aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Schadt <kingdread@gmx.de>2022-12-20 20:17:59 +0100
committerDaniel Schadt <kingdread@gmx.de>2022-12-20 20:17:59 +0100
commit7a60619d3f6fd523d42f50753436f3b7e7d72ca4 (patch)
tree5a5afe7dda6631100f2f52fac0d1935f7627026d
parentd5830f4000bf8f96d821fd3e93f04898af03132e (diff)
parent0e0a7e4326a592a6b99ad590e3397e6ddaeaf93a (diff)
downloadfietsboek-7a60619d3f6fd523d42f50753436f3b7e7d72ca4.tar.gz
fietsboek-7a60619d3f6fd523d42f50753436f3b7e7d72ca4.tar.bz2
fietsboek-7a60619d3f6fd523d42f50753436f3b7e7d72ca4.zip
Merge branch 'gpx-exodus'
-rw-r--r--.mypy.ini3
-rw-r--r--CHANGELOG.rst1
-rw-r--r--doc/administration/backup.rst40
-rw-r--r--fietsboek/alembic/versions/20221214_c939800af428.py45
-rw-r--r--fietsboek/data.py157
-rw-r--r--fietsboek/models/track.py192
-rw-r--r--fietsboek/scripts/fietsctl.py28
-rw-r--r--fietsboek/summaries.py43
-rw-r--r--fietsboek/updater/__init__.py22
-rw-r--r--fietsboek/updater/cli.py5
-rw-r--r--fietsboek/updater/scripts/upd_30ppwg8zi4ujb46f.py64
-rw-r--r--fietsboek/util.py6
-rw-r--r--fietsboek/views/browse.py24
-rw-r--r--fietsboek/views/default.py10
-rw-r--r--fietsboek/views/detail.py51
-rw-r--r--fietsboek/views/edit.py17
-rw-r--r--fietsboek/views/upload.py10
-rw-r--r--poetry.lock110
-rw-r--r--pylint.tests.toml2
-rw-r--r--pyproject.toml2
-rw-r--r--tests/conftest.py24
-rw-r--r--tests/integration/test_browse.py16
-rw-r--r--tests/playwright/test_basic.py17
23 files changed, 697 insertions, 192 deletions
diff --git a/.mypy.ini b/.mypy.ini
index ed220e3..f77b4ba 100644
--- a/.mypy.ini
+++ b/.mypy.ini
@@ -4,6 +4,9 @@ check_untyped_defs = True
allow_redefinition = True
exclude = fietsboek/updater/scripts/.+\.py
+[mypy-brotli.*]
+ignore_missing_imports = True
+
[mypy-pyramid.*]
ignore_missing_imports = True
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index d3960aa..8b542a2 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -16,6 +16,7 @@ Changed
- The configuration file is now parsed and validated at application startup
with better error reports.
- GPX content is now delivered compressed if the browser supports it.
+- GPX files are now stored outside of the database.
Fixed
^^^^^
diff --git a/doc/administration/backup.rst b/doc/administration/backup.rst
index 3973298..fb379bc 100644
--- a/doc/administration/backup.rst
+++ b/doc/administration/backup.rst
@@ -34,20 +34,36 @@ In addition to the actual data, you should also note down the Fietsboek version
and the database schema version, as backups can only be restored to the same
version!
-To view the fietsboek version in case you are using ``git``, you can use the
+To view the Fietsboek version in case you are using ``git``, you can use the
following command:
.. code:: bash
git rev-parse HEAD
+If you installed Fietsboek via ``pip`` or other means, you can use one of the
+following to see the version:
+
+.. code:: bash
+
+ .venv/bin/pip show fietsboek
+ .venv/bin/fietsctl version
+
+To view the data version, use ``fietsupdate``:
+
+.. code:: bash
+
+ .venv/bin/fietsupdate status -c development.ini
+
To view the database schema version, use ``alembic``:
.. code:: bash
.venv/bin/alembic -c development.ini current
-Note those value in addition to your backup.
+Note those values (Fietsboek version, data version, database schema version) in
+addition to your backup, as you will need them when restoring data later or
+when troubleshooting the restoration process.
Restore
-------
@@ -55,17 +71,31 @@ Restore
The restoration process works in four steps:
First, we ensure that we are on the correct Fietsboek version. If you are using
-``git``, this can be done with ``git checkout``:
+``git``, this can be done with ``git checkout`` before installing it:
.. code:: bash
git checkout NOTED_GIT_VERSION_HERE
-Then, we get the database schema to the right version:
+If you have installed Fietsboek via ``pip``, you can use it to request a
+specific version:
.. code:: bash
- .venv/bin/alembic -c production.ini upgrade NOTED_ALEMBIC_VERSION_HERE
+ .venv/bin/pip install "fietsboek==vX.Y.Z"
+
+Next, we run the data migrations:
+
+.. code:: bash
+
+ .venv/bin/fietsupdate update -c development.ini VERSION_FROM_EARLIER
+
+We can verify that the database has the correct schema version by using the
+same command from earlier and comparing its output to the noted value:
+
+.. code:: bash
+
+ .venv/bin/alembic -c development.ini current
Now, we can restore the data in the database. This step is dependent on the
DBMS that you, therefore you should consult its documentation.
diff --git a/fietsboek/alembic/versions/20221214_c939800af428.py b/fietsboek/alembic/versions/20221214_c939800af428.py
new file mode 100644
index 0000000..4b07983
--- /dev/null
+++ b/fietsboek/alembic/versions/20221214_c939800af428.py
@@ -0,0 +1,45 @@
+"""remove gpx data from db
+
+Revision ID: c939800af428
+Revises: d085998b49ca
+Create Date: 2022-12-14 23:58:37.983942
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import logging
+
+
+# revision identifiers, used by Alembic.
+revision = 'c939800af428'
+down_revision = 'd085998b49ca'
+branch_labels = None
+depends_on = None
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ try:
+ op.drop_column('tracks', 'gpx')
+ except sa.exc.OperationalError as exc:
+ # sqlite < 3.35.0 does not know "ALTER TABLE DROP COLUMN", in which
+ # case we probably don't want to break the DB. Instead, we simply
+ # "drop" the column by setting it empty, which should still help in
+ # saving some space.
+ logging.getLogger(__name__).warning(
+ "Your database does not support dropping a column. "
+ "We're setting the content to zero instead (%s).",
+ exc,
+ )
+ op.execute("UPDATE tracks SET gpx = '';")
+ # ### end Alembic commands ###
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ try:
+ op.add_column('tracks', sa.Column('gpx', sa.BLOB(), nullable=True))
+ except sa.exc.OperationalError as exc:
+ logging.getLogger(__name__).warning(
+ "The column already exists - doing nothing (%s).",
+ exc,
+ )
+ # ### end Alembic commands ###
diff --git a/fietsboek/data.py b/fietsboek/data.py
index bd4222b..922600b 100644
--- a/fietsboek/data.py
+++ b/fietsboek/data.py
@@ -8,6 +8,11 @@ import string
import shutil
import uuid
import logging
+from typing import List, BinaryIO, Optional
+from pathlib import Path
+
+import brotli
+from filelock import FileLock
from .util import secure_filename
@@ -15,16 +20,14 @@ from .util import secure_filename
LOGGER = logging.getLogger(__name__)
-def generate_filename(filename):
+def generate_filename(filename: Optional[str]) -> str:
"""Generates a safe-to-use filename for uploads.
If possible, tries to keep parts of the original filename intact, such as
the extension.
:param filename: The original filename.
- :type filename: str
:return: The generated filename.
- :rtype: str
"""
if filename:
good_name = secure_filename(filename)
@@ -42,77 +45,146 @@ class DataManager:
used to access track's images and other on-disk data.
:ivar data_dir: Path to the data folder.
- :vartype data_dir: pathlib.Path
"""
- def __init__(self, data_dir):
- self.data_dir = data_dir
+ def __init__(self, data_dir: Path):
+ self.data_dir: Path = data_dir
def _track_data_dir(self, track_id):
return self.data_dir / "tracks" / str(track_id)
- def images(self, track_id):
- """Returns a list of images that belong to a track.
+ def initialize(self, track_id: int) -> "TrackDataDir":
+ """Creates the data directory for a track.
- :param track_id: Numerical ID of the track.
- :type track_id: int
- :return: A list of image IDs.
- :rtype: list[str]
+ :raises FileExistsError: If the directory already exists.
+ :param track_id: ID of the track.
+ :return: The manager that can be used to manage this track's data.
"""
- image_dir = self._track_data_dir(track_id) / "images"
- if not image_dir.exists():
- return []
- images = []
- for image in image_dir.iterdir():
- images.append(image.name)
- return images
+ path = self._track_data_dir(track_id)
+ path.mkdir(parents=True)
+ return TrackDataDir(track_id, path)
- def purge(self, track_id):
- """Purge all data pertaining to the given track.
+ def purge(self, track_id: int):
+ """Forcefully purges all data from the given track.
This function logs errors but raises no exception, as such it can
always be used to clean up after a track.
+ """
+ TrackDataDir(track_id, self._track_data_dir(track_id)).purge()
+
+ def open(self, track_id: int) -> "TrackDataDir":
+ """Opens a track's data directory.
- :param track_id: The ID of the track.
- :type track_id: int
+ :raises FileNotFoundError: If the track directory does not exist.
+ :param track_id: ID of the track.
+ :return: The manager that can be used to manage this track's data.
+ """
+ path = self._track_data_dir(track_id)
+ if not path.is_dir():
+ raise FileNotFoundError(f"The path {path} is not a directory") from None
+ return TrackDataDir(track_id, path)
+
+
+class TrackDataDir:
+ """Manager for a single track's data."""
+
+ def __init__(self, track_id: int, path: Path):
+ self.track_id: int = track_id
+ self.path: Path = path
+
+ def lock(self) -> FileLock:
+ """Returns a FileLock that can be used to lock access to the track's
+ data.
+
+ :return: The lock responsible for locking this data directory.
+ """
+ return FileLock(self.path / "lock")
+
+ def purge(self):
+ """Purge all data pertaining to the track.
+
+ This function logs errors but raises no exception, as such it can
+ always be used to clean up after a track.
"""
def log_error(_, path, exc_info):
LOGGER.warning("Failed to remove %s", path, exc_info=exc_info)
- path = self._track_data_dir(track_id)
- if path.is_dir():
- shutil.rmtree(path, ignore_errors=False, onerror=log_error)
+ if self.path.is_dir():
+ shutil.rmtree(self.path, ignore_errors=False, onerror=log_error)
+
+ def gpx_path(self) -> Path:
+ """Returns the path of the GPX file.
- def image_path(self, track_id, image_id):
+ This file contains the (brotli) compressed GPX data.
+
+ :return: The path where the GPX is supposed to be.
+ """
+ return self.path / "track.gpx.br"
+
+ def compress_gpx(self, data: bytes, quality: int = 4):
+ """Set the GPX content to the compressed form of data.
+
+ If you want to write compressed data directly, use :meth:`gpx_path` to
+ get the path of the GPX file.
+
+ :param data: The GPX data (uncompressed).
+ :param quality: Compression quality, from 0 to 11 - 11 is highest
+ quality but slowest compression speed.
+ """
+ compressed = brotli.compress(data, quality=quality)
+ self.gpx_path().write_bytes(compressed)
+
+ def decompress_gpx(self) -> bytes:
+ """Returns the GPX bytes decompressed.
+
+ :return: The saved GPX file, decompressed.
+ """
+ return brotli.decompress(self.gpx_path().read_bytes())
+
+ def backup(self):
+ """Create a backup of the GPX file."""
+ shutil.copy(self.gpx_path(), self.backup_path())
+
+ def backup_path(self) -> Path:
+ """Path of the GPX backup file.
+
+ :return: The path of the backup file.
+ """
+ return self.path / "track.bck.gpx.br"
+
+ def images(self) -> List[str]:
+ """Returns a list of images that belong to the track.
+
+ :param track_id: Numerical ID of the track.
+ :return: A list of image IDs.
+ """
+ image_dir = self.path / "images"
+ if not image_dir.exists():
+ return []
+ images = [image.name for image in image_dir.iterdir()]
+ return images
+
+ def image_path(self, image_id: str) -> Path:
"""Returns a path to a saved image.
:raises FileNotFoundError: If the given image could not be found.
- :param track_id: ID of the track.
- :type track_id: int
:param image_id: ID of the image.
- :type image_id: str
:return: A path pointing to the requested image.
- :rtype: pathlib.Path
"""
- image = self._track_data_dir(track_id) / "images" / secure_filename(image_id)
+ image = self.path / "images" / secure_filename(image_id)
if not image.exists():
raise FileNotFoundError("The requested image does not exist")
return image
- def add_image(self, track_id, image, filename=None):
+ def add_image(self, image: BinaryIO, filename: Optional[str] = None) -> str:
"""Saves an image to a track.
- :param track_id: ID of the track.
- :type track_id: int
:param image: The image, as a file-like object to read from.
- :type image: file
:param filename: The image's original filename.
- :type filename: str
:return: The ID of the saved image.
- :rtype: str
"""
- image_dir = self._track_data_dir(track_id) / "images"
+ image_dir = self.path / "images"
image_dir.mkdir(parents=True, exist_ok=True)
filename = generate_filename(filename)
@@ -122,18 +194,15 @@ class DataManager:
return filename
- def delete_image(self, track_id, image_id):
+ def delete_image(self, image_id: str):
"""Deletes an image from a track.
:raises FileNotFoundError: If the given image could not be found.
- :param track_id: ID of the track.
- :type track_id: int
:param image_id: ID of the image.
- :type image_id: str
"""
# Be sure to not delete anything else than the image file
image_id = secure_filename(image_id)
if "/" in image_id or "\\" in image_id:
return
- path = self.image_path(track_id, image_id)
+ path = self.image_path(image_id)
path.unlink()
diff --git a/fietsboek/models/track.py b/fietsboek/models/track.py
index 9eeae55..5f09059 100644
--- a/fietsboek/models/track.py
+++ b/fietsboek/models/track.py
@@ -15,6 +15,7 @@ import enum
import gzip
import datetime
import logging
+from typing import Optional, List, Set, TYPE_CHECKING
from itertools import chain
@@ -33,7 +34,7 @@ from sqlalchemy import (
from sqlalchemy.orm import relationship
from pyramid.httpexceptions import HTTPNotFound
-from pyramid.i18n import TranslationString as _
+from pyramid.i18n import TranslationString as _, Localizer
from pyramid.authorization import (
Allow,
Everyone,
@@ -49,6 +50,9 @@ from babel.numbers import format_decimal
from .meta import Base
from .. import util
+if TYPE_CHECKING:
+ from .. import models
+
LOGGER = logging.getLogger(__name__)
@@ -193,7 +197,6 @@ class Track(Base):
description = Column(Text)
date_raw = Column(DateTime)
date_tz = Column(Integer)
- gpx = Column(LargeBinary)
visibility = Column(Enum(Visibility))
link_secret = Column(Text)
type = Column(Enum(TrackType))
@@ -269,25 +272,6 @@ class Track(Base):
)
return acl
- # GPX files are XML files with a lot of repeated property names. Really, it
- # is quite inefficient to store a whole ton of GPS points in big XML
- # structs. Therefore, we add transparent gzip compression to reduce the
- # file size by quite a bit:
- # 6.7M 20210902_111541.gpx
- # 792K 20210902_111541.gpx.gz
- @property
- def gpx_data(self):
- """The actual GPX data.
-
- Since storing a lot of GPS points in a XML file is inefficient, we
- apply transparent compression to reduce the stored size.
- """
- return gzip.decompress(self.gpx)
-
- @gpx_data.setter
- def gpx_data(self, value):
- self.gpx = gzip.compress(value)
-
@property
def date(self):
"""The time-zone-aware date this track has set.
@@ -344,12 +328,15 @@ class Track(Base):
result = ACLHelper().permits(self, principals, "track.view")
return isinstance(result, ACLAllowed)
- def ensure_cache(self):
- """Ensure that a cached version of this track's metadata exists."""
+ def ensure_cache(self, gpx_data: str):
+ """Ensure that a cached version of this track's metadata exists.
+
+ :param gpx_data: GPX data (uncompressed) from which to build the cache.
+ """
if self.cache is not None:
return
self.cache = TrackCache(track=self)
- meta = util.tour_metadata(self.gpx_data)
+ meta = util.tour_metadata(gpx_data)
self.cache.length = meta["length"]
self.cache.uphill = meta["uphill"]
self.cache.downhill = meta["downhill"]
@@ -418,122 +405,139 @@ class Track(Base):
for i in to_delete[::-1]:
del self.tags[i]
+
+class TrackWithMetadata:
+ """A class to add metadata to a :class:`Track`.
+
+ This basically caches the result of :func:`fietsboek.util.tour_metadata`,
+ or uses the track's cache if possible.
+
+ Loading of the metadata is lazy on first access. The track is accessible as
+ ``track``, but most attributes are proxied read-only.
+ """
+
+ # pylint: disable=too-many-public-methods
+
+ def __init__(self, track: Track, data_manager):
+ self.track = track
+ self.cache = track.cache
+ self.data_manager = data_manager
+ self._cached_meta: Optional[dict] = None
+
+ def _meta(self):
+ # Already loaded, we're done
+ if self._cached_meta:
+ return self._cached_meta
+
+ data = self.data_manager.open(self.track.id).decompress_gpx()
+ self._cached_meta = util.tour_metadata(data)
+ return self._cached_meta
+
@property
- def length(self):
+ def length(self) -> float:
"""Returns the length of the track..
:return: Length of the track in meters.
- :rtype: float
"""
if self.cache is None:
- return util.tour_metadata(self.gpx_data)["length"]
+ return self._meta()["length"]
return self.cache.length
@property
- def downhill(self):
+ def downhill(self) -> float:
"""Returns the downhill of the track.
:return: Downhill in meters.
- :rtype: float
"""
if self.cache is None:
- return util.tour_metadata(self.gpx_data)["downhill"]
+ return self._meta()["downhill"]
return self.cache.downhill
@property
- def uphill(self):
+ def uphill(self) -> float:
"""Returns the uphill of the track.
:return: Uphill in meters.
- :rtype: float
"""
if self.cache is None:
- return util.tour_metadata(self.gpx_data)["uphill"]
+ return self._meta()["uphill"]
return self.cache.uphill
@property
- def moving_time(self):
+ def moving_time(self) -> datetime.timedelta:
"""Returns the moving time.
:return: Moving time in seconds.
- :rtype: datetime.timedelta
"""
if self.cache is None:
- value = util.tour_metadata(self.gpx_data)["moving_time"]
+ value = self._meta()["moving_time"]
else:
value = self.cache.moving_time
return datetime.timedelta(seconds=value)
@property
- def stopped_time(self):
+ def stopped_time(self) -> datetime.timedelta:
"""Returns the stopped time.
:return: Stopped time in seconds.
- :rtype: datetime.timedelta
"""
if self.cache is None:
- value = util.tour_metadata(self.gpx_data)["stopped_time"]
+ value = self._meta()["stopped_time"]
else:
value = self.cache.stopped_time
return datetime.timedelta(seconds=value)
@property
- def max_speed(self):
+ def max_speed(self) -> float:
"""Returns the maximum speed.
:return: Maximum speed in meters/second.
- :rtype: float
"""
if self.cache is None:
- return util.tour_metadata(self.gpx_data)["max_speed"]
+ return self._meta()["max_speed"]
return self.cache.max_speed
@property
- def avg_speed(self):
+ def avg_speed(self) -> float:
"""Returns the average speed.
:return: Average speed in meters/second.
- :rtype: float
"""
if self.cache is None:
- return util.tour_metadata(self.gpx_data)["avg_speed"]
+ return self._meta()["avg_speed"]
return self.cache.avg_speed
@property
- def start_time(self):
+ def start_time(self) -> datetime.datetime:
"""Returns the start time.
This is the time embedded in the GPX file, not the time in the ``date`` column.
:return: Start time.
- :rtype: datetime.datetime
"""
if self.cache is None:
- return util.tour_metadata(self.gpx_data)["start_time"]
+ return self._meta()["start_time"]
return self.cache.start_time
@property
- def end_time(self):
+ def end_time(self) -> datetime.datetime:
"""Returns the end time.
This is the time embedded in the GPX file, not the time in the ``date`` column.
:return: End time.
- :rtype: float
"""
if self.cache is None:
- return util.tour_metadata(self.gpx_data)["end_time"]
+ return self._meta()["end_time"]
return self.cache.end_time
- def html_tooltip(self, localizer):
+ def html_tooltip(self, localizer: Localizer) -> Markup:
"""Generate a quick summary of the track as a HTML element.
This can be used in Bootstrap tooltips.
:param localizer: The localizer used for localization.
- :type localizer: pyramid.i18n.Localizer
:return: The generated HTML.
- :rtype: Markup
"""
def number(num):
@@ -560,6 +564,86 @@ class Track(Base):
]
return Markup(f'<table>{"".join(rows)}</table>')
+ # Proxied properties
+ @property
+ def id(self) -> int:
+ """ID of the underlying track."""
+ return self.track.id
+
+ @property
+ def title(self) -> str:
+ """Title of the underlying track."""
+ return self.track.title
+
+ @property
+ def description(self) -> str:
+ """Description of the underlying track."""
+ return self.track.description
+
+ @property
+ def date(self) -> datetime.datetime:
+ """Date of the underlying track."""
+ return self.track.date
+
+ @property
+ def visibility(self) -> Visibility:
+ """Visibility of the underlying track."""
+ return self.track.visibility
+
+ @property
+ def link_secret(self) -> str:
+ """Link secret of the underlying track."""
+ return self.track.link_secret
+
+ @property
+ def type(self) -> TrackType:
+ """Type of the underlying track."""
+ return self.track.type
+
+ @property
+ def owner(self) -> "models.User":
+ """Owner of the undlerying track."""
+ return self.track.owner
+
+ @property
+ def tagged_people(self) -> List["models.User"]:
+ """Tagged people of the underlying track."""
+ return self.track.tagged_people[:]
+
+ @property
+ def badges(self) -> List["models.Badge"]:
+ """Badges of the underlying track."""
+ return self.track.badges[:]
+
+ @property
+ def tags(self) -> List["models.Tag"]:
+ """Tags of the underlying track."""
+ return self.track.tags[:]
+
+ @property
+ def comments(self) -> List["models.Comment"]:
+ """Comments of the underlying track."""
+ return self.track.comments[:]
+
+ @property
+ def images(self) -> List["models.ImageMetadata"]:
+ """Images of the underlying track."""
+ return self.track.images[:]
+
+ def text_tags(self) -> Set[str]:
+ """Returns a set of textual tags.
+
+ :return: The tags of the track, as a set of strings.
+ """
+ return self.track.text_tags()
+
+ def show_organic_data(self) -> bool:
+ """Proxied method :meth:`Track.show_organic_data`.
+
+ :return: Whether the organic data should be shown.
+ """
+ return self.track.show_organic_data()
+
class TrackCache(Base):
"""Cache for computed track metadata.
diff --git a/fietsboek/scripts/fietsctl.py b/fietsboek/scripts/fietsctl.py
index bd37987..75c615a 100644
--- a/fietsboek/scripts/fietsctl.py
+++ b/fietsboek/scripts/fietsctl.py
@@ -7,7 +7,7 @@ import sys
from pyramid.paster import bootstrap, setup_logging
from sqlalchemy import select
-from .. import models
+from .. import models, __VERSION__
EXIT_OKAY = 0
@@ -136,6 +136,12 @@ def cmd_passwd(env, args):
return EXIT_OKAY
+def cmd_version():
+ """Show the installed fietsboek version."""
+ name = __name__.split(".", 1)[0]
+ print(f"{name} {__VERSION__}")
+
+
def parse_args(argv):
"""Parse the given args.
@@ -150,11 +156,17 @@ def parse_args(argv):
"--config",
dest="config_uri",
help="configuration file, e.g., development.ini",
- required=True,
)
subparsers = parser.add_subparsers(help="available subcommands", required=True)
+ p_version = subparsers.add_parser(
+ "version",
+ help="show the version",
+ description=cmd_version.__doc__,
+ )
+ p_version.set_defaults(func=cmd_version)
+
p_useradd = subparsers.add_parser(
"useradd",
help="create a new user",
@@ -234,14 +246,22 @@ def parse_args(argv):
)
p_passwd.set_defaults(func=cmd_passwd)
- return parser.parse_args(argv[1:])
+ return parser.parse_args(argv[1:]), parser
def main(argv=None):
"""Main entry point."""
if argv is None:
argv = sys.argv
- args = parse_args(argv)
+ args, parser = parse_args(argv)
+
+ if args.func == cmd_version: # pylint: disable=comparison-with-callable
+ cmd_version()
+ sys.exit(EXIT_OKAY)
+
+ if not args.config_uri:
+ parser.error("the following arguments are required: -c/--config")
+
setup_logging(args.config_uri)
env = bootstrap(args.config_uri)
diff --git a/fietsboek/summaries.py b/fietsboek/summaries.py
index 04b74c5..fcbbc86 100644
--- a/fietsboek/summaries.py
+++ b/fietsboek/summaries.py
@@ -1,4 +1,7 @@
"""Module for a yearly/monthly track summary."""
+from typing import List, Dict
+
+from fietsboek.models.track import TrackWithMetadata
class Summary:
@@ -9,22 +12,21 @@ class Summary:
"""
def __init__(self):
- self.years = {}
+ self.years: Dict[int, YearSummary] = {}
def __iter__(self):
items = list(self.years.values())
items.sort(key=lambda y: y.year)
return iter(items)
- def all_tracks(self):
+ def all_tracks(self) -> List[TrackWithMetadata]:
"""Returns all tracks of the summary.
:return: All tracks.
- :rtype: list[fietsboek.model.track.Track]
"""
return [track for year in self for month in year for track in month.all_tracks()]
- def add(self, track):
+ def add(self, track: TrackWithMetadata):
"""Add a track to the summary.
This automatically inserts the track into the right yearly summary.
@@ -36,11 +38,10 @@ class Summary:
self.years.setdefault(year, YearSummary(year)).add(track)
@property
- def total_length(self):
+ def total_length(self) -> float:
"""Returns the total length of all tracks in this summary.
:return: The total length in meters.
- :rtype: float
"""
return sum(track.length for track in self.all_tracks())
@@ -49,45 +50,40 @@ class YearSummary:
"""A summary over a single year.
:ivar year: Year number.
- :vartype year: int
:ivar months: Mapping of month to :class:`MonthSummary`.
- :vartype months: dict[int, MonthSummary]
"""
def __init__(self, year):
- self.year = year
- self.months = {}
+ self.year: int = year
+ self.months: Dict[int, MonthSummary] = {}
def __iter__(self):
items = list(self.months.values())
items.sort(key=lambda x: x.month)
return iter(items)
- def all_tracks(self):
+ def all_tracks(self) -> List[TrackWithMetadata]:
"""Returns all tracks of the summary.
:return: All tracks.
- :rtype: list[fietsboek.model.track.Track]
"""
return [track for month in self for track in month]
- def add(self, track):
+ def add(self, track: TrackWithMetadata):
"""Add a track to the summary.
This automatically inserts the track into the right monthly summary.
:param track: The track to insert.
- :type track: fietsboek.model.track.Track
"""
month = track.date.month
self.months.setdefault(month, MonthSummary(month)).add(track)
@property
- def total_length(self):
+ def total_length(self) -> float:
"""Returns the total length of all tracks in this summary.
:return: The total length in meters.
- :rtype: float
"""
return sum(track.length for track in self.all_tracks())
@@ -96,41 +92,36 @@ class MonthSummary:
"""A summary over a single month.
:ivar month: Month number (1-12).
- :vartype month: int
:ivar tracks: List of tracks in this month.
- :vartype tracks: list[fietsboek.model.track.Track]
"""
def __init__(self, month):
- self.month = month
- self.tracks = []
+ self.month: int = month
+ self.tracks: List[TrackWithMetadata] = []
def __iter__(self):
items = self.tracks[:]
items.sort(key=lambda t: t.date)
return iter(items)
- def all_tracks(self):
+ def all_tracks(self) -> List[TrackWithMetadata]:
"""Returns all tracks of the summary.
:return: All tracks.
- :rtype: list[fietsboek.model.track.Track]
"""
return self.tracks[:]
- def add(self, track):
+ def add(self, track: TrackWithMetadata):
"""Add a track to the summary.
:param track: The track to insert.
- :type track: fietsboek.model.track.Track
"""
self.tracks.append(track)
@property
- def total_length(self):
+ def total_length(self) -> float:
"""Returns the total length of all tracks in this summary.
:return: The total length in meters.
- :rtype: float
"""
return sum(track.length for track in self.all_tracks())
diff --git a/fietsboek/updater/__init__.py b/fietsboek/updater/__init__.py
index 348f713..b136a6c 100644
--- a/fietsboek/updater/__init__.py
+++ b/fietsboek/updater/__init__.py
@@ -86,8 +86,20 @@ class Updater:
# Ensure that each script has an entry
self.backward_dependencies = {script.id: [] for script in self.scripts.values()}
for script in self.scripts.values():
+ down_alembic = None
for prev_id in script.previous:
self.backward_dependencies[prev_id].append(script.id)
+ possible_alembic = self.scripts[prev_id].alembic_version
+ if down_alembic is None:
+ down_alembic = possible_alembic
+ elif down_alembic != possible_alembic:
+ LOGGER.error(
+ "Invalid update graph - two different down alembics for script %s",
+ script.id,
+ )
+ raise ValueError(f"Two alembic downgrades for {script.id}")
+ down_alembic = possible_alembic
+ script.down_alembic = down_alembic
def exists(self, revision_id):
"""Checks if the revision with the given ID exists.
@@ -291,6 +303,7 @@ class UpdateScript:
self.module = importlib.util.module_from_spec(spec) # type: ignore
assert self.module
exec(source, self.module.__dict__) # pylint: disable=exec-used
+ self.down_alembic = None
def __repr__(self):
return f"<{__name__}.{self.__class__.__name__} name={self.name!r} id={self.id!r}>"
@@ -355,10 +368,11 @@ class UpdateScript:
"""
LOGGER.info("[down] Running pre-alembic task for %s", self.id)
self.module.Down().pre_alembic(config)
- LOGGER.info("[down] Running alembic downgrade for %s to %s", self.id, self.alembic_version)
- alembic.command.downgrade(alembic_config, self.alembic_version)
- LOGGER.info("[down] Running post-alembic task for %s", self.id)
- self.module.Down().post_alembic(config)
+ if self.down_alembic:
+ LOGGER.info("[down] Running alembic downgrade for %s to %s", self.id, self.down_alembic)
+ alembic.command.downgrade(alembic_config, self.down_alembic)
+ LOGGER.info("[down] Running post-alembic task for %s", self.id)
+ self.module.Down().post_alembic(config)
def _filename_to_modname(name):
diff --git a/fietsboek/updater/cli.py b/fietsboek/updater/cli.py
index 5c97687..d19e444 100644
--- a/fietsboek/updater/cli.py
+++ b/fietsboek/updater/cli.py
@@ -6,6 +6,7 @@ managing migrations between Fietsboek versions. In particular, the updater
takes care of running the database migrations, migrating the data directory and
migrating the configuration.
"""
+import logging.config
import click
from . import Updater
@@ -70,6 +71,7 @@ def update(ctx, config, version, force):
VERSION specifies the version you want to update to. Leave empty to choose
the latest version.
"""
+ logging.config.fileConfig(config)
updater = Updater(config)
updater.load()
if version and not updater.exists(version):
@@ -114,6 +116,7 @@ def downgrade(ctx, config, version, force):
VERSION specifies the version you want to downgrade to.
"""
+ logging.config.fileConfig(config)
updater = Updater(config)
updater.load()
if version and not updater.exists(version):
@@ -143,6 +146,7 @@ def revision(config, revision_id):
This command is useful for developers who work on Fietsboek.
"""
+ logging.config.fileConfig(config)
updater = Updater(config)
updater.load()
current = updater.current_versions()
@@ -163,6 +167,7 @@ def revision(config, revision_id):
@config_option
def status(config):
"""Display information about the current version and available updates."""
+ logging.config.fileConfig(config)
updater = Updater(config)
updater.load()
current = updater.current_versions()
diff --git a/fietsboek/updater/scripts/upd_30ppwg8zi4ujb46f.py b/fietsboek/updater/scripts/upd_30ppwg8zi4ujb46f.py
new file mode 100644
index 0000000..983ea45
--- /dev/null
+++ b/fietsboek/updater/scripts/upd_30ppwg8zi4ujb46f.py
@@ -0,0 +1,64 @@
+"""Revision upgrade script 30ppwg8zi4ujb46f
+
+This script moves the GPX data out of the database and puts them into the data
+directory instead.
+
+Date created: 2022-12-14 22:33:32.837737
+"""
+from fietsboek.updater.script import UpdateScript
+
+import shutil
+import gzip
+import brotli
+from pathlib import Path
+from sqlalchemy import create_engine
+from sqlalchemy.sql import text
+
+update_id = '30ppwg8zi4ujb46f'
+previous = [
+ 'v0.4.0',
+]
+alembic_revision = 'c939800af428'
+
+
+class Up(UpdateScript):
+ def pre_alembic(self, config):
+ engine = create_engine(config["sqlalchemy.url"])
+ connection = engine.connect()
+ data_dir = Path(config["fietsboek.data_dir"])
+
+ for row in connection.execute("SELECT id, gpx FROM tracks;"):
+ self.tell(f"Moving GPX data for track {row.id} from database to disk")
+ track_dir = data_dir / "tracks" / str(row.id)
+ track_dir.mkdir(parents=True, exist_ok=True)
+
+ raw_gpx = gzip.decompress(row.gpx)
+ gpx_path = track_dir / "track.gpx.br"
+ gpx_path.write_bytes(brotli.compress(raw_gpx, quality=5))
+ shutil.copy(gpx_path, track_dir / "track.bck.gpx.br")
+
+ def post_alembic(self, config):
+ pass
+
+
+class Down(UpdateScript):
+ def pre_alembic(self, config):
+ pass
+
+ def post_alembic(self, config):
+ engine = create_engine(config["sqlalchemy.url"])
+ connection = engine.connect()
+ data_dir = Path(config["fietsboek.data_dir"])
+
+ for track_path in (data_dir / "tracks").iterdir():
+ track_id = int(track_path.name)
+ self.tell(f"Moving GPX data for track {track_id} from disk to database")
+ brotli_data = (track_path / "track.gpx.br").read_bytes()
+ gzip_data = gzip.compress(brotli.decompress(brotli_data))
+ connection.execute(
+ text("UPDATE tracks SET gpx = :gpx WHERE id = :id;"),
+ gpx=gzip_data, id=track_id
+ )
+
+ (track_path / "track.gpx.br").unlink()
+ (track_path / "track.bck.gpx.br").unlink(missing_ok=True)
diff --git a/fietsboek/util.py b/fietsboek/util.py
index a4f2891..789fc3b 100644
--- a/fietsboek/util.py
+++ b/fietsboek/util.py
@@ -4,7 +4,7 @@ import re
import os
import unicodedata
import secrets
-from typing import Optional, List
+from typing import Optional, List, Union
# Compat for Python < 3.9
import importlib_resources
@@ -155,7 +155,7 @@ def guess_gpx_timezone(gpx: gpxpy.gpx.GPX) -> datetime.tzinfo:
return datetime.timezone.utc
-def tour_metadata(gpx_data: str) -> dict:
+def tour_metadata(gpx_data: Union[str, bytes]) -> dict:
"""Calculate the metadata of the tour.
Returns a dict with ``length``, ``uphill``, ``downhill``, ``moving_time``,
@@ -165,6 +165,8 @@ def tour_metadata(gpx_data: str) -> dict:
:param gpx_data: The GPX data of the tour.
:return: A dictionary with the computed values.
"""
+ if isinstance(gpx_data, bytes):
+ gpx_data = gpx_data.decode("utf-8")
gpx = gpxpy.parse(gpx_data)
timezone = guess_gpx_timezone(gpx)
uphill, downhill = gpx.get_uphill_downhill()
diff --git a/fietsboek/views/browse.py b/fietsboek/views/browse.py
index 018cb6e..377e073 100644
--- a/fietsboek/views/browse.py
+++ b/fietsboek/views/browse.py
@@ -12,7 +12,7 @@ from sqlalchemy import select, func, or_
from sqlalchemy.orm import aliased
from .. import models, util
-from ..models.track import TrackType
+from ..models.track import TrackType, TrackWithMetadata
class Stream(RawIOBase):
@@ -339,6 +339,7 @@ def browse(request):
query = query.order_by(track.date_raw.desc())
tracks = request.dbsession.execute(query).scalars()
+ tracks = (TrackWithMetadata(track, request.data_manager) for track in tracks)
tracks = [track for track in tracks if filters.apply(track)]
return {
"tracks": tracks,
@@ -356,12 +357,9 @@ def archive(request):
:return: The HTTP response.
:rtype: pyramid.response.Response
"""
- # We need to create a separate session, otherwise we will get detached instances
- session = request.registry["dbsession_factory"]()
-
track_ids = set(map(int, request.params.getall("track_id[]")))
tracks = (
- session.execute(select(models.Track).filter(models.Track.id.in_(track_ids)))
+ request.dbsession.execute(select(models.Track).filter(models.Track.id.in_(track_ids)))
.scalars()
.fetchall()
)
@@ -374,15 +372,13 @@ def archive(request):
return HTTPForbidden()
def generate():
- try:
- stream = Stream()
- with ZipFile(stream, "w", ZIP_DEFLATED) as zipfile: # type: ignore
- for track in tracks:
- zipfile.writestr(f"track_{track.id}.gpx", track.gpx_data)
- yield stream.readall()
- yield stream.readall()
- finally:
- session.close()
+ stream = Stream()
+ with ZipFile(stream, "w", ZIP_DEFLATED) as zipfile: # type: ignore
+ for track_id in track_ids:
+ data = request.data_manager.open(track_id).decompress_gpx()
+ zipfile.writestr(f"track_{track_id}.gpx", data)
+ yield stream.readall()
+ yield stream.readall()
return Response(
app_iter=generate(),
diff --git a/fietsboek/views/default.py b/fietsboek/views/default.py
index fe9df08..8d0ad7e 100644
--- a/fietsboek/views/default.py
+++ b/fietsboek/views/default.py
@@ -14,7 +14,7 @@ from markupsafe import Markup
from .. import models, summaries, util, email
from ..models.user import PasswordMismatch, TokenType
-from ..models.track import TrackType
+from ..models.track import TrackType, TrackWithMetadata
@view_config(route_name="home", renderer="fietsboek:templates/home.jinja2")
@@ -55,9 +55,11 @@ def home(request):
summary = summaries.Summary()
for track in request.dbsession.execute(query).scalars():
- track.ensure_cache()
- request.dbsession.add(track.cache)
- summary.add(track)
+ if track.cache is None:
+ gpx_data = request.data_manager.open(track.id).decompress_gpx()
+ track.ensure_cache(gpx_data)
+ request.dbsession.add(track.cache)
+ summary.add(TrackWithMetadata(track, request.data_manager))
return {
"summary": summary,
diff --git a/fietsboek/views/detail.py b/fietsboek/views/detail.py
index 485bad3..a6d845a 100644
--- a/fietsboek/views/detail.py
+++ b/fietsboek/views/detail.py
@@ -1,14 +1,25 @@
"""Track detail views."""
import datetime
+import logging
+import gzip
from pyramid.view import view_config
from pyramid.response import Response, FileResponse
from pyramid.i18n import TranslationString as _
-from pyramid.httpexceptions import HTTPFound, HTTPNotFound, HTTPNotAcceptable
+from pyramid.httpexceptions import (
+ HTTPFound,
+ HTTPNotFound,
+ HTTPNotAcceptable,
+ HTTPInternalServerError,
+)
from sqlalchemy import select
from .. import models, util
+from ..models.track import TrackWithMetadata
+
+
+LOGGER = logging.getLogger(__name__)
@view_config(
@@ -26,8 +37,15 @@ def details(request):
description = util.safe_markdown(track.description)
show_edit_link = track.owner == request.identity
+ on_disk_images = []
+ try:
+ manager = request.data_manager.open(track.id)
+ on_disk_images.extend(manager.images())
+ except FileNotFoundError:
+ pass
+
images = []
- for image_name in request.data_manager.images(track.id):
+ for image_name in on_disk_images:
query = []
if "secret" in request.GET:
query.append(("secret", request.GET["secret"]))
@@ -39,8 +57,9 @@ def details(request):
else:
images.append((img_src, ""))
+ with_meta = TrackWithMetadata(track, request.data_manager)
return {
- "track": track,
+ "track": with_meta,
"show_organic": track.show_organic_data(),
"show_edit_link": show_edit_link,
"mps_to_kph": util.mps_to_kph,
@@ -60,18 +79,32 @@ def gpx(request):
:rtype: pyramid.response.Response
"""
track = request.context
+ try:
+ manager = request.data_manager.open(track.id)
+ except FileNotFoundError:
+ LOGGER.error("Track exists in database, but not on disk: %d", track.id)
+ return HTTPInternalServerError()
# We can be nice to the client if they support it, and deliver the gzipped
# data straight. This saves decompression time on the server and saves a
# lot of bandwidth.
- accepted = request.accept_encoding.acceptable_offers(["gzip", "identity"])
+ accepted = request.accept_encoding.acceptable_offers(["br", "gzip", "identity"])
for encoding, _qvalue in accepted:
- if encoding == "gzip":
- response = Response(
- track.gpx, content_type="application/gpx+xml", content_encoding="gzip"
+ if encoding == "br":
+ response = FileResponse(
+ str(manager.gpx_path()),
+ request,
+ content_type="application/gpx+xml",
+ content_encoding="br",
)
break
+ if encoding == "gzip":
+ # gzip'ed GPX files are so much smaller than uncompressed ones, it
+ # is worth re-compressing them for the client
+ data = gzip.compress(manager.decompress_gpx())
+ response = Response(data, content_type="application/gpx+xml", content_encoding="gzip")
+ break
if encoding == "identity":
- response = Response(track.gpx_data, content_type="application/gpx+xml")
+ response = Response(manager.decompress_gpx(), content_type="application/gpx+xml")
break
else:
return HTTPNotAcceptable("No data with acceptable encoding found")
@@ -138,7 +171,7 @@ def image(request):
"""
track = request.context
try:
- image_path = request.data_manager.image_path(track.id, request.matchdict["image_name"])
+ image_path = request.data_manager.open(track.id).image_path(request.matchdict["image_name"])
except FileNotFoundError:
return HTTPNotFound()
else:
diff --git a/fietsboek/views/edit.py b/fietsboek/views/edit.py
index 003f7c7..a26452d 100644
--- a/fietsboek/views/edit.py
+++ b/fietsboek/views/edit.py
@@ -36,8 +36,13 @@ def edit(request):
badges = request.dbsession.execute(select(models.Badge)).scalars()
badges = [(badge in track.badges, badge) for badge in badges]
+ on_disk_images = []
+ try:
+ on_disk_images = request.data_manager.open(track.id).images()
+ except FileNotFoundError:
+ pass
images = []
- for image in request.data_manager.images(track.id):
+ for image in on_disk_images:
metadata = request.dbsession.execute(
select(models.ImageMetadata).filter_by(track=track, image_name=image)
).scalar_one_or_none()
@@ -106,9 +111,13 @@ def edit_images(request, track):
:type track: fietsboek.models.track.Track
"""
+ try:
+ manager = request.data_manager.open(track.id)
+ except FileNotFoundError:
+ manager = request.data_manager.initialize(track.id)
# Delete requested images
for image in request.params.getall("delete-image[]"):
- request.data_manager.delete_image(track.id, image)
+ manager.delete_image(image)
image_meta = request.dbsession.execute(
select(models.ImageMetadata).filter_by(track_id=track.id, image_name=image)
).scalar_one_or_none()
@@ -127,14 +136,14 @@ def edit_images(request, track):
continue
upload_id = match.group(1)
- image_name = request.data_manager.add_image(track.id, image.file, image.filename)
+ image_name = manager.add_image(image.file, image.filename)
image_meta = models.ImageMetadata(track=track, image_name=image_name)
image_meta.description = request.params.get(f"image-description[{upload_id}]", "")
request.dbsession.add(image_meta)
LOGGER.debug("Uploaded image %s %s", track.id, image_name)
set_descriptions.add(upload_id)
- images = request.data_manager.images(track.id)
+ images = manager.images()
# Set image descriptions
for param_name, description in request.params.items():
match = re.match("image-description\\[(.+)\\]$", param_name)
diff --git a/fietsboek/views/upload.py b/fietsboek/views/upload.py
index d691e46..f0f7793 100644
--- a/fietsboek/views/upload.py
+++ b/fietsboek/views/upload.py
@@ -173,15 +173,21 @@ def do_finish_upload(request):
track.date = date
tags = request.params.getall("tag[]")
track.sync_tags(tags)
- track.gpx_data = upload.gpx_data
request.dbsession.add(track)
request.dbsession.delete(upload)
request.dbsession.flush()
# Best time to build the cache is right after the upload
- track.ensure_cache()
+ track.ensure_cache(upload.gpx_data)
request.dbsession.add(track.cache)
+ # Save the GPX data
+ LOGGER.debug("Creating a new data folder for %d", track.id)
+ manager = request.data_manager.initialize(track.id)
+ LOGGER.debug("Saving GPX to %s", manager.gpx_path())
+ manager.compress_gpx(upload.gpx_data)
+ manager.backup()
+
# Don't forget to add the images
LOGGER.debug("Starting to edit images for the upload")
edit.edit_images(request, track)
diff --git a/poetry.lock b/poetry.lock
index 596cd1a..b918f8f 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -131,6 +131,14 @@ css = ["tinycss2 (>=1.1.0,<1.2)"]
dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"]
[[package]]
+name = "brotli"
+version = "1.0.9"
+description = "Python bindings for the Brotli compression library"
+category = "main"
+optional = false
+python-versions = "*"
+
+[[package]]
name = "certifi"
version = "2022.9.24"
description = "Python package for providing Mozilla's CA Bundle."
@@ -244,6 +252,18 @@ python-versions = ">=3.7"
test = ["pytest (>=6)"]
[[package]]
+name = "filelock"
+version = "3.8.2"
+description = "A platform independent file lock."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
+testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
+
+[[package]]
name = "gpxpy"
version = "1.5.0"
description = "GPX file parser and GPS track manipulation library"
@@ -1323,7 +1343,7 @@ test = ["zope.testing"]
[metadata]
lock-version = "1.1"
python-versions = "^3.7.2"
-content-hash = "20da091813c8e88b93d645201acd1c5c00ec4316188ed64fd67447da1bac6a1f"
+content-hash = "409175c4ab450ddc362d21b89a10f181b634469077586b046f8a5b9f028bb0e2"
[metadata.files]
alabaster = [
@@ -1372,6 +1392,90 @@ bleach = [
{file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"},
{file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"},
]
+brotli = [
+ {file = "Brotli-1.0.9-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70"},
+ {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b"},
+ {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6"},
+ {file = "Brotli-1.0.9-cp27-cp27m-win32.whl", hash = "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa"},
+ {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452"},
+ {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7"},
+ {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9744a863b489c79a73aba014df554b0e7a0fc44ef3f8a0ef2a52919c7d155031"},
+ {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a72661af47119a80d82fa583b554095308d6a4c356b2a554fdc2799bc19f2a43"},
+ {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c"},
+ {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c"},
+ {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0"},
+ {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91"},
+ {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa"},
+ {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb"},
+ {file = "Brotli-1.0.9-cp310-cp310-win32.whl", hash = "sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181"},
+ {file = "Brotli-1.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2"},
+ {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cc0283a406774f465fb45ec7efb66857c09ffefbe49ec20b7882eff6d3c86d3a"},
+ {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11d3283d89af7033236fa4e73ec2cbe743d4f6a81d41bd234f24bf63dde979df"},
+ {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1306004d49b84bd0c4f90457c6f57ad109f5cc6067a9664e12b7b79a9948ad"},
+ {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1375b5d17d6145c798661b67e4ae9d5496920d9265e2f00f1c2c0b5ae91fbde"},
+ {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cab1b5964b39607a66adbba01f1c12df2e55ac36c81ec6ed44f2fca44178bf1a"},
+ {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ed6a5b3d23ecc00ea02e1ed8e0ff9a08f4fc87a1f58a2530e71c0f48adf882f"},
+ {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cb02ed34557afde2d2da68194d12f5719ee96cfb2eacc886352cb73e3808fc5d"},
+ {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3523f51818e8f16599613edddb1ff924eeb4b53ab7e7197f85cbc321cdca32f"},
+ {file = "Brotli-1.0.9-cp311-cp311-win32.whl", hash = "sha256:ba72d37e2a924717990f4d7482e8ac88e2ef43fb95491eb6e0d124d77d2a150d"},
+ {file = "Brotli-1.0.9-cp311-cp311-win_amd64.whl", hash = "sha256:3ffaadcaeafe9d30a7e4e1e97ad727e4f5610b9fa2f7551998471e3736738679"},
+ {file = "Brotli-1.0.9-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4"},
+ {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296"},
+ {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430"},
+ {file = "Brotli-1.0.9-cp35-cp35m-win32.whl", hash = "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1"},
+ {file = "Brotli-1.0.9-cp35-cp35m-win_amd64.whl", hash = "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea"},
+ {file = "Brotli-1.0.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f"},
+ {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4"},
+ {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a"},
+ {file = "Brotli-1.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b"},
+ {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f"},
+ {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6"},
+ {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b"},
+ {file = "Brotli-1.0.9-cp36-cp36m-win32.whl", hash = "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14"},
+ {file = "Brotli-1.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c"},
+ {file = "Brotli-1.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126"},
+ {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d"},
+ {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12"},
+ {file = "Brotli-1.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130"},
+ {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a"},
+ {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3"},
+ {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d"},
+ {file = "Brotli-1.0.9-cp37-cp37m-win32.whl", hash = "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1"},
+ {file = "Brotli-1.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5"},
+ {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb"},
+ {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8"},
+ {file = "Brotli-1.0.9-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb"},
+ {file = "Brotli-1.0.9-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26"},
+ {file = "Brotli-1.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c"},
+ {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b"},
+ {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17"},
+ {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649"},
+ {file = "Brotli-1.0.9-cp38-cp38-win32.whl", hash = "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429"},
+ {file = "Brotli-1.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f"},
+ {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19"},
+ {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7"},
+ {file = "Brotli-1.0.9-cp39-cp39-manylinux1_i686.whl", hash = "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b"},
+ {file = "Brotli-1.0.9-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389"},
+ {file = "Brotli-1.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7"},
+ {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806"},
+ {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1"},
+ {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c"},
+ {file = "Brotli-1.0.9-cp39-cp39-win32.whl", hash = "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3"},
+ {file = "Brotli-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761"},
+ {file = "Brotli-1.0.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267"},
+ {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:73fd30d4ce0ea48010564ccee1a26bfe39323fde05cb34b5863455629db61dc7"},
+ {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02177603aaca36e1fd21b091cb742bb3b305a569e2402f1ca38af471777fb019"},
+ {file = "Brotli-1.0.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d"},
+ {file = "Brotli-1.0.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b43775532a5904bc938f9c15b77c613cb6ad6fb30990f3b0afaea82797a402d8"},
+ {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5bf37a08493232fbb0f8229f1824b366c2fc1d02d64e7e918af40acd15f3e337"},
+ {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:330e3f10cd01da535c70d09c4283ba2df5fb78e915bea0a28becad6e2ac010be"},
+ {file = "Brotli-1.0.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e1abbeef02962596548382e393f56e4c94acd286bd0c5afba756cffc33670e8a"},
+ {file = "Brotli-1.0.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3148362937217b7072cf80a2dcc007f09bb5ecb96dae4617316638194113d5be"},
+ {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336b40348269f9b91268378de5ff44dc6fbaa2268194f85177b53463d313842a"},
+ {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b09a16a1950b9ef495a0f8b9d0a87599a9d1f179e2d4ac014b2ec831f87e7"},
+ {file = "Brotli-1.0.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c8e521a0ce7cf690ca84b8cc2272ddaf9d8a50294fd086da67e517439614c755"},
+ {file = "Brotli-1.0.9.zip", hash = "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438"},
+]
certifi = [
{file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
{file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
@@ -1546,6 +1650,10 @@ exceptiongroup = [
{file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"},
{file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"},
]
+filelock = [
+ {file = "filelock-3.8.2-py3-none-any.whl", hash = "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c"},
+ {file = "filelock-3.8.2.tar.gz", hash = "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2"},
+]
gpxpy = [
{file = "gpxpy-1.5.0.tar.gz", hash = "sha256:e6993a8945eae07a833cd304b88bbc6c3c132d63b2bf4a9b0a5d9097616b8708"},
]
diff --git a/pylint.tests.toml b/pylint.tests.toml
index 1c479e4..9e4b37b 100644
--- a/pylint.tests.toml
+++ b/pylint.tests.toml
@@ -230,7 +230,7 @@ valid-metaclass-classmethod-first-arg = ["cls"]
# ignored-parents =
# Maximum number of arguments for function / method.
-max-args = 5
+max-args = 10
# Maximum number of attributes for a class (see R0902).
max-attributes = 7
diff --git a/pyproject.toml b/pyproject.toml
index e08b92b..4e81701 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -53,6 +53,8 @@ requests = "^2.28.1"
pydantic = "^1.10.2"
termcolor = "^2.1.1"
+filelock = "^3.8.2"
+brotli = "^1.0.9"
[tool.poetry.group.docs]
optional = true
diff --git a/tests/conftest.py b/tests/conftest.py
index 8f7f77c..a203775 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,7 +1,10 @@
+import os
+import shutil
+from pathlib import Path
+
import alembic
import alembic.config
import alembic.command
-import os
from pyramid.paster import get_appsettings
from pyramid.scripting import prepare
from pyramid.testing import DummyRequest, testConfig
@@ -11,8 +14,8 @@ import webtest
from sqlalchemy import delete, select
-from fietsboek import main
-from fietsboek import models
+from fietsboek import main, models
+from fietsboek.data import DataManager
from fietsboek.models.meta import Base
@@ -48,6 +51,21 @@ def dbengine(app_settings, ini_file):
Base.metadata.drop_all(bind=engine)
alembic.command.stamp(alembic_cfg, None, purge=True)
+@pytest.fixture
+def data_manager(app_settings):
+ return DataManager(Path(app_settings["fietsboek.data_dir"]))
+
+@pytest.fixture(autouse=True)
+def _cleanup_data(app_settings):
+ yield
+ engine = models.get_engine(app_settings)
+ connection = engine.connect()
+ for table in reversed(Base.metadata.sorted_tables):
+ connection.execute(table.delete())
+ data_dir = Path(app_settings["fietsboek.data_dir"])
+ if (data_dir / "tracks").is_dir():
+ shutil.rmtree(data_dir / "tracks")
+
@pytest.fixture(scope='session')
def app(app_settings, dbengine, tmp_path_factory):
app_settings["fietsboek.data_dir"] = str(tmp_path_factory.mktemp("data"))
diff --git a/tests/integration/test_browse.py b/tests/integration/test_browse.py
index 493239f..89d6f94 100644
--- a/tests/integration/test_browse.py
+++ b/tests/integration/test_browse.py
@@ -9,7 +9,7 @@ from fietsboek.models.track import Visibility
@contextmanager
-def added_tracks(tm, dbsession, owner):
+def added_tracks(tm, dbsession, owner, data_manager):
"""Adds some tracks to the database session.
This function should be used as a context manager and it ensures that the
@@ -33,8 +33,9 @@ def added_tracks(tm, dbsession, owner):
tagged_people=[],
)
track.date = datetime(2022, 3, 14, 9, 26, 54)
- track.gpx_data = load_gpx_asset("MyTourbook_1.gpx.gz")
dbsession.add(track)
+ dbsession.flush()
+ data_manager.initialize(track.id).compress_gpx(load_gpx_asset("MyTourbook_1.gpx.gz"))
tracks.append(track)
track = models.Track(
@@ -47,8 +48,9 @@ def added_tracks(tm, dbsession, owner):
tagged_people=[],
)
track.date = datetime(2022, 10, 29, 13, 37, 11)
- track.gpx_data = load_gpx_asset("Teasi_1.gpx.gz")
dbsession.add(track)
+ dbsession.flush()
+ data_manager.initialize(track.id).compress_gpx(load_gpx_asset("Teasi_1.gpx.gz"))
tracks.append(track)
tm.begin()
@@ -65,9 +67,9 @@ def added_tracks(tm, dbsession, owner):
tm.doom()
-def test_browse(testapp, dbsession, route_path, logged_in, tm):
+def test_browse(testapp, dbsession, route_path, logged_in, tm, data_manager):
# Ensure there are some tracks in the database
- with added_tracks(tm, dbsession, logged_in):
+ with added_tracks(tm, dbsession, logged_in, data_manager):
# Now go to the browse page
browse = testapp.get(route_path('browse'))
@@ -75,8 +77,8 @@ def test_browse(testapp, dbsession, route_path, logged_in, tm):
assert "Barfoo" in browse.text
-def test_archive(testapp, dbsession, route_path, logged_in, tm):
- with added_tracks(tm, dbsession, logged_in):
+def test_archive(testapp, dbsession, route_path, logged_in, tm, data_manager):
+ with added_tracks(tm, dbsession, logged_in, data_manager):
archive = testapp.get(
route_path('track-archive', _query=[("track_id[]", "1"), ("track_id[]", "2")])
)
diff --git a/tests/playwright/test_basic.py b/tests/playwright/test_basic.py
index b3e340d..f2031d2 100644
--- a/tests/playwright/test_basic.py
+++ b/tests/playwright/test_basic.py
@@ -18,15 +18,13 @@ def john_doe(dbaccess):
This fixture either returns the existing John or creates a new one.
"""
- query = models.User.query_by_email("john@doe.com")
- result = dbaccess.execute(query).scalar_one_or_none()
- if result:
- return result
with dbaccess:
user = models.User(name="John Doe", email="john@doe.com", is_verified=True)
user.set_password("password")
dbaccess.add(user)
dbaccess.commit()
+ dbaccess.refresh(user, ["id"])
+ dbaccess.expunge(user)
return user
@@ -113,19 +111,20 @@ def test_upload(page: Page, john_doe, app_settings, tmp_path, dbaccess):
assert track.description == "Beschreibung der tollen Tour"
-def test_edit(page: Page, john_doe, app_settings, dbaccess):
+def test_edit(page: Page, john_doe, app_settings, dbaccess, data_manager):
do_login(app_settings, page, john_doe)
with dbaccess:
+ john_doe = dbaccess.merge(john_doe)
track = models.Track(
title="Another awesome track",
visibility=Visibility.PRIVATE,
description="Another description",
)
track.date = datetime.datetime.now(datetime.timezone.utc)
- track.gpx_data = load_gpx_asset("Teasi_1.gpx.gz")
john_doe.tracks.append(track)
dbaccess.flush()
track_id = track.id
+ data_manager.initialize(track_id).compress_gpx(load_gpx_asset("Teasi_1.gpx.gz"))
dbaccess.commit()
page.goto(f"/track/{track_id}")
@@ -152,9 +151,10 @@ def test_edit(page: Page, john_doe, app_settings, dbaccess):
assert track.description == "Not so descriptive anymore"
-def test_browse(page: Page, john_doe, app_settings, dbaccess):
+def test_browse(page: Page, john_doe, app_settings, dbaccess, data_manager):
do_login(app_settings, page, john_doe)
with dbaccess:
+ john_doe = dbaccess.merge(john_doe)
track = models.Track(
title="We're looking for this track",
visibility=Visibility.PRIVATE,
@@ -162,8 +162,9 @@ def test_browse(page: Page, john_doe, app_settings, dbaccess):
type=TrackType.ORGANIC,
)
track.date = datetime.datetime.now(datetime.timezone.utc)
- track.gpx_data = load_gpx_asset("Teasi_1.gpx.gz")
john_doe.tracks.append(track)
+ dbaccess.flush()
+ data_manager.initialize(track.id).compress_gpx(load_gpx_asset("Teasi_1.gpx.gz"))
dbaccess.commit()
page.goto("/")