aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab-ci.yml6
-rw-r--r--.mypy.ini17
-rw-r--r--fietsboek/__init__.py25
-rw-r--r--fietsboek/alembic/env.py9
-rw-r--r--fietsboek/config.py75
-rw-r--r--fietsboek/data.py3
-rw-r--r--fietsboek/email.py18
-rw-r--r--fietsboek/jinja2.py44
-rw-r--r--fietsboek/models/__init__.py22
-rw-r--r--fietsboek/models/badge.py5
-rw-r--r--fietsboek/models/comment.py5
-rw-r--r--fietsboek/models/image.py5
-rw-r--r--fietsboek/models/meta.py2
-rw-r--r--fietsboek/models/track.py129
-rw-r--r--fietsboek/models/user.py96
-rw-r--r--fietsboek/pages.py47
-rw-r--r--fietsboek/pshell.py8
-rw-r--r--fietsboek/routes.py112
-rw-r--r--fietsboek/scripts/fietsctl.py46
-rw-r--r--fietsboek/security.py13
-rw-r--r--fietsboek/summaries.py1
-rw-r--r--fietsboek/updater/__init__.py16
-rw-r--r--fietsboek/updater/cli.py11
-rw-r--r--fietsboek/util.py125
-rw-r--r--fietsboek/views/account.py35
-rw-r--r--fietsboek/views/admin.py38
-rw-r--r--fietsboek/views/browse.py207
-rw-r--r--fietsboek/views/default.py85
-rw-r--r--fietsboek/views/detail.py43
-rw-r--r--fietsboek/views/edit.py31
-rw-r--r--fietsboek/views/notfound.py2
-rw-r--r--fietsboek/views/profile.py69
-rw-r--r--fietsboek/views/tileproxy.py183
-rw-r--r--fietsboek/views/upload.py68
-rw-r--r--poetry.lock512
-rw-r--r--pyproject.toml36
-rw-r--r--testing.ini1
-rw-r--r--tests/__init__.py0
-rw-r--r--tests/integration/test_register.py68
-rw-r--r--tox.ini46
40 files changed, 1523 insertions, 741 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index cb3c9ef..9cd9587 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -30,8 +30,8 @@ test-pypy:
lint:
script:
- - tox -e pylint,flake8
+ - tox -e pylint,pylint-tests,flake,black
-lint-tests:
+mypy:
script:
- - tox -e pylint-tests
+ - tox -e mypy
diff --git a/.mypy.ini b/.mypy.ini
new file mode 100644
index 0000000..ed220e3
--- /dev/null
+++ b/.mypy.ini
@@ -0,0 +1,17 @@
+[mypy]
+follow_imports = silent
+check_untyped_defs = True
+allow_redefinition = True
+exclude = fietsboek/updater/scripts/.+\.py
+
+[mypy-pyramid.*]
+ignore_missing_imports = True
+
+[mypy-sqlalchemy.*]
+ignore_missing_imports = True
+
+[mypy-webob.*]
+ignore_missing_imports = True
+
+[mypy-zope.*]
+ignore_missing_imports = True
diff --git a/fietsboek/__init__.py b/fietsboek/__init__.py
index fe6bf52..a248dc9 100644
--- a/fietsboek/__init__.py
+++ b/fietsboek/__init__.py
@@ -17,7 +17,7 @@ from .pages import Pages
from . import jinja2 as mod_jinja2, config as mod_config
-__VERSION__ = importlib_metadata.version('fietsboek')
+__VERSION__ = importlib_metadata.version("fietsboek")
def locale_negotiator(request):
@@ -48,8 +48,7 @@ def locale_negotiator(request):
def main(_global_config, **settings):
- """ This function returns a Pyramid WSGI application.
- """
+ """This function returns a Pyramid WSGI application."""
parsed_config = mod_config.parse(settings)
def data_manager(request):
@@ -73,15 +72,15 @@ def main(_global_config, **settings):
def pages(_request):
return page_manager
- my_session_factory = SignedCookieSessionFactory(settings['session_key'])
+ my_session_factory = SignedCookieSessionFactory(parsed_config.derive_secret("sessions"))
with Configurator(settings=settings) as config:
- config.include('pyramid_jinja2')
- config.include('.routes')
- config.include('.models')
+ config.include("pyramid_jinja2")
+ config.include(".routes")
+ config.include(".models")
config.scan()
- config.add_translation_dirs('fietsboek:locale/')
+ config.add_translation_dirs("fietsboek:locale/")
for pack in parsed_config.language_packs:
- config.add_translation_dirs(f'{pack}:locale/')
+ config.add_translation_dirs(f"{pack}:locale/")
config.set_session_factory(my_session_factory)
config.set_security_policy(SecurityPolicy())
config.set_csrf_storage_policy(CookieCSRFStoragePolicy())
@@ -93,9 +92,9 @@ def main(_global_config, **settings):
config.add_request_method(config_, name="config", reify=True)
jinja2_env = config.get_jinja2_environment()
- jinja2_env.filters['format_decimal'] = mod_jinja2.filter_format_decimal
- jinja2_env.filters['format_datetime'] = mod_jinja2.filter_format_datetime
- jinja2_env.filters['local_datetime'] = mod_jinja2.filter_local_datetime
- jinja2_env.globals['embed_tile_layers'] = mod_jinja2.global_embed_tile_layers
+ jinja2_env.filters["format_decimal"] = mod_jinja2.filter_format_decimal
+ jinja2_env.filters["format_datetime"] = mod_jinja2.filter_format_datetime
+ jinja2_env.filters["local_datetime"] = mod_jinja2.filter_local_datetime
+ jinja2_env.globals["embed_tile_layers"] = mod_jinja2.global_embed_tile_layers
return config.make_wsgi_app()
diff --git a/fietsboek/alembic/env.py b/fietsboek/alembic/env.py
index ae79f02..644d98b 100644
--- a/fietsboek/alembic/env.py
+++ b/fietsboek/alembic/env.py
@@ -25,7 +25,7 @@ def run_migrations_offline():
script output.
"""
- context.configure(url=settings['sqlalchemy.url'])
+ context.configure(url=settings["sqlalchemy.url"])
with context.begin_transaction():
context.run_migrations()
@@ -37,13 +37,10 @@ def run_migrations_online():
and associate a connection with the context.
"""
- engine = engine_from_config(settings, prefix='sqlalchemy.')
+ engine = engine_from_config(settings, prefix="sqlalchemy.")
connection = engine.connect()
- context.configure(
- connection=connection,
- target_metadata=target_metadata
- )
+ context.configure(connection=connection, target_metadata=target_metadata)
try:
with context.begin_transaction():
diff --git a/fietsboek/config.py b/fietsboek/config.py
index b8a2cfb..41f6a64 100644
--- a/fietsboek/config.py
+++ b/fietsboek/config.py
@@ -12,6 +12,7 @@ Most of the logic is handled by pydantic_.
.. _pydantic: https://pydantic-docs.helpmanual.io/
"""
# pylint: disable=no-name-in-module,no-self-argument,too-few-public-methods
+import hashlib
import logging
import re
import typing
@@ -20,7 +21,12 @@ from enum import Enum
import pydantic
from pydantic import (
- BaseModel, Field, AnyUrl, DirectoryPath, validator, SecretStr,
+ BaseModel,
+ Field,
+ AnyUrl,
+ DirectoryPath,
+ validator,
+ SecretStr,
)
from pyramid import settings
from termcolor import colored
@@ -45,8 +51,14 @@ KNOWN_PYRAMID_SETTINGS = {
}
KNOWN_TILE_LAYERS = [
- "osm", "osmde", "satellite", "opentopo", "topplusopen",
- "opensea", "cycling", "hiking",
+ "osm",
+ "osmde",
+ "satellite",
+ "opentopo",
+ "topplusopen",
+ "opensea",
+ "cycling",
+ "hiking",
]
@@ -60,15 +72,16 @@ class ValidationError(Exception):
self.errors = errors
def __str__(self):
- lines = ['']
+ lines = [""]
for where, error in self.errors:
- lines.append(colored(f'Error in {where}:', 'red'))
+ lines.append(colored(f"Error in {where}:", "red"))
lines.append(str(error))
return "\n".join(lines)
class LayerType(Enum):
"""Enum to distinguish base layers and overlay layers."""
+
BASE = "base"
OVERLAY = "overlay"
@@ -79,6 +92,7 @@ class LayerAccess(Enum):
Note that in the future, a finer-grained distinction might be possible.
"""
+
PUBLIC = "public"
RESTRICTED = "restricted"
@@ -144,7 +158,7 @@ class Config(BaseModel):
language_packs: PyramidList = Field([], alias="fietsboek.language_packs")
"""Additional language packs to load."""
- available_locales: PyramidList = ["en", "de"]
+ available_locales: PyramidList = PyramidList(["en", "de"])
"""Available locales."""
email_from: str = Field(alias="email.from")
@@ -162,8 +176,9 @@ class Config(BaseModel):
pages: PyramidList = Field([], alias="fietsboek.pages")
"""Custom pages."""
- default_tile_layers: PyramidList = Field(KNOWN_TILE_LAYERS,
- alias="fietsboek.default_tile_layers")
+ default_tile_layers: PyramidList = Field(
+ KNOWN_TILE_LAYERS, alias="fietsboek.default_tile_layers"
+ )
"""The subset of the default tile layers that should be enabled.
By default, that's all of them.
@@ -175,8 +190,7 @@ class Config(BaseModel):
thunderforest_maps: PyramidList = Field([], alias="thunderforest.maps")
"""List of enabled Thunderforest maps."""
- thunderforest_access: LayerAccess = Field(LayerAccess.RESTRICTED,
- alias="thunderforest.access")
+ thunderforest_access: LayerAccess = Field(LayerAccess.RESTRICTED, alias="thunderforest.access")
"""Thunderforest access restriction."""
disable_tile_proxy: bool = Field(False, alias="fietsboek.tile_proxy.disable")
@@ -192,13 +206,34 @@ class Config(BaseModel):
"""
if value == "<EDIT THIS>":
raise ValueError("You need to edit the default session key!")
+ return value
@validator("email_smtp_url")
def _known_smtp_url(cls, value):
"""Ensures that the SMTP URL is valid."""
parsed = urllib.parse.urlparse(value)
- if parsed.scheme not in {'debug', 'smtp', 'smtp+ssl', 'smtp+starttls'}:
+ if parsed.scheme not in {"debug", "smtp", "smtp+ssl", "smtp+starttls"}:
raise ValueError(f"Unknown mailing scheme {parsed.scheme}".strip())
+ return value
+
+ def derive_secret(self, what_for):
+ """Derive a secret for other parts of the application.
+
+ All secrets are derived from ``secret_key`` in a deterministic way. See
+ https://docs.pylonsproject.org/projects/pyramid/en/latest/narr/security.html#admonishment-against-secret-sharing
+ on why ``secret_key`` should not be used directly.
+
+ :param what_for: What the secret is used for. Passing the same "use
+ case" will generate the same secret.
+ :type what_for: str
+ :return: The generated secret.
+ :rtype: str
+ """
+ hasher = hashlib.sha256()
+ hasher.update(f"{len(what_for)}".encode("utf-8"))
+ hasher.update(self.session_key.encode("utf-8"))
+ hasher.update(what_for.encode("utf-8"))
+ return hasher.hexdigest()
def parse(config):
@@ -221,27 +256,27 @@ def parse(config):
continue
provider_id = match.group(1)
- prefix = f'{value}.'
- inner = {k[len(prefix):]: v for (k, v) in config.items() if k.startswith(prefix)}
- inner['layer_id'] = provider_id
- inner['name'] = value
+ prefix = f"{value}."
+ inner = {k[len(prefix) :]: v for (k, v) in config.items() if k.startswith(prefix)}
+ inner["layer_id"] = provider_id
+ inner["name"] = value
try:
layer_config = TileLayerConfig.parse_obj(inner)
tile_layers.append(layer_config)
except pydantic.ValidationError as validation_error:
- errors.append((f'tile layer {provider_id}', validation_error))
+ errors.append((f"tile layer {provider_id}", validation_error))
keys.discard(key)
for field in TileLayerConfig.__fields__.values():
- keys.discard(f'{prefix}{_field_name(field)}')
+ keys.discard(f"{prefix}{_field_name(field)}")
- config['tile_layers'] = tile_layers
+ config["tile_layers"] = tile_layers
# Now we can parse the main config
try:
config = Config.parse_obj(config)
except pydantic.ValidationError as validation_error:
- errors.append(('configuration', validation_error))
+ errors.append(("configuration", validation_error))
if errors:
raise ValidationError(errors)
@@ -257,7 +292,7 @@ def parse(config):
def _field_name(field):
- alias = getattr(field, 'alias', None)
+ alias = getattr(field, "alias", None)
if alias:
return alias
return field.name
diff --git a/fietsboek/data.py b/fietsboek/data.py
index c1cd214..bd4222b 100644
--- a/fietsboek/data.py
+++ b/fietsboek/data.py
@@ -76,6 +76,7 @@ class DataManager:
:param track_id: The ID of the track.
:type track_id: int
"""
+
def log_error(_, path, exc_info):
LOGGER.warning("Failed to remove %s", path, exc_info=exc_info)
@@ -132,7 +133,7 @@ class DataManager:
"""
# Be sure to not delete anything else than the image file
image_id = secure_filename(image_id)
- if '/' in image_id or '\\' in image_id:
+ if "/" in image_id or "\\" in image_id:
return
path = self.image_path(track_id, image_id)
path.unlink()
diff --git a/fietsboek/email.py b/fietsboek/email.py
index 1ebb740..78b0493 100644
--- a/fietsboek/email.py
+++ b/fietsboek/email.py
@@ -25,12 +25,12 @@ def prepare_message(sender, addr_to, subject):
:rtype: email.message.EmailMessage
"""
message = EmailMessage()
- message['To'] = addr_to
- if '<' not in sender and '>' not in sender:
- message['From'] = f'Fietsboek <{sender}>'
+ message["To"] = addr_to
+ if "<" not in sender and ">" not in sender:
+ message["From"] = f"Fietsboek <{sender}>"
else:
- message['From'] = sender
- message['Subject'] = subject
+ message["From"] = sender
+ message["Subject"] = subject
return message
@@ -49,15 +49,15 @@ def send_message(server_url, username, password, message):
:type message: email.message.EmailMessage
"""
parsed_url = urlparse(server_url)
- if parsed_url.scheme == 'debug':
+ if parsed_url.scheme == "debug":
print(message, file=sys.stderr)
return
try:
- if parsed_url.scheme == 'smtp':
+ if parsed_url.scheme == "smtp":
client = smtplib.SMTP(parsed_url.hostname, parsed_url.port)
- elif parsed_url.scheme == 'smtp+ssl':
+ elif parsed_url.scheme == "smtp+ssl":
client = smtplib.SMTP_SSL(parsed_url.hostname, parsed_url.port)
- elif parsed_url.scheme == 'smtp+starttls':
+ elif parsed_url.scheme == "smtp+starttls":
client = smtplib.SMTP(parsed_url.hostname, parsed_url.port)
client.starttls()
if username and password:
diff --git a/fietsboek/jinja2.py b/fietsboek/jinja2.py
index e7ef522..6e5e7b6 100644
--- a/fietsboek/jinja2.py
+++ b/fietsboek/jinja2.py
@@ -22,7 +22,7 @@ def filter_format_decimal(ctx, value):
:return: The formatted decimal.
:rtype: str
"""
- request = ctx.get('request')
+ request = ctx.get("request")
locale = request.localizer.locale_name
return format_decimal(value, locale=locale)
@@ -38,7 +38,7 @@ def filter_format_datetime(ctx, value):
:return: The formatted date.
:rtype: str
"""
- request = ctx.get('request')
+ request = ctx.get("request")
locale = request.localizer.locale_name
return format_datetime(value, locale=locale)
@@ -69,7 +69,7 @@ def filter_local_datetime(ctx, value):
else:
value = value.astimezone(datetime.timezone.utc)
- request = ctx.get('request')
+ request = ctx.get("request")
locale = request.localizer.locale_name
fallback = Markup.escape(format_datetime(value, locale=locale))
@@ -93,24 +93,34 @@ def global_embed_tile_layers(request):
"""
# pylint: disable=import-outside-toplevel,cyclic-import
from .views import tileproxy
+
tile_sources = tileproxy.sources_for(request)
if request.config.disable_tile_proxy:
+
def _url(source):
return source.url_template
+
else:
+
def _url(source):
- return (request.route_url("tile-proxy", provider=source.key, x="{x}", y="{y}", z="{z}")
- .replace("%7Bx%7D", "{x}")
- .replace("%7By%7D", "{y}")
- .replace("%7Bz%7D", "{z}"))
-
- return Markup(json.dumps([
- {
- "name": source.name,
- "url": _url(source),
- "attribution": source.attribution,
- "type": source.layer_type.value,
- }
- for source in tile_sources
- ]))
+ return (
+ request.route_url("tile-proxy", provider=source.key, x="{x}", y="{y}", z="{z}")
+ .replace("%7Bx%7D", "{x}")
+ .replace("%7By%7D", "{y}")
+ .replace("%7Bz%7D", "{z}")
+ )
+
+ return Markup(
+ json.dumps(
+ [
+ {
+ "name": source.name,
+ "url": _url(source),
+ "attribution": source.attribution,
+ "type": source.layer_type.value,
+ }
+ for source in tile_sources
+ ]
+ )
+ )
diff --git a/fietsboek/models/__init__.py b/fietsboek/models/__init__.py
index 53feb22..828b689 100644
--- a/fietsboek/models/__init__.py
+++ b/fietsboek/models/__init__.py
@@ -21,7 +21,7 @@ from .image import ImageMetadata # flake8: noqa
configure_mappers()
-def get_engine(settings, prefix='sqlalchemy.'):
+def get_engine(settings, prefix="sqlalchemy."):
"""Create an SQL Engine from the given settings."""
return engine_from_config(settings, prefix)
@@ -89,9 +89,7 @@ def get_tm_session(session_factory, transaction_manager, request=None):
request = dbsession.info["request"]
"""
dbsession = session_factory(info={"request": request})
- zope.sqlalchemy.register(
- dbsession, transaction_manager=transaction_manager
- )
+ zope.sqlalchemy.register(dbsession, transaction_manager=transaction_manager)
return dbsession
@@ -103,7 +101,7 @@ def includeme(config):
"""
settings = config.get_settings()
- settings['tm.manager_hook'] = 'pyramid_tm.explicit_manager'
+ settings["tm.manager_hook"] = "pyramid_tm.explicit_manager"
# Use ``pyramid_tm`` to hook the transaction lifecycle to the request.
# Note: the packages ``pyramid_tm`` and ``transaction`` work together to
@@ -111,28 +109,26 @@ def includeme(config):
# If your project migrates away from ``pyramid_tm``, you may need to use a
# Pyramid callback function to close the database session after each
# request.
- config.include('pyramid_tm')
+ config.include("pyramid_tm")
# use pyramid_retry to retry a request when transient exceptions occur
- config.include('pyramid_retry')
+ config.include("pyramid_retry")
# hook to share the dbengine fixture in testing
- dbengine = settings.get('dbengine')
+ dbengine = settings.get("dbengine")
if not dbengine:
dbengine = get_engine(settings)
session_factory = get_session_factory(dbengine)
- config.registry['dbsession_factory'] = session_factory
+ config.registry["dbsession_factory"] = session_factory
# make request.dbsession available for use in Pyramid
def dbsession(request):
# hook to share the dbsession fixture in testing
- dbsession = request.environ.get('app.dbsession')
+ dbsession = request.environ.get("app.dbsession")
if dbsession is None:
# request.tm is the transaction manager used by pyramid_tm
- dbsession = get_tm_session(
- session_factory, request.tm, request=request
- )
+ dbsession = get_tm_session(session_factory, request.tm, request=request)
return dbsession
config.add_request_method(dbsession, reify=True)
diff --git a/fietsboek/models/badge.py b/fietsboek/models/badge.py
index 3bbe714..f16e9bf 100644
--- a/fietsboek/models/badge.py
+++ b/fietsboek/models/badge.py
@@ -27,13 +27,14 @@ class Badge(Base):
:ivar tracks: Tracks associated with this badge.
:vartype tracks: list[fietsboek.models.track.Track]
"""
+
# pylint: disable=too-few-public-methods
- __tablename__ = 'badges'
+ __tablename__ = "badges"
id = Column(Integer, primary_key=True)
title = Column(Text)
image = Column(LargeBinary)
- tracks = relationship('Track', secondary='track_badge_assoc', back_populates='badges')
+ tracks = relationship("Track", secondary="track_badge_assoc", back_populates="badges")
@classmethod
def factory(cls, request):
diff --git a/fietsboek/models/comment.py b/fietsboek/models/comment.py
index 23f1871..386dfce 100644
--- a/fietsboek/models/comment.py
+++ b/fietsboek/models/comment.py
@@ -31,6 +31,7 @@ class Comment(Base):
:ivar track: Track that the comment belongs to.
:vartype track: fietsboek.model.track.Track
"""
+
# pylint: disable=too-few-public-methods
__tablename__ = "comments"
id = Column(Integer, primary_key=True)
@@ -40,5 +41,5 @@ class Comment(Base):
title = Column(Text)
text = Column(Text)
- author = relationship('User', back_populates='comments')
- track = relationship('Track', back_populates='comments')
+ author = relationship("User", back_populates="comments")
+ track = relationship("Track", back_populates="comments")
diff --git a/fietsboek/models/image.py b/fietsboek/models/image.py
index 4037619..cf507ec 100644
--- a/fietsboek/models/image.py
+++ b/fietsboek/models/image.py
@@ -30,6 +30,7 @@ class ImageMetadata(Base):
:ivar track: The track that this image belongs to.
:vartype track: fietsboek.models.track.Track
"""
+
# pylint: disable=too-few-public-methods
__tablename__ = "image_metadata"
id = Column(Integer, primary_key=True)
@@ -37,9 +38,9 @@ class ImageMetadata(Base):
image_name = Column(Text, nullable=False)
description = Column(Text)
- track = relationship('Track', back_populates='images')
+ track = relationship("Track", back_populates="images")
- __table_args__ = (UniqueConstraint('track_id', 'image_name'),)
+ __table_args__ = (UniqueConstraint("track_id", "image_name"),)
@classmethod
def get_or_create(cls, dbsession, track, image_name):
diff --git a/fietsboek/models/meta.py b/fietsboek/models/meta.py
index 87c82b2..6b11a09 100644
--- a/fietsboek/models/meta.py
+++ b/fietsboek/models/meta.py
@@ -10,7 +10,7 @@ NAMING_CONVENTION = {
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
- "pk": "pk_%(table_name)s"
+ "pk": "pk_%(table_name)s",
}
metadata = MetaData(naming_convention=NAMING_CONVENTION)
diff --git a/fietsboek/models/track.py b/fietsboek/models/track.py
index ce7b4d0..9eeae55 100644
--- a/fietsboek/models/track.py
+++ b/fietsboek/models/track.py
@@ -35,7 +35,12 @@ from sqlalchemy.orm import relationship
from pyramid.httpexceptions import HTTPNotFound
from pyramid.i18n import TranslationString as _
from pyramid.authorization import (
- Allow, Everyone, Authenticated, ALL_PERMISSIONS, ACLHelper, ACLAllowed,
+ Allow,
+ Everyone,
+ Authenticated,
+ ALL_PERMISSIONS,
+ ACLHelper,
+ ACLAllowed,
)
from markupsafe import Markup
@@ -58,12 +63,13 @@ class Tag(Base):
:ivar track: The track object that this tag belongs to.
:vartype track: Track
"""
+
# pylint: disable=too-few-public-methods
- __tablename__ = 'tags'
+ __tablename__ = "tags"
track_id = Column(Integer, ForeignKey("tracks.id"), primary_key=True)
tag = Column(Text, primary_key=True)
- track = relationship('Track', back_populates='tags')
+ track = relationship("Track", back_populates="tags")
class Visibility(enum.Enum):
@@ -72,6 +78,7 @@ class Visibility(enum.Enum):
Note that the track is always visible to tagged people and via the sharing
link.
"""
+
PRIVATE = enum.auto()
"""Only the owner of the track can see it."""
FRIENDS = enum.auto()
@@ -178,9 +185,10 @@ class Track(Base):
:ivar images: Metadata of the images saved for this track.
:vartype images: list[fietsboek.models.image.ImageMetadata]
"""
- __tablename__ = 'tracks'
+
+ __tablename__ = "tracks"
id = Column(Integer, primary_key=True)
- owner_id = Column(Integer, ForeignKey('users.id'))
+ owner_id = Column(Integer, ForeignKey("users.id"))
title = Column(Text)
description = Column(Text)
date_raw = Column(DateTime)
@@ -190,15 +198,17 @@ class Track(Base):
link_secret = Column(Text)
type = Column(Enum(TrackType))
- owner = relationship('User', back_populates='tracks')
- cache = relationship('TrackCache', back_populates='track', uselist=False,
- cascade="all, delete-orphan")
- tagged_people = relationship('User', secondary=track_people_assoc,
- back_populates='tagged_tracks')
- badges = relationship('Badge', secondary=track_badge_assoc, back_populates='tracks')
- tags = relationship('Tag', back_populates='track', cascade="all, delete-orphan")
- comments = relationship('Comment', back_populates='track', cascade="all, delete-orphan")
- images = relationship('ImageMetadata', back_populates='track', cascade="all, delete-orphan")
+ owner = relationship("User", back_populates="tracks")
+ cache = relationship(
+ "TrackCache", back_populates="track", uselist=False, cascade="all, delete-orphan"
+ )
+ tagged_people = relationship(
+ "User", secondary=track_people_assoc, back_populates="tagged_tracks"
+ )
+ badges = relationship("Badge", secondary=track_badge_assoc, back_populates="tracks")
+ tags = relationship("Tag", back_populates="track", cascade="all, delete-orphan")
+ comments = relationship("Comment", back_populates="track", cascade="all, delete-orphan")
+ images = relationship("ImageMetadata", back_populates="track", cascade="all, delete-orphan")
@classmethod
def factory(cls, request):
@@ -214,7 +224,7 @@ class Track(Base):
:return: The track.
:type: Track
"""
- track_id = request.matchdict['track_id']
+ track_id = request.matchdict["track_id"]
query = select(cls).filter_by(id=track_id)
track = request.dbsession.execute(query).scalar_one_or_none()
if track is None:
@@ -224,24 +234,27 @@ class Track(Base):
def __acl__(self):
# Basic ACL: Permissions for the admin, the owner and the share link
acl = [
- (Allow, 'group:admins', ALL_PERMISSIONS),
- (Allow, f'user:{self.owner_id}',
- ['track.view', 'track.edit', 'track.unshare', 'track.comment']),
- (Allow, f'secret:{self.link_secret}', 'track.view'),
+ (Allow, "group:admins", ALL_PERMISSIONS),
+ (
+ Allow,
+ f"user:{self.owner_id}",
+ ["track.view", "track.edit", "track.unshare", "track.comment"],
+ ),
+ (Allow, f"secret:{self.link_secret}", "track.view"),
]
# Tagged people may always see the track
for tagged in self.tagged_people:
- acl.append((Allow, f'user:{tagged.id}', ['track.view', 'track.comment']))
+ acl.append((Allow, f"user:{tagged.id}", ["track.view", "track.comment"]))
if self.visibility == Visibility.PUBLIC:
- acl.append((Allow, Everyone, 'track.view'))
- acl.append((Allow, Authenticated, 'track.comment'))
+ acl.append((Allow, Everyone, "track.view"))
+ acl.append((Allow, Authenticated, "track.comment"))
elif self.visibility == Visibility.LOGGED_IN:
- acl.append((Allow, Authenticated, ['track.view', 'track.comment']))
+ acl.append((Allow, Authenticated, ["track.view", "track.comment"]))
elif self.visibility == Visibility.FRIENDS:
acl.extend(
- (Allow, f'user:{friend.id}', ['track.view', 'track.comment'])
+ (Allow, f"user:{friend.id}", ["track.view", "track.comment"])
for friend in self.owner.get_friends()
)
elif self.visibility == Visibility.FRIENDS_TAGGED:
@@ -251,7 +264,7 @@ class Track(Base):
for friend in person.get_friends()
)
acl.extend(
- (Allow, f'user:{friend.id}', ['track.view', 'track.comment'])
+ (Allow, f"user:{friend.id}", ["track.view", "track.comment"])
for friend in all_friends
)
return acl
@@ -294,8 +307,11 @@ class Track(Base):
@date.setter
def date(self, value):
if value.tzinfo is None:
- LOGGER.debug('Non-aware datetime passed (track_id=%d, value=%s), assuming offset=0',
- self.id or -1, value)
+ LOGGER.debug(
+ "Non-aware datetime passed (track_id=%d, value=%s), assuming offset=0",
+ self.id or -1,
+ value,
+ )
self.date_tz = 0
else:
self.date_tz = value.tzinfo.utcoffset(value).total_seconds() // 60
@@ -325,7 +341,7 @@ class Track(Base):
if user:
principals.append(Authenticated)
principals.extend(user.principals())
- result = ACLHelper().permits(self, principals, 'track.view')
+ result = ACLHelper().permits(self, principals, "track.view")
return isinstance(result, ACLAllowed)
def ensure_cache(self):
@@ -519,19 +535,24 @@ class Track(Base):
:return: The generated HTML.
:rtype: Markup
"""
+
def number(num):
return format_decimal(num, locale=localizer.locale_name)
rows = [
- (_("tooltip.table.length"), f'{number(round(self.length / 1000, 2))} km'),
- (_("tooltip.table.uphill"), f'{number(round(self.uphill, 2))} m'),
- (_("tooltip.table.downhill"), f'{number(round(self.downhill, 2))} m'),
- (_("tooltip.table.moving_time"), f'{self.moving_time}'),
- (_("tooltip.table.stopped_time"), f'{self.stopped_time}'),
- (_("tooltip.table.max_speed"),
- f'{number(round(util.mps_to_kph(self.max_speed), 2))} km/h'),
- (_("tooltip.table.avg_speed"),
- f'{number(round(util.mps_to_kph(self.avg_speed), 2))} km/h'),
+ (_("tooltip.table.length"), f"{number(round(self.length / 1000, 2))} km"),
+ (_("tooltip.table.uphill"), f"{number(round(self.uphill, 2))} m"),
+ (_("tooltip.table.downhill"), f"{number(round(self.downhill, 2))} m"),
+ (_("tooltip.table.moving_time"), f"{self.moving_time}"),
+ (_("tooltip.table.stopped_time"), f"{self.stopped_time}"),
+ (
+ _("tooltip.table.max_speed"),
+ f"{number(round(util.mps_to_kph(self.max_speed), 2))} km/h",
+ ),
+ (
+ _("tooltip.table.avg_speed"),
+ f"{number(round(util.mps_to_kph(self.avg_speed), 2))} km/h",
+ ),
]
rows = [
f"<tr><td>{localizer.translate(name)}</td><td>{value}</td></tr>"
@@ -574,9 +595,10 @@ class TrackCache(Base):
:ivar track: The track that belongs to this cache entry.
:vartype track: Track
"""
+
# pylint: disable=too-many-instance-attributes,too-few-public-methods
- __tablename__ = 'track_cache'
- track_id = Column(Integer, ForeignKey('tracks.id'), primary_key=True)
+ __tablename__ = "track_cache"
+ track_id = Column(Integer, ForeignKey("tracks.id"), primary_key=True)
length = Column(Float)
uphill = Column(Float)
downhill = Column(Float)
@@ -589,7 +611,7 @@ class TrackCache(Base):
end_time_raw = Column(DateTime)
end_time_tz = Column(Integer)
- track = relationship('Track', back_populates='cache')
+ track = relationship("Track", back_populates="cache")
@property
def start_time(self):
@@ -607,8 +629,11 @@ class TrackCache(Base):
@start_time.setter
def start_time(self, value):
if value.tzinfo is None:
- LOGGER.debug('Non-aware datetime passed (cache_id=%d, value=%s), assuming offset=0',
- self.id or -1, value)
+ LOGGER.debug(
+ "Non-aware datetime passed (cache_id=%d, value=%s), assuming offset=0",
+ self.id or -1,
+ value,
+ )
self.start_time_tz = 0
else:
self.start_time_tz = value.tzinfo.utcoffset(value).total_seconds() // 60
@@ -630,8 +655,11 @@ class TrackCache(Base):
@end_time.setter
def end_time(self, value):
if value.tzinfo is None:
- LOGGER.debug('Non-aware datetime passed (cache_id=%d, value=%s), assuming offset=0',
- self.id or -1, value)
+ LOGGER.debug(
+ "Non-aware datetime passed (cache_id=%d, value=%s), assuming offset=0",
+ self.id or -1,
+ value,
+ )
self.end_time_tz = 0
else:
self.end_time_tz = value.tzinfo.utcoffset(value).total_seconds() // 60
@@ -655,14 +683,15 @@ class Upload(Base):
:ivar owner: Uploader of this track.
:vartype owner: fietsboek.model.user.User
"""
+
# pylint: disable=too-many-instance-attributes,too-few-public-methods
- __tablename__ = 'uploads'
+ __tablename__ = "uploads"
id = Column(Integer, primary_key=True)
uploaded_at = Column(DateTime)
- owner_id = Column(Integer, ForeignKey('users.id'))
+ owner_id = Column(Integer, ForeignKey("users.id"))
gpx = Column(LargeBinary)
- owner = relationship('User', back_populates='uploads')
+ owner = relationship("User", back_populates="uploads")
@classmethod
def factory(cls, request):
@@ -678,7 +707,7 @@ class Upload(Base):
:return: The upload.
:type: Track
"""
- query = select(cls).filter_by(id=request.matchdict['upload_id'])
+ query = select(cls).filter_by(id=request.matchdict["upload_id"])
upload = request.dbsession.execute(query).scalar_one_or_none()
if upload is None:
raise HTTPNotFound()
@@ -686,8 +715,8 @@ class Upload(Base):
def __acl__(self):
return [
- (Allow, 'group:admins', ALL_PERMISSIONS),
- (Allow, f'user:{self.owner_id}', 'upload.finish'),
+ (Allow, "group:admins", ALL_PERMISSIONS),
+ (Allow, f"user:{self.owner_id}", "upload.finish"),
]
@property
diff --git a/fietsboek/models/user.py b/fietsboek/models/user.py
index 3a267d8..0fe7877 100644
--- a/fietsboek/models/user.py
+++ b/fietsboek/models/user.py
@@ -41,10 +41,10 @@ class PasswordMismatch(Exception):
# The parameters were chosen according to the documentation in
# https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt
SCRYPT_PARAMETERS = {
- 'length': 32,
- 'n': 2**14,
- 'r': 8,
- 'p': 1,
+ "length": 32,
+ "n": 2**14,
+ "r": 8,
+ "p": 1,
}
SALT_LENGTH = 32
@@ -87,7 +87,8 @@ class User(Base):
:ivar comments: List of comments left by this user.
:vartype comments: list[fietsboek.model.comment.Comment]
"""
- __tablename__ = 'users'
+
+ __tablename__ = "users"
id = Column(Integer, primary_key=True)
name = Column(Text)
password = Column(LargeBinary)
@@ -96,18 +97,27 @@ class User(Base):
is_admin = Column(Boolean, default=False)
is_verified = Column(Boolean, default=False)
- tracks = relationship('Track', back_populates='owner', cascade="all, delete-orphan")
- tagged_tracks = relationship('Track', secondary='track_people_assoc',
- back_populates='tagged_people')
- uploads = relationship('Upload', back_populates='owner', cascade="all, delete-orphan")
- tokens = relationship('Token', back_populates='user', cascade="all, delete-orphan")
- comments = relationship('Comment', back_populates='author', cascade="all, delete-orphan")
+ tracks = relationship("Track", back_populates="owner", cascade="all, delete-orphan")
+ tagged_tracks = relationship(
+ "Track", secondary="track_people_assoc", back_populates="tagged_people"
+ )
+ uploads = relationship("Upload", back_populates="owner", cascade="all, delete-orphan")
+ tokens = relationship("Token", back_populates="user", cascade="all, delete-orphan")
+ comments = relationship("Comment", back_populates="author", cascade="all, delete-orphan")
# We don't use them, but include them to ensure our cascading works
- friends_1 = relationship('User', secondary='friends_assoc', back_populates='friends_2',
- foreign_keys=[friends_assoc.c.user_1_id])
- friends_2 = relationship('User', secondary='friends_assoc', back_populates='friends_1',
- foreign_keys=[friends_assoc.c.user_2_id])
+ friends_1 = relationship(
+ "User",
+ secondary="friends_assoc",
+ back_populates="friends_2",
+ foreign_keys=[friends_assoc.c.user_1_id],
+ )
+ friends_2 = relationship(
+ "User",
+ secondary="friends_assoc",
+ back_populates="friends_1",
+ foreign_keys=[friends_assoc.c.user_2_id],
+ )
@classmethod
def query_by_email(cls, email):
@@ -131,7 +141,7 @@ class User(Base):
:param new_password: The new password of the user.
:type new_password: str
"""
- new_password = new_password.encode('utf-8')
+ new_password = new_password.encode("utf-8")
salt = secrets.token_bytes(SALT_LENGTH)
scrypt = Scrypt(salt=salt, **SCRYPT_PARAMETERS)
password = scrypt.derive(new_password)
@@ -148,7 +158,7 @@ class User(Base):
:param password: The password to check.
:type password: str
"""
- password = password.encode('utf-8')
+ password = password.encode("utf-8")
scrypt = Scrypt(salt=self.salt, **SCRYPT_PARAMETERS)
try:
scrypt.verify(password, self.password)
@@ -164,9 +174,9 @@ class User(Base):
:return: The seceurity principals that this user fulfills.
:rtype: list[str]
"""
- principals = [f'user:{self.id}']
+ principals = [f"user:{self.id}"]
if self.is_admin:
- principals.append('group:admins')
+ principals.append("group:admins")
return principals
def all_tracks_query(self):
@@ -191,6 +201,7 @@ class User(Base):
# Late import to avoid cycles
# pylint: disable=import-outside-toplevel
from .track import Track
+
own = select(Track).where(with_parent(self, User.tracks))
friends = select(Track).where(with_parent(self, User.tagged_tracks))
# Create a fresh select so we can apply filter operations
@@ -214,6 +225,7 @@ class User(Base):
# Late import to avoid cycles
# pylint: disable=import-outside-toplevel,protected-access
from .track import Track, Visibility, track_people_assoc
+
# We build the list of visible tracks in multiple steps, and then union
# them later.
queries = []
@@ -233,8 +245,9 @@ class User(Base):
select(Track)
# The owner also counts as a "tagged person", so we need to
# include FRIENDS_TAGGED here as well.
- .where(Track.visibility.in_([Visibility.FRIENDS, Visibility.FRIENDS_TAGGED]))
- .where(Track.owner_id.in_(friend_ids))
+ .where(Track.visibility.in_([Visibility.FRIENDS, Visibility.FRIENDS_TAGGED])).where(
+ Track.owner_id.in_(friend_ids)
+ )
)
# Step 5: Am I a friend of a tagged person?
# We do this via a big join:
@@ -261,12 +274,12 @@ class User(Base):
return union(*queries)
def _friend_query(self):
- qry1 = (select(User)
- .filter(friends_assoc.c.user_1_id == self.id,
- friends_assoc.c.user_2_id == User.id))
- qry2 = (select(User)
- .filter(friends_assoc.c.user_2_id == self.id,
- friends_assoc.c.user_1_id == User.id))
+ qry1 = select(User).filter(
+ friends_assoc.c.user_1_id == self.id, friends_assoc.c.user_2_id == User.id
+ )
+ qry2 = select(User).filter(
+ friends_assoc.c.user_2_id == self.id, friends_assoc.c.user_1_id == User.id
+ )
return union(qry1, qry2)
def get_friends(self):
@@ -314,7 +327,7 @@ class User(Base):
return reduce(lambda acc, track: acc | track.text_tags(), self.tracks, set())
-Index('idx_users_email', User.email, unique=True)
+Index("idx_users_email", User.email, unique=True)
class FriendRequest(Base):
@@ -333,19 +346,22 @@ class FriendRequest(Base):
:ivar recipient: Recipient of the friendship.
:vartype recipient: User
"""
+
# pylint: disable=too-few-public-methods
- __tablename__ = 'friend_requests'
+ __tablename__ = "friend_requests"
id = Column(Integer, primary_key=True)
- sender_id = Column(Integer, ForeignKey('users.id'))
- recipient_id = Column(Integer, ForeignKey('users.id'))
+ sender_id = Column(Integer, ForeignKey("users.id"))
+ recipient_id = Column(Integer, ForeignKey("users.id"))
date = Column(DateTime)
- sender = relationship('User', primaryjoin='User.id == FriendRequest.sender_id',
- backref='outgoing_requests')
- recipient = relationship('User', primaryjoin='User.id == FriendRequest.recipient_id',
- backref='incoming_requests')
+ sender = relationship(
+ "User", primaryjoin="User.id == FriendRequest.sender_id", backref="outgoing_requests"
+ )
+ recipient = relationship(
+ "User", primaryjoin="User.id == FriendRequest.recipient_id", backref="incoming_requests"
+ )
- __table_args__ = (UniqueConstraint('sender_id', 'recipient_id'),)
+ __table_args__ = (UniqueConstraint("sender_id", "recipient_id"),)
class TokenType(enum.Enum):
@@ -354,6 +370,7 @@ class TokenType(enum.Enum):
A token can be used either to verify the user's email, or it can be used to
reset the password.
"""
+
VERIFY_EMAIL = enum.auto()
"""A token that can be used to verify a user's email."""
RESET_PASSWORD = enum.auto()
@@ -383,15 +400,16 @@ class Token(Base):
:ivar user: User that this token belongs to.
:vartype user: User
"""
+
# pylint: disable=too-few-public-methods
__tablename__ = "tokens"
id = Column(Integer, primary_key=True)
- user_id = Column(Integer, ForeignKey('users.id'))
+ user_id = Column(Integer, ForeignKey("users.id"))
uuid = Column(Text)
token_type = Column(Enum(TokenType))
date = Column(DateTime)
- user = relationship('User', back_populates='tokens')
+ user = relationship("User", back_populates="tokens")
@classmethod
def generate(cls, user, token_type):
@@ -409,4 +427,4 @@ class Token(Base):
return cls(user=user, uuid=token_uuid, date=now, token_type=token_type)
-Index('idx_token_uuid', Token.uuid, unique=True)
+Index("idx_token_uuid", Token.uuid, unique=True)
diff --git a/fietsboek/pages.py b/fietsboek/pages.py
index e94a493..f11b126 100644
--- a/fietsboek/pages.py
+++ b/fietsboek/pages.py
@@ -1,6 +1,7 @@
"""Module containing logic to support "static" pages."""
import enum
import re
+from typing import List, Optional
import markdown
@@ -41,8 +42,16 @@ class Page:
:vartype menu_index: int
"""
- def __init__(self, slug, title, content, link_name, locale_filter=None,
- user_filter=UserFilter.EVERYONE, menu_index=0):
+ def __init__(
+ self,
+ slug,
+ title,
+ content,
+ link_name,
+ locale_filter=None,
+ user_filter=UserFilter.EVERYONE,
+ menu_index=0,
+ ):
# pylint: disable=too-many-arguments
self.slug = slug
self.title = title
@@ -68,8 +77,7 @@ class Page:
if self.locale_filter is not None:
return any(
- lfilter.match(request.localizer.locale_name)
- for lfilter in self.locale_filter
+ lfilter.match(request.localizer.locale_name) for lfilter in self.locale_filter
)
return True
@@ -91,36 +99,39 @@ class Page:
parser = markdown.Markdown(extensions=["meta"])
content = parser.convert(text)
- title = parser.Meta.get('title', [''])[0]
+ title = parser.Meta.get("title", [""])[0] # type: ignore
if not title:
raise PageException("Missing `title`")
- link_name = parser.Meta.get('link-name', [''])[0]
+ link_name = parser.Meta.get("link-name", [""])[0] # type: ignore
if not link_name:
raise PageException("Missing `link-name`")
- slug = parser.Meta.get('slug', [''])[0]
+ slug = parser.Meta.get("slug", [""])[0] # type: ignore
if not slug:
raise PageException("Missing `slug`")
+ locale_filter: Optional[List[re.Pattern]]
try:
- locale_filter = list(map(re.compile, parser.Meta.get('locale', [])))
+ locale_filter = list(map(re.compile, parser.Meta.get("locale", []))) # type: ignore
except re.error as exc:
raise PageException("Invalid locale regex") from exc
if not locale_filter:
locale_filter = None
filter_map = {
- 'logged-in': UserFilter.LOGGED_IN,
- 'logged-out': UserFilter.LOGGED_OUT,
- 'everyone': UserFilter.EVERYONE,
+ "logged-in": UserFilter.LOGGED_IN,
+ "logged-out": UserFilter.LOGGED_OUT,
+ "everyone": UserFilter.EVERYONE,
}
- user_filter = filter_map.get(parser.Meta.get('show-to', ['everyone'])[0].lower())
+ user_filter = filter_map.get(
+ parser.Meta.get("show-to", ["everyone"])[0].lower() # type: ignore
+ )
if user_filter is None:
raise PageException("Invalid `show-to` filter")
try:
- menu_index = int(parser.Meta.get('index', ['0'])[0])
+ menu_index = int(parser.Meta.get("index", ["0"])[0]) # type: ignore
except ValueError as exc:
raise PageException("Invalid value for `index`") from exc
@@ -195,10 +206,7 @@ class Pages:
:return: A list of menu entries to show.
:rtype: list[Page]
"""
- return [
- page for page in self.collection
- if page.menu_index < 0 and page.matches(request)
- ]
+ return [page for page in self.collection if page.menu_index < 0 and page.matches(request)]
def post_menu_items(self, request):
"""Return all items that should appear after Fietsboek's main menu.
@@ -208,7 +216,4 @@ class Pages:
:return: A list of menu entries to show.
:rtype: list[Page]
"""
- return [
- page for page in self.collection
- if page.menu_index > 0 and page.matches(request)
- ]
+ return [page for page in self.collection if page.menu_index > 0 and page.matches(request)]
diff --git a/fietsboek/pshell.py b/fietsboek/pshell.py
index cc6988f..0907e80 100644
--- a/fietsboek/pshell.py
+++ b/fietsboek/pshell.py
@@ -8,12 +8,12 @@ def setup(env):
:param env: The environment to set up.
:type env: pyramid.scripting.AppEnvironment
"""
- request = env['request']
+ request = env["request"]
# start a transaction
request.tm.begin()
# inject some vars into the shell builtins
- env['tm'] = request.tm
- env['dbsession'] = request.dbsession
- env['models'] = models
+ env["tm"] = request.tm
+ env["dbsession"] = request.dbsession
+ env["models"] = models
diff --git a/fietsboek/routes.py b/fietsboek/routes.py
index 9286f13..9e71686 100644
--- a/fietsboek/routes.py
+++ b/fietsboek/routes.py
@@ -3,57 +3,61 @@
def includeme(config):
# pylint: disable=missing-function-docstring
- config.add_static_view('static', 'static', cache_max_age=3600)
- config.add_route('home', '/')
- config.add_route('login', '/login')
- config.add_route('logout', '/logout')
- config.add_route('browse', '/track/')
-
- config.add_route('static-page', '/page/{slug}')
-
- config.add_route('track-archive', '/track/archive')
-
- config.add_route('password-reset', '/password-reset')
- config.add_route('use-token', '/token/{uuid}')
- config.add_route('create-account', '/create-account')
-
- config.add_route('upload', '/upload')
- config.add_route('preview', '/preview/{upload_id}.gpx',
- factory='fietsboek.models.Upload.factory')
- config.add_route('finish-upload', '/upload/{upload_id}',
- factory='fietsboek.models.Upload.factory')
- config.add_route('cancel-upload', '/cancel/{upload_id}',
- factory='fietsboek.models.Upload.factory')
-
- config.add_route('details', '/track/{track_id}',
- factory='fietsboek.models.Track.factory')
- config.add_route('edit', '/track/{track_id}/edit',
- factory='fietsboek.models.Track.factory')
- config.add_route('gpx', '/gpx/{track_id}.gpx',
- factory='fietsboek.models.Track.factory')
- config.add_route('invalidate-share', '/track/{track_id}/invalidate-link',
- factory='fietsboek.models.Track.factory')
- config.add_route('delete-track', '/track/{track_id}/delete',
- factory='fietsboek.models.Track.factory')
- config.add_route('add-comment', '/track/{track_id}/comment',
- factory='fietsboek.models.Track.factory')
- config.add_route('image', '/track/{track_id}/images/{image_name}',
- factory='fietsboek.models.Track.factory')
-
- config.add_route('badge', '/badge/{badge_id}',
- factory='fietsboek.models.Badge.factory')
-
- config.add_route('admin', '/admin')
- config.add_route('admin-badge-add', '/admin/add-badge')
- config.add_route('admin-badge-edit', '/admin/edit-badge')
- config.add_route('admin-badge-delete', '/admin/delete-badge')
-
- config.add_route('profile', '/me')
- config.add_route('change-profile', '/me/personal-data')
- config.add_route('add-friend', '/me/send-friend-request')
- config.add_route('delete-friend', '/me/delete-friend')
- config.add_route('accept-friend', '/me/accept-friend')
- config.add_route('json-friends', '/me/friends.json')
-
- config.add_route('tile-proxy',
- '/tile/{provider}/{z:\\d+}/{x:\\d+}/{y:\\d+}')
+ config.add_static_view("static", "static", cache_max_age=3600)
+ config.add_route("home", "/")
+ config.add_route("login", "/login")
+ config.add_route("logout", "/logout")
+ config.add_route("browse", "/track/")
+
+ config.add_route("static-page", "/page/{slug}")
+
+ config.add_route("track-archive", "/track/archive")
+
+ config.add_route("password-reset", "/password-reset")
+ config.add_route("use-token", "/token/{uuid}")
+ config.add_route("create-account", "/create-account")
+
+ config.add_route("upload", "/upload")
+ config.add_route(
+ "preview", "/preview/{upload_id}.gpx", factory="fietsboek.models.Upload.factory"
+ )
+ config.add_route(
+ "finish-upload", "/upload/{upload_id}", factory="fietsboek.models.Upload.factory"
+ )
+ config.add_route(
+ "cancel-upload", "/cancel/{upload_id}", factory="fietsboek.models.Upload.factory"
+ )
+
+ config.add_route("details", "/track/{track_id}", factory="fietsboek.models.Track.factory")
+ config.add_route("edit", "/track/{track_id}/edit", factory="fietsboek.models.Track.factory")
+ config.add_route("gpx", "/gpx/{track_id}.gpx", factory="fietsboek.models.Track.factory")
+ config.add_route(
+ "invalidate-share",
+ "/track/{track_id}/invalidate-link",
+ factory="fietsboek.models.Track.factory",
+ )
+ config.add_route(
+ "delete-track", "/track/{track_id}/delete", factory="fietsboek.models.Track.factory"
+ )
+ config.add_route(
+ "add-comment", "/track/{track_id}/comment", factory="fietsboek.models.Track.factory"
+ )
+ config.add_route(
+ "image", "/track/{track_id}/images/{image_name}", factory="fietsboek.models.Track.factory"
+ )
+
+ config.add_route("badge", "/badge/{badge_id}", factory="fietsboek.models.Badge.factory")
+
+ config.add_route("admin", "/admin")
+ config.add_route("admin-badge-add", "/admin/add-badge")
+ config.add_route("admin-badge-edit", "/admin/edit-badge")
+ config.add_route("admin-badge-delete", "/admin/delete-badge")
+
+ config.add_route("profile", "/me")
+ config.add_route("change-profile", "/me/personal-data")
+ config.add_route("add-friend", "/me/send-friend-request")
+ config.add_route("delete-friend", "/me/delete-friend")
+ config.add_route("accept-friend", "/me/accept-friend")
+ config.add_route("json-friends", "/me/friends.json")
+
+ config.add_route("tile-proxy", "/tile/{provider}/{z:\\d+}/{x:\\d+}/{y:\\d+}")
diff --git a/fietsboek/scripts/fietsctl.py b/fietsboek/scripts/fietsctl.py
index e8f7b3f..bd37987 100644
--- a/fietsboek/scripts/fietsctl.py
+++ b/fietsboek/scripts/fietsctl.py
@@ -83,7 +83,7 @@ def cmd_userdel(env, args):
print(user.email)
if not args.force:
query = input("Really delete this user? [y/N] ")
- if query not in {'Y', 'y'}:
+ if query not in {"Y", "y"}:
print("Aborted by user.")
return EXIT_FAILURE
dbsession.delete(user)
@@ -103,9 +103,9 @@ def cmd_userlist(env, args):
dbsession = env["request"].dbsession
users = dbsession.execute(select(models.User).order_by(models.User.id)).scalars()
for user in users:
- tag = '[{}{}]'.format(
- 'a' if user.is_admin else '-',
- 'v' if user.is_verified else '-',
+ tag = "[{}{}]".format(
+ "a" if user.is_admin else "-",
+ "v" if user.is_verified else "-",
)
print(f"{tag} {user.id} - {user.email} - {user.name}")
return EXIT_OKAY
@@ -146,13 +146,14 @@ def parse_args(argv):
"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
- '-c', '--config',
- dest='config_uri',
- help='configuration file, e.g., development.ini',
+ "-c",
+ "--config",
+ dest="config_uri",
+ help="configuration file, e.g., development.ini",
required=True,
)
- subparsers = parser.add_subparsers(help='available subcommands', required=True)
+ subparsers = parser.add_subparsers(help="available subcommands", required=True)
p_useradd = subparsers.add_parser(
"useradd",
@@ -160,20 +161,20 @@ def parse_args(argv):
description=cmd_useradd.__doc__,
)
p_useradd.add_argument(
- '--email',
+ "--email",
help="email address of the user",
)
p_useradd.add_argument(
- '--name',
+ "--name",
help="name of the user",
)
p_useradd.add_argument(
- '--password',
+ "--password",
help="password of the user",
)
p_useradd.add_argument(
- '--admin',
- action='store_true',
+ "--admin",
+ action="store_true",
help="make the new user an admin",
)
p_useradd.set_defaults(func=cmd_useradd)
@@ -184,18 +185,21 @@ def parse_args(argv):
description=cmd_userdel.__doc__,
)
p_userdel.add_argument(
- '--force', '-f',
- action='store_true',
+ "--force",
+ "-f",
+ action="store_true",
help="override the safety check",
)
group = p_userdel.add_mutually_exclusive_group(required=True)
group.add_argument(
- '--id', '-i',
+ "--id",
+ "-i",
type=int,
help="database ID of the user",
)
group.add_argument(
- '--email', '-e',
+ "--email",
+ "-e",
help="email of the user",
)
p_userdel.set_defaults(func=cmd_userdel)
@@ -213,17 +217,19 @@ def parse_args(argv):
description=cmd_userdel.__doc__,
)
p_passwd.add_argument(
- '--password',
+ "--password",
help="password of the user",
)
group = p_passwd.add_mutually_exclusive_group(required=True)
group.add_argument(
- '--id', '-i',
+ "--id",
+ "-i",
type=int,
help="database ID of the user",
)
group.add_argument(
- '--email', '-e',
+ "--email",
+ "-e",
help="email of the user",
)
p_passwd.set_defaults(func=cmd_passwd)
diff --git a/fietsboek/security.py b/fietsboek/security.py
index a5cafd4..84dd88a 100644
--- a/fietsboek/security.py
+++ b/fietsboek/security.py
@@ -9,11 +9,12 @@ from sqlalchemy import select
from . import models
-ADMIN_PERMISSIONS = {'admin'}
+ADMIN_PERMISSIONS = {"admin"}
class SecurityPolicy:
"""Implementation of the Pyramid security policy."""
+
def __init__(self):
self.helper = SessionAuthenticationHelper()
@@ -39,12 +40,12 @@ class SecurityPolicy:
# If the context is not there, we are on a static site that does not use ACL
if isinstance(context, DefaultRootFactory):
if identity is None:
- return Denied('User is not signed in.')
+ return Denied("User is not signed in.")
if permission not in ADMIN_PERMISSIONS:
- return Allowed('User is signed in.')
+ return Allowed("User is signed in.")
if identity.is_admin:
- return Allowed('User is an administrator.')
- return Denied('User is not an administrator.')
+ return Allowed("User is an administrator.")
+ return Denied("User is not an administrator.")
# If the context is there, use ACL
principals = [Everyone]
@@ -52,7 +53,7 @@ class SecurityPolicy:
principals.append(Authenticated)
principals.extend(identity.principals())
- if 'secret' in request.GET:
+ if "secret" in request.GET:
principals.append(f'secret:{request.GET["secret"]}')
return ACLHelper().permits(context, principals, permission)
diff --git a/fietsboek/summaries.py b/fietsboek/summaries.py
index 9d4c0aa..04b74c5 100644
--- a/fietsboek/summaries.py
+++ b/fietsboek/summaries.py
@@ -100,6 +100,7 @@ class MonthSummary:
:ivar tracks: List of tracks in this month.
:vartype tracks: list[fietsboek.model.track.Track]
"""
+
def __init__(self, month):
self.month = month
self.tracks = []
diff --git a/fietsboek/updater/__init__.py b/fietsboek/updater/__init__.py
index a5bcf0e..348f713 100644
--- a/fietsboek/updater/__init__.py
+++ b/fietsboek/updater/__init__.py
@@ -5,6 +5,7 @@ import random
import string
import importlib.util
from pathlib import Path
+from typing import List
# Compat for Python < 3.9
import importlib_resources
@@ -81,13 +82,9 @@ class Updater:
scripts = _load_update_scripts()
for script in scripts:
self.scripts[script.id] = script
- self.forward_dependencies = {
- script.id: script.previous for script in self.scripts.values()
- }
+ self.forward_dependencies = {script.id: script.previous for script in self.scripts.values()}
# Ensure that each script has an entry
- self.backward_dependencies = {
- script.id: [] for script in self.scripts.values()
- }
+ self.backward_dependencies = {script.id: [] for script in self.scripts.values()}
for script in self.scripts.values():
for prev_id in script.previous:
self.backward_dependencies[prev_id].append(script.id)
@@ -151,7 +148,7 @@ class Updater:
def _make_schedule(self, wanted, dependencies):
wanted = set(wanted)
- queue = []
+ queue: List[str] = []
while wanted:
next_updates = {
update
@@ -233,7 +230,7 @@ class Updater:
current_alembic = context.get_current_heads()
LOGGER.debug("Found alembic versions: %s", current_alembic)
assert len(current_alembic) == 1
- current_alembic = current_alembic[0]
+ current_alembic = current_alembic[0] # type: ignore
loader = jinja2.DictLoader({"revision.py": TEMPLATE})
env = jinja2.Environment(loader=loader, autoescape=False)
@@ -291,7 +288,8 @@ class UpdateScript:
def __init__(self, source, name):
self.name = name
spec = importlib.util.spec_from_loader(f"{__name__}.{name}", None)
- self.module = importlib.util.module_from_spec(spec)
+ self.module = importlib.util.module_from_spec(spec) # type: ignore
+ assert self.module
exec(source, self.module.__dict__) # pylint: disable=exec-used
def __repr__(self):
diff --git a/fietsboek/updater/cli.py b/fietsboek/updater/cli.py
index a6ea6c1..5c97687 100644
--- a/fietsboek/updater/cli.py
+++ b/fietsboek/updater/cli.py
@@ -18,7 +18,8 @@ from . import Updater
# https://github.com/pallets/click/issues/295
# https://github.com/pallets/click/issues/814
config_option = click.option(
- "-c", "--config",
+ "-c",
+ "--config",
type=click.Path(exists=True, dir_okay=False),
required=True,
help="Path to the Fietsboek configuration file",
@@ -44,7 +45,7 @@ def user_confirm(verb):
@click.group(
help=__doc__,
- context_settings={'help_option_names': ['-h', '--help']},
+ context_settings={"help_option_names": ["-h", "--help"]},
)
def cli():
"""CLI main entry point."""
@@ -53,7 +54,8 @@ def cli():
@cli.command("update")
@config_option
@click.option(
- "-f", "--force",
+ "-f",
+ "--force",
is_flag=True,
help="Skip the safety question and just run the update",
)
@@ -97,7 +99,8 @@ def update(ctx, config, version, force):
@cli.command("downgrade")
@config_option
@click.option(
- "-f", "--force",
+ "-f",
+ "--force",
is_flag=True,
help="Skip the safety question and just run the downgrade",
)
diff --git a/fietsboek/util.py b/fietsboek/util.py
index 6b2190b..63414ae 100644
--- a/fietsboek/util.py
+++ b/fietsboek/util.py
@@ -4,6 +4,7 @@ import re
import os
import unicodedata
import secrets
+from typing import Optional, List
# Compat for Python < 3.9
import importlib_resources
@@ -11,20 +12,27 @@ import babel
import markdown
import bleach
import gpxpy
+import webob
+import sqlalchemy
from pyramid.i18n import TranslationString as _
from pyramid.httpexceptions import HTTPBadRequest
+from pyramid.request import Request
from markupsafe import Markup
from sqlalchemy import select
-ALLOWED_TAGS = (bleach.sanitizer.ALLOWED_TAGS +
- # Allow headings
- ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'] +
- ['p'] + ['img'])
+ALLOWED_TAGS = (
+ bleach.sanitizer.ALLOWED_TAGS
+ +
+ # Allow headings
+ ["h1", "h2", "h3", "h4", "h5", "h6"]
+ + ["p"]
+ + ["img"]
+)
ALLOWED_ATTRIBUTES = dict(bleach.sanitizer.ALLOWED_ATTRIBUTES)
-ALLOWED_ATTRIBUTES['img'] = ['alt', 'src']
+ALLOWED_ATTRIBUTES["img"] = ["alt", "src"]
# Arbitrarily chosen, just make sure they are representable
DEFAULT_START_TIME = datetime.datetime(1977, 5, 25, 8, 0)
@@ -47,23 +55,21 @@ _windows_device_files = (
)
-def safe_markdown(md_source):
+def safe_markdown(md_source: str) -> Markup:
"""Transform a markdown document into a safe HTML document.
This uses ``markdown`` to first parse the markdown source into HTML, and
then ``bleach`` to strip any disallowed HTML tags.
:param md_source: The markdown source.
- :type md_source: str
:return: The safe HTML transformed version.
- :rtype: Markup
"""
- html = markdown.markdown(md_source, output_format='html5')
+ html = markdown.markdown(md_source, output_format="html")
html = bleach.clean(html, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES)
return Markup(html)
-def fix_iso_timestamp(timestamp):
+def fix_iso_timestamp(timestamp: str) -> str:
"""Fixes an ISO timestamp to make it parseable by
:meth:`datetime.datetime.fromisoformat`.
@@ -71,24 +77,21 @@ def fix_iso_timestamp(timestamp):
it with '+00:00'.
:param timestamp: The timestamp to fix.
- :type timestamp: str
:return: The fixed timestamp.
- :rtype: str
"""
- if timestamp.endswith('Z'):
- return timestamp[:-1] + '+00:00'
+ if timestamp.endswith("Z"):
+ return timestamp[:-1] + "+00:00"
return timestamp
-def round_timedelta_to_multiple(value, multiples):
+def round_timedelta_to_multiple(
+ value: datetime.timedelta, multiples: datetime.timedelta
+) -> datetime.timedelta:
"""Round the timedelta `value` to be a multiple of `multiples`.
:param value: The value to be rounded.
- :type value: datetime.timedelta
:param multiples: The size of each multiple.
- :type multiples: datetime.timedelta
:return: The rounded value.
- :rtype: datetime.timedelta
"""
lower = value.total_seconds() // multiples.total_seconds() * multiples.total_seconds()
second_offset = value.total_seconds() - lower
@@ -99,16 +102,14 @@ def round_timedelta_to_multiple(value, multiples):
return datetime.timedelta(seconds=lower) + multiples
-def guess_gpx_timezone(gpx):
+def guess_gpx_timezone(gpx: gpxpy.gpx.GPX) -> datetime.tzinfo:
"""Guess which timezone the GPX file was recorded in.
This looks at a few timestamps to see if they have timezone information
attached, including some known GPX extensions.
:param gpx: The parsed GPX file to analyse.
- :type gpx: gpxpy.GPX
:return: The timezone information.
- :rtype: datetime.timezone
"""
time_bounds = gpx.get_time_bounds()
times = [
@@ -131,12 +132,14 @@ def guess_gpx_timezone(gpx):
time = times[0]
local_time = None
for extension in track.extensions:
- if extension.tag.lower() == 'localtime':
+ if extension.tag.lower() == "localtime":
local_time = datetime.datetime.fromisoformat(
- fix_iso_timestamp(extension.text)).replace(tzinfo=None)
- elif extension.tag.lower() == 'time':
+ fix_iso_timestamp(extension.text)
+ ).replace(tzinfo=None)
+ elif extension.tag.lower() == "time":
time = datetime.datetime.fromisoformat(
- fix_iso_timestamp(extension.text)).replace(tzinfo=None)
+ fix_iso_timestamp(extension.text)
+ ).replace(tzinfo=None)
if local_time is not None:
# We found a pair that we can use!
offset = local_time - time
@@ -152,7 +155,7 @@ def guess_gpx_timezone(gpx):
return datetime.timezone.utc
-def tour_metadata(gpx_data):
+def tour_metadata(gpx_data: str) -> dict:
"""Calculate the metadata of the tour.
Returns a dict with ``length``, ``uphill``, ``downhill``, ``moving_time``,
@@ -160,9 +163,7 @@ def tour_metadata(gpx_data):
``end_time``.
:param gpx_data: The GPX data of the tour.
- :type gpx_data: str
:return: A dictionary with the computed values.
- :rtype: dict
"""
gpx = gpxpy.parse(gpx_data)
timezone = guess_gpx_timezone(gpx)
@@ -174,58 +175,56 @@ def tour_metadata(gpx_data):
except ZeroDivisionError:
avg_speed = 0.0
return {
- 'length': gpx.length_3d(),
- 'uphill': uphill,
- 'downhill': downhill,
- 'moving_time': moving_data.moving_time,
- 'stopped_time': moving_data.stopped_time,
- 'max_speed': moving_data.max_speed,
- 'avg_speed': avg_speed,
- 'start_time': (time_bounds.start_time or DEFAULT_START_TIME).astimezone(timezone),
- 'end_time': (time_bounds.end_time or DEFAULT_END_TIME).astimezone(timezone),
+ "length": gpx.length_3d(),
+ "uphill": uphill,
+ "downhill": downhill,
+ "moving_time": moving_data.moving_time,
+ "stopped_time": moving_data.stopped_time,
+ "max_speed": moving_data.max_speed,
+ "avg_speed": avg_speed,
+ "start_time": (time_bounds.start_time or DEFAULT_START_TIME).astimezone(timezone),
+ "end_time": (time_bounds.end_time or DEFAULT_END_TIME).astimezone(timezone),
}
-def mps_to_kph(mps):
+def mps_to_kph(mps: float) -> float:
"""Converts meters/second to kilometers/hour.
:param mps: Input meters/second.
- :type mps: float
:return: The converted km/h value.
- :rtype: float
"""
return mps / 1000 * 60 * 60
-def month_name(request, month):
+def month_name(request: Request, month: int) -> str:
"""Returns the localized name for the month with the given number.
:param request: The pyramid request.
- :type request: pyramid.request.Request
:param month: Number of the month, 1 = January.
- :type month: int
:return: The localized month name.
- :rtype: str
"""
assert 1 <= month <= 12
locale = babel.Locale.parse(request.localizer.locale_name)
return locale.months["stand-alone"]["wide"][month]
-def random_link_secret(nbytes=20):
+def random_link_secret(nbytes: int = 20) -> str:
"""Safely generates a secret suitable for the link share.
The returned string consists of characters that are safe to use in a URL.
:param nbytes: Number of random bytes to use.
- :type nbytes: int
:return: A randomly drawn string.
- :rtype: str
"""
return secrets.token_urlsafe(nbytes)
-def retrieve_multiple(dbsession, model, params, name):
+def retrieve_multiple(
+ dbsession: "sqlalchemy.orm.session.Session",
+ model: type,
+ params: "webob.multidict.NestedMultiDict",
+ name: str,
+) -> list:
"""Parses a reply to retrieve multiple database objects.
This is usable for arrays sent by HTML forms, for example to retrieve all
@@ -237,15 +236,10 @@ def retrieve_multiple(dbsession, model, params, name):
:raises pyramid.httpexceptions.HTTPBadRequest: If an object could not be
found.
:param dbsession: The database session.
- :type dbsession: sqlalchemy.orm.session.Session
:param model: The model class to retrieve.
- :type model: class
:param params: The form parameters.
- :type params: webob.multidict.NestedMultiDict
:param name: Name of the parameter to look for.
- :type name: str
:return: A list of elements found.
- :rtype: list[model]
"""
objects = []
for obj_id in params.getall(name):
@@ -259,7 +253,7 @@ def retrieve_multiple(dbsession, model, params, name):
return objects
-def check_password_constraints(password, repeat_password=None):
+def check_password_constraints(password: str, repeat_password: Optional[str] = None):
"""Verifies that the password constraints match for the given password.
This is usually also verified client-side, but for people that bypass the
@@ -273,9 +267,7 @@ def check_password_constraints(password, repeat_password=None):
:class:`~pyramid.i18n.TranslationString` with the message of why the
verification failed.
:param password: The password which to verify.
- :type password: str
:param repeat_password: The password repeat.
- :type repeat_password: str
"""
if repeat_password is not None:
if repeat_password != password:
@@ -284,7 +276,9 @@ def check_password_constraints(password, repeat_password=None):
raise ValueError(_("password_constraint.length"))
-def read_localized_resource(locale_name, path, locale_packages=None, raise_on_error=False):
+def read_localized_resource(
+ locale_name: str, path: str, locale_packages: List[str], raise_on_error: bool = False
+) -> str:
"""Reads a localized resource.
Localized resources are located in the ``fietsboek/locale/**`` directory.
@@ -293,16 +287,13 @@ def read_localized_resource(locale_name, path, locale_packages=None, raise_on_er
If the resource could not be found, a placeholder string is returned instead.
:param locale_name: Name of the locale.
- :type locale_name: str
+ :param path: Path of the resource.
:param locale_packages: Names of packages in which locale data is searched.
By default, only built-in locales are searched.
- :type locale_packages: list[str]
:param raise_on_error: Raise an error instead of returning a placeholder.
- :type raise_on_error: bool
:raises FileNotFoundError: If the path could not be found and
``raise_on_error`` is ``True``.
:return: The text content of the resource.
- :rtype: str
"""
locales = [locale_name]
# Second chance: If the locale is a specific form of a more general
@@ -311,11 +302,11 @@ def read_localized_resource(locale_name, path, locale_packages=None, raise_on_er
locales.append(locale_name.split("_", 1)[0])
if locale_packages is None:
- locale_packages = ['fietsboek']
+ locale_packages = ["fietsboek"]
for locale in locales:
for package in locale_packages:
- locale_dir = importlib_resources.files(package) / 'locale' / locale
+ locale_dir = importlib_resources.files(package) / "locale" / locale
resource_path = locale_dir / path
try:
return resource_path.read_text()
@@ -326,7 +317,7 @@ def read_localized_resource(locale_name, path, locale_packages=None, raise_on_er
return f"{locale_name}:{path}"
-def secure_filename(filename):
+def secure_filename(filename: str) -> str:
r"""Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
@@ -346,9 +337,7 @@ def secure_filename(filename):
generate a random filename if the function returned an empty one.
:param filename: the filename to secure
- :type filename: str
:return: The secure filename.
- :rtype: str
"""
# Taken from
# https://github.com/pallets/werkzeug/blob/main/src/werkzeug/utils.py
@@ -359,9 +348,7 @@ def secure_filename(filename):
for sep in os.path.sep, os.path.altsep:
if sep:
filename = filename.replace(sep, " ")
- filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip(
- "._"
- )
+ filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip("._")
# on nt a couple of special files are present in each folder. We
# have to ensure that the target file is not such a filename. In
diff --git a/fietsboek/views/account.py b/fietsboek/views/account.py
index f9f48e9..b181148 100644
--- a/fietsboek/views/account.py
+++ b/fietsboek/views/account.py
@@ -7,8 +7,11 @@ from .. import models, util, email
from ..models.user import TokenType
-@view_config(route_name="create-account", renderer="fietsboek:templates/create_account.jinja2",
- request_method="GET")
+@view_config(
+ route_name="create-account",
+ renderer="fietsboek:templates/create_account.jinja2",
+ request_method="GET",
+)
def create_account(request):
"""Shows the "create account" page.
@@ -23,8 +26,11 @@ def create_account(request):
return {}
-@view_config(route_name="create-account", renderer="fietsboek:templates/create_account.jinja2",
- request_method="POST")
+@view_config(
+ route_name="create-account",
+ renderer="fietsboek:templates/create_account.jinja2",
+ request_method="POST",
+)
def do_create_account(request):
"""Shows the "create account" page.
@@ -41,17 +47,17 @@ def do_create_account(request):
util.check_password_constraints(password, request.params["repeat-password"])
except ValueError as exc:
request.session.flash(request.localizer.translate(exc.args[0]))
- return HTTPFound(request.route_url('create-account'))
+ return HTTPFound(request.route_url("create-account"))
name = request.params["name"]
if not name:
- request.session.flash(request.localizer.translate(_('flash.invalid_name')))
- return HTTPFound(request.route_url('create-account'))
+ request.session.flash(request.localizer.translate(_("flash.invalid_name")))
+ return HTTPFound(request.route_url("create-account"))
email_addr = request.params["email"]
if not email_addr:
- request.session.flash(request.localizer.translate(_('flash.invalid_email')))
- return HTTPFound(request.route_url('create-account'))
+ request.session.flash(request.localizer.translate(_("flash.invalid_email")))
+ return HTTPFound(request.route_url("create-account"))
user = models.User(name=name, email=email_addr)
user.set_password(password)
@@ -63,10 +69,13 @@ def do_create_account(request):
message = email.prepare_message(
request.config.email_from,
user.email,
- request.localizer.translate(_('email.verify_mail.subject')),
+ request.localizer.translate(_("email.verify_mail.subject")),
+ )
+ message.set_content(
+ request.localizer.translate(_("email.verify.text")).format(
+ request.route_url("use-token", uuid=token.uuid)
+ )
)
- message.set_content(request.localizer.translate(_('email.verify.text'))
- .format(request.route_url('use-token', uuid=token.uuid)))
email.send_message(
request.config.email_smtp_url,
request.config.email_username,
@@ -75,4 +84,4 @@ def do_create_account(request):
)
request.session.flash(request.localizer.translate(_("flash.a_confirmation_link_has_been_sent")))
- return HTTPFound(request.route_url('login'))
+ return HTTPFound(request.route_url("login"))
diff --git a/fietsboek/views/admin.py b/fietsboek/views/admin.py
index 7ad6372..454d1e1 100644
--- a/fietsboek/views/admin.py
+++ b/fietsboek/views/admin.py
@@ -8,8 +8,12 @@ from sqlalchemy import select
from .. import models
-@view_config(route_name='admin', renderer='fietsboek:templates/admin.jinja2',
- request_method="GET", permission="admin")
+@view_config(
+ route_name="admin",
+ renderer="fietsboek:templates/admin.jinja2",
+ request_method="GET",
+ permission="admin",
+)
def admin(request):
"""Renders the main admin overview.
@@ -20,11 +24,11 @@ def admin(request):
"""
badges = request.dbsession.execute(select(models.Badge)).scalars()
return {
- 'badges': badges,
+ "badges": badges,
}
-@view_config(route_name='admin-badge-add', permission="admin", request_method="POST")
+@view_config(route_name="admin-badge-add", permission="admin", request_method="POST")
def do_badge_add(request):
"""Adds a badge.
@@ -36,17 +40,17 @@ def do_badge_add(request):
:rtype: pyramid.response.Response
"""
- image = request.params['badge-image'].file.read()
- title = request.params['badge-title']
+ image = request.params["badge-image"].file.read()
+ title = request.params["badge-title"]
badge = models.Badge(title=title, image=image)
request.dbsession.add(badge)
request.session.flash(request.localizer.translate(_("flash.badge_added")))
- return HTTPFound(request.route_url('admin'))
+ return HTTPFound(request.route_url("admin"))
-@view_config(route_name='admin-badge-edit', permission="admin", request_method="POST")
+@view_config(route_name="admin-badge-edit", permission="admin", request_method="POST")
def do_badge_edit(request):
"""Modifies an already existing badge.
@@ -58,19 +62,19 @@ def do_badge_edit(request):
:rtype: pyramid.response.Response
"""
badge = request.dbsession.execute(
- select(models.Badge).filter_by(id=request.params["badge-edit-id"])
- ).scalar_one()
+ select(models.Badge).filter_by(id=request.params["badge-edit-id"])
+ ).scalar_one()
try:
- badge.image = request.params['badge-image'].file.read()
+ badge.image = request.params["badge-image"].file.read()
except AttributeError:
pass
- badge.title = request.params['badge-title']
+ badge.title = request.params["badge-title"]
request.session.flash(request.localizer.translate(_("flash.badge_modified")))
- return HTTPFound(request.route_url('admin'))
+ return HTTPFound(request.route_url("admin"))
-@view_config(route_name='admin-badge-delete', permission="admin", request_method="POST")
+@view_config(route_name="admin-badge-delete", permission="admin", request_method="POST")
def do_badge_delete(request):
"""Removes a badge.
@@ -82,9 +86,9 @@ def do_badge_delete(request):
:rtype: pyramid.response.Response
"""
badge = request.dbsession.execute(
- select(models.Badge).filter_by(id=request.params["badge-delete-id"])
- ).scalar_one()
+ select(models.Badge).filter_by(id=request.params["badge-delete-id"])
+ ).scalar_one()
request.dbsession.delete(badge)
request.session.flash(request.localizer.translate(_("flash.badge_deleted")))
- return HTTPFound(request.route_url('admin'))
+ return HTTPFound(request.route_url("admin"))
diff --git a/fietsboek/views/browse.py b/fietsboek/views/browse.py
index c01d4f6..018cb6e 100644
--- a/fietsboek/views/browse.py
+++ b/fietsboek/views/browse.py
@@ -1,6 +1,7 @@
"""Views for browsing all tracks."""
import datetime
from io import RawIOBase
+from typing import List
from zipfile import ZipFile, ZIP_DEFLATED
from pyramid.view import view_config
@@ -39,21 +40,21 @@ def _get_int(request, name):
try:
return int(request.params.get(name))
except ValueError as exc:
- raise HTTPBadRequest(f'Invalid integer in {name!r}') from exc
+ raise HTTPBadRequest(f"Invalid integer in {name!r}") from exc
def _get_date(request, name):
try:
return datetime.date.fromisoformat(request.params.get(name))
except ValueError as exc:
- raise HTTPBadRequest(f'Invalid date in {name!r}') from exc
+ raise HTTPBadRequest(f"Invalid date in {name!r}") from exc
def _get_enum(enum, value):
try:
return enum[value]
except KeyError as exc:
- raise HTTPBadRequest(f'Invalid enum value {value!r}') from exc
+ raise HTTPBadRequest(f"Invalid enum value {value!r}") from exc
class Filter:
@@ -123,10 +124,12 @@ class TagFilter(Filter):
def compile(self, query, track, track_cache):
lower_tags = [tag.lower() for tag in self.tags]
for tag in lower_tags:
- exists_query = (select(models.Tag)
- .where(models.Tag.track_id == track.id)
- .where(func.lower(models.Tag.tag) == tag)
- .exists())
+ exists_query = (
+ select(models.Tag)
+ .where(models.Tag.track_id == track.id)
+ .where(func.lower(models.Tag.tag) == tag)
+ .exists()
+ )
query = query.where(exists_query)
return query
@@ -146,15 +149,19 @@ class PersonFilter(Filter):
lower_names = [name.lower() for name in self.names]
for name in lower_names:
tpa = models.track.track_people_assoc
- exists_query = (select(tpa)
- .join(models.User, tpa.c.user_id == models.User.id)
- .where(tpa.c.track_id == track.id)
- .where(func.lower(models.User.name) == name)
- .exists())
- is_owner = (select(models.User.id)
- .where(models.User.id == track.owner_id)
- .where(func.lower(models.User.name) == name)
- .exists())
+ exists_query = (
+ select(tpa)
+ .join(models.User, tpa.c.user_id == models.User.id)
+ .where(tpa.c.track_id == track.id)
+ .where(func.lower(models.User.name) == name)
+ .exists()
+ )
+ is_owner = (
+ select(models.User.id)
+ .where(models.User.id == track.owner_id)
+ .where(func.lower(models.User.name) == name)
+ .exists()
+ )
query = query.where(or_(exists_query, is_owner))
return query
@@ -172,13 +179,17 @@ class UserTaggedFilter(Filter):
def compile(self, query, track, track_cache):
tpa = models.track.track_people_assoc
- return query.where(or_(
- track.owner == self.user,
- (select(tpa)
- .where(tpa.c.track_id == track.id)
- .where(tpa.c.user_id == self.user.id)
- .exists()),
- ))
+ return query.where(
+ or_(
+ track.owner == self.user,
+ (
+ select(tpa)
+ .where(tpa.c.track_id == track.id)
+ .where(tpa.c.user_id == self.user.id)
+ .exists()
+ ),
+ )
+ )
def apply(self, track):
return track.owner == self.user or self.user in track.tagged_people
@@ -212,84 +223,105 @@ class FilterCollection(Filter):
:rtype: FilterCollection
"""
# pylint: disable=singleton-comparison
- filters = []
- if request.params.get('search-terms'):
- term = request.params.get('search-terms').strip()
+ filters: List[Filter] = []
+ if request.params.get("search-terms"):
+ term = request.params.get("search-terms").strip()
filters.append(SearchFilter([term]))
- if request.params.get('tags'):
- tags = [tag.strip() for tag in request.params.get('tags').split('&&')]
+ if request.params.get("tags"):
+ tags = [tag.strip() for tag in request.params.get("tags").split("&&")]
tags = list(filter(bool, tags))
filters.append(TagFilter(tags))
- if request.params.get('tagged-person'):
- names = [name.strip() for name in request.params.get('tagged-person').split('&&')]
+ if request.params.get("tagged-person"):
+ names = [name.strip() for name in request.params.get("tagged-person").split("&&")]
names = list(filter(bool, names))
filters.append(PersonFilter(names))
- if request.params.get('min-length'):
+ if request.params.get("min-length"):
# Value is given in km, so convert it to m
min_length = _get_int(request, "min-length") * 1000
- filters.append(LambdaFilter(
- lambda query, track, track_cache:
- query.where(or_(track_cache.length >= min_length,
- track_cache.length == None)), # noqa: E711
- lambda track: track.length >= min_length,
- ))
-
- if request.params.get('max-length'):
+ filters.append(
+ LambdaFilter(
+ lambda query, track, track_cache: query.where(
+ or_(
+ track_cache.length >= min_length,
+ track_cache.length == None, # noqa: E711
+ )
+ ),
+ lambda track: track.length >= min_length,
+ )
+ )
+
+ if request.params.get("max-length"):
max_length = _get_int(request, "max-length") * 1000
- filters.append(LambdaFilter(
- lambda query, track, track_cache:
- query.where(or_(track_cache.length <= max_length,
- track_cache.length == None)), # noqa: E711
- lambda track: track.length <= max_length,
- ))
-
- if request.params.get('min-date'):
+ filters.append(
+ LambdaFilter(
+ lambda query, track, track_cache: query.where(
+ or_(
+ track_cache.length <= max_length,
+ track_cache.length == None, # noqa: E711
+ )
+ ),
+ lambda track: track.length <= max_length,
+ )
+ )
+
+ if request.params.get("min-date"):
min_date = _get_date(request, "min-date")
min_date = datetime.datetime.combine(min_date, datetime.time.min)
- filters.append(LambdaFilter(
- lambda query, track, track_cache: query.where(track.date_raw >= min_date),
- lambda track: track.date.replace(tzinfo=None) >= min_date,
- ))
-
- if request.params.get('max-date'):
+ filters.append(
+ LambdaFilter(
+ lambda query, track, track_cache: query.where(track.date_raw >= min_date),
+ lambda track: track.date.replace(tzinfo=None) >= min_date,
+ )
+ )
+
+ if request.params.get("max-date"):
max_date = _get_date(request, "max-date")
max_date = datetime.datetime.combine(max_date, datetime.time.max)
- filters.append(LambdaFilter(
- lambda query, track, track_cache: query.where(track.date_raw <= max_date),
- lambda track: track.date.replace(tzinfo=None) <= max_date,
- ))
-
- if "mine" in request.params.getall('show-only[]'):
- filters.append(LambdaFilter(
- lambda query, track, track_cache: query.where(track.owner == request.identity),
- lambda track: track.owner == request.identity,
- ))
-
- if "friends" in request.params.getall('show-only[]') and request.identity:
+ filters.append(
+ LambdaFilter(
+ lambda query, track, track_cache: query.where(track.date_raw <= max_date),
+ lambda track: track.date.replace(tzinfo=None) <= max_date,
+ )
+ )
+
+ if "mine" in request.params.getall("show-only[]"):
+ filters.append(
+ LambdaFilter(
+ lambda query, track, track_cache: query.where(track.owner == request.identity),
+ lambda track: track.owner == request.identity,
+ )
+ )
+
+ if "friends" in request.params.getall("show-only[]") and request.identity:
friend_ids = {friend.id for friend in request.identity.get_friends()}
- filters.append(LambdaFilter(
- lambda query, track, track_cache: query.where(track.owner_id.in_(friend_ids)),
- lambda track: track.owner in request.identity.get_friends(),
- ))
-
- if "user-tagged" in request.params.getall('show-only[]') and request.identity:
+ filters.append(
+ LambdaFilter(
+ lambda query, track, track_cache: query.where(track.owner_id.in_(friend_ids)),
+ lambda track: track.owner in request.identity.get_friends(),
+ )
+ )
+
+ if "user-tagged" in request.params.getall("show-only[]") and request.identity:
filters.append(UserTaggedFilter(request.identity))
- if 'type[]' in request.params:
- types = {_get_enum(TrackType, value) for value in request.params.getall('type[]')}
- filters.append(LambdaFilter(
- lambda query, track, track_cache: query.where(track.type.in_(types)),
- lambda track: track.type in types,
- ))
+ if "type[]" in request.params:
+ types = {_get_enum(TrackType, value) for value in request.params.getall("type[]")}
+ filters.append(
+ LambdaFilter(
+ lambda query, track, track_cache: query.where(track.type.in_(types)),
+ lambda track: track.type in types,
+ )
+ )
return cls(filters)
-@view_config(route_name="browse", renderer="fietsboek:templates/browse.jinja2",
- request_method="GET")
+@view_config(
+ route_name="browse", renderer="fietsboek:templates/browse.jinja2", request_method="GET"
+)
def browse(request):
"""Returns the page that lets a user browse all visible tracks.
@@ -309,9 +341,9 @@ def browse(request):
tracks = request.dbsession.execute(query).scalars()
tracks = [track for track in tracks if filters.apply(track)]
return {
- 'tracks': tracks,
- 'mps_to_kph': util.mps_to_kph,
- 'used_filters': bool(filters),
+ "tracks": tracks,
+ "mps_to_kph": util.mps_to_kph,
+ "used_filters": bool(filters),
}
@@ -325,11 +357,14 @@ def archive(request):
:rtype: pyramid.response.Response
"""
# We need to create a separate session, otherwise we will get detached instances
- session = request.registry['dbsession_factory']()
+ session = request.registry["dbsession_factory"]()
track_ids = set(map(int, request.params.getall("track_id[]")))
- tracks = session.execute(
- select(models.Track).filter(models.Track.id.in_(track_ids))).scalars().fetchall()
+ tracks = (
+ session.execute(select(models.Track).filter(models.Track.id.in_(track_ids)))
+ .scalars()
+ .fetchall()
+ )
if len(tracks) != len(track_ids):
return HTTPNotFound()
@@ -341,7 +376,7 @@ def archive(request):
def generate():
try:
stream = Stream()
- with ZipFile(stream, "w", ZIP_DEFLATED) as zipfile:
+ with ZipFile(stream, "w", ZIP_DEFLATED) as zipfile: # type: ignore
for track in tracks:
zipfile.writestr(f"track_{track.id}.gpx", track.gpx_data)
yield stream.readall()
diff --git a/fietsboek/views/default.py b/fietsboek/views/default.py
index 516aef7..883d8d7 100644
--- a/fietsboek/views/default.py
+++ b/fietsboek/views/default.py
@@ -16,7 +16,7 @@ from ..models.user import PasswordMismatch, TokenType
from ..models.track import TrackType
-@view_config(route_name='home', renderer='fietsboek:templates/home.jinja2')
+@view_config(route_name="home", renderer="fietsboek:templates/home.jinja2")
def home(request):
"""Renders the home page.
@@ -27,13 +27,13 @@ def home(request):
"""
if not request.identity:
# See if the admin set a custom home page
- page = request.pages.find('/', request)
+ page = request.pages.find("/", request)
if page is not None:
return render_to_response(
- 'fietsboek:templates/static-page.jinja2',
+ "fietsboek:templates/static-page.jinja2",
{
- 'title': page.title,
- 'content': Markup(page.content),
+ "title": page.title,
+ "content": Markup(page.content),
},
request,
)
@@ -46,7 +46,7 @@ def home(request):
locale_packages=request.config.language_packs,
)
return {
- 'home_content': content,
+ "home_content": content,
}
query = request.identity.all_tracks_query()
@@ -59,12 +59,12 @@ def home(request):
summary.add(track)
return {
- 'summary': summary,
- 'month_name': util.month_name,
+ "summary": summary,
+ "month_name": util.month_name,
}
-@view_config(route_name='static-page', renderer='fietsboek:templates/static-page.jinja2')
+@view_config(route_name="static-page", renderer="fietsboek:templates/static-page.jinja2")
def static_page(request):
"""Renders a static page.
@@ -73,17 +73,17 @@ def static_page(request):
:return: The HTTP response.
:rtype: pyramid.response.Response
"""
- page = request.pages.find(request.matchdict['slug'], request)
+ page = request.pages.find(request.matchdict["slug"], request)
if page is None:
return HTTPNotFound()
return {
- 'title': page.title,
- 'content': Markup(page.content),
+ "title": page.title,
+ "content": Markup(page.content),
}
-@view_config(route_name='login', renderer='fietsboek:templates/login.jinja2', request_method='GET')
+@view_config(route_name="login", renderer="fietsboek:templates/login.jinja2", request_method="GET")
def login(request):
"""Renders the login page.
@@ -96,7 +96,7 @@ def login(request):
return {}
-@view_config(route_name='login', request_method='POST')
+@view_config(route_name="login", request_method="POST")
def do_login(request):
"""Endpoint for the login form.
@@ -105,24 +105,24 @@ def do_login(request):
:return: The HTTP response.
:rtype: pyramid.response.Response
"""
- query = models.User.query_by_email(request.params['email'])
+ query = models.User.query_by_email(request.params["email"])
try:
user = request.dbsession.execute(query).scalar_one()
- user.check_password(request.params['password'])
+ user.check_password(request.params["password"])
except (NoResultFound, PasswordMismatch):
- request.session.flash(request.localizer.translate(_('flash.invalid_credentials')))
- return HTTPFound(request.route_url('login'))
+ request.session.flash(request.localizer.translate(_("flash.invalid_credentials")))
+ return HTTPFound(request.route_url("login"))
if not user.is_verified:
- request.session.flash(request.localizer.translate(_('flash.account_not_verified')))
- return HTTPFound(request.route_url('login'))
+ request.session.flash(request.localizer.translate(_("flash.account_not_verified")))
+ return HTTPFound(request.route_url("login"))
- request.session.flash(request.localizer.translate(_('flash.logged_in')))
+ request.session.flash(request.localizer.translate(_("flash.logged_in")))
headers = remember(request, str(user.id))
- return HTTPFound('/', headers=headers)
+ return HTTPFound("/", headers=headers)
-@view_config(route_name='logout')
+@view_config(route_name="logout")
def logout(request):
"""Logs the user out.
@@ -131,13 +131,16 @@ def logout(request):
:return: The HTTP response.
:rtype: pyramid.response.Response
"""
- request.session.flash(request.localizer.translate(_('flash.logged_out')))
+ request.session.flash(request.localizer.translate(_("flash.logged_out")))
headers = forget(request)
- return HTTPFound('/', headers=headers)
+ return HTTPFound("/", headers=headers)
-@view_config(route_name="password-reset", request_method="GET",
- renderer="fietsboek:templates/request_password.jinja2")
+@view_config(
+ route_name="password-reset",
+ request_method="GET",
+ renderer="fietsboek:templates/request_password.jinja2",
+)
def password_reset(request):
"""Form to request a new password.
@@ -159,11 +162,11 @@ def do_password_reset(request):
:return: The HTTP response.
:rtype: pyramid.response.Response
"""
- query = models.User.query_by_email(request.params['email'])
+ query = models.User.query_by_email(request.params["email"])
user = request.dbsession.execute(query).scalar_one_or_none()
if user is None:
request.session.flash(request.localizer.translate(_("flash.reset_invalid_email")))
- return HTTPFound(request.route_url('password-reset'))
+ return HTTPFound(request.route_url("password-reset"))
token = models.Token.generate(user, TokenType.RESET_PASSWORD)
request.dbsession.add(token)
@@ -175,9 +178,9 @@ def do_password_reset(request):
request.localizer.translate(_("page.password_reset.email.subject")),
)
mail.set_content(
- request.localizer
- .translate(_("page.password_reset.email.body"))
- .format(request.route_url('use-token', uuid=token.uuid))
+ request.localizer.translate(_("page.password_reset.email.body")).format(
+ request.route_url("use-token", uuid=token.uuid)
+ )
)
email.send_message(
request.config.email_smtp_url,
@@ -186,7 +189,7 @@ def do_password_reset(request):
mail,
)
- return HTTPFound(request.route_url('password-reset'))
+ return HTTPFound(request.route_url("password-reset"))
@view_config(route_name="use-token")
@@ -200,25 +203,25 @@ def use_token(request):
:rtype: pyramid.response.Response
"""
token = request.dbsession.execute(
- select(models.Token).filter_by(uuid=request.matchdict['uuid'])
- ).scalar_one_or_none()
+ select(models.Token).filter_by(uuid=request.matchdict["uuid"])
+ ).scalar_one_or_none()
if token is None:
return HTTPNotFound()
if token.token_type == TokenType.VERIFY_EMAIL:
token.user.is_verified = True
request.dbsession.delete(token)
- request.session.flash(request.localizer.translate(_('flash.email_verified')))
- return HTTPFound(request.route_url('login'))
- if request.method == 'GET' and token.token_type == TokenType.RESET_PASSWORD:
- return render_to_response('fietsboek:templates/password_reset.jinja2', {}, request)
- if request.method == 'POST' and token.token_type == TokenType.RESET_PASSWORD:
+ request.session.flash(request.localizer.translate(_("flash.email_verified")))
+ return HTTPFound(request.route_url("login"))
+ if request.method == "GET" and token.token_type == TokenType.RESET_PASSWORD:
+ return render_to_response("fietsboek:templates/password_reset.jinja2", {}, request)
+ if request.method == "POST" and token.token_type == TokenType.RESET_PASSWORD:
password = request.params["password"]
try:
util.check_password_constraints(password, request.params["repeat-password"])
except ValueError as exc:
request.session.flash(request.localizer.translate(exc.args[0]))
- return HTTPFound(request.route_url('use-token', uuid=token.uuid))
+ return HTTPFound(request.route_url("use-token", uuid=token.uuid))
token.user.set_password(password)
request.dbsession.delete(token)
diff --git a/fietsboek/views/detail.py b/fietsboek/views/detail.py
index a135916..e0ca113 100644
--- a/fietsboek/views/detail.py
+++ b/fietsboek/views/detail.py
@@ -11,8 +11,9 @@ from sqlalchemy import select
from .. import models, util
-@view_config(route_name='details', renderer='fietsboek:templates/details.jinja2',
- permission='track.view')
+@view_config(
+ route_name="details", renderer="fietsboek:templates/details.jinja2", permission="track.view"
+)
def details(request):
"""Renders the detail page for a given track.
@@ -23,13 +24,13 @@ def details(request):
"""
track = request.context
description = util.safe_markdown(track.description)
- show_edit_link = (track.owner == request.identity)
+ show_edit_link = track.owner == request.identity
images = []
for image_name in request.data_manager.images(track.id):
query = []
- if 'secret' in request.GET:
- query.append(('secret', request.GET['secret']))
+ if "secret" in request.GET:
+ query.append(("secret", request.GET["secret"]))
img_src = request.route_url("image", track_id=track.id, image_name=image_name, _query=query)
query = select(models.ImageMetadata).filter_by(track=track, image_name=image_name)
image_metadata = request.dbsession.execute(query).scalar_one_or_none()
@@ -39,17 +40,17 @@ def details(request):
images.append((img_src, ""))
return {
- 'track': track,
- 'show_organic': track.show_organic_data(),
- 'show_edit_link': show_edit_link,
- 'mps_to_kph': util.mps_to_kph,
- 'comment_md_to_html': util.safe_markdown,
- 'description': description,
- 'images': images,
+ "track": track,
+ "show_organic": track.show_organic_data(),
+ "show_edit_link": show_edit_link,
+ "mps_to_kph": util.mps_to_kph,
+ "comment_md_to_html": util.safe_markdown,
+ "description": description,
+ "images": images,
}
-@view_config(route_name='gpx', http_cache=3600, permission='track.view')
+@view_config(route_name="gpx", http_cache=3600, permission="track.view")
def gpx(request):
"""Returns the actual GPX data from the stored track.
@@ -62,7 +63,7 @@ def gpx(request):
# We can be nice to the client if they support it, and deliver the gzipped
# data straight. This saves decompression time on the server and saves a
# lot of bandwidth.
- if 'gzip' in request.accept_encoding:
+ if "gzip" in request.accept_encoding:
response = Response(track.gpx, content_type="application/gpx+xml", content_encoding="gzip")
else:
response = Response(track.gpx_data, content_type="application/gpx+xml")
@@ -70,7 +71,7 @@ def gpx(request):
return response
-@view_config(route_name='invalidate-share', request_method='POST', permission='track.unshare')
+@view_config(route_name="invalidate-share", request_method="POST", permission="track.unshare")
def invalidate_share(request):
"""Endpoint to invalidate the share link.
@@ -81,10 +82,10 @@ def invalidate_share(request):
"""
track = request.context
track.link_secret = util.random_link_secret()
- return HTTPFound(request.route_url('details', track_id=track.id))
+ return HTTPFound(request.route_url("details", track_id=track.id))
-@view_config(route_name='delete-track', request_method='POST', permission='track.delete')
+@view_config(route_name="delete-track", request_method="POST", permission="track.delete")
def delete_track(request):
"""Endpoint to delete the track.
@@ -98,10 +99,10 @@ def delete_track(request):
request.dbsession.delete(track)
request.data_manager.purge(track_id)
request.session.flash(request.localizer.translate(_("flash.track_deleted")))
- return HTTPFound(request.route_url('home'))
+ return HTTPFound(request.route_url("home"))
-@view_config(route_name='badge', http_cache=3600)
+@view_config(route_name="badge", http_cache=3600)
def badge(request):
"""Returns the image data associated with a badge.
@@ -115,7 +116,7 @@ def badge(request):
return response
-@view_config(route_name='image', http_cache=3600, permission='track.view')
+@view_config(route_name="image", http_cache=3600, permission="track.view")
def image(request):
"""Returns the image data for the requested image.
@@ -129,7 +130,7 @@ def image(request):
"""
track = request.context
try:
- image_path = request.data_manager.image_path(track.id, request.matchdict['image_name'])
+ image_path = request.data_manager.image_path(track.id, request.matchdict["image_name"])
except FileNotFoundError:
return HTTPNotFound()
else:
diff --git a/fietsboek/views/edit.py b/fietsboek/views/edit.py
index ff71282..003f7c7 100644
--- a/fietsboek/views/edit.py
+++ b/fietsboek/views/edit.py
@@ -18,8 +18,12 @@ ImageEmbed = namedtuple("ImageEmbed", "name url description")
LOGGER = logging.getLogger(__name__)
-@view_config(route_name='edit', renderer='fietsboek:templates/edit.jinja2',
- permission='track.edit', request_method='GET')
+@view_config(
+ route_name="edit",
+ renderer="fietsboek:templates/edit.jinja2",
+ permission="track.edit",
+ request_method="GET",
+)
def edit(request):
"""Renders the edit form.
@@ -35,8 +39,8 @@ def edit(request):
images = []
for image in request.data_manager.images(track.id):
metadata = request.dbsession.execute(
- select(models.ImageMetadata).filter_by(track=track, image_name=image)
- ).scalar_one_or_none()
+ select(models.ImageMetadata).filter_by(track=track, image_name=image)
+ ).scalar_one_or_none()
if metadata:
description = metadata.description
else:
@@ -45,13 +49,13 @@ def edit(request):
images.append(ImageEmbed(image, img_src, description))
return {
- 'track': track,
- 'badges': badges,
- 'images': images,
+ "track": track,
+ "badges": badges,
+ "images": images,
}
-@view_config(route_name='edit', permission='track.edit', request_method='POST')
+@view_config(route_name="edit", permission="track.edit", request_method="POST")
def do_edit(request):
"""Endpoint for saving the edited data.
@@ -65,8 +69,9 @@ def do_edit(request):
user_friends = request.identity.get_friends()
badges = util.retrieve_multiple(request.dbsession, models.Badge, request.params, "badge[]")
- tagged_people = util.retrieve_multiple(request.dbsession, models.User,
- request.params, "tagged-friend[]")
+ tagged_people = util.retrieve_multiple(
+ request.dbsession, models.User, request.params, "tagged-friend[]"
+ )
if any(user not in track.tagged_people and user not in user_friends for user in tagged_people):
return HTTPBadRequest()
@@ -86,7 +91,7 @@ def do_edit(request):
edit_images(request, request.context)
- return HTTPFound(request.route_url('details', track_id=track.id))
+ return HTTPFound(request.route_url("details", track_id=track.id))
def edit_images(request, track):
@@ -105,8 +110,8 @@ def edit_images(request, track):
for image in request.params.getall("delete-image[]"):
request.data_manager.delete_image(track.id, image)
image_meta = request.dbsession.execute(
- select(models.ImageMetadata).filter_by(track_id=track.id, image_name=image)
- ).scalar_one_or_none()
+ select(models.ImageMetadata).filter_by(track_id=track.id, image_name=image)
+ ).scalar_one_or_none()
LOGGER.debug("Deleted image %s %s (metadata: %s)", track.id, image, image_meta)
if image_meta:
request.dbsession.delete(image_meta)
diff --git a/fietsboek/views/notfound.py b/fietsboek/views/notfound.py
index f7117fe..ac24008 100644
--- a/fietsboek/views/notfound.py
+++ b/fietsboek/views/notfound.py
@@ -2,7 +2,7 @@
from pyramid.view import notfound_view_config
-@notfound_view_config(renderer='fietsboek:templates/404.jinja2')
+@notfound_view_config(renderer="fietsboek:templates/404.jinja2")
def notfound_view(request):
"""Renders the 404 response.
diff --git a/fietsboek/views/profile.py b/fietsboek/views/profile.py
index f4acd3d..7a929aa 100644
--- a/fietsboek/views/profile.py
+++ b/fietsboek/views/profile.py
@@ -10,8 +10,12 @@ from sqlalchemy import select
from .. import models, util
-@view_config(route_name='profile', renderer='fietsboek:templates/profile.jinja2',
- permission='user', request_method="GET")
+@view_config(
+ route_name="profile",
+ renderer="fietsboek:templates/profile.jinja2",
+ permission="user",
+ request_method="GET",
+)
def profile(request):
"""Provides the profile overview.
@@ -22,15 +26,15 @@ def profile(request):
"""
coming_requests = request.dbsession.execute(
- select(models.FriendRequest).filter_by(recipient_id=request.identity.id)
- ).scalars()
+ select(models.FriendRequest).filter_by(recipient_id=request.identity.id)
+ ).scalars()
going_requests = request.dbsession.execute(
- select(models.FriendRequest).filter_by(sender_id=request.identity.id)
- ).scalars()
+ select(models.FriendRequest).filter_by(sender_id=request.identity.id)
+ ).scalars()
return {
- 'user': request.identity,
- 'outgoing_friend_requests': going_requests,
- 'incoming_friend_requests': coming_requests,
+ "user": request.identity,
+ "outgoing_friend_requests": going_requests,
+ "incoming_friend_requests": coming_requests,
}
@@ -49,16 +53,16 @@ def do_change_profile(request):
util.check_password_constraints(password, request.params["repeat-password"])
except ValueError as exc:
request.session.flash(request.localizer.translate(exc.args[0]))
- return HTTPFound(request.route_url('profile'))
+ return HTTPFound(request.route_url("profile"))
request.identity.set_password(request.params["password"])
name = request.params["name"]
if request.identity.name != name:
request.identity.name = name
request.session.flash(request.localizer.translate(_("flash.personal_data_updated")))
- return HTTPFound(request.route_url('profile'))
+ return HTTPFound(request.route_url("profile"))
-@view_config(route_name='add-friend', permission='user', request_method='POST')
+@view_config(route_name="add-friend", permission="user", request_method="POST")
def do_add_friend(request):
"""Sends a friend request.
@@ -69,18 +73,17 @@ def do_add_friend(request):
:return: The HTTP response.
:rtype: pyramid.response.Response
"""
- email = request.params['friend-email']
- candidate = (request.dbsession
- .execute(models.User.query_by_email(email))
- .scalar_one_or_none())
+ email = request.params["friend-email"]
+ candidate = request.dbsession.execute(models.User.query_by_email(email)).scalar_one_or_none()
if candidate is None:
request.session.flash(request.localizer.translate(_("flash.friend_not_found")))
- return HTTPFound(request.route_url('profile'))
+ return HTTPFound(request.route_url("profile"))
- if (candidate in request.identity.get_friends()
- or candidate in [x.recipient for x in request.identity.outgoing_requests]):
+ if candidate in request.identity.get_friends() or candidate in [
+ x.recipient for x in request.identity.outgoing_requests
+ ]:
request.session.flash(request.localizer.translate(_("flash.friend_already_exists")))
- return HTTPFound(request.route_url('profile'))
+ return HTTPFound(request.route_url("profile"))
for incoming in request.identity.incoming_requests:
if incoming.sender == candidate:
@@ -88,7 +91,7 @@ def do_add_friend(request):
request.identity.add_friend(candidate)
request.dbsession.delete(incoming)
request.session.flash(request.localizer.translate(_("flash.friend_added")))
- return HTTPFound(request.route_url('profile'))
+ return HTTPFound(request.route_url("profile"))
# Nothing helped, so we send the friend request
friend_req = models.FriendRequest(
@@ -98,10 +101,10 @@ def do_add_friend(request):
)
request.dbsession.add(friend_req)
request.session.flash(request.localizer.translate(_("flash.friend_request_sent")))
- return HTTPFound(request.route_url('profile'))
+ return HTTPFound(request.route_url("profile"))
-@view_config(route_name='delete-friend', permission='user', request_method='POST')
+@view_config(route_name="delete-friend", permission="user", request_method="POST")
def do_delete_friend(request):
"""Deletes a friend.
@@ -113,14 +116,14 @@ def do_delete_friend(request):
:rtype: pyramid.response.Response
"""
friend = request.dbsession.execute(
- select(models.User).filter_by(id=request.params["friend-id"])
- ).scalar_one_or_none()
+ select(models.User).filter_by(id=request.params["friend-id"])
+ ).scalar_one_or_none()
if friend:
request.identity.remove_friend(friend)
- return HTTPFound(request.route_url('profile'))
+ return HTTPFound(request.route_url("profile"))
-@view_config(route_name='accept-friend', permission='user', request_method='POST')
+@view_config(route_name="accept-friend", permission="user", request_method="POST")
def do_accept_friend(request):
"""Accepts a friend request.
@@ -132,8 +135,8 @@ def do_accept_friend(request):
:rtype: pyramid.response.Response
"""
friend_request = request.dbsession.execute(
- select(models.FriendRequest).filter_by(id=request.params["request-id"])
- ).scalar_one_or_none()
+ select(models.FriendRequest).filter_by(id=request.params["request-id"])
+ ).scalar_one_or_none()
if friend_request is None:
return HTTPNotFound()
if friend_request.recipient != request.identity:
@@ -141,10 +144,10 @@ def do_accept_friend(request):
friend_request.sender.add_friend(friend_request.recipient)
request.dbsession.delete(friend_request)
- return HTTPFound(request.route_url('profile'))
+ return HTTPFound(request.route_url("profile"))
-@view_config(route_name='json-friends', renderer='json', permission='user')
+@view_config(route_name="json-friends", renderer="json", permission="user")
def json_friends(request):
"""Returns a JSON-ified list of the user's friends.
@@ -153,7 +156,5 @@ def json_friends(request):
:return: The HTTP response.
:rtype: pyramid.response.Response
"""
- friends = [
- {'name': friend.name, 'id': friend.id} for friend in request.identity.get_friends()
- ]
+ friends = [{"name": friend.name, "id": friend.id} for friend in request.identity.get_friends()]
return friends
diff --git a/fietsboek/views/tileproxy.py b/fietsboek/views/tileproxy.py
index 3e2abc1..f0612dc 100644
--- a/fietsboek/views/tileproxy.py
+++ b/fietsboek/views/tileproxy.py
@@ -9,7 +9,7 @@ Additionally, this protects the users' IP, as only fietsboek can see it.
import datetime
import random
import logging
-from typing import NamedTuple
+from typing import NamedTuple, Optional
from itertools import chain
from pyramid.view import view_config
@@ -25,6 +25,7 @@ from ..config import LayerType, LayerAccess
class TileSource(NamedTuple):
"""Represents a remote server that can provide tiles to us."""
+
key: str
"""Key to indicate this source in URLs."""
name: str
@@ -33,7 +34,7 @@ class TileSource(NamedTuple):
"""URL with placeholders."""
layer_type: LayerType
"""Type of this layer."""
- zoom: int
+ zoom: Optional[int]
"""Max zoom of this layer."""
access: LayerAccess
"""Access restrictions to use this layer."""
@@ -54,88 +55,107 @@ _jb_copy = _href("https://www.j-berkemeier.de/GPXViewer", "GPXViewer")
DEFAULT_TILE_LAYERS = [
# Main base layers
TileSource(
- 'osm',
- 'OSM',
- 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
+ "osm",
+ "OSM",
+ "https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png",
LayerType.BASE,
19,
LayerAccess.PUBLIC,
- ''.join([
- _jb_copy, ' | Map data &copy; ',
- _href("https://www.openstreetmap.org/", "OpenStreetMap"), ' and contributors ',
- _href("https://creativecommons.org/licenses/by-sa/2.0/", "CC-BY-SA"),
- ]),
+ "".join(
+ [
+ _jb_copy,
+ " | Map data &copy; ",
+ _href("https://www.openstreetmap.org/", "OpenStreetMap"),
+ " and contributors ",
+ _href("https://creativecommons.org/licenses/by-sa/2.0/", "CC-BY-SA"),
+ ]
+ ),
),
TileSource(
- 'satellite',
- 'Satellit',
- 'https://server.arcgisonline.com/ArcGIS/rest/services/'
- 'World_Imagery/MapServer/tile/{z}/{y}/{x}',
+ "satellite",
+ "Satellit",
+ "https://server.arcgisonline.com/ArcGIS/rest/services/"
+ "World_Imagery/MapServer/tile/{z}/{y}/{x}",
LayerType.BASE,
21,
LayerAccess.PUBLIC,
- ''.join([
- _jb_copy, ' | Map data &copy; ', _href("https://www.esri.com", "Esri"),
- ', i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, ',
- 'IGP, UPR-EGP, and the GIS User Community',
- ]),
+ "".join(
+ [
+ _jb_copy,
+ " | Map data &copy; ",
+ _href("https://www.esri.com", "Esri"),
+ ", i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, ",
+ "IGP, UPR-EGP, and the GIS User Community",
+ ]
+ ),
),
TileSource(
- 'osmde',
- 'OSMDE',
- 'https://{s}.tile.openstreetmap.de/tiles/osmde/{z}/{x}/{y}.png',
+ "osmde",
+ "OSMDE",
+ "https://{s}.tile.openstreetmap.de/tiles/osmde/{z}/{x}/{y}.png",
LayerType.BASE,
19,
LayerAccess.PUBLIC,
- ''.join([
- _jb_copy, ' | Map data &copy; ',
- _href("https://www.openstreetmap.org/", "OpenStreetMap"), ' and contributors ',
- _href("https://creativecommons.org/licenses/by-sa/2.0/", "CC-BY-SA")
- ]),
+ "".join(
+ [
+ _jb_copy,
+ " | Map data &copy; ",
+ _href("https://www.openstreetmap.org/", "OpenStreetMap"),
+ " and contributors ",
+ _href("https://creativecommons.org/licenses/by-sa/2.0/", "CC-BY-SA"),
+ ]
+ ),
),
TileSource(
- 'opentopo',
- 'Open Topo',
- 'https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png',
+ "opentopo",
+ "Open Topo",
+ "https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png",
LayerType.BASE,
17,
LayerAccess.PUBLIC,
- ''.join([
- _jb_copy,
- ' | Kartendaten: © OpenStreetMap-Mitwirkende, SRTM | Kartendarstellung: © ',
- _href("https://opentopomap.org/about", "OpenTopoMap"), ' (CC-BY-SA)',
- ]),
+ "".join(
+ [
+ _jb_copy,
+ " | Kartendaten: © OpenStreetMap-Mitwirkende, SRTM | Kartendarstellung: © ",
+ _href("https://opentopomap.org/about", "OpenTopoMap"),
+ " (CC-BY-SA)",
+ ]
+ ),
),
TileSource(
- 'topplusopen',
- 'TopPlusOpen',
- 'https://sgx.geodatenzentrum.de/wmts_topplus_open/tile/'
- '1.0.0/web/default/WEBMERCATOR/{z}/{y}/{x}.png',
+ "topplusopen",
+ "TopPlusOpen",
+ "https://sgx.geodatenzentrum.de/wmts_topplus_open/tile/"
+ "1.0.0/web/default/WEBMERCATOR/{z}/{y}/{x}.png",
LayerType.BASE,
18,
LayerAccess.PUBLIC,
- ''.join([
- _jb_copy, ' | Kartendaten: © ',
- _href("https://www.bkg.bund.de/SharedDocs/Produktinformationen"
- "/BKG/DE/P-2017/170922-TopPlus-Web-Open.html",
- "Bundesamt für Kartographie und Geodäsie"),
- ]),
+ "".join(
+ [
+ _jb_copy,
+ " | Kartendaten: © ",
+ _href(
+ "https://www.bkg.bund.de/SharedDocs/Produktinformationen"
+ "/BKG/DE/P-2017/170922-TopPlus-Web-Open.html",
+ "Bundesamt für Kartographie und Geodäsie",
+ ),
+ ]
+ ),
),
-
# Overlay layers
TileSource(
- 'opensea',
- 'OpenSea',
- 'https://tiles.openseamap.org/seamark/{z}/{x}/{y}.png',
+ "opensea",
+ "OpenSea",
+ "https://tiles.openseamap.org/seamark/{z}/{x}/{y}.png",
LayerType.OVERLAY,
None,
LayerAccess.PUBLIC,
'Kartendaten: © <a href="http://www.openseamap.org">OpenSeaMap</a> contributors',
),
TileSource(
- 'hiking',
- 'Hiking',
- 'https://tile.waymarkedtrails.org/hiking/{z}/{x}/{y}.png',
+ "hiking",
+ "Hiking",
+ "https://tile.waymarkedtrails.org/hiking/{z}/{x}/{y}.png",
LayerType.OVERLAY,
None,
LayerAccess.PUBLIC,
@@ -143,9 +163,9 @@ DEFAULT_TILE_LAYERS = [
f'({_href("https://creativecommons.org/licenses/by-sa/3.0/", "CC-BY-SA")})',
),
TileSource(
- 'cycling',
- 'Cycling',
- 'https://tile.waymarkedtrails.org/cycling/{z}/{x}/{y}.png',
+ "cycling",
+ "Cycling",
+ "https://tile.waymarkedtrails.org/cycling/{z}/{x}/{y}.png",
LayerType.OVERLAY,
None,
LayerAccess.PUBLIC,
@@ -167,7 +187,7 @@ PUNISHMENT_THRESHOLD = 10
"""Block a provider after that many requests have timed out."""
-@view_config(route_name='tile-proxy', http_cache=3600)
+@view_config(route_name="tile-proxy", http_cache=3600)
def tile_proxy(request):
"""Requests the given tile from the proxy.
@@ -179,13 +199,16 @@ def tile_proxy(request):
if request.config.disable_tile_proxy:
raise HTTPBadRequest("Tile proxying is disabled")
- provider = request.matchdict['provider']
+ provider = request.matchdict["provider"]
tile_sources = {source.key: source for source in sources_for(request)}
if provider not in tile_sources:
raise HTTPBadRequest("Invalid provider")
- x, y, z = (int(request.matchdict['x']), int(request.matchdict['y']),
- int(request.matchdict['z']))
+ x, y, z = (
+ int(request.matchdict["x"]),
+ int(request.matchdict["y"]),
+ int(request.matchdict["z"]),
+ )
cache_key = f"tile:{provider}-{x}-{y}-{z}"
content_type = "image/png"
@@ -220,8 +243,7 @@ def tile_proxy(request):
resp.raise_for_status()
except requests.HTTPError as exc:
LOGGER.info("Proxy request failed for %s: %s", provider, exc)
- return Response(f"Failed to get tile from {provider}",
- status_code=resp.status_code)
+ return Response(f"Failed to get tile from {provider}", status_code=resp.status_code)
request.redis.set(cache_key, resp.content, ex=TTL)
return Response(resp.content, content_type=resp.headers.get("Content-type", content_type))
@@ -235,9 +257,13 @@ def sources_for(request):
:rtype: list[TileSource]
"""
return [
- source for source in chain(
- (default_layer for default_layer in DEFAULT_TILE_LAYERS
- if default_layer.key in request.config.default_tile_layers),
+ source
+ for source in chain(
+ (
+ default_layer
+ for default_layer in DEFAULT_TILE_LAYERS
+ if default_layer.key in request.config.default_tile_layers
+ ),
extract_tile_layers(request.config),
)
if source.access == LayerAccess.PUBLIC or request.identity is not None
@@ -263,17 +289,26 @@ def _extract_thunderforest(config):
tf_api_key = config.thunderforest_key.get_secret_value()
if tf_api_key:
tf_access = config.thunderforest_access
- tf_attribution = ' | '.join([
- _jb_copy,
- _href("https://www.thunderforest.com/", "Thunderforest"),
- _href("https://www.openstreetmap.org/", "OpenStreetMap"),
- ])
+ tf_attribution = " | ".join(
+ [
+ _jb_copy,
+ _href("https://www.thunderforest.com/", "Thunderforest"),
+ _href("https://www.openstreetmap.org/", "OpenStreetMap"),
+ ]
+ )
for tf_map in config.thunderforest_maps:
- url = (f"https://tile.thunderforest.com/{tf_map}/"
- f"{{z}}/{{x}}/{{y}}.png?apikey={tf_api_key}")
+ url = (
+ f"https://tile.thunderforest.com/{tf_map}/"
+ f"{{z}}/{{x}}/{{y}}.png?apikey={tf_api_key}"
+ )
yield TileSource(
- f"tf-{tf_map}", f"TF {tf_map.title()}", url,
- LayerType.BASE, 22, tf_access, tf_attribution,
+ f"tf-{tf_map}",
+ f"TF {tf_map.title()}",
+ url,
+ LayerType.BASE,
+ 22,
+ tf_access,
+ tf_attribution,
)
@@ -287,5 +322,5 @@ def _extract_user_layers(config):
layer.layer_type,
layer.zoom,
layer.access,
- layer.attribution
+ layer.attribution,
)
diff --git a/fietsboek/views/upload.py b/fietsboek/views/upload.py
index f63f45d..d691e46 100644
--- a/fietsboek/views/upload.py
+++ b/fietsboek/views/upload.py
@@ -19,8 +19,12 @@ from ..models.track import Visibility, TrackType
LOGGER = logging.getLogger(__name__)
-@view_config(route_name='upload', renderer='fietsboek:templates/upload.jinja2',
- request_method='GET', permission='upload')
+@view_config(
+ route_name="upload",
+ renderer="fietsboek:templates/upload.jinja2",
+ request_method="GET",
+ permission="upload",
+)
def show_upload(request):
"""Renders the main upload form.
@@ -33,7 +37,7 @@ def show_upload(request):
return {}
-@view_config(route_name='upload', request_method='POST', permission='upload')
+@view_config(route_name="upload", request_method="POST", permission="upload")
def do_upload(request):
"""Endpoint to store the uploaded file.
@@ -47,10 +51,10 @@ def do_upload(request):
:rtype: pyramid.response.Response
"""
try:
- gpx = request.POST['gpx'].file.read()
+ gpx = request.POST["gpx"].file.read()
except AttributeError:
- request.session.flash(request.localizer.translate(_('flash.no_file_selected')))
- return HTTPFound(request.route_url('upload'))
+ request.session.flash(request.localizer.translate(_("flash.no_file_selected")))
+ return HTTPFound(request.route_url("upload"))
# Before we do anything, we check if we can parse the file.
# gpxpy might throw different exceptions, so we simply catch `Exception`
@@ -59,9 +63,9 @@ def do_upload(request):
try:
gpxpy.parse(gpx)
except Exception as exc:
- request.session.flash(request.localizer.translate(_('flash.invalid_file')))
+ request.session.flash(request.localizer.translate(_("flash.invalid_file")))
LOGGER.info("Could not parse gpx: %s", exc)
- return HTTPFound(request.route_url('upload'))
+ return HTTPFound(request.route_url("upload"))
now = datetime.datetime.utcnow()
@@ -73,10 +77,10 @@ def do_upload(request):
request.dbsession.add(upload)
request.dbsession.flush()
- return HTTPFound(request.route_url('finish-upload', upload_id=upload.id))
+ return HTTPFound(request.route_url("finish-upload", upload_id=upload.id))
-@view_config(route_name='preview', permission='upload.finish')
+@view_config(route_name="preview", permission="upload.finish")
def preview(request):
"""Allows a preview of the uploaded track by returning the GPX data of a
:class:`~fietsboek.models.track.Upload`
@@ -87,11 +91,15 @@ def preview(request):
:rtype: pyramid.response.Response
"""
upload = request.context
- return Response(upload.gpx_data, content_type='application/gpx+xml')
+ return Response(upload.gpx_data, content_type="application/gpx+xml")
-@view_config(route_name='finish-upload', renderer='fietsboek:templates/finish_upload.jinja2',
- request_method='GET', permission='upload.finish')
+@view_config(
+ route_name="finish-upload",
+ renderer="fietsboek:templates/finish_upload.jinja2",
+ request_method="GET",
+ permission="upload.finish",
+)
def finish_upload(request):
"""Renders the form that allows the user to finish the upload.
@@ -108,6 +116,7 @@ def finish_upload(request):
date = gpx.time or gpx.get_time_bounds().start_time or datetime.datetime.now()
date = date.astimezone(timezone)
tz_offset = timezone.utcoffset(date)
+ tz_offset = 0 if tz_offset is None else tz_offset.total_seconds()
track_name = ""
for track in gpx.tracks:
if track.name:
@@ -115,20 +124,20 @@ def finish_upload(request):
break
return {
- 'preview_id': upload.id,
- 'upload_title': gpx.name or track_name,
- 'upload_date': date,
- 'upload_date_tz': int(tz_offset.total_seconds() // 60),
- 'upload_visibility': Visibility.PRIVATE,
- 'upload_type': TrackType.ORGANIC,
- 'upload_description': gpx.description,
- 'upload_tags': set(),
- 'upload_tagged_people': [],
- 'badges': badges,
+ "preview_id": upload.id,
+ "upload_title": gpx.name or track_name,
+ "upload_date": date,
+ "upload_date_tz": int(tz_offset // 60),
+ "upload_visibility": Visibility.PRIVATE,
+ "upload_type": TrackType.ORGANIC,
+ "upload_description": gpx.description,
+ "upload_tags": set(),
+ "upload_tagged_people": [],
+ "badges": badges,
}
-@view_config(route_name='finish-upload', request_method='POST', permission='upload.finish')
+@view_config(route_name="finish-upload", request_method="POST", permission="upload.finish")
def do_finish_upload(request):
"""Endpoint for the "finishing upload" form.
@@ -140,8 +149,9 @@ def do_finish_upload(request):
upload = request.context
user_friends = request.identity.get_friends()
badges = util.retrieve_multiple(request.dbsession, models.Badge, request.params, "badge[]")
- tagged_people = util.retrieve_multiple(request.dbsession, models.User,
- request.params, "tagged-friend[]")
+ tagged_people = util.retrieve_multiple(
+ request.dbsession, models.User, request.params, "tagged-friend[]"
+ )
if any(user not in user_friends for user in tagged_people):
return HTTPBadRequest()
@@ -178,10 +188,10 @@ def do_finish_upload(request):
request.session.flash(request.localizer.translate(_("flash.upload_success")))
- return HTTPFound(request.route_url('details', track_id=track.id))
+ return HTTPFound(request.route_url("details", track_id=track.id))
-@view_config(route_name='cancel-upload', permission='upload.finish', request_method="POST")
+@view_config(route_name="cancel-upload", permission="upload.finish", request_method="POST")
def cancel_upload(request):
"""Cancels the upload and clears the temporary data.
@@ -193,4 +203,4 @@ def cancel_upload(request):
upload = request.context
request.dbsession.delete(upload)
request.session.flash(request.localizer.translate(_("flash.upload_cancelled")))
- return HTTPFound(request.route_url('upload'))
+ return HTTPFound(request.route_url("upload"))
diff --git a/poetry.lock b/poetry.lock
index a5e6951..327b7e5 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -24,6 +24,23 @@ SQLAlchemy = ">=1.3.0"
tz = ["python-dateutil"]
[[package]]
+name = "astroid"
+version = "2.12.13"
+description = "An abstract syntax tree for Python with inference support."
+category = "dev"
+optional = false
+python-versions = ">=3.7.2"
+
+[package.dependencies]
+lazy-object-proxy = ">=1.4.0"
+typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""}
+typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
+wrapt = [
+ {version = ">=1.11,<2", markers = "python_version < \"3.11\""},
+ {version = ">=1.14,<2", markers = "python_version >= \"3.11\""},
+]
+
+[[package]]
name = "async-timeout"
version = "4.0.2"
description = "Timeout context manager for asyncio programs"
@@ -38,8 +55,8 @@ typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""}
name = "attrs"
version = "22.1.0"
description = "Classes Without Boilerplate"
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.5"
[package.extras]
@@ -63,8 +80,8 @@ pytz = ">=2015.7"
name = "beautifulsoup4"
version = "4.11.1"
description = "Screen-scraping library"
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.6.0"
[package.dependencies]
@@ -75,6 +92,29 @@ html5lib = ["html5lib"]
lxml = ["lxml"]
[[package]]
+name = "black"
+version = "22.12.0"
+description = "The uncompromising code formatter."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+click = ">=8.0.0"
+mypy-extensions = ">=0.4.3"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
+typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
+typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.7.4)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
+[[package]]
name = "bleach"
version = "5.0.1"
description = "An easy safelist-based HTML-sanitizing tool."
@@ -144,8 +184,8 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7
name = "coverage"
version = "6.5.0"
description = "Code coverage measurement for Python"
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.7"
[package.dependencies]
@@ -174,6 +214,17 @@ ssh = ["bcrypt (>=3.1.5)"]
test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"]
[[package]]
+name = "dill"
+version = "0.3.6"
+description = "serialize all of python"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+graph = ["objgraph (>=1.7.2)"]
+
+[[package]]
name = "docutils"
version = "0.19"
description = "Docutils -- Python Documentation Utilities"
@@ -185,8 +236,8 @@ python-versions = ">=3.7"
name = "exceptiongroup"
version = "1.0.4"
description = "Backport of PEP 654 (exception groups)"
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.7"
[package.extras]
@@ -276,11 +327,25 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec
name = "iniconfig"
version = "1.1.1"
description = "iniconfig: brain-dead simple config-ini parsing"
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = "*"
[[package]]
+name = "isort"
+version = "5.10.1"
+description = "A Python utility / library to sort Python imports."
+category = "dev"
+optional = false
+python-versions = ">=3.6.1,<4.0"
+
+[package.extras]
+colors = ["colorama (>=0.4.3,<0.5.0)"]
+pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
+plugins = ["setuptools"]
+requirements-deprecated-finder = ["pip-api", "pipreqs"]
+
+[[package]]
name = "jinja2"
version = "3.1.2"
description = "A very fast and expressive template engine."
@@ -295,6 +360,14 @@ MarkupSafe = ">=2.0"
i18n = ["Babel (>=2.7)"]
[[package]]
+name = "lazy-object-proxy"
+version = "1.8.0"
+description = "A fast and thorough lazy object proxy."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
name = "mako"
version = "1.2.4"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
@@ -334,6 +407,42 @@ optional = false
python-versions = ">=3.7"
[[package]]
+name = "mccabe"
+version = "0.7.0"
+description = "McCabe checker, plugin for flake8"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "mypy"
+version = "0.991"
+description = "Optional static typing for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+mypy-extensions = ">=0.4.3"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""}
+typing-extensions = ">=3.10"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
+python2 = ["typed-ast (>=1.4.0,<2)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "0.4.3"
+description = "Experimental type system extensions for programs checked with the mypy typechecker."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
name = "packaging"
version = "21.3"
description = "Core utilities for Python packages"
@@ -361,6 +470,14 @@ paste = ["Paste"]
testing = ["Paste", "pytest", "pytest-cov"]
[[package]]
+name = "pathspec"
+version = "0.10.2"
+description = "Utility library for gitignore style pattern matching of file paths."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
name = "plaster"
version = "1.1.2"
description = "A loader interface around multiple config file formats."
@@ -391,11 +508,23 @@ plaster = ">=0.5"
testing = ["pytest", "pytest-cov"]
[[package]]
+name = "platformdirs"
+version = "2.6.0"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"]
+test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
+
+[[package]]
name = "pluggy"
version = "1.0.0"
description = "plugin and hook calling mechanisms for python"
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.6"
[package.dependencies]
@@ -440,6 +569,29 @@ python-versions = ">=3.6"
plugins = ["importlib-metadata"]
[[package]]
+name = "pylint"
+version = "2.15.8"
+description = "python code static checker"
+category = "dev"
+optional = false
+python-versions = ">=3.7.2"
+
+[package.dependencies]
+astroid = ">=2.12.13,<=2.14.0-dev0"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+dill = ">=0.2"
+isort = ">=4.2.5,<6"
+mccabe = ">=0.6,<0.8"
+platformdirs = ">=2.2.0"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+tomlkit = ">=0.10.1"
+typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+spelling = ["pyenchant (>=3.2,<4.0)"]
+testutils = ["gitpython (>3)"]
+
+[[package]]
name = "pyparsing"
version = "3.0.9"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
@@ -561,8 +713,8 @@ testing = ["WebTest", "coverage (>=5.0)", "pytest", "pytest-cov"]
name = "pytest"
version = "7.2.0"
description = "pytest: simple powerful testing with Python"
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.7"
[package.dependencies]
@@ -582,8 +734,8 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.
name = "pytest-cov"
version = "4.0.0"
description = "Pytest plugin for measuring coverage."
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.6"
[package.dependencies]
@@ -682,8 +834,8 @@ python-versions = "*"
name = "soupsieve"
version = "2.3.2.post1"
description = "A modern CSS selector implementation for Beautiful Soup."
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.6"
[[package]]
@@ -837,11 +989,19 @@ tests = ["pytest", "pytest-cov"]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.7"
[[package]]
+name = "tomlkit"
+version = "0.11.6"
+description = "Style preserving TOML library"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
name = "transaction"
version = "3.0.1"
description = "Transaction management for Python"
@@ -869,6 +1029,76 @@ python-versions = "*"
docs = ["Sphinx (>=1.3.1)", "docutils", "pylons-sphinx-themes"]
[[package]]
+name = "typed-ast"
+version = "1.5.4"
+description = "a fork of Python 2 and 3 ast modules with type comment support"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "types-babel"
+version = "2.11.0.7"
+description = "Typing stubs for babel"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+types-pytz = "*"
+
+[[package]]
+name = "types-bleach"
+version = "5.0.3.1"
+description = "Typing stubs for bleach"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "types-markdown"
+version = "3.4.2.1"
+description = "Typing stubs for Markdown"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "types-pytz"
+version = "2022.6.0.1"
+description = "Typing stubs for pytz"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "types-redis"
+version = "4.3.21.6"
+description = "Typing stubs for redis"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "types-requests"
+version = "2.28.11.5"
+description = "Typing stubs for requests"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+types-urllib3 = "<1.27"
+
+[[package]]
+name = "types-urllib3"
+version = "1.26.25.4"
+description = "Typing stubs for urllib3"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
name = "typing-extensions"
version = "4.4.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
@@ -937,8 +1167,8 @@ testing = ["coverage", "pytest (>=3.1.0)", "pytest-cov", "pytest-xdist"]
name = "webtest"
version = "3.0.0"
description = "Helper to test WSGI applications"
-category = "main"
-optional = true
+category = "dev"
+optional = false
python-versions = ">=3.6, <4"
[package.dependencies]
@@ -951,6 +1181,14 @@ docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.8)"]
tests = ["PasteDeploy", "WSGIProxy2", "coverage", "pyquery", "pytest", "pytest-cov"]
[[package]]
+name = "wrapt"
+version = "1.14.1"
+description = "Module for decorators, wrappers and monkey patching."
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[[package]]
name = "zipp"
version = "3.11.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
@@ -1010,13 +1248,10 @@ transaction = ">=1.6.0"
[package.extras]
test = ["zope.testing"]
-[extras]
-testing = ["WebTest", "pytest", "pytest-cov"]
-
[metadata]
lock-version = "1.1"
-python-versions = "^3.7"
-content-hash = "08ae6927db708c957185f78e6276fd5438ee36a0a30231e38bf89840c9a4778d"
+python-versions = "^3.7.2"
+content-hash = "8f437e4819a20de837375ca3cdf59530da0f190389484750eb32998b9930a964"
[metadata.files]
alabaster = [
@@ -1027,6 +1262,10 @@ alembic = [
{file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"},
{file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"},
]
+astroid = [
+ {file = "astroid-2.12.13-py3-none-any.whl", hash = "sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907"},
+ {file = "astroid-2.12.13.tar.gz", hash = "sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7"},
+]
async-timeout = [
{file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
{file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
@@ -1043,6 +1282,20 @@ beautifulsoup4 = [
{file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"},
{file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"},
]
+black = [
+ {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"},
+ {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"},
+ {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"},
+ {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"},
+ {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"},
+ {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"},
+ {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"},
+ {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"},
+ {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"},
+ {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"},
+ {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"},
+ {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"},
+]
bleach = [
{file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"},
{file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"},
@@ -1209,6 +1462,10 @@ cryptography = [
{file = "cryptography-38.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9"},
{file = "cryptography-38.0.4.tar.gz", hash = "sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290"},
]
+dill = [
+ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"},
+ {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"},
+]
docutils = [
{file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"},
{file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"},
@@ -1306,10 +1563,35 @@ iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
+isort = [
+ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
+ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
+]
jinja2 = [
{file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
{file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
]
+lazy-object-proxy = [
+ {file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"},
+ {file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"},
+ {file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"},
+ {file = "lazy_object_proxy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"},
+ {file = "lazy_object_proxy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7"},
+ {file = "lazy_object_proxy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e"},
+ {file = "lazy_object_proxy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d"},
+ {file = "lazy_object_proxy-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c"},
+ {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd"},
+ {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858"},
+ {file = "lazy_object_proxy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada"},
+ {file = "lazy_object_proxy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f"},
+ {file = "lazy_object_proxy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c"},
+ {file = "lazy_object_proxy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288"},
+ {file = "lazy_object_proxy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f"},
+ {file = "lazy_object_proxy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0"},
+ {file = "lazy_object_proxy-1.8.0-pp37-pypy37_pp73-any.whl", hash = "sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891"},
+ {file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"},
+ {file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"},
+]
mako = [
{file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"},
{file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"},
@@ -1360,6 +1642,46 @@ markupsafe = [
{file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
{file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
]
+mccabe = [
+ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
+]
+mypy = [
+ {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
+ {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
+ {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"},
+ {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"},
+ {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"},
+ {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"},
+ {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"},
+ {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"},
+ {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"},
+ {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"},
+ {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"},
+ {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"},
+ {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"},
+ {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"},
+ {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"},
+ {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"},
+ {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"},
+ {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"},
+ {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"},
+ {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"},
+ {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"},
+ {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"},
+ {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"},
+ {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"},
+ {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"},
+ {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"},
+ {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"},
+ {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"},
+ {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"},
+ {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"},
+]
+mypy-extensions = [
+ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
+ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+]
packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
@@ -1368,6 +1690,10 @@ pastedeploy = [
{file = "PasteDeploy-3.0.1-py3-none-any.whl", hash = "sha256:6195c921b1c3ed9722e4e3e6aa29b70deebb2429b4ca3ff3d49185c8e80003bb"},
{file = "PasteDeploy-3.0.1.tar.gz", hash = "sha256:5f4b4d5fddd39b8947ea727161e366bf55b90efc60a4d1dd7976b9031d0b4e5f"},
]
+pathspec = [
+ {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"},
+ {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"},
+]
plaster = [
{file = "plaster-1.1.2-py2.py3-none-any.whl", hash = "sha256:42992ab1f4865f1278e2ad740e8ad145683bb4022e03534265528f0c23c0df2d"},
{file = "plaster-1.1.2.tar.gz", hash = "sha256:f8befc54bf8c1147c10ab40297ec84c2676fa2d4ea5d6f524d9436a80074ef98"},
@@ -1376,6 +1702,10 @@ plaster-pastedeploy = [
{file = "plaster_pastedeploy-1.0.1-py2.py3-none-any.whl", hash = "sha256:ad3550cc744648969ed3b810f33c9344f515ee8d8a8cec18e8f2c4a643c2181f"},
{file = "plaster_pastedeploy-1.0.1.tar.gz", hash = "sha256:be262e6d2e41a7264875daa2fe2850cbb0615728bcdc92828fdc72736e381412"},
]
+platformdirs = [
+ {file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"},
+ {file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"},
+]
pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
@@ -1426,6 +1756,10 @@ pygments = [
{file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"},
{file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"},
]
+pylint = [
+ {file = "pylint-2.15.8-py3-none-any.whl", hash = "sha256:ea82cd6a1e11062dc86d555d07c021b0fb65afe39becbe6fe692efd6c4a67443"},
+ {file = "pylint-2.15.8.tar.gz", hash = "sha256:ec4a87c33da054ab86a6c79afa6771dc8765cb5631620053e727fcf3ef8cbed7"},
+]
pyparsing = [
{file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
{file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
@@ -1573,6 +1907,10 @@ tomli = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
+tomlkit = [
+ {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"},
+ {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"},
+]
transaction = [
{file = "transaction-3.0.1-py2.py3-none-any.whl", hash = "sha256:2329a6e6b82d1d8d4de9267ea6ee790532c375e5911d3c7633a234e94a4a0a9e"},
{file = "transaction-3.0.1.tar.gz", hash = "sha256:0c15ef0b7ff3518357ceea75722a30d974c3f85e11aa5cec5d5a2b6a40cfcf68"},
@@ -1581,6 +1919,60 @@ translationstring = [
{file = "translationstring-1.4-py2.py3-none-any.whl", hash = "sha256:5f4dc4d939573db851c8d840551e1a0fb27b946afe3b95aafc22577eed2d6262"},
{file = "translationstring-1.4.tar.gz", hash = "sha256:bf947538d76e69ba12ab17283b10355a9ecfbc078e6123443f43f2107f6376f3"},
]
+typed-ast = [
+ {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"},
+ {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"},
+ {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"},
+ {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"},
+ {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"},
+ {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"},
+ {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"},
+ {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"},
+ {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"},
+ {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"},
+ {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"},
+ {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"},
+ {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"},
+ {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"},
+ {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"},
+ {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"},
+ {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"},
+ {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"},
+ {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"},
+ {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"},
+ {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"},
+ {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"},
+ {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"},
+ {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"},
+]
+types-babel = [
+ {file = "types-babel-2.11.0.7.tar.gz", hash = "sha256:07c84fe265570b8ec4040abc6c88c6d20465b9e8cc35189d1b2930ae14bd003e"},
+ {file = "types_babel-2.11.0.7-py3-none-any.whl", hash = "sha256:df99729ab2855638562b1f914524fd58a54e79c6583099a9a31acd90fa1e143f"},
+]
+types-bleach = [
+ {file = "types-bleach-5.0.3.1.tar.gz", hash = "sha256:ce8772ea5126dab1883851b41e3aeff229aa5213ced36096990344e632e92373"},
+ {file = "types_bleach-5.0.3.1-py3-none-any.whl", hash = "sha256:af5f1b3a54ff279f54c29eccb2e6988ebb6718bc4061469588a5fd4880a79287"},
+]
+types-markdown = [
+ {file = "types-Markdown-3.4.2.1.tar.gz", hash = "sha256:03c0904cf5886a7d8193e2f50bcf842afc89e0ab80f060f389f6c2635c65628f"},
+ {file = "types_Markdown-3.4.2.1-py3-none-any.whl", hash = "sha256:b2333f6f4b8f69af83de359e10a097e4a3f14bbd6d2484e1829d9b0ec56fa0cb"},
+]
+types-pytz = [
+ {file = "types-pytz-2022.6.0.1.tar.gz", hash = "sha256:d078196374d1277e9f9984d49373ea043cf2c64d5d5c491fbc86c258557bd46f"},
+ {file = "types_pytz-2022.6.0.1-py3-none-any.whl", hash = "sha256:bea605ce5d5a5d52a8e1afd7656c9b42476e18a0f888de6be91587355313ddf4"},
+]
+types-redis = [
+ {file = "types-redis-4.3.21.6.tar.gz", hash = "sha256:f7969f73a0f79e9e7895f053a06d8b429fb7b5d4fe1269b8ee40463388f653ad"},
+ {file = "types_redis-4.3.21.6-py3-none-any.whl", hash = "sha256:615e5a9142993789ffc22ee54435769b600da3e528bb51cf38430e5cd82af306"},
+]
+types-requests = [
+ {file = "types-requests-2.28.11.5.tar.gz", hash = "sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a"},
+ {file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"},
+]
+types-urllib3 = [
+ {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"},
+ {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"},
+]
typing-extensions = [
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
@@ -1609,6 +2001,72 @@ webtest = [
{file = "WebTest-3.0.0-py3-none-any.whl", hash = "sha256:2a001a9efa40d2a7e5d9cd8d1527c75f41814eb6afce2c3d207402547b1e5ead"},
{file = "WebTest-3.0.0.tar.gz", hash = "sha256:54bd969725838d9861a9fa27f8d971f79d275d94ae255f5c501f53bb6d9929eb"},
]
+wrapt = [
+ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"},
+ {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"},
+ {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"},
+ {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"},
+ {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"},
+ {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"},
+ {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"},
+ {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"},
+ {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"},
+ {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"},
+ {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"},
+ {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"},
+ {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"},
+ {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"},
+ {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"},
+ {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"},
+ {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"},
+ {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"},
+ {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"},
+ {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"},
+ {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"},
+ {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"},
+ {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"},
+ {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"},
+ {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"},
+ {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"},
+ {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"},
+ {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"},
+ {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"},
+ {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"},
+ {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"},
+ {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"},
+ {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"},
+ {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"},
+ {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"},
+ {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"},
+ {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"},
+ {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"},
+ {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"},
+ {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"},
+ {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"},
+ {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"},
+ {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"},
+ {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"},
+ {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"},
+ {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"},
+ {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"},
+ {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"},
+ {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"},
+ {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"},
+ {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"},
+ {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"},
+ {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"},
+ {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"},
+ {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"},
+ {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"},
+ {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"},
+ {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"},
+ {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"},
+ {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"},
+ {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"},
+ {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"},
+ {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"},
+ {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
+]
zipp = [
{file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"},
{file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"},
diff --git a/pyproject.toml b/pyproject.toml
index 42f8ea3..067631f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -24,7 +24,7 @@ repository = "https://gitlab.com/dunj3/fietsboek"
keywords = ["web", "gpx"]
[tool.poetry.dependencies]
-python = "^3.7"
+python = "^3.7.2"
pyramid = "^2"
pyramid_jinja2 = "^2.10"
@@ -51,9 +51,6 @@ bleach = "^5"
Click = "^8.1"
requests = "^2.28.1"
-WebTest = {version = "^3", optional = true}
-pytest = {version = "^7.2", optional = true}
-pytest-cov = {version = "*", optional = true}
pydantic = "^1.10.2"
termcolor = "^2.1.1"
@@ -63,8 +60,31 @@ optional = true
[tool.poetry.group.docs.dependencies]
Sphinx = "^5.3"
-[tool.poetry.extras]
-testing = ["WebTest", "pytest", "pytest-cov"]
+[tool.poetry.group.testing]
+optional = true
+
+[tool.poetry.group.testing.dependencies]
+pytest = "^7.2.0"
+webtest = "^3.0.0"
+pytest-cov = "^4.0.0"
+
+[tool.poetry.group.linters]
+optional = true
+
+[tool.poetry.group.linters.dependencies]
+pylint = "^2.15.8"
+black = "^22.12.0"
+
+[tool.poetry.group.types]
+optional = true
+
+[tool.poetry.group.types.dependencies]
+mypy = "^0.991"
+types-markdown = "^3.4.2.1"
+types-requests = "^2.28.11.5"
+types-bleach = "^5.0.3.1"
+types-babel = "^2.11.0.7"
+types-redis = "^4.3.21.6"
[tool.poetry.scripts]
fietsctl = "fietsboek.scripts.fietsctl:main"
@@ -72,3 +92,7 @@ fietsupdate = "fietsboek.updater.cli:cli"
[tool.poetry.plugins."paste.app_factory"]
main = "fietsboek:main"
+
+[tool.black]
+line-length = 100
+extend-exclude = '''upd_.+\.py|^/fietsboek/alembic/versions/.+'''
diff --git a/testing.ini b/testing.ini
index 62c1e26..e68d0cc 100644
--- a/testing.ini
+++ b/testing.ini
@@ -16,6 +16,7 @@ sqlalchemy.url = sqlite:///%(here)s/testing.sqlite
redis.url = redis://localhost
fietsboek.default_tile_layers =
+enable_account_registration = true
email.from = Test <test@localhost>
email.smtp_url = debug://
diff --git a/tests/__init__.py b/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/__init__.py
+++ /dev/null
diff --git a/tests/integration/test_register.py b/tests/integration/test_register.py
new file mode 100644
index 0000000..af1e313
--- /dev/null
+++ b/tests/integration/test_register.py
@@ -0,0 +1,68 @@
+import re
+
+import fietsboek.email
+from fietsboek import models
+
+VERIFICATION_LINK_PATTERN = re.compile("http://example.com(/token/[A-Za-z0-9-]+)")
+
+
+def test_registration_working(testapp, dbsession, route_path, monkeypatch):
+ """Ensures that a user can register, including using the verification link."""
+ mails = []
+ def send_message(server_url, username, password, message):
+ mails.append(message)
+
+ monkeypatch.setattr(fietsboek.email, "send_message", send_message)
+
+ registration = testapp.get(route_path('create-account'))
+ form = registration.form
+ form['email'] = 'foo-new@bar.com'
+ form['name'] = 'The new Foo'
+ form['password'] = 'foobarpassword'
+ form['repeat-password'] = 'foobarpassword'
+ response = form.submit().maybe_follow()
+
+ assert b'A confirmation link has been sent' in response.body
+ assert len(mails) == 1
+
+ user = dbsession.execute(models.User.query_by_email('foo-new@bar.com')).scalar_one()
+ assert not user.is_verified
+
+ body = mails[0].get_body().get_content()
+ token_path = VERIFICATION_LINK_PATTERN.search(body).group(1)
+ testapp.get(token_path)
+
+ assert user.is_verified
+
+
+def test_registration_short_password(testapp, route_path):
+ """Ensures that passwords that are too short are rejected."""
+ registration = testapp.get(route_path('create-account'))
+ form = registration.form
+ form['email'] = 'foo-new@bar.com'
+ form['name'] = 'The new Foo'
+ form['password'] = 'foo'
+ form['repeat-password'] = 'foo'
+ response = form.submit().maybe_follow()
+
+ assert re.search(
+ b'<div class="alert alert-primary" role="alert">\\s*Password not long enough\\s*</div>',
+ response.body,
+ )
+
+
+def test_registration_password_mismatch(testapp, route_path):
+ """Ensures that passwords that do not match are rejected."""
+ registration = testapp.get(route_path('create-account'))
+ form = registration.form
+ form['email'] = 'foo-new@bar.com'
+ form['name'] = 'The new Foo'
+ form['password'] = 'foobarfoobar'
+ form['repeat-password'] = 'foobarfoo'
+ response = form.submit().maybe_follow()
+
+ assert re.search(
+ b'<div class="alert alert-primary" role="alert">\\s*'
+ b'Passwords don&#39;t match\\s*</div>',
+ response.body,
+ )
diff --git a/tox.ini b/tox.ini
index 66e9870..4080616 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,46 +1,64 @@
[flake8]
max-line-length = 100
exclude = fietsboek/alembic
+extend-ignore = E203
per-file-ignores =
fietsboek/models/__init__.py:F401
[tox]
-envlist = python,pylint,pylint-tests,flake8
+envlist = python,pylint,pylint-tests,flake,mypy,black
+# This can be removed in tox 4 as it will be the default:
isolated_build = true
[testenv]
-deps =
- pytest
- lxml: lxml
-extras = testing
+deps = poetry
+skip_install = true
passenv =
TERM
+commands_pre =
+ poetry install -v --with testing
commands =
pytest {posargs}
[testenv:pylint]
-deps = pylint
-usedevelop = true
+commands_pre =
+ poetry install -v --with linters
commands =
pylint --rcfile=pylint.toml fietsboek
[testenv:pylint-tests]
-deps = pylint
-usedevelop = true
allowlist_externals = bash
+commands_pre =
+ poetry install -v --with linters,testing
commands =
bash -c "pylint --rcfile=pylint.tests.toml tests/**/*.py"
-[testenv:flake8]
-deps = flake8
-usedevelop = true
+[testenv:flake]
+# This is a bit of a hack since flake8 specifies Python 3.8 as minimum required
+# version, while fietsboek only specifices 3.7. Thus, we manually install
+# flake8 via pip and bypass poetry's locking-
+commands_pre =
+ poetry install -v
+ pip install flake8
commands =
flake8 fietsboek
[testenv:sphinx]
-deps = sphinx
-usedevelop = true
allowlist_externals = make
changedir={toxinidir}{/}doc
+commands_pre =
+ poetry install -v --with docs
commands =
make html
+
+[testenv:mypy]
+commands_pre =
+ poetry install --with types
+commands =
+ mypy fietsboek
+
+[testenv:black]
+deps = black
+commands_pre =
+commands =
+ black --diff --check fietsboek