This commit is contained in:
sam 2024-04-10 20:59:57 +02:00
parent 8c1db3fadb
commit 72aea7e07c
49 changed files with 5559 additions and 5497 deletions

18
.gitignore vendored
View file

@ -1,9 +1,9 @@
__pycache__/
.pytest_cache/
.env
node_modules
build
.svelte-kit
package
vite.config.js.timestamp-*
vite.config.ts.timestamp-*
__pycache__/
.pytest_cache/
.env
node_modules
build
.svelte-kit
package
vite.config.js.timestamp-*
vite.config.ts.timestamp-*

View file

@ -1,30 +1,30 @@
FROM docker.io/python:3.11-alpine as python-base
ENV POETRY_VERSION=1.3.2
ENV POETRY_HOME=/opt/poetry
ENV POETRY_VENV=/opt/poetry-venv
ENV POETRY_CACHE_DIR=/opt/.cache
RUN apk add --no-cache tini libmagic libpq vips vips-dev
FROM python-base as poetry-base
RUN python3 -m venv $POETRY_VENV \
&& $POETRY_VENV/bin/pip install -U pip setuptools \
&& $POETRY_VENV/bin/pip install poetry==${POETRY_VERSION} \
&& $POETRY_VENV/bin/pip install poethepoet
FROM python-base as app
COPY --from=poetry-base ${POETRY_VENV} ${POETRY_VENV}
ENV PATH="${PATH}:${POETRY_VENV}/bin"
WORKDIR /app
COPY poetry.lock pyproject.toml ./
RUN poetry install --no-interaction --no-cache --without dev
COPY . /app
ENTRYPOINT ["/sbin/tini", "--"]
CMD ["sh", "./entry.sh"]
FROM docker.io/python:3.11-alpine as python-base
ENV POETRY_VERSION=1.3.2
ENV POETRY_HOME=/opt/poetry
ENV POETRY_VENV=/opt/poetry-venv
ENV POETRY_CACHE_DIR=/opt/.cache
RUN apk add --no-cache tini libmagic libpq vips vips-dev
FROM python-base as poetry-base
RUN python3 -m venv $POETRY_VENV \
&& $POETRY_VENV/bin/pip install -U pip setuptools \
&& $POETRY_VENV/bin/pip install poetry==${POETRY_VERSION} \
&& $POETRY_VENV/bin/pip install poethepoet
FROM python-base as app
COPY --from=poetry-base ${POETRY_VENV} ${POETRY_VENV}
ENV PATH="${PATH}:${POETRY_VENV}/bin"
WORKDIR /app
COPY poetry.lock pyproject.toml ./
RUN poetry install --no-interaction --no-cache --without dev
COPY . /app
ENTRYPOINT ["/sbin/tini", "--"]
CMD ["sh", "./entry.sh"]

402
LICENSE
View file

@ -1,201 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,12 +1,12 @@
# pronouns.cc
pronouns.cc rewrite in Python, using Quart for routing, SQLAlchemy for the database, and Celery for background tasks.
## Running
This isn't anywhere *near* complete yet. For now, you can install [`poe`](https://github.com/nat-n/poethepoet),
run `poe migrate`, and then `poe server`.
For configuration, a `.env` file is used. See `foxnouns/settings.py`--all keys are required unless specified otherwise.
(Note that the docker-compose file doesn't work yet)
# pronouns.cc
pronouns.cc rewrite in Python, using Quart for routing, SQLAlchemy for the database, and Celery for background tasks.
## Running
This isn't anywhere *near* complete yet. For now, you can install [`poe`](https://github.com/nat-n/poethepoet),
run `poe migrate`, and then `poe server`.
For configuration, a `.env` file is used. See `foxnouns/settings.py`--all keys are required unless specified otherwise.
(Note that the docker-compose file doesn't work yet)

View file

@ -1,12 +1,12 @@
- `user`
- `user.read_hidden`: read current user's hidden data.
This includes data such as timezone and whether the user's member list is hidden.
- `user.read_privileged`: read privileged user data such as authentication methods
- `user.update`: update current user. This scope cannot update privileged data. This scope implies `user.read_hidden`.
- `member`
- `member.read`: read member list, even if it's hidden, including hidden members.
- `member.update`: update and delete existing members.
While `update` and `delete` could be separate, that might lull users into a false sense of security,
as it would still be possible to clear members and scramble their names,
which would be equivalent to `delete` anyway.
- `member.create`: create new members
- `user`
- `user.read_hidden`: read current user's hidden data.
This includes data such as timezone and whether the user's member list is hidden.
- `user.read_privileged`: read privileged user data such as authentication methods
- `user.update`: update current user. This scope cannot update privileged data. This scope implies `user.read_hidden`.
- `member`
- `member.read`: read member list, even if it's hidden, including hidden members.
- `member.update`: update and delete existing members.
While `update` and `delete` could be separate, that might lull users into a false sense of security,
as it would still be possible to clear members and scramble their names,
which would be equivalent to `delete` anyway.
- `member.create`: create new members

View file

@ -1,115 +1,115 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
file_template = %%(epoch)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
file_template = %%(epoch)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View file

@ -1,80 +1,80 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
from alembic import context
from foxnouns.db import Base
from foxnouns.db.sync import SYNC_DATABASE_URL
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = SYNC_DATABASE_URL
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine_config = config.get_section(config.config_ini_section, {})
engine_config["sqlalchemy.url"] = SYNC_DATABASE_URL
connectable = engine_from_config(
engine_config,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
from alembic import context
from foxnouns.db import Base
from foxnouns.db.sync import SYNC_DATABASE_URL
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = SYNC_DATABASE_URL
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine_config = config.get_section(config.config_ini_section, {})
engine_config["sqlalchemy.url"] = SYNC_DATABASE_URL
connectable = engine_from_config(
engine_config,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View file

@ -1,26 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View file

@ -1,65 +1,65 @@
"""Init
Revision ID: b39613fd7327
Revises:
Create Date: 2024-03-09 16:32:28.590145
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "b39613fd7327"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"fediverse_apps",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("instance", sa.Text(), nullable=False),
sa.Column("client_id", sa.Text(), nullable=False),
sa.Column("client_secret", sa.Text(), nullable=False),
sa.Column("instance_type", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("instance"),
)
op.create_table(
"users",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("username", sa.Text(), nullable=False),
sa.Column("display_name", sa.Text(), nullable=True),
sa.Column("bio", sa.Text(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("username"),
)
op.create_table(
"auth_methods",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("auth_type", sa.Integer(), nullable=False),
sa.Column("remote_id", sa.Text(), nullable=False),
sa.Column("remote_username", sa.Text(), nullable=True),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.Column("fediverse_app_id", sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(
["fediverse_app_id"],
["fediverse_apps.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
def downgrade() -> None:
op.drop_table("auth_methods")
op.drop_table("users")
op.drop_table("fediverse_apps")
"""Init
Revision ID: b39613fd7327
Revises:
Create Date: 2024-03-09 16:32:28.590145
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "b39613fd7327"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"fediverse_apps",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("instance", sa.Text(), nullable=False),
sa.Column("client_id", sa.Text(), nullable=False),
sa.Column("client_secret", sa.Text(), nullable=False),
sa.Column("instance_type", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("instance"),
)
op.create_table(
"users",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("username", sa.Text(), nullable=False),
sa.Column("display_name", sa.Text(), nullable=True),
sa.Column("bio", sa.Text(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("username"),
)
op.create_table(
"auth_methods",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("auth_type", sa.Integer(), nullable=False),
sa.Column("remote_id", sa.Text(), nullable=False),
sa.Column("remote_username", sa.Text(), nullable=True),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.Column("fediverse_app_id", sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(
["fediverse_app_id"],
["fediverse_apps.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
def downgrade() -> None:
op.drop_table("auth_methods")
op.drop_table("users")
op.drop_table("fediverse_apps")

View file

@ -1,39 +1,39 @@
"""Add tokens
Revision ID: 0b63f7c8ab96
Revises: b39613fd7327
Create Date: 2024-03-13 17:01:50.434602
"""
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "0b63f7c8ab96"
down_revision: Union[str, None] = "b39613fd7327"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"tokens",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("expires_at", sa.DateTime(), nullable=False),
sa.Column("scopes", postgresql.ARRAY(sa.Text()), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
def downgrade() -> None:
op.drop_table("tokens")
"""Add tokens
Revision ID: 0b63f7c8ab96
Revises: b39613fd7327
Create Date: 2024-03-13 17:01:50.434602
"""
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "0b63f7c8ab96"
down_revision: Union[str, None] = "b39613fd7327"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"tokens",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("expires_at", sa.DateTime(), nullable=False),
sa.Column("scopes", postgresql.ARRAY(sa.Text()), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
def downgrade() -> None:
op.drop_table("tokens")

View file

@ -1,56 +1,56 @@
"""Add names/pronouns/fields
Revision ID: 1d8f8443a7f5
Revises: 0b63f7c8ab96
Create Date: 2024-03-20 15:36:08.756635
"""
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "1d8f8443a7f5"
down_revision: Union[str, None] = "0b63f7c8ab96"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column(
"users",
sa.Column(
"names",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
server_default="[]",
),
)
op.add_column(
"users",
sa.Column(
"pronouns",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
server_default="[]",
),
)
op.add_column(
"users",
sa.Column(
"fields",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
server_default="[]",
),
)
def downgrade() -> None:
op.drop_column("users", "fields")
op.drop_column("users", "pronouns")
op.drop_column("users", "names")
"""Add names/pronouns/fields
Revision ID: 1d8f8443a7f5
Revises: 0b63f7c8ab96
Create Date: 2024-03-20 15:36:08.756635
"""
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "1d8f8443a7f5"
down_revision: Union[str, None] = "0b63f7c8ab96"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column(
"users",
sa.Column(
"names",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
server_default="[]",
),
)
op.add_column(
"users",
sa.Column(
"pronouns",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
server_default="[]",
),
)
op.add_column(
"users",
sa.Column(
"fields",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
server_default="[]",
),
)
def downgrade() -> None:
op.drop_column("users", "fields")
op.drop_column("users", "pronouns")
op.drop_column("users", "names")

View file

@ -1,47 +1,47 @@
"""Add members
Revision ID: 17cc8cb77be5
Revises: 1d8f8443a7f5
Create Date: 2024-03-20 16:00:59.251354
"""
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "17cc8cb77be5"
down_revision: Union[str, None] = "1d8f8443a7f5"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"members",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("display_name", sa.Text(), nullable=True),
sa.Column("bio", sa.Text(), nullable=True),
sa.Column("names", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column("pronouns", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column("fields", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("members")
# ### end Alembic commands ###
"""Add members
Revision ID: 17cc8cb77be5
Revises: 1d8f8443a7f5
Create Date: 2024-03-20 16:00:59.251354
"""
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "17cc8cb77be5"
down_revision: Union[str, None] = "1d8f8443a7f5"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"members",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("display_name", sa.Text(), nullable=True),
sa.Column("bio", sa.Text(), nullable=True),
sa.Column("names", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column("pronouns", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column("fields", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("members")
# ### end Alembic commands ###

View file

@ -1,32 +1,32 @@
"""Add unique index to members
Revision ID: a000d800f45f
Revises: 17cc8cb77be5
Create Date: 2024-03-21 15:52:09.403257
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "a000d800f45f"
down_revision: Union[str, None] = "17cc8cb77be5"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_index(
"members_user_name_idx",
"members",
["user_id", sa.text("lower(name)")],
unique=True,
)
def downgrade() -> None:
op.drop_index("members_user_name_idx", table_name="members")
"""Add unique index to members
Revision ID: a000d800f45f
Revises: 17cc8cb77be5
Create Date: 2024-03-21 15:52:09.403257
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "a000d800f45f"
down_revision: Union[str, None] = "17cc8cb77be5"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_index(
"members_user_name_idx",
"members",
["user_id", sa.text("lower(name)")],
unique=True,
)
def downgrade() -> None:
op.drop_index("members_user_name_idx", table_name="members")

View file

@ -1,29 +1,29 @@
"""Add avatars
Revision ID: 7503d2a6094c
Revises: a000d800f45f
Create Date: 2024-03-27 15:36:49.749722
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "7503d2a6094c"
down_revision: Union[str, None] = "a000d800f45f"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column("members", sa.Column("avatar", sa.Text(), nullable=True))
op.add_column("users", sa.Column("avatar", sa.Text(), nullable=True))
def downgrade() -> None:
op.drop_column("users", "avatar")
op.drop_column("members", "avatar")
"""Add avatars
Revision ID: 7503d2a6094c
Revises: a000d800f45f
Create Date: 2024-03-27 15:36:49.749722
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "7503d2a6094c"
down_revision: Union[str, None] = "a000d800f45f"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column("members", sa.Column("avatar", sa.Text(), nullable=True))
op.add_column("users", sa.Column("avatar", sa.Text(), nullable=True))
def downgrade() -> None:
op.drop_column("users", "avatar")
op.drop_column("members", "avatar")

View file

@ -1,36 +1,36 @@
version: "3"
services:
app:
image: foxnouns
build: .
environment:
- DATABASE_USER=postgres
- DATABASE_PASSWORD=postgres
- DATABASE_HOST=postgres
- DATABASE_NAME=postgres
volumes:
- "./.env:/app/.env"
ports:
- "8000:8000"
networks:
- default
postgres:
image: docker.io/postgres:15-alpine
volumes:
- "postgres_data:/var/lib/postgresql/data"
command: ["postgres",
"-c", "max-connections=1000",
"-c", "timezone=Etc/UTC",
"-c", "max_wal_size=1GB",
"-c", "min_wal_size=80MB",
"-c", "shared_buffers=128MB"]
environment:
- "POSTGRES_PASSWORD=postgres"
restart: unless-stopped
networks:
- default
volumes:
postgres_data:
version: "3"
services:
app:
image: foxnouns
build: .
environment:
- DATABASE_USER=postgres
- DATABASE_PASSWORD=postgres
- DATABASE_HOST=postgres
- DATABASE_NAME=postgres
volumes:
- "./.env:/app/.env"
ports:
- "8000:8000"
networks:
- default
postgres:
image: docker.io/postgres:15-alpine
volumes:
- "postgres_data:/var/lib/postgresql/data"
command: ["postgres",
"-c", "max-connections=1000",
"-c", "timezone=Etc/UTC",
"-c", "max_wal_size=1GB",
"-c", "min_wal_size=80MB",
"-c", "shared_buffers=128MB"]
environment:
- "POSTGRES_PASSWORD=postgres"
restart: unless-stopped
networks:
- default
volumes:
postgres_data:

View file

@ -1,5 +1,5 @@
#!/bin/sh
source .env
# poetry run alembic upgrade head
# poetry run uvicorn --workers=${WORKERS:-2} --host=0.0.0.0 --port=8000 'foxnouns.app:app'
poe migrate && poe server
#!/bin/sh
source .env
# poetry run alembic upgrade head
# poetry run uvicorn --workers=${WORKERS:-2} --host=0.0.0.0 --port=8000 'foxnouns.app:app'
poe migrate && poe server

View file

@ -1,62 +1,62 @@
from quart import Quart, g, request
from quart_cors import cors
from quart_schema import QuartSchema, RequestSchemaValidationError
from . import blueprints
from .db.aio import async_session
from .db.util import validate_token
from .exceptions import ErrorCode, ExpectedError
from .settings import BASE_DOMAIN, SECRET_KEY
app = Quart(__name__, host_matching=True, static_host=BASE_DOMAIN)
app.secret_key = SECRET_KEY
app = cors(
app,
allow_origin="*",
allow_methods="*",
allow_headers=["Content-Type", "Authorization", "User-Agent"],
max_age=86400,
)
QuartSchema(app)
for bp in blueprints.__all__:
app.register_blueprint(bp)
@app.errorhandler(RequestSchemaValidationError)
async def handle_request_validation_error(error: RequestSchemaValidationError):
# TODO: parse the error and return a format closer to the draft APIv2
return {"code": ErrorCode.BadRequest, "message": "Bad request"}, 400
@app.errorhandler(ExpectedError)
async def handle_expected_error(error: ExpectedError):
return {"code": error.type, "message": error.msg}, error.status_code
@app.errorhandler(404)
async def handle_404(_):
return {"code": 404, "message": "Not found"}, 404
@app.errorhandler(500)
async def handle_500(_):
return {"code": 500, "message": "Internal server error"}, 500
@app.before_request
async def get_user_from_token():
"""Get the current user from a token given in the `Authorization` header.
If no token is set, does nothing; if an invalid token is set, raises an error."""
token = request.headers.get("Authorization", None)
if not token:
return
async with async_session() as session:
try:
token, user = await validate_token(session, token)
g.token = token
g.user = user
except:
raise
from quart import Quart, g, request
from quart_cors import cors
from quart_schema import QuartSchema, RequestSchemaValidationError
from . import blueprints
from .db.aio import async_session
from .db.util import validate_token
from .exceptions import ErrorCode, ExpectedError
from .settings import BASE_DOMAIN, SECRET_KEY
app = Quart(__name__, host_matching=True, static_host=BASE_DOMAIN)
app.secret_key = SECRET_KEY
app = cors(
app,
allow_origin="*",
allow_methods="*",
allow_headers=["Content-Type", "Authorization", "User-Agent"],
max_age=86400,
)
QuartSchema(app)
for bp in blueprints.__all__:
app.register_blueprint(bp)
@app.errorhandler(RequestSchemaValidationError)
async def handle_request_validation_error(error: RequestSchemaValidationError):
# TODO: parse the error and return a format closer to the draft APIv2
return {"code": ErrorCode.BadRequest, "message": "Bad request"}, 400
@app.errorhandler(ExpectedError)
async def handle_expected_error(error: ExpectedError):
return {"code": error.type, "message": error.msg}, error.status_code
@app.errorhandler(404)
async def handle_404(_):
return {"code": 404, "message": "Not found"}, 404
@app.errorhandler(500)
async def handle_500(_):
return {"code": 500, "message": "Internal server error"}, 500
@app.before_request
async def get_user_from_token():
"""Get the current user from a token given in the `Authorization` header.
If no token is set, does nothing; if an invalid token is set, raises an error."""
token = request.headers.get("Authorization", None)
if not token:
return
async with async_session() as session:
try:
token, user = await validate_token(session, token)
g.token = token
g.user = user
except:
raise

View file

@ -1,6 +1,6 @@
from .v2.auth.discord import bp as discord_auth_blueprint
from .v2.members import bp as members_blueprint
from .v2.meta import bp as meta_blueprint
from .v2.users import bp as users_blueprint
__all__ = [users_blueprint, members_blueprint, meta_blueprint, discord_auth_blueprint]
from .v2.auth.discord import bp as discord_auth_blueprint
from .v2.members import bp as members_blueprint
from .v2.meta import bp as meta_blueprint
from .v2.users import bp as users_blueprint
__all__ = [users_blueprint, members_blueprint, meta_blueprint, discord_auth_blueprint]

View file

@ -1,61 +1,61 @@
from datetime import datetime
from pydantic import BaseModel, Field
from quart import Blueprint
from quart_schema import validate_response
from foxnouns.models.user import SelfUserModel
from foxnouns.settings import BASE_DOMAIN
bp = Blueprint("auth_v2", __name__)
class URLsResponse(BaseModel):
discord: str | None = Field(default=None)
google: str | None = Field(default=None)
tumblr: str | None = Field(default=None)
@bp.post("/api/v2/auth/urls", host=BASE_DOMAIN)
@validate_response(URLsResponse, 200)
async def urls():
# TODO: build authorization URLs + callback URLs, store state in Redis
raise NotImplementedError()
class OAuthCallbackRequest(BaseModel):
callback_domain: str
code: str
state: str
class BaseCallbackResponse(BaseModel):
"""The base class for callback responses."""
has_account: bool
class ExistingUserCallbackResponse(BaseCallbackResponse):
"""The class returned when a user already exists."""
token: str
user: SelfUserModel
class NewUserCallbackResponse(BaseCallbackResponse):
"""The class returned when the user is new and has to create an account."""
remote_username: str
ticket: str
require_captcha: bool
class DeletedUserCallbackResponse(BaseCallbackResponse):
"""The class returned when the user has been deleted."""
token: str
user: SelfUserModel
deleted_at: datetime
self_delete: bool
delete_reason: str | None = Field(default=None)
from datetime import datetime
from pydantic import BaseModel, Field
from quart import Blueprint
from quart_schema import validate_response
from foxnouns.models.user import SelfUserModel
from foxnouns.settings import BASE_DOMAIN
bp = Blueprint("auth_v2", __name__)
class URLsResponse(BaseModel):
discord: str | None = Field(default=None)
google: str | None = Field(default=None)
tumblr: str | None = Field(default=None)
@bp.post("/api/v2/auth/urls", host=BASE_DOMAIN)
@validate_response(URLsResponse, 200)
async def urls():
# TODO: build authorization URLs + callback URLs, store state in Redis
raise NotImplementedError()
class OAuthCallbackRequest(BaseModel):
callback_domain: str
code: str
state: str
class BaseCallbackResponse(BaseModel):
"""The base class for callback responses."""
has_account: bool
class ExistingUserCallbackResponse(BaseCallbackResponse):
"""The class returned when a user already exists."""
token: str
user: SelfUserModel
class NewUserCallbackResponse(BaseCallbackResponse):
"""The class returned when the user is new and has to create an account."""
remote_username: str
ticket: str
require_captcha: bool
class DeletedUserCallbackResponse(BaseCallbackResponse):
"""The class returned when the user has been deleted."""
token: str
user: SelfUserModel
deleted_at: datetime
self_delete: bool
delete_reason: str | None = Field(default=None)

View file

@ -1,15 +1,17 @@
from quart import Blueprint
from quart_schema import validate_request, validate_response
from foxnouns.settings import BASE_DOMAIN
from . import BaseCallbackResponse, OAuthCallbackRequest
bp = Blueprint("discord_v2", __name__)
@bp.post("/api/v2/auth/discord/callback", host=BASE_DOMAIN)
@validate_request(OAuthCallbackRequest)
@validate_response(BaseCallbackResponse)
async def discord_callback(data: OAuthCallbackRequest):
raise NotImplementedError()
from quart import Blueprint
from quart_schema import validate_request, validate_response
from foxnouns import settings
from foxnouns.decorators import require_config_key
from . import BaseCallbackResponse, OAuthCallbackRequest
bp = Blueprint("discord_v2", __name__)
@bp.post("/api/v2/auth/discord/callback", host=settings.BASE_DOMAIN)
@require_config_key(keys=[settings.DISCORD_CLIENT_ID, settings.DISCORD_CLIENT_SECRET])
@validate_request(OAuthCallbackRequest)
@validate_response(BaseCallbackResponse)
async def discord_callback(data: OAuthCallbackRequest):
raise NotImplementedError()

View file

@ -1,61 +1,61 @@
from pydantic import Field
from quart import Blueprint, g
from quart_schema import validate_request, validate_response
from foxnouns import tasks
from foxnouns.auth import require_auth
from foxnouns.db import Member
from foxnouns.db.aio import async_session
from foxnouns.db.util import user_from_ref
from foxnouns.exceptions import ErrorCode, NotFoundError
from foxnouns.models.member import FullMemberModel, MemberPatchModel
from foxnouns.settings import BASE_DOMAIN
bp = Blueprint("members_v2", __name__)
@bp.get("/api/v2/users/<user_ref>/members", host=BASE_DOMAIN)
@validate_response(list[FullMemberModel], 200)
async def get_members(user_ref: str):
async with async_session() as session:
user = await user_from_ref(session, user_ref)
if not user:
raise NotFoundError("User not found", type=ErrorCode.UserNotFound)
return [FullMemberModel.model_validate(m) for m in user.members]
class MemberCreateModel(MemberPatchModel):
name: str = Field(
min_length=1,
max_length=100,
pattern=r"^[^@\?!#\/\\\[\]\"\{\}'$%&()+<=>^|~`,\*]{1,100}$",
)
@bp.post("/api/v2/members", host=BASE_DOMAIN)
@require_auth(scope="member.create")
@validate_request(MemberCreateModel)
@validate_response(FullMemberModel, 200)
async def create_member(data: MemberCreateModel):
async with async_session() as session:
member = Member(
user_id=g.user.id,
name=data.name,
bio=data.bio,
names=[e.model_dump() for e in data.names],
pronouns=[e.model_dump() for e in data.pronouns],
fields=[e.model_dump() for e in data.fields],
)
session.add(member)
await session.commit()
# This has to be fetched before we can pass the model to Pydantic.
# In a normal SELECT this is automatically fetched, but because we just created the object,
# we have to do it manually.
await member.awaitable_attrs.user
if data.avatar:
tasks.process_member_avatar.delay(member.id, data.avatar)
return FullMemberModel.model_validate(member)
from pydantic import Field
from quart import Blueprint, g
from quart_schema import validate_request, validate_response
from foxnouns import tasks
from foxnouns.decorators import require_auth
from foxnouns.db import Member
from foxnouns.db.aio import async_session
from foxnouns.db.util import user_from_ref
from foxnouns.exceptions import ErrorCode, NotFoundError
from foxnouns.models.member import FullMemberModel, MemberPatchModel
from foxnouns.settings import BASE_DOMAIN
bp = Blueprint("members_v2", __name__)
@bp.get("/api/v2/users/<user_ref>/members", host=BASE_DOMAIN)
@validate_response(list[FullMemberModel], 200)
async def get_members(user_ref: str):
async with async_session() as session:
user = await user_from_ref(session, user_ref)
if not user:
raise NotFoundError("User not found", type=ErrorCode.UserNotFound)
return [FullMemberModel.model_validate(m) for m in user.members]
class MemberCreateModel(MemberPatchModel):
name: str = Field(
min_length=1,
max_length=100,
pattern=r"^[^@\?!#\/\\\[\]\"\{\}'$%&()+<=>^|~`,\*]{1,100}$",
)
@bp.post("/api/v2/members", host=BASE_DOMAIN)
@require_auth(scope="member.create")
@validate_request(MemberCreateModel)
@validate_response(FullMemberModel, 200)
async def create_member(data: MemberCreateModel):
async with async_session() as session:
member = Member(
user_id=g.user.id,
name=data.name,
bio=data.bio,
names=[e.model_dump() for e in data.names],
pronouns=[e.model_dump() for e in data.pronouns],
fields=[e.model_dump() for e in data.fields],
)
session.add(member)
await session.commit()
# This has to be fetched before we can pass the model to Pydantic.
# In a normal SELECT this is automatically fetched, but because we just created the object,
# we have to do it manually.
await member.awaitable_attrs.user
if data.avatar:
tasks.process_member_avatar.delay(member.id, data.avatar)
return FullMemberModel.model_validate(member)

View file

@ -1,26 +1,26 @@
from pydantic import BaseModel
from quart import Blueprint
from quart_schema import validate_response
from sqlalchemy import select
from sqlalchemy.sql import func
from foxnouns.db import Member, User
from foxnouns.db.aio import async_session
from foxnouns.settings import BASE_DOMAIN
bp = Blueprint("meta_v2", __name__)
class MetaResponse(BaseModel):
users: int
members: int
@bp.get("/api/v2/meta", host=BASE_DOMAIN)
@validate_response(MetaResponse)
async def meta():
async with async_session() as session:
user_count = await session.scalar(select(func.count()).select_from(User))
member_count = await session.scalar(select(func.count()).select_from(Member))
return MetaResponse(users=user_count, members=member_count)
from pydantic import BaseModel
from quart import Blueprint
from quart_schema import validate_response
from sqlalchemy import select
from sqlalchemy.sql import func
from foxnouns.db import Member, User
from foxnouns.db.aio import async_session
from foxnouns.settings import BASE_DOMAIN
bp = Blueprint("meta_v2", __name__)
class MetaResponse(BaseModel):
users: int
members: int
@bp.get("/api/v2/meta", host=BASE_DOMAIN)
@validate_response(MetaResponse)
async def meta():
async with async_session() as session:
user_count = await session.scalar(select(func.count()).select_from(User))
member_count = await session.scalar(select(func.count()).select_from(Member))
return MetaResponse(users=user_count, members=member_count)

View file

@ -1,105 +1,105 @@
from pydantic import Field, field_validator
from quart import Blueprint, g
from quart_schema import validate_request, validate_response
from sqlalchemy import select
from foxnouns import tasks
from foxnouns.auth import require_auth
from foxnouns.db import User
from foxnouns.db.aio import async_session
from foxnouns.db.snowflake import Snowflake
from foxnouns.db.util import create_token, generate_token, is_self, user_from_ref
from foxnouns.exceptions import ErrorCode, NotFoundError
from foxnouns.models import BasePatchModel
from foxnouns.models.user import SelfUserModel, UserModel, check_username
from foxnouns.settings import BASE_DOMAIN
bp = Blueprint("users_v2", __name__)
@bp.get("/api/v2/users/<user_ref>", host=BASE_DOMAIN)
@validate_response(UserModel, 200)
async def get_user(user_ref: str):
async with async_session() as session:
user = await user_from_ref(session, user_ref)
if not user:
raise NotFoundError("User not found", type=ErrorCode.UserNotFound)
return (
SelfUserModel.model_validate(user)
if is_self(user)
else UserModel.model_validate(user)
)
class EditUserRequest(BasePatchModel):
username: str | None = Field(
min_length=2, max_length=40, pattern=r"^[\w\-\.]{2,40}$", default=None
)
display_name: str | None = Field(max_length=100, default=None)
bio: str | None = Field(max_length=1024, default=None)
avatar: str | None = Field(max_length=1_000_000, default=None)
@field_validator("username")
@classmethod
def check_username(cls, value):
return check_username(value)
@bp.patch("/api/v2/users/@me", host=BASE_DOMAIN)
@require_auth(scope="user.update")
@validate_request(EditUserRequest)
@validate_response(SelfUserModel, 200)
async def edit_user(data: EditUserRequest):
"""Updates the current user."""
async with async_session() as session:
user = await session.scalar(select(User).where(User.id == g.user.id))
await user.awaitable_attrs.members
if data.username:
user.username = data.username
if data.is_set("display_name"):
user.display_name = data.display_name
if data.is_set("bio"):
user.bio = data.bio
await session.commit()
if data.is_set("avatar"):
if data.avatar:
tasks.process_user_avatar.delay(user.id, data.avatar)
else:
tasks.delete_user_avatar.delay(user.id)
return SelfUserModel.model_validate(user)
class DebugUserData(BasePatchModel):
username: str
class DebugUserResponse(SelfUserModel):
token: str
@bp.post("/api/v2/users/debug", host=BASE_DOMAIN)
@validate_request(DebugUserData)
@validate_response(DebugUserResponse, 200)
async def debug_create_user(data: DebugUserData):
"""Creates a user from just a username, and returns it along with a token.
FIXME: this must be removed **BEFORE** deploying to production (or even public testing)
"""
async with async_session() as session:
user = User(id=Snowflake.generate_int(), username=data.username)
await session.commit()
session.add(user)
token = await create_token(session, user, ["*"])
await session.commit()
await user.awaitable_attrs.members
user.token = generate_token(token)
return DebugUserResponse.model_validate(user)
from pydantic import Field, field_validator
from quart import Blueprint, g
from quart_schema import validate_request, validate_response
from sqlalchemy import select
from foxnouns import tasks
from foxnouns.decorators import require_auth
from foxnouns.db import User
from foxnouns.db.aio import async_session
from foxnouns.db.snowflake import Snowflake
from foxnouns.db.util import create_token, generate_token, is_self, user_from_ref
from foxnouns.exceptions import ErrorCode, NotFoundError
from foxnouns.models import BasePatchModel
from foxnouns.models.user import SelfUserModel, UserModel, check_username
from foxnouns.settings import BASE_DOMAIN
bp = Blueprint("users_v2", __name__)
@bp.get("/api/v2/users/<user_ref>", host=BASE_DOMAIN)
@validate_response(UserModel, 200)
async def get_user(user_ref: str):
async with async_session() as session:
user = await user_from_ref(session, user_ref)
if not user:
raise NotFoundError("User not found", type=ErrorCode.UserNotFound)
return (
SelfUserModel.model_validate(user)
if is_self(user)
else UserModel.model_validate(user)
)
class EditUserRequest(BasePatchModel):
username: str | None = Field(
min_length=2, max_length=40, pattern=r"^[\w\-\.]{2,40}$", default=None
)
display_name: str | None = Field(max_length=100, default=None)
bio: str | None = Field(max_length=1024, default=None)
avatar: str | None = Field(max_length=1_000_000, default=None)
@field_validator("username")
@classmethod
def check_username(cls, value):
return check_username(value)
@bp.patch("/api/v2/users/@me", host=BASE_DOMAIN)
@require_auth(scope="user.update")
@validate_request(EditUserRequest)
@validate_response(SelfUserModel, 200)
async def edit_user(data: EditUserRequest):
"""Updates the current user."""
async with async_session() as session:
user = await session.scalar(select(User).where(User.id == g.user.id))
await user.awaitable_attrs.members
if data.username:
user.username = data.username
if data.is_set("display_name"):
user.display_name = data.display_name
if data.is_set("bio"):
user.bio = data.bio
await session.commit()
if data.is_set("avatar"):
if data.avatar:
tasks.process_user_avatar.delay(user.id, data.avatar)
else:
tasks.delete_user_avatar.delay(user.id)
return SelfUserModel.model_validate(user)
class DebugUserData(BasePatchModel):
username: str
class DebugUserResponse(SelfUserModel):
token: str
@bp.post("/api/v2/users/debug", host=BASE_DOMAIN)
@validate_request(DebugUserData)
@validate_response(DebugUserResponse, 200)
async def debug_create_user(data: DebugUserData):
"""Creates a user from just a username, and returns it along with a token.
FIXME: this must be removed **BEFORE** deploying to production (or even public testing)
"""
async with async_session() as session:
user = User(id=Snowflake.generate_int(), username=data.username)
await session.commit()
session.add(user)
token = await create_token(session, user, ["*"])
await session.commit()
await user.awaitable_attrs.members
user.token = generate_token(token)
return DebugUserResponse.model_validate(user)

View file

@ -1,5 +1,5 @@
from .base import Base
from .member import Member
from .user import AuthMethod, FediverseApp, Token, User
__all__ = [Base, User, Token, AuthMethod, FediverseApp, Member]
from .base import Base
from .member import Member
from .user import AuthMethod, FediverseApp, Token, User
__all__ = [Base, User, Token, AuthMethod, FediverseApp, Member]

View file

@ -1,17 +1,17 @@
from sqlalchemy import URL
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from foxnouns.settings import DATABASE, ECHO_SQL
ASYNC_DATABASE_URL = URL.create(
"postgresql+asyncpg",
username=DATABASE["USER"],
password=DATABASE["PASSWORD"],
host=DATABASE["HOST"],
database=DATABASE["NAME"],
)
engine = create_async_engine(ASYNC_DATABASE_URL, echo=ECHO_SQL)
async_session = async_sessionmaker(engine, expire_on_commit=False)
__all__ = [engine, async_session]
from sqlalchemy import URL
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from foxnouns.settings import DATABASE, ECHO_SQL
ASYNC_DATABASE_URL = URL.create(
"postgresql+asyncpg",
username=DATABASE["USER"],
password=DATABASE["PASSWORD"],
host=DATABASE["HOST"],
database=DATABASE["NAME"],
)
engine = create_async_engine(ASYNC_DATABASE_URL, echo=ECHO_SQL)
async_session = async_sessionmaker(engine, expire_on_commit=False)
__all__ = [engine, async_session]

View file

@ -1,6 +1,6 @@
from sqlalchemy.ext.asyncio import AsyncAttrs
from sqlalchemy.orm import DeclarativeBase
class Base(AsyncAttrs, DeclarativeBase):
pass
from sqlalchemy.ext.asyncio import AsyncAttrs
from sqlalchemy.orm import DeclarativeBase
class Base(AsyncAttrs, DeclarativeBase):
pass

View file

@ -1,34 +1,34 @@
from typing import Any
from sqlalchemy import BigInteger, ForeignKey, Index, Text, func, text
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import Mapped, mapped_column, relationship
from .base import Base
from .snowflake import Snowflake
from .user import User
class Member(Base):
__tablename__ = "members"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
name: Mapped[str] = mapped_column(Text(), nullable=False)
display_name: Mapped[str | None] = mapped_column(Text(), nullable=True)
bio: Mapped[str | None] = mapped_column(Text(), nullable=True)
avatar: Mapped[str | None] = mapped_column(Text(), nullable=True)
names: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
pronouns: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
fields: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
user: Mapped[User] = relationship(back_populates="members", lazy="immediate")
__table_args__ = (
Index(
"members_user_name_idx", "user_id", func.lower(text("name")), unique=True
),
)
from typing import Any
from sqlalchemy import BigInteger, ForeignKey, Index, Text, func, text
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import Mapped, mapped_column, relationship
from .base import Base
from .snowflake import Snowflake
from .user import User
class Member(Base):
__tablename__ = "members"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
name: Mapped[str] = mapped_column(Text(), nullable=False)
display_name: Mapped[str | None] = mapped_column(Text(), nullable=True)
bio: Mapped[str | None] = mapped_column(Text(), nullable=True)
avatar: Mapped[str | None] = mapped_column(Text(), nullable=True)
names: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
pronouns: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
fields: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
user: Mapped[User] = relationship(back_populates="members", lazy="immediate")
__table_args__ = (
Index(
"members_user_name_idx", "user_id", func.lower(text("name")), unique=True
),
)

View file

@ -1,5 +1,5 @@
from redis import asyncio as aioredis
from foxnouns.settings import REDIS_URL
redis = aioredis.from_url(REDIS_URL)
from redis import asyncio as aioredis
from foxnouns.settings import REDIS_URL
redis = aioredis.from_url(REDIS_URL)

View file

@ -1,110 +1,110 @@
import os
import threading
from datetime import datetime, timezone
from random import randrange
_local = threading.local()
def _get_increment() -> int:
if not hasattr(_local, "increment"):
_local.increment = randrange(0, 4095)
increment = _local.increment
_local.increment += 1
return increment
class Snowflake:
"""A Snowflake ID (https://en.wikipedia.org/wiki/Snowflake_ID).
This class wraps an integer and adds convenience functions."""
EPOCH = 1_640_995_200_000 # 2022-01-01 at 00:00:00 UTC
_raw: int
def __init__(self, src: int):
self._raw = src
def __str__(self) -> str:
return str(self.id)
def __repr__(self) -> str:
return f"Snowflake<{self.id}, {self.process}, {self.thread}, {self.increment}, {self.timestamp}>"
def __int__(self) -> int:
return self._raw
def __float__(self) -> float:
return float(self._raw)
def __lt__(self, y: "Snowflake"):
return self.id < y.id
def __le__(self, y: "Snowflake"):
return self.id <= y.id
def __eq__(self, y: "Snowflake"):
return self.id == y.id
def __ne__(self, y: "Snowflake"):
return self.id != y.id
def __gt__(self, y: "Snowflake"):
return self.id > y.id
def __ge__(self, y: "Snowflake"):
return self.id >= y.id
@property
def id(self) -> int:
"""The raw integer value of the snowflake."""
return self._raw
@property
def time(self) -> datetime:
"""The time embedded into the snowflake."""
return datetime.fromtimestamp(self.timestamp, tz=timezone.utc)
@property
def timestamp(self) -> float:
"""The unix timestamp embedded into the snowflake."""
return ((self._raw >> 22) + self.EPOCH) / 1000
@property
def process(self) -> int:
"""The process ID embedded into the snowflake."""
return (self._raw & 0x3E0000) >> 17
@property
def thread(self) -> int:
"""The thread ID embedded into the snowflake."""
return (self._raw & 0x1F000) >> 12
@property
def increment(self) -> int:
"""The increment embedded into the snowflake."""
return self._raw & 0xFFF
@classmethod
def generate(cls, time: datetime | None = None):
"""Generates a new snowflake.
If `time` is set, use that time for the snowflake, otherwise, use the current time.
"""
process_id = os.getpid()
thread_id = threading.get_native_id()
increment = _get_increment()
now = time if time else datetime.now(tz=timezone.utc)
timestamp = round(now.timestamp() * 1000) - cls.EPOCH
return cls(
timestamp << 22
| (process_id % 32) << 17
| (thread_id % 32) << 12
| (increment % 4096)
)
@classmethod
def generate_int(cls, time: datetime | None = None):
return cls.generate(time).id
import os
import threading
from datetime import datetime, timezone
from random import randrange
_local = threading.local()
def _get_increment() -> int:
if not hasattr(_local, "increment"):
_local.increment = randrange(0, 4095)
increment = _local.increment
_local.increment += 1
return increment
class Snowflake:
"""A Snowflake ID (https://en.wikipedia.org/wiki/Snowflake_ID).
This class wraps an integer and adds convenience functions."""
EPOCH = 1_640_995_200_000 # 2022-01-01 at 00:00:00 UTC
_raw: int
def __init__(self, src: int):
self._raw = src
def __str__(self) -> str:
return str(self.id)
def __repr__(self) -> str:
return f"Snowflake<{self.id}, {self.process}, {self.thread}, {self.increment}, {self.timestamp}>"
def __int__(self) -> int:
return self._raw
def __float__(self) -> float:
return float(self._raw)
def __lt__(self, y: "Snowflake"):
return self.id < y.id
def __le__(self, y: "Snowflake"):
return self.id <= y.id
def __eq__(self, y: "Snowflake"):
return self.id == y.id
def __ne__(self, y: "Snowflake"):
return self.id != y.id
def __gt__(self, y: "Snowflake"):
return self.id > y.id
def __ge__(self, y: "Snowflake"):
return self.id >= y.id
@property
def id(self) -> int:
"""The raw integer value of the snowflake."""
return self._raw
@property
def time(self) -> datetime:
"""The time embedded into the snowflake."""
return datetime.fromtimestamp(self.timestamp, tz=timezone.utc)
@property
def timestamp(self) -> float:
"""The unix timestamp embedded into the snowflake."""
return ((self._raw >> 22) + self.EPOCH) / 1000
@property
def process(self) -> int:
"""The process ID embedded into the snowflake."""
return (self._raw & 0x3E0000) >> 17
@property
def thread(self) -> int:
"""The thread ID embedded into the snowflake."""
return (self._raw & 0x1F000) >> 12
@property
def increment(self) -> int:
"""The increment embedded into the snowflake."""
return self._raw & 0xFFF
@classmethod
def generate(cls, time: datetime | None = None):
"""Generates a new snowflake.
If `time` is set, use that time for the snowflake, otherwise, use the current time.
"""
process_id = os.getpid()
thread_id = threading.get_native_id()
increment = _get_increment()
now = time if time else datetime.now(tz=timezone.utc)
timestamp = round(now.timestamp() * 1000) - cls.EPOCH
return cls(
timestamp << 22
| (process_id % 32) << 17
| (thread_id % 32) << 12
| (increment % 4096)
)
@classmethod
def generate_int(cls, time: datetime | None = None):
return cls.generate(time).id

View file

@ -1,17 +1,17 @@
from sqlalchemy import URL, create_engine
from sqlalchemy.orm import sessionmaker
from foxnouns.settings import DATABASE, ECHO_SQL
SYNC_DATABASE_URL = URL.create(
"postgresql+psycopg",
username=DATABASE["USER"],
password=DATABASE["PASSWORD"],
host=DATABASE["HOST"],
database=DATABASE["NAME"],
)
engine = create_engine(SYNC_DATABASE_URL, echo=ECHO_SQL)
session = sessionmaker(engine)
__all__ = [engine, session]
from sqlalchemy import URL, create_engine
from sqlalchemy.orm import sessionmaker
from foxnouns.settings import DATABASE, ECHO_SQL
SYNC_DATABASE_URL = URL.create(
"postgresql+psycopg",
username=DATABASE["USER"],
password=DATABASE["PASSWORD"],
host=DATABASE["HOST"],
database=DATABASE["NAME"],
)
engine = create_engine(SYNC_DATABASE_URL, echo=ECHO_SQL)
session = sessionmaker(engine)
__all__ = [engine, session]

View file

@ -1,117 +1,117 @@
import enum
from datetime import datetime
from typing import TYPE_CHECKING, Any
from sqlalchemy import BigInteger, DateTime, ForeignKey, Integer, Text
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
from sqlalchemy.orm import Mapped, mapped_column, relationship
from .base import Base
from .snowflake import Snowflake
if TYPE_CHECKING:
from .member import Member
class User(Base):
__tablename__ = "users"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
username: Mapped[str] = mapped_column(Text(), unique=True, nullable=False)
display_name: Mapped[str | None] = mapped_column(Text(), nullable=True)
bio: Mapped[str | None] = mapped_column(Text(), nullable=True)
avatar: Mapped[str | None] = mapped_column(Text(), nullable=True)
names: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
pronouns: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
fields: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
tokens: Mapped[list["Token"]] = relationship(
back_populates="user", cascade="all, delete-orphan"
)
auth_methods: Mapped[list["AuthMethod"]] = relationship(
back_populates="user", cascade="all, delete-orphan"
)
members: Mapped[list["Member"]] = relationship(
back_populates="user", cascade="all, delete-orphan"
)
def __repr__(self):
return f"User(id={self.id!r}, username={self.username!r})"
class Token(Base):
__tablename__ = "tokens"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
expires_at: Mapped[datetime] = mapped_column(DateTime(), nullable=False)
scopes: Mapped[list[str]] = mapped_column(ARRAY(Text), nullable=False)
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
user: Mapped[User] = relationship(back_populates="tokens", lazy="immediate")
def __repr__(self):
return f"Token(id={self.id!r}, user={self.user_id!r})"
def has_scope(self, scope: str):
"""Returns whether this token can be used for the given scope."""
# `*` is a special scope for site tokens, which grants access to all endpoints.
if "*" in self.scopes:
return True
# Some scopes have sub-scopes, indicated by a `.` (i.e. `user.edit` is contained in `user`)
# Tokens can have these narrower scopes given to them, or the wider, more privileged scopes
# This way, both `user` and `user.edit` tokens will grant access to `user.edit` endpoints.
return scope in self.scopes or scope.split(".")[0] in self.scopes
class AuthType(enum.IntEnum):
DISCORD = 1
GOOGLE = 2
TUMBLR = 3
FEDIVERSE = 4
EMAIL = 5
class AuthMethod(Base):
__tablename__ = "auth_methods"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
auth_type: Mapped[AuthType] = mapped_column(Integer(), nullable=False)
remote_id: Mapped[str] = mapped_column(Text(), nullable=False)
remote_username: Mapped[str | None] = mapped_column(Text(), nullable=True)
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
user: Mapped[User] = relationship(back_populates="auth_methods", lazy="immediate")
fediverse_app_id: Mapped[int] = mapped_column(
ForeignKey("fediverse_apps.id"), nullable=True
)
fediverse_app: Mapped["FediverseApp"] = relationship(lazy="immediate")
class FediverseInstanceType(enum.IntEnum):
MASTODON_API = 1
MISSKEY_API = 2
class FediverseApp(Base):
__tablename__ = "fediverse_apps"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
instance: Mapped[str] = mapped_column(Text(), unique=True, nullable=False)
client_id: Mapped[str] = mapped_column(Text(), nullable=False)
client_secret: Mapped[str] = mapped_column(Text(), nullable=False)
instance_type: Mapped[FediverseInstanceType] = mapped_column(
Integer(), nullable=False
)
import enum
from datetime import datetime
from typing import TYPE_CHECKING, Any
from sqlalchemy import BigInteger, DateTime, ForeignKey, Integer, Text
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
from sqlalchemy.orm import Mapped, mapped_column, relationship
from .base import Base
from .snowflake import Snowflake
if TYPE_CHECKING:
from .member import Member
class User(Base):
__tablename__ = "users"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
username: Mapped[str] = mapped_column(Text(), unique=True, nullable=False)
display_name: Mapped[str | None] = mapped_column(Text(), nullable=True)
bio: Mapped[str | None] = mapped_column(Text(), nullable=True)
avatar: Mapped[str | None] = mapped_column(Text(), nullable=True)
names: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
pronouns: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
fields: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
tokens: Mapped[list["Token"]] = relationship(
back_populates="user", cascade="all, delete-orphan"
)
auth_methods: Mapped[list["AuthMethod"]] = relationship(
back_populates="user", cascade="all, delete-orphan"
)
members: Mapped[list["Member"]] = relationship(
back_populates="user", cascade="all, delete-orphan"
)
def __repr__(self):
return f"User(id={self.id!r}, username={self.username!r})"
class Token(Base):
__tablename__ = "tokens"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
expires_at: Mapped[datetime] = mapped_column(DateTime(), nullable=False)
scopes: Mapped[list[str]] = mapped_column(ARRAY(Text), nullable=False)
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
user: Mapped[User] = relationship(back_populates="tokens", lazy="immediate")
def __repr__(self):
return f"Token(id={self.id!r}, user={self.user_id!r})"
def has_scope(self, scope: str):
"""Returns whether this token can be used for the given scope."""
# `*` is a special scope for site tokens, which grants access to all endpoints.
if "*" in self.scopes:
return True
# Some scopes have sub-scopes, indicated by a `.` (i.e. `user.edit` is contained in `user`)
# Tokens can have these narrower scopes given to them, or the wider, more privileged scopes
# This way, both `user` and `user.edit` tokens will grant access to `user.edit` endpoints.
return scope in self.scopes or scope.split(".")[0] in self.scopes
class AuthType(enum.IntEnum):
DISCORD = 1
GOOGLE = 2
TUMBLR = 3
FEDIVERSE = 4
EMAIL = 5
class AuthMethod(Base):
__tablename__ = "auth_methods"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
auth_type: Mapped[AuthType] = mapped_column(Integer(), nullable=False)
remote_id: Mapped[str] = mapped_column(Text(), nullable=False)
remote_username: Mapped[str | None] = mapped_column(Text(), nullable=True)
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
user: Mapped[User] = relationship(back_populates="auth_methods", lazy="immediate")
fediverse_app_id: Mapped[int] = mapped_column(
ForeignKey("fediverse_apps.id"), nullable=True
)
fediverse_app: Mapped["FediverseApp"] = relationship(lazy="immediate")
class FediverseInstanceType(enum.IntEnum):
MASTODON_API = 1
MISSKEY_API = 2
class FediverseApp(Base):
__tablename__ = "fediverse_apps"
id: Mapped[int] = mapped_column(
BigInteger(), primary_key=True, default=Snowflake.generate_int
)
instance: Mapped[str] = mapped_column(Text(), unique=True, nullable=False)
client_id: Mapped[str] = mapped_column(Text(), nullable=False)
client_secret: Mapped[str] = mapped_column(Text(), nullable=False)
instance_type: Mapped[FediverseInstanceType] = mapped_column(
Integer(), nullable=False
)

View file

@ -1,89 +1,89 @@
import datetime
from itsdangerous import BadSignature
from itsdangerous.url_safe import URLSafeTimedSerializer
from quart import g
from sqlalchemy import insert, select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from foxnouns.exceptions import ErrorCode, ForbiddenError
from foxnouns.settings import SECRET_KEY
from .member import Member
from .user import Token, User
async def user_from_ref(session: AsyncSession, user_ref: str):
"""Returns a user from a `user_ref` value. If `user_ref` is `@me`, returns the current user.
Otherwise, tries to convert the user to a snowflake ID and queries that. Otherwise, returns a user with that username.
"""
query = select(User).options(selectinload(User.members))
if user_ref == "@me":
if "user" in g:
if g.token.has_scope("user.read"):
query = query.where(User.id == g.user.id)
else:
raise ForbiddenError(
"Missing scope 'user.read'", type=ErrorCode.MissingScope
)
else:
raise ForbiddenError("Not authenticated")
else:
try:
id = int(user_ref)
query = query.where(User.id == id)
except ValueError:
query = query.where(User.username == user_ref)
return await session.scalar(query)
async def user_members(session: AsyncSession, user: User):
query = select(Member).where(Member.user_id == user.id)
res = await session.scalars(query)
return res.all()
serializer = URLSafeTimedSerializer(SECRET_KEY)
def generate_token(token: Token):
return serializer.dumps(token.id)
async def create_token(session: AsyncSession, user: User, scopes: list[str] = ["*"]):
expires = datetime.datetime.now() + datetime.timedelta(days=90)
query = (
insert(Token)
.values(user_id=user.id, expires_at=expires, scopes=scopes)
.returning(Token)
)
return await session.scalar(query)
async def validate_token(session: AsyncSession, header: str) -> tuple[Token, User]:
try:
token_id = serializer.loads(header)
except BadSignature:
raise ForbiddenError("Invalid token", type=ErrorCode.InvalidToken)
row = (
await session.execute(
select(Token, User).join(Token.user).where(Token.id == token_id)
)
).first()
if not row or not row.Token:
raise ForbiddenError("Invalid token", type=ErrorCode.InvalidToken)
if row.Token.expires_at < datetime.datetime.now():
raise ForbiddenError("Token has expired", type=ErrorCode.InvalidToken)
return (row.Token, row.User)
def is_self(user: User) -> bool:
return "user" in g and g.user.id == user.id
import datetime
from itsdangerous import BadSignature
from itsdangerous.url_safe import URLSafeTimedSerializer
from quart import g
from sqlalchemy import insert, select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from foxnouns.exceptions import ErrorCode, ForbiddenError
from foxnouns.settings import SECRET_KEY
from .member import Member
from .user import Token, User
async def user_from_ref(session: AsyncSession, user_ref: str):
"""Returns a user from a `user_ref` value. If `user_ref` is `@me`, returns the current user.
Otherwise, tries to convert the user to a snowflake ID and queries that. Otherwise, returns a user with that username.
"""
query = select(User).options(selectinload(User.members))
if user_ref == "@me":
if "user" in g:
if g.token.has_scope("user.read"):
query = query.where(User.id == g.user.id)
else:
raise ForbiddenError(
"Missing scope 'user.read'", type=ErrorCode.MissingScope
)
else:
raise ForbiddenError("Not authenticated")
else:
try:
id = int(user_ref)
query = query.where(User.id == id)
except ValueError:
query = query.where(User.username == user_ref)
return await session.scalar(query)
async def user_members(session: AsyncSession, user: User):
query = select(Member).where(Member.user_id == user.id)
res = await session.scalars(query)
return res.all()
serializer = URLSafeTimedSerializer(SECRET_KEY)
def generate_token(token: Token):
return serializer.dumps(token.id)
async def create_token(session: AsyncSession, user: User, scopes: list[str] = ["*"]):
expires = datetime.datetime.now() + datetime.timedelta(days=90)
query = (
insert(Token)
.values(user_id=user.id, expires_at=expires, scopes=scopes)
.returning(Token)
)
return await session.scalar(query)
async def validate_token(session: AsyncSession, header: str) -> tuple[Token, User]:
try:
token_id = serializer.loads(header)
except BadSignature:
raise ForbiddenError("Invalid token", type=ErrorCode.InvalidToken)
row = (
await session.execute(
select(Token, User).join(Token.user).where(Token.id == token_id)
)
).first()
if not row or not row.Token:
raise ForbiddenError("Invalid token", type=ErrorCode.InvalidToken)
if row.Token.expires_at < datetime.datetime.now():
raise ForbiddenError("Token has expired", type=ErrorCode.InvalidToken)
return (row.Token, row.User)
def is_self(user: User) -> bool:
return "user" in g and g.user.id == user.id

45
foxnouns/decorators.py Normal file
View file

@ -0,0 +1,45 @@
from typing import Any
from functools import wraps
from quart import g
from foxnouns.exceptions import ErrorCode, ForbiddenError, UnsupportedEndpointError
def require_auth(*, scope: str | None = None):
"""Decorator that requires a token with the given scopes.
If no token is given or the required scopes aren't set on it, execution is aborted."""
def decorator(func):
@wraps(func)
async def wrapper(*args, **kwargs):
if "user" not in g or "token" not in g:
raise ForbiddenError("Not authenticated", type=ErrorCode.Forbidden)
if scope and not g.token.has_scope(scope):
raise ForbiddenError(
f"Missing scope '{scope}'", type=ErrorCode.MissingScope
)
return await func(*args, **kwargs)
return wrapper
return decorator
def require_config_key(*, keys: list[Any]):
"""Decorator that requires one or more config keys to be set.
If any of them are None, execution is aborted."""
def decorator(func):
@wraps(func)
async def wrapper(*args, **kwargs):
for key in keys:
if not key:
raise UnsupportedEndpointError()
return await func(*args, **kwargs)
return wrapper
return decorator

View file

@ -1,82 +1,93 @@
import enum
class ErrorCode(enum.IntEnum):
BadRequest = 400
Forbidden = 403
NotFound = 404
MethodNotAllowed = 405
TooManyRequests = 429
InternalServerError = 500 # catch-all code for unknown errors
# Login/authorize error codes
InvalidState = 1001
InvalidOAuthCode = 1002
InvalidToken = 1003 # a token was supplied, but it is invalid
InviteRequired = 1004
InvalidTicket = 1005 # invalid signup ticket
InvalidUsername = 1006 # invalid username (when signing up)
UsernameTaken = 1007 # username taken (when signing up)
InvitesDisabled = 1008 # invites are disabled (unneeded)
InviteLimitReached = 1009 # invite limit reached (when creating invites)
InviteAlreadyUsed = 1010 # invite already used (when signing up)
DeletionPending = 1011 # own user deletion pending, returned with undo code
RecentExport = 1012 # latest export is too recent
UnsupportedInstance = 1013 # unsupported fediverse software
AlreadyLinked = 1014 # user already has linked account of the same type
NotLinked = 1015 # user already doesn't have a linked account
LastProvider = (
1016 # unlinking provider would leave account with no authentication method
)
InvalidCaptcha = 1017 # invalid or missing captcha response
MissingScope = 1018 # missing the required scope for this endpoint
# User-related error codes
UserNotFound = 2001
MemberListPrivate = 2002
FlagLimitReached = 2003
RerollingTooQuickly = 2004
# Member-related error codes
MemberNotFound = 3001
MemberLimitReached = 3002
MemberNameInUse = 3003
NotOwnMember = 3004
# General request error codes
RequestTooBig = 4001
MissingPermissions = 4002
# Moderation related error codes
ReportAlreadyHandled = 5001
NotSelfDelete = 5002
class ExpectedError(Exception):
msg: str
type: ErrorCode
status_code: int = 500
def __init__(self, msg: str, type: ErrorCode):
self.msg = msg
self.type = type
super().__init__(msg)
def __str__(self):
return f"{self.__class__.__name__}({self.msg})"
class NotFoundError(ExpectedError):
status_code = 404
def __init__(self, msg: str, type=ErrorCode.NotFound):
self.type = type
super().__init__(msg, type)
class ForbiddenError(ExpectedError):
status_code = 403
def __init__(self, msg: str, type=ErrorCode.Forbidden):
self.type = type
super().__init__(msg, type)
import enum
class ErrorCode(enum.IntEnum):
BadRequest = 400
Forbidden = 403
NotFound = 404
MethodNotAllowed = 405
TooManyRequests = 429
InternalServerError = 500 # catch-all code for unknown errors
# Login/authorize error codes
InvalidState = 1001
InvalidOAuthCode = 1002
InvalidToken = 1003 # a token was supplied, but it is invalid
InviteRequired = 1004
InvalidTicket = 1005 # invalid signup ticket
InvalidUsername = 1006 # invalid username (when signing up)
UsernameTaken = 1007 # username taken (when signing up)
InvitesDisabled = 1008 # invites are disabled (unneeded)
InviteLimitReached = 1009 # invite limit reached (when creating invites)
InviteAlreadyUsed = 1010 # invite already used (when signing up)
DeletionPending = 1011 # own user deletion pending, returned with undo code
RecentExport = 1012 # latest export is too recent
UnsupportedInstance = 1013 # unsupported fediverse software
AlreadyLinked = 1014 # user already has linked account of the same type
NotLinked = 1015 # user already doesn't have a linked account
LastProvider = (
1016 # unlinking provider would leave account with no authentication method
)
InvalidCaptcha = 1017 # invalid or missing captcha response
MissingScope = 1018 # missing the required scope for this endpoint
# User-related error codes
UserNotFound = 2001
MemberListPrivate = 2002
FlagLimitReached = 2003
RerollingTooQuickly = 2004
# Member-related error codes
MemberNotFound = 3001
MemberLimitReached = 3002
MemberNameInUse = 3003
NotOwnMember = 3004
# General request error codes
RequestTooBig = 4001
MissingPermissions = 4002
# Moderation related error codes
ReportAlreadyHandled = 5001
NotSelfDelete = 5002
class ExpectedError(Exception):
msg: str
type: ErrorCode
status_code: int = 500
def __init__(self, msg: str, type: ErrorCode):
self.msg = msg
self.type = type
super().__init__(msg)
def __str__(self):
return f"{self.__class__.__name__}({self.msg})"
class NotFoundError(ExpectedError):
status_code = 404
def __init__(self, msg: str, type=ErrorCode.NotFound):
self.type = type
super().__init__(msg, type)
class ForbiddenError(ExpectedError):
status_code = 403
def __init__(self, msg: str, type=ErrorCode.Forbidden):
self.type = type
super().__init__(msg, type)
class UnsupportedEndpointError(ExpectedError):
status_code = 404
def __init__(self):
self.type = ErrorCode.NotFound
super().__init__(
"Endpoint is not supported on this instance",
type=ErrorCode.NotFound,
)

View file

@ -1,21 +1,21 @@
from pydantic import BaseModel, field_validator
class BasePatchModel(BaseModel):
model_config = {"from_attributes": True}
def is_set(self, key: str) -> bool:
return key in self.model_fields_set
class BaseSnowflakeModel(BaseModel):
"""A base model with a Snowflake ID that is serialized as a string.
Also sets `model_config.from_attributes` to `True`."""
id: str
@field_validator("id", mode="before")
def transform_id_to_str(cls, value) -> str:
return str(value)
model_config = {"from_attributes": True}
from pydantic import BaseModel, field_validator
class BasePatchModel(BaseModel):
model_config = {"from_attributes": True}
def is_set(self, key: str) -> bool:
return key in self.model_fields_set
class BaseSnowflakeModel(BaseModel):
"""A base model with a Snowflake ID that is serialized as a string.
Also sets `model_config.from_attributes` to `True`."""
id: str
@field_validator("id", mode="before")
def transform_id_to_str(cls, value) -> str:
return str(value)
model_config = {"from_attributes": True}

View file

@ -1,17 +1,17 @@
from pydantic import BaseModel, Field
class FieldEntry(BaseModel):
value: str = Field(max_length=128)
status: str
class ProfileField(BaseModel):
name: str = Field(max_length=128)
entries: list[FieldEntry]
class PronounEntry(BaseModel):
value: str = Field(max_length=128)
status: str
display: str | None = Field(max_length=128, default=None)
from pydantic import BaseModel, Field
class FieldEntry(BaseModel):
value: str = Field(max_length=128)
status: str
class ProfileField(BaseModel):
name: str = Field(max_length=128)
entries: list[FieldEntry]
class PronounEntry(BaseModel):
value: str = Field(max_length=128)
status: str
display: str | None = Field(max_length=128, default=None)

View file

@ -1,32 +1,32 @@
from pydantic import Field, field_validator
from . import BasePatchModel
from .fields import FieldEntry, ProfileField, PronounEntry
from .user import BaseMemberModel, BaseUserModel
class FullMemberModel(BaseMemberModel):
user: BaseUserModel
class MemberPatchModel(BasePatchModel):
name: str | None = Field(
min_length=1,
max_length=100,
default=None,
pattern=r"^[^@\?!#\/\\\[\]\"\{\}'$%&()+<=>^|~`,\*]{1,100}$",
)
bio: str | None = Field(max_length=1024, default=None)
avatar: str | None = Field(max_length=1_000_000, default=None)
names: list[FieldEntry] = Field(default=[])
pronouns: list[PronounEntry] = Field(default=[])
fields: list[ProfileField] = Field(default=[])
@field_validator("name")
@classmethod
def check_name(cls, value):
if value in [".", "..", "edit"]:
raise ValueError("Name is not allowed")
return value
from pydantic import Field, field_validator
from . import BasePatchModel
from .fields import FieldEntry, ProfileField, PronounEntry
from .user import BaseMemberModel, BaseUserModel
class FullMemberModel(BaseMemberModel):
user: BaseUserModel
class MemberPatchModel(BasePatchModel):
name: str | None = Field(
min_length=1,
max_length=100,
default=None,
pattern=r"^[^@\?!#\/\\\[\]\"\{\}'$%&()+<=>^|~`,\*]{1,100}$",
)
bio: str | None = Field(max_length=1024, default=None)
avatar: str | None = Field(max_length=1_000_000, default=None)
names: list[FieldEntry] = Field(default=[])
pronouns: list[PronounEntry] = Field(default=[])
fields: list[ProfileField] = Field(default=[])
@field_validator("name")
@classmethod
def check_name(cls, value):
if value in [".", "..", "edit"]:
raise ValueError("Name is not allowed")
return value

View file

@ -1,56 +1,56 @@
from pydantic import Field
from . import BaseSnowflakeModel
from .fields import FieldEntry, ProfileField, PronounEntry
class BaseUserModel(BaseSnowflakeModel):
name: str = Field(alias="username")
display_name: str | None
bio: str | None
avatar: str | None
names: list[FieldEntry] = Field(default=[])
pronouns: list[PronounEntry] = Field(default=[])
fields: list[ProfileField] = Field(default=[])
class UserModel(BaseUserModel):
members: list["BaseMemberModel"] = Field(default=[])
class BaseMemberModel(BaseSnowflakeModel):
name: str
display_name: str | None
bio: str | None
avatar: str | None
names: list[FieldEntry] = Field(default=[])
pronouns: list[PronounEntry] = Field(default=[])
fields: list[ProfileField] = Field(default=[])
class SelfUserModel(UserModel):
pass
def check_username(value):
if not value:
return value
if value.lower() in (
"..",
"admin",
"administrator",
"mod",
"moderator",
"api",
"page",
"pronouns",
"settings",
"pronouns.cc",
"pronounscc",
):
raise ValueError("Username is not allowed")
return value
from pydantic import Field
from . import BaseSnowflakeModel
from .fields import FieldEntry, ProfileField, PronounEntry
class BaseUserModel(BaseSnowflakeModel):
name: str = Field(alias="username")
display_name: str | None
bio: str | None
avatar: str | None
names: list[FieldEntry] = Field(default=[])
pronouns: list[PronounEntry] = Field(default=[])
fields: list[ProfileField] = Field(default=[])
class UserModel(BaseUserModel):
members: list["BaseMemberModel"] = Field(default=[])
class BaseMemberModel(BaseSnowflakeModel):
name: str
display_name: str | None
bio: str | None
avatar: str | None
names: list[FieldEntry] = Field(default=[])
pronouns: list[PronounEntry] = Field(default=[])
fields: list[ProfileField] = Field(default=[])
class SelfUserModel(UserModel):
pass
def check_username(value):
if not value:
return value
if value.lower() in (
"..",
"admin",
"administrator",
"mod",
"moderator",
"api",
"page",
"pronouns",
"settings",
"pronouns.cc",
"pronounscc",
):
raise ValueError("Username is not allowed")
return value

View file

@ -1,41 +1,45 @@
from environs import Env
# read .env file
env = Env()
env.read_env()
# Format: postgresql+{driver}//{user}:{password}@{host}/{name}
# Note that the driver is set by the application.
with env.prefixed("DATABASE_"):
DATABASE = {
"USER": env("USER"),
"PASSWORD": env("PASSWORD"),
"HOST": env("HOST"),
"NAME": env("NAME"),
}
# The Redis database used for Celery and ephemeral storage.
REDIS_URL = env("REDIS_URL", "redis://localhost")
with env.prefixed("MINIO_"):
MINIO = {
"ENDPOINT": env("ENDPOINT"),
"ACCESS_KEY": env("ACCESS_KEY"),
"SECRET_KEY": env("SECRET_KEY"),
"BUCKET": env("BUCKET"),
"SECURE": env.bool("SECURE", True),
"REGION": env("REGION", "auto"),
}
# The base domain the API is served on. This must be set.
BASE_DOMAIN = env("BASE_DOMAIN")
# The base domain for short URLs.
SHORT_DOMAIN = env("SHORT_DOMAIN", "prns.localhost")
# The base URL used for the frontend. This will usually be the same as BASE_DOMAIN prefixed with https://.
FRONTEND_BASE = env("FRONTEND_DOMAIN", f"https://{BASE_DOMAIN}")
# Secret key for signing tokens, generate with (for example) `openssl rand -base64 32`
SECRET_KEY = env("SECRET_KEY")
# Whether to echo SQL statements to the logs.
ECHO_SQL = env.bool("ECHO_SQL", False)
from environs import Env
# read .env file
env = Env()
env.read_env()
# Format: postgresql+{driver}//{user}:{password}@{host}/{name}
# Note that the driver is set by the application.
with env.prefixed("DATABASE_"):
DATABASE = {
"USER": env("USER"),
"PASSWORD": env("PASSWORD"),
"HOST": env("HOST"),
"NAME": env("NAME"),
}
# The Redis database used for Celery and ephemeral storage.
REDIS_URL = env("REDIS_URL", "redis://localhost")
with env.prefixed("MINIO_"):
MINIO = {
"ENDPOINT": env("ENDPOINT"),
"ACCESS_KEY": env("ACCESS_KEY"),
"SECRET_KEY": env("SECRET_KEY"),
"BUCKET": env("BUCKET"),
"SECURE": env.bool("SECURE", True),
"REGION": env("REGION", "auto"),
}
# Discord OAuth credentials. If these are not set the Discord OAuth endpoints will not work.
DISCORD_CLIENT_ID = env("DISCORD_CLIENT_ID", None)
DISCORD_CLIENT_SECRET = env("DISCORD_CLIENT_SECRET", None)
# The base domain the API is served on. This must be set.
BASE_DOMAIN = env("BASE_DOMAIN")
# The base domain for short URLs.
SHORT_DOMAIN = env("SHORT_DOMAIN", "prns.localhost")
# The base URL used for the frontend. This will usually be the same as BASE_DOMAIN prefixed with https://.
FRONTEND_BASE = env("FRONTEND_DOMAIN", f"https://{BASE_DOMAIN}")
# Secret key for signing tokens, generate with (for example) `openssl rand -base64 32`
SECRET_KEY = env("SECRET_KEY")
# Whether to echo SQL statements to the logs.
ECHO_SQL = env.bool("ECHO_SQL", False)

View file

@ -1,155 +1,155 @@
import base64
import hashlib
from io import BytesIO
import pyvips
from celery import Celery
from celery.utils.log import get_task_logger
from minio import Minio
from sqlalchemy import select, update
from foxnouns.db import Member, User
from foxnouns.db.sync import session
from foxnouns.settings import MINIO, REDIS_URL
app = Celery("tasks", broker=REDIS_URL)
logger = get_task_logger(__name__)
minio = Minio(
MINIO["ENDPOINT"],
access_key=MINIO["ACCESS_KEY"],
secret_key=MINIO["SECRET_KEY"],
secure=MINIO["SECURE"],
region=MINIO["REGION"],
)
bucket = MINIO["BUCKET"]
def convert_avatar(uri: str) -> bytes:
"""Converts a base64 data URI into a WebP image.
Images are resized and cropped to 512x512 and exported with quality 95.
Only PNG, WebP, and JPEG images are allowed as input."""
if not uri.startswith("data:image/"):
raise ValueError("Not a data URI")
content_type, encoded = uri.removeprefix("data:").split(";base64,", 1)
if content_type not in ["image/png", "image/webp", "image/jpeg"]:
raise ValueError("Invalid content type for image")
img = pyvips.Image.thumbnail_buffer(
base64.b64decode(encoded),
512,
height=512,
size=pyvips.Size.BOTH,
crop=pyvips.Interesting.CENTRE,
)
return img.write_to_buffer(".webp", Q=95)
@app.task
def process_user_avatar(user_id: int, avatar: str) -> None:
"""Processes an avatar string, uploads it to S3, and updates the user's avatar hash.
Also deletes the old avatar if one was already set."""
with session() as conn:
user = conn.scalar(select(User).where(User.id == user_id))
if not user:
raise ValueError("process_user_avatar was passed the ID of a nonexistent user")
img = convert_avatar(avatar)
hash = hashlib.new("sha256", data=img).hexdigest()
old_hash = user.avatar
minio.put_object(
bucket,
f"users/{user_id}/avatars/{hash}.webp",
BytesIO(img),
len(img),
"image/webp",
)
with session() as conn:
conn.execute(update(User).values(avatar=hash).where(User.id == user_id))
conn.commit()
if old_hash and old_hash != hash:
minio.remove_object(bucket, f"users/{user_id}/avatars/{old_hash}.webp")
@app.task
def delete_user_avatar(user_id: int) -> None:
"""Deletes a user's avatar."""
with session() as conn:
user = conn.scalar(select(User).where(User.id == user_id))
if not user:
raise ValueError("delete_user_avatar was passed the ID of a nonexistent user")
if not user.avatar:
logger.info(
"delete_user_avatar was called for a user with a null avatar (%d)", user_id
)
return
minio.remove_object(bucket, f"users/{user_id}/avatars/{user.avatar}.webp")
with session() as conn:
conn.execute(update(User).values(avatar=None).where(User.id == user_id))
conn.commit()
@app.task
def process_member_avatar(member_id: int, avatar: str) -> None:
"""Processes an avatar string, uploads it to S3, and updates the member's avatar hash.
Also deletes the old avatar if one was already set."""
with session() as conn:
member = conn.scalar(select(Member).where(Member.id == member_id))
if not member:
raise ValueError(
"process_member_avatar was passed the ID of a nonexistent member"
)
img = convert_avatar(avatar)
hash = hashlib.new("sha256", data=img).hexdigest()
old_hash = member.avatar
minio.put_object(
bucket,
f"members/{member_id}/avatars/{hash}.webp",
BytesIO(img),
len(img),
"image/webp",
)
with session() as conn:
conn.execute(update(Member).values(avatar=hash).where(Member.id == member_id))
conn.commit()
if old_hash and old_hash != hash:
minio.remove_object(bucket, f"members/{member_id}/avatars/{old_hash}.webp")
@app.task
def delete_member_avatar(member_id: int) -> None:
"""Deletes a member's avatar."""
with session() as conn:
member = conn.scalar(select(Member).where(Member.id == member_id))
if not member:
raise ValueError(
"delete_member_avatar was passed the ID of a nonexistent member"
)
if not member.avatar:
logger.info(
"delete_member_avatar was called for a member with a null avatar (%d)",
member_id,
)
return
minio.remove_object(bucket, f"members/{member_id}/avatars/{member.avatar}.webp")
with session() as conn:
conn.execute(update(Member).values(avatar=None).where(Member.id == member_id))
conn.commit()
import base64
import hashlib
from io import BytesIO
import pyvips
from celery import Celery
from celery.utils.log import get_task_logger
from minio import Minio
from sqlalchemy import select, update
from foxnouns.db import Member, User
from foxnouns.db.sync import session
from foxnouns.settings import MINIO, REDIS_URL
app = Celery("tasks", broker=REDIS_URL)
logger = get_task_logger(__name__)
minio = Minio(
MINIO["ENDPOINT"],
access_key=MINIO["ACCESS_KEY"],
secret_key=MINIO["SECRET_KEY"],
secure=MINIO["SECURE"],
region=MINIO["REGION"],
)
bucket = MINIO["BUCKET"]
def convert_avatar(uri: str) -> bytes:
"""Converts a base64 data URI into a WebP image.
Images are resized and cropped to 512x512 and exported with quality 95.
Only PNG, WebP, and JPEG images are allowed as input."""
if not uri.startswith("data:image/"):
raise ValueError("Not a data URI")
content_type, encoded = uri.removeprefix("data:").split(";base64,", 1)
if content_type not in ["image/png", "image/webp", "image/jpeg"]:
raise ValueError("Invalid content type for image")
img = pyvips.Image.thumbnail_buffer(
base64.b64decode(encoded),
512,
height=512,
size=pyvips.Size.BOTH,
crop=pyvips.Interesting.CENTRE,
)
return img.write_to_buffer(".webp", Q=95)
@app.task
def process_user_avatar(user_id: int, avatar: str) -> None:
"""Processes an avatar string, uploads it to S3, and updates the user's avatar hash.
Also deletes the old avatar if one was already set."""
with session() as conn:
user = conn.scalar(select(User).where(User.id == user_id))
if not user:
raise ValueError("process_user_avatar was passed the ID of a nonexistent user")
img = convert_avatar(avatar)
hash = hashlib.new("sha256", data=img).hexdigest()
old_hash = user.avatar
minio.put_object(
bucket,
f"users/{user_id}/avatars/{hash}.webp",
BytesIO(img),
len(img),
"image/webp",
)
with session() as conn:
conn.execute(update(User).values(avatar=hash).where(User.id == user_id))
conn.commit()
if old_hash and old_hash != hash:
minio.remove_object(bucket, f"users/{user_id}/avatars/{old_hash}.webp")
@app.task
def delete_user_avatar(user_id: int) -> None:
"""Deletes a user's avatar."""
with session() as conn:
user = conn.scalar(select(User).where(User.id == user_id))
if not user:
raise ValueError("delete_user_avatar was passed the ID of a nonexistent user")
if not user.avatar:
logger.info(
"delete_user_avatar was called for a user with a null avatar (%d)", user_id
)
return
minio.remove_object(bucket, f"users/{user_id}/avatars/{user.avatar}.webp")
with session() as conn:
conn.execute(update(User).values(avatar=None).where(User.id == user_id))
conn.commit()
@app.task
def process_member_avatar(member_id: int, avatar: str) -> None:
"""Processes an avatar string, uploads it to S3, and updates the member's avatar hash.
Also deletes the old avatar if one was already set."""
with session() as conn:
member = conn.scalar(select(Member).where(Member.id == member_id))
if not member:
raise ValueError(
"process_member_avatar was passed the ID of a nonexistent member"
)
img = convert_avatar(avatar)
hash = hashlib.new("sha256", data=img).hexdigest()
old_hash = member.avatar
minio.put_object(
bucket,
f"members/{member_id}/avatars/{hash}.webp",
BytesIO(img),
len(img),
"image/webp",
)
with session() as conn:
conn.execute(update(Member).values(avatar=hash).where(Member.id == member_id))
conn.commit()
if old_hash and old_hash != hash:
minio.remove_object(bucket, f"members/{member_id}/avatars/{old_hash}.webp")
@app.task
def delete_member_avatar(member_id: int) -> None:
"""Deletes a member's avatar."""
with session() as conn:
member = conn.scalar(select(Member).where(Member.id == member_id))
if not member:
raise ValueError(
"delete_member_avatar was passed the ID of a nonexistent member"
)
if not member.avatar:
logger.info(
"delete_member_avatar was called for a member with a null avatar (%d)",
member_id,
)
return
minio.remove_object(bucket, f"members/{member_id}/avatars/{member.avatar}.webp")
with session() as conn:
conn.execute(update(Member).values(avatar=None).where(Member.id == member_id))
conn.commit()

View file

@ -1,32 +1,32 @@
export type User = {
id: string;
name: string;
display_name: string | null;
bio: string | null;
avatar: string | null;
names: FieldEntry[];
pronouns: PronounEntry[];
fields: ProfileField[];
};
export type FieldEntry = {
value: string;
status: string;
};
export type ProfileField = {
name: string;
entries: FieldEntry[];
};
export type PronounEntry = {
value: string;
status: string;
display: string | null;
};
export type Meta = {
users: number;
members: number;
};
export type User = {
id: string;
name: string;
display_name: string | null;
bio: string | null;
avatar: string | null;
names: FieldEntry[];
pronouns: PronounEntry[];
fields: ProfileField[];
};
export type FieldEntry = {
value: string;
status: string;
};
export type ProfileField = {
name: string;
entries: FieldEntry[];
};
export type PronounEntry = {
value: string;
status: string;
display: string | null;
};
export type Meta = {
users: number;
members: number;
};

View file

@ -1,83 +1,83 @@
import type { Cookies, ServerLoadEvent } from "@sveltejs/kit";
export type FetchOptions = {
fetchFn?: typeof fetch;
token?: string;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data?: any;
version?: number;
extraHeaders?: Record<string, string>;
};
/**
* Fetch a path from the API and parse the response.
* To make sure the request is authenticated in load functions,
* pass `fetch` from the request object into opts.
*
* @param method The HTTP method, i.e. GET, POST, PATCH
* @param path The path to request, minus the leading `/api/v2`
* @param opts Extra options for this request
* @returns T
* @throws APIError
*/
export default async function request<T>(
method: string,
path: string,
opts: FetchOptions = {},
): Promise<T> {
const { token, data, version, extraHeaders } = opts;
const fetchFn = opts.fetchFn ?? fetch;
const resp = await fetchFn(`/api/v${version ?? 2}${path}`, {
method,
body: data ? JSON.stringify(data) : undefined,
headers: {
...extraHeaders,
...(token ? { Authorization: token } : {}),
"Content-Type": "application/json",
},
});
if (resp.status < 200 || resp.status >= 400) throw await resp.json();
return (await resp.json()) as T;
}
/**
* Fetch a path from the API and discard the response.
* To make sure the request is authenticated in load functions,
* pass `fetch` from the request object into opts.
*
* @param method The HTTP method, i.e. GET, POST, PATCH
* @param path The path to request, minus the leading `/api/v2`
* @param opts Extra options for this request
* @throws APIError
*/
export async function fastRequest(
method: string,
path: string,
opts: FetchOptions = {},
): Promise<void> {
const { token, data, version, extraHeaders } = opts;
const fetchFn = opts.fetchFn ?? fetch;
const resp = await fetchFn(`/api/v2${version ?? 2}${path}`, {
method,
body: data ? JSON.stringify(data) : undefined,
headers: {
...extraHeaders,
...(token ? { Authorization: token } : {}),
"Content-Type": "application/json",
},
});
if (resp.status < 200 || resp.status >= 400) throw await resp.json();
}
/**
* Helper function to get a token from a request cookie.
* Accepts both a cookie object ({ cookies }) or a request object (req).
* @param s A Cookies or ServerLoadEvent object
* @returns A token, or `undefined` if no token is set.
*/
export const getToken = (s: Cookies | ServerLoadEvent) =>
"cookies" in s ? s.cookies.get("pronounscc-token") : s.get("pronounscc-token");
import type { Cookies, ServerLoadEvent } from "@sveltejs/kit";
export type FetchOptions = {
fetchFn?: typeof fetch;
token?: string;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data?: any;
version?: number;
extraHeaders?: Record<string, string>;
};
/**
* Fetch a path from the API and parse the response.
* To make sure the request is authenticated in load functions,
* pass `fetch` from the request object into opts.
*
* @param method The HTTP method, i.e. GET, POST, PATCH
* @param path The path to request, minus the leading `/api/v2`
* @param opts Extra options for this request
* @returns T
* @throws APIError
*/
export default async function request<T>(
method: string,
path: string,
opts: FetchOptions = {},
): Promise<T> {
const { token, data, version, extraHeaders } = opts;
const fetchFn = opts.fetchFn ?? fetch;
const resp = await fetchFn(`/api/v${version ?? 2}${path}`, {
method,
body: data ? JSON.stringify(data) : undefined,
headers: {
...extraHeaders,
...(token ? { Authorization: token } : {}),
"Content-Type": "application/json",
},
});
if (resp.status < 200 || resp.status >= 400) throw await resp.json();
return (await resp.json()) as T;
}
/**
* Fetch a path from the API and discard the response.
* To make sure the request is authenticated in load functions,
* pass `fetch` from the request object into opts.
*
* @param method The HTTP method, i.e. GET, POST, PATCH
* @param path The path to request, minus the leading `/api/v2`
* @param opts Extra options for this request
* @throws APIError
*/
export async function fastRequest(
method: string,
path: string,
opts: FetchOptions = {},
): Promise<void> {
const { token, data, version, extraHeaders } = opts;
const fetchFn = opts.fetchFn ?? fetch;
const resp = await fetchFn(`/api/v2${version ?? 2}${path}`, {
method,
body: data ? JSON.stringify(data) : undefined,
headers: {
...extraHeaders,
...(token ? { Authorization: token } : {}),
"Content-Type": "application/json",
},
});
if (resp.status < 200 || resp.status >= 400) throw await resp.json();
}
/**
* Helper function to get a token from a request cookie.
* Accepts both a cookie object ({ cookies }) or a request object (req).
* @param s A Cookies or ServerLoadEvent object
* @returns A token, or `undefined` if no token is set.
*/
export const getToken = (s: Cookies | ServerLoadEvent) =>
"cookies" in s ? s.cookies.get("pronounscc-token") : s.get("pronounscc-token");

View file

@ -1,13 +1,13 @@
import request, { getToken } from "$lib/request";
import type { User, Meta } from "$lib/entities";
export async function load({ fetch, cookies }) {
const meta = await request<Meta>("GET", "/meta", { fetchFn: fetch });
let user;
if (cookies.get("pronounscc-token")) {
user = await request<User>("GET", "/users/@me", { fetchFn: fetch, token: getToken(cookies) });
}
return { meta, user, token: getToken(cookies) };
}
import request, { getToken } from "$lib/request";
import type { User, Meta } from "$lib/entities";
export async function load({ fetch, cookies }) {
const meta = await request<Meta>("GET", "/meta", { fetchFn: fetch });
let user;
if (cookies.get("pronounscc-token")) {
user = await request<User>("GET", "/users/@me", { fetchFn: fetch, token: getToken(cookies) });
}
return { meta, user, token: getToken(cookies) };
}

View file

@ -1,15 +1,15 @@
<script lang="ts">
import "bootstrap/scss/bootstrap.scss";
import "bootstrap-icons/font/bootstrap-icons.scss";
import type { LayoutData } from "./$types";
export let data: LayoutData;
</script>
{JSON.stringify(data.meta)}
{#if data.user}
{JSON.stringify(data.user)}
{/if}
<slot />
<script lang="ts">
import "bootstrap/scss/bootstrap.scss";
import "bootstrap-icons/font/bootstrap-icons.scss";
import type { LayoutData } from "./$types";
export let data: LayoutData;
</script>
{JSON.stringify(data.meta)}
{#if data.user}
{JSON.stringify(data.user)}
{/if}
<slot />

File diff suppressed because it is too large Load diff

3032
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,71 +1,71 @@
[tool.poetry]
name = "foxnouns"
version = "0.1.0"
description = ""
authors = ["sam <sam@sleepycat.moe>"]
license = "Apache-2.0"
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.11"
sqlalchemy = { extras = ["asyncio"], version = "^2.0.28" }
psycopg = "^3.1.18"
celery = { extras = ["redis"], version = "^5.3.6" }
quart = "^0.19.4"
# Temporary until a release containing this commit is made:
# https://github.com/pgjones/quart-schema/commit/9f4455a1363c6edd2b23b898c554e52a9ce6d00f
quart-schema = { git = "https://github.com/pgjones/quart-schema.git" }
# quart-schema = { extras = ["pydantic"], version = "^0.19.1" }
pydantic = "^2.6.3"
itsdangerous = "^2.1.2"
uvicorn = "^0.28.0"
asyncpg = "^0.29.0"
environs = "^11.0.0"
alembic = "^1.13.1"
quart-cors = "^0.7.0"
minio = "^7.2.5"
pyvips = "^2.2.2"
redis = "^5.0.3"
[tool.poetry.group.dev]
optional = true
[tool.poetry.group.dev.dependencies]
ruff = "^0.3.4"
[tool.poetry.group.test]
optional = true
[tool.poetry.group.test.dependencies]
pytest = "^8.0.2"
pytest-asyncio = "^0.23.5.post1"
[tool.poe.tasks.dev]
help = "Run a development server with auto-reload"
cmd = "env QUART_APP=foxnouns.app:app quart --debug run --reload"
[tool.poe.tasks.celery]
help = "Run a Celery task worker"
cmd = "celery -A foxnouns.tasks worker"
[tool.poe.tasks.server]
help = "Run a production server"
cmd = "uvicorn 'foxnouns.app:app'"
[tool.poe.tasks.migrate]
help = "Migrate the database to the latest revision"
cmd = "alembic upgrade head"
[tool.poe.tasks]
test = "pytest"
lint = "ruff check"
format = "ruff format"
"check-imports" = "ruff check --select I"
"sort-imports" = "ruff check --select I --fix"
[tool.pytest.ini_options]
addopts = ["--import-mode=importlib"]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "foxnouns"
version = "0.1.0"
description = ""
authors = ["sam <sam@sleepycat.moe>"]
license = "Apache-2.0"
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.11"
sqlalchemy = { extras = ["asyncio"], version = "^2.0.28" }
psycopg = "^3.1.18"
celery = { extras = ["redis"], version = "^5.3.6" }
quart = "^0.19.4"
# Temporary until a release containing this commit is made:
# https://github.com/pgjones/quart-schema/commit/9f4455a1363c6edd2b23b898c554e52a9ce6d00f
quart-schema = { git = "https://github.com/pgjones/quart-schema.git" }
# quart-schema = { extras = ["pydantic"], version = "^0.19.1" }
pydantic = "^2.6.3"
itsdangerous = "^2.1.2"
uvicorn = "^0.28.0"
asyncpg = "^0.29.0"
environs = "^11.0.0"
alembic = "^1.13.1"
quart-cors = "^0.7.0"
minio = "^7.2.5"
pyvips = "^2.2.2"
redis = "^5.0.3"
[tool.poetry.group.dev]
optional = true
[tool.poetry.group.dev.dependencies]
ruff = "^0.3.4"
[tool.poetry.group.test]
optional = true
[tool.poetry.group.test.dependencies]
pytest = "^8.0.2"
pytest-asyncio = "^0.23.5.post1"
[tool.poe.tasks.dev]
help = "Run a development server with auto-reload"
cmd = "env QUART_APP=foxnouns.app:app quart --debug run --reload"
[tool.poe.tasks.celery]
help = "Run a Celery task worker"
cmd = "celery -A foxnouns.tasks worker"
[tool.poe.tasks.server]
help = "Run a production server"
cmd = "uvicorn 'foxnouns.app:app'"
[tool.poe.tasks.migrate]
help = "Migrate the database to the latest revision"
cmd = "alembic upgrade head"
[tool.poe.tasks]
test = "pytest"
lint = "ruff check"
format = "ruff format"
"check-imports" = "ruff check --select I"
"sort-imports" = "ruff check --select I --fix"
[tool.pytest.ini_options]
addopts = ["--import-mode=importlib"]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View file

@ -1,60 +1,60 @@
import pytest
import pytest_asyncio
from sqlalchemy import delete, text
from foxnouns.db import Base
from foxnouns.settings import DATABASE
# Override the database name to the testing database
DATABASE["NAME"] = f"{DATABASE['NAME']}_test"
def pytest_collection_modifyitems(items):
"""Ensure that all async tests use the same event loop."""
pytest_asyncio_tests = (
item for item in items if pytest_asyncio.is_async_test(item)
)
session_scope_marker = pytest.mark.asyncio(scope="session")
for async_test in pytest_asyncio_tests:
async_test.add_marker(session_scope_marker, append=False)
@pytest.fixture(scope="session", autouse=True)
def setup():
"""Migrate the testing database to the latest migration, and once the tests complete, clear the database again."""
from alembic import command, config
from foxnouns.db.sync import engine
cfg = config.Config("alembic.ini")
cfg.attributes["connection"] = engine.connect()
command.upgrade(cfg, "head")
yield
with engine.begin() as session:
Base.metadata.drop_all(session)
session.execute(text("DROP TABLE alembic_version"))
session.commit()
@pytest.fixture(scope="function", autouse=True)
def clean_tables_after_tests():
"""Clean tables after every test."""
yield
from foxnouns.db.sync import engine
with engine.begin() as session:
for table in reversed(Base.metadata.sorted_tables):
session.execute(delete(table))
session.commit()
@pytest_asyncio.fixture(scope="session", autouse=True)
async def app():
from foxnouns.app import app
return app
import pytest
import pytest_asyncio
from sqlalchemy import delete, text
from foxnouns.db import Base
from foxnouns.settings import DATABASE
# Override the database name to the testing database
DATABASE["NAME"] = f"{DATABASE['NAME']}_test"
def pytest_collection_modifyitems(items):
"""Ensure that all async tests use the same event loop."""
pytest_asyncio_tests = (
item for item in items if pytest_asyncio.is_async_test(item)
)
session_scope_marker = pytest.mark.asyncio(scope="session")
for async_test in pytest_asyncio_tests:
async_test.add_marker(session_scope_marker, append=False)
@pytest.fixture(scope="session", autouse=True)
def setup():
"""Migrate the testing database to the latest migration, and once the tests complete, clear the database again."""
from alembic import command, config
from foxnouns.db.sync import engine
cfg = config.Config("alembic.ini")
cfg.attributes["connection"] = engine.connect()
command.upgrade(cfg, "head")
yield
with engine.begin() as session:
Base.metadata.drop_all(session)
session.execute(text("DROP TABLE alembic_version"))
session.commit()
@pytest.fixture(scope="function", autouse=True)
def clean_tables_after_tests():
"""Clean tables after every test."""
yield
from foxnouns.db.sync import engine
with engine.begin() as session:
for table in reversed(Base.metadata.sorted_tables):
session.execute(delete(table))
session.commit()
@pytest_asyncio.fixture(scope="session", autouse=True)
async def app():
from foxnouns.app import app
return app

View file

@ -1,13 +1,13 @@
import pytest
from quart import Quart
@pytest.mark.asyncio
class TestUsers:
async def test_get_me_returns_403_if_unauthenticated(self, app: Quart):
resp = await app.test_client().get("/api/v2/users/@me")
assert resp.status_code == 403
async def test_get_users_returns_404_if_user_not_found(self, app: Quart):
resp = await app.test_client().get("/api/v2/users/unknown_user")
assert resp.status_code == 404
import pytest
from quart import Quart
@pytest.mark.asyncio
class TestUsers:
async def test_get_me_returns_403_if_unauthenticated(self, app: Quart):
resp = await app.test_client().get("/api/v2/users/@me")
assert resp.status_code == 403
async def test_get_users_returns_404_if_user_not_found(self, app: Quart):
resp = await app.test_client().get("/api/v2/users/unknown_user")
assert resp.status_code == 404