oh no
This commit is contained in:
parent
8c1db3fadb
commit
72aea7e07c
49 changed files with 5559 additions and 5497 deletions
18
.gitignore
vendored
18
.gitignore
vendored
|
@ -1,9 +1,9 @@
|
||||||
__pycache__/
|
__pycache__/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
.env
|
.env
|
||||||
node_modules
|
node_modules
|
||||||
build
|
build
|
||||||
.svelte-kit
|
.svelte-kit
|
||||||
package
|
package
|
||||||
vite.config.js.timestamp-*
|
vite.config.js.timestamp-*
|
||||||
vite.config.ts.timestamp-*
|
vite.config.ts.timestamp-*
|
||||||
|
|
60
Dockerfile
60
Dockerfile
|
@ -1,30 +1,30 @@
|
||||||
FROM docker.io/python:3.11-alpine as python-base
|
FROM docker.io/python:3.11-alpine as python-base
|
||||||
|
|
||||||
ENV POETRY_VERSION=1.3.2
|
ENV POETRY_VERSION=1.3.2
|
||||||
ENV POETRY_HOME=/opt/poetry
|
ENV POETRY_HOME=/opt/poetry
|
||||||
ENV POETRY_VENV=/opt/poetry-venv
|
ENV POETRY_VENV=/opt/poetry-venv
|
||||||
ENV POETRY_CACHE_DIR=/opt/.cache
|
ENV POETRY_CACHE_DIR=/opt/.cache
|
||||||
|
|
||||||
RUN apk add --no-cache tini libmagic libpq vips vips-dev
|
RUN apk add --no-cache tini libmagic libpq vips vips-dev
|
||||||
|
|
||||||
FROM python-base as poetry-base
|
FROM python-base as poetry-base
|
||||||
|
|
||||||
RUN python3 -m venv $POETRY_VENV \
|
RUN python3 -m venv $POETRY_VENV \
|
||||||
&& $POETRY_VENV/bin/pip install -U pip setuptools \
|
&& $POETRY_VENV/bin/pip install -U pip setuptools \
|
||||||
&& $POETRY_VENV/bin/pip install poetry==${POETRY_VERSION} \
|
&& $POETRY_VENV/bin/pip install poetry==${POETRY_VERSION} \
|
||||||
&& $POETRY_VENV/bin/pip install poethepoet
|
&& $POETRY_VENV/bin/pip install poethepoet
|
||||||
|
|
||||||
FROM python-base as app
|
FROM python-base as app
|
||||||
|
|
||||||
COPY --from=poetry-base ${POETRY_VENV} ${POETRY_VENV}
|
COPY --from=poetry-base ${POETRY_VENV} ${POETRY_VENV}
|
||||||
ENV PATH="${PATH}:${POETRY_VENV}/bin"
|
ENV PATH="${PATH}:${POETRY_VENV}/bin"
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY poetry.lock pyproject.toml ./
|
COPY poetry.lock pyproject.toml ./
|
||||||
RUN poetry install --no-interaction --no-cache --without dev
|
RUN poetry install --no-interaction --no-cache --without dev
|
||||||
|
|
||||||
COPY . /app
|
COPY . /app
|
||||||
|
|
||||||
ENTRYPOINT ["/sbin/tini", "--"]
|
ENTRYPOINT ["/sbin/tini", "--"]
|
||||||
CMD ["sh", "./entry.sh"]
|
CMD ["sh", "./entry.sh"]
|
||||||
|
|
402
LICENSE
402
LICENSE
|
@ -1,201 +1,201 @@
|
||||||
Apache License
|
Apache License
|
||||||
Version 2.0, January 2004
|
Version 2.0, January 2004
|
||||||
http://www.apache.org/licenses/
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
1. Definitions.
|
1. Definitions.
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
the copyright owner that is granting the License.
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
other entities that control, are controlled by, or are under common
|
other entities that control, are controlled by, or are under common
|
||||||
control with that entity. For the purposes of this definition,
|
control with that entity. For the purposes of this definition,
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
direction or management of such entity, whether by contract or
|
direction or management of such entity, whether by contract or
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
exercising permissions granted by this License.
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
including but not limited to software source code, documentation
|
including but not limited to software source code, documentation
|
||||||
source, and configuration files.
|
source, and configuration files.
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
"Object" form shall mean any form resulting from mechanical
|
||||||
transformation or translation of a Source form, including but
|
transformation or translation of a Source form, including but
|
||||||
not limited to compiled object code, generated documentation,
|
not limited to compiled object code, generated documentation,
|
||||||
and conversions to other media types.
|
and conversions to other media types.
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
Object form, made available under the License, as indicated by a
|
Object form, made available under the License, as indicated by a
|
||||||
copyright notice that is included in or attached to the work
|
copyright notice that is included in or attached to the work
|
||||||
(an example is provided in the Appendix below).
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
form, that is based on (or derived from) the Work and for which the
|
form, that is based on (or derived from) the Work and for which the
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
of this License, Derivative Works shall not include works that remain
|
of this License, Derivative Works shall not include works that remain
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
the Work and Derivative Works thereof.
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
"Contribution" shall mean any work of authorship, including
|
||||||
the original version of the Work and any modifications or additions
|
the original version of the Work and any modifications or additions
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
means any form of electronic, verbal, or written communication sent
|
means any form of electronic, verbal, or written communication sent
|
||||||
to the Licensor or its representatives, including but not limited to
|
to the Licensor or its representatives, including but not limited to
|
||||||
communication on electronic mailing lists, source code control systems,
|
communication on electronic mailing lists, source code control systems,
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
excluding communication that is conspicuously marked or otherwise
|
excluding communication that is conspicuously marked or otherwise
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
subsequently incorporated within the Work.
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
Work and such Derivative Works in Source or Object form.
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
(except as stated in this section) patent license to make, have made,
|
(except as stated in this section) patent license to make, have made,
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
where such license applies only to those patent claims licensable
|
where such license applies only to those patent claims licensable
|
||||||
by such Contributor that are necessarily infringed by their
|
by such Contributor that are necessarily infringed by their
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
institute patent litigation against any entity (including a
|
institute patent litigation against any entity (including a
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
or contributory patent infringement, then any patent licenses
|
or contributory patent infringement, then any patent licenses
|
||||||
granted to You under this License for that Work shall terminate
|
granted to You under this License for that Work shall terminate
|
||||||
as of the date such litigation is filed.
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
modifications, and in Source or Object form, provided that You
|
modifications, and in Source or Object form, provided that You
|
||||||
meet the following conditions:
|
meet the following conditions:
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
(a) You must give any other recipients of the Work or
|
||||||
Derivative Works a copy of this License; and
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
(b) You must cause any modified files to carry prominent notices
|
||||||
stating that You changed the files; and
|
stating that You changed the files; and
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
that You distribute, all copyright, patent, trademark, and
|
that You distribute, all copyright, patent, trademark, and
|
||||||
attribution notices from the Source form of the Work,
|
attribution notices from the Source form of the Work,
|
||||||
excluding those notices that do not pertain to any part of
|
excluding those notices that do not pertain to any part of
|
||||||
the Derivative Works; and
|
the Derivative Works; and
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
distribution, then any Derivative Works that You distribute must
|
distribution, then any Derivative Works that You distribute must
|
||||||
include a readable copy of the attribution notices contained
|
include a readable copy of the attribution notices contained
|
||||||
within such NOTICE file, excluding those notices that do not
|
within such NOTICE file, excluding those notices that do not
|
||||||
pertain to any part of the Derivative Works, in at least one
|
pertain to any part of the Derivative Works, in at least one
|
||||||
of the following places: within a NOTICE text file distributed
|
of the following places: within a NOTICE text file distributed
|
||||||
as part of the Derivative Works; within the Source form or
|
as part of the Derivative Works; within the Source form or
|
||||||
documentation, if provided along with the Derivative Works; or,
|
documentation, if provided along with the Derivative Works; or,
|
||||||
within a display generated by the Derivative Works, if and
|
within a display generated by the Derivative Works, if and
|
||||||
wherever such third-party notices normally appear. The contents
|
wherever such third-party notices normally appear. The contents
|
||||||
of the NOTICE file are for informational purposes only and
|
of the NOTICE file are for informational purposes only and
|
||||||
do not modify the License. You may add Your own attribution
|
do not modify the License. You may add Your own attribution
|
||||||
notices within Derivative Works that You distribute, alongside
|
notices within Derivative Works that You distribute, alongside
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
that such additional attribution notices cannot be construed
|
that such additional attribution notices cannot be construed
|
||||||
as modifying the License.
|
as modifying the License.
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
You may add Your own copyright statement to Your modifications and
|
||||||
may provide additional or different license terms and conditions
|
may provide additional or different license terms and conditions
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
the conditions stated in this License.
|
the conditions stated in this License.
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
this License, without any additional terms or conditions.
|
this License, without any additional terms or conditions.
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
the terms of any separate license agreement you may have executed
|
the terms of any separate license agreement you may have executed
|
||||||
with Licensor regarding such Contributions.
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
except as required for reasonable and customary use in describing the
|
except as required for reasonable and customary use in describing the
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
implied, including, without limitation, any warranties or conditions
|
implied, including, without limitation, any warranties or conditions
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
appropriateness of using or redistributing the Work and assume any
|
appropriateness of using or redistributing the Work and assume any
|
||||||
risks associated with Your exercise of permissions under this License.
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
unless required by applicable law (such as deliberate and grossly
|
unless required by applicable law (such as deliberate and grossly
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
liable to You for damages, including any direct, indirect, special,
|
liable to You for damages, including any direct, indirect, special,
|
||||||
incidental, or consequential damages of any character arising as a
|
incidental, or consequential damages of any character arising as a
|
||||||
result of this License or out of the use or inability to use the
|
result of this License or out of the use or inability to use the
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
other commercial damages or losses), even if such Contributor
|
other commercial damages or losses), even if such Contributor
|
||||||
has been advised of the possibility of such damages.
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
or other liability obligations and/or rights consistent with this
|
or other liability obligations and/or rights consistent with this
|
||||||
License. However, in accepting such obligations, You may act only
|
License. However, in accepting such obligations, You may act only
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
defend, and hold each Contributor harmless for any liability
|
defend, and hold each Contributor harmless for any liability
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
of your accepting any such warranty or additional liability.
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
To apply the Apache License to your work, attach the following
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
replaced with your own identifying information. (Don't include
|
replaced with your own identifying information. (Don't include
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
comment syntax for the file format. We also recommend that a
|
comment syntax for the file format. We also recommend that a
|
||||||
file or class name and description of purpose be included on the
|
file or class name and description of purpose be included on the
|
||||||
same "printed page" as the copyright notice for easier
|
same "printed page" as the copyright notice for easier
|
||||||
identification within third-party archives.
|
identification within third-party archives.
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
You may obtain a copy of the License at
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
Unless required by applicable law or agreed to in writing, software
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
See the License for the specific language governing permissions and
|
See the License for the specific language governing permissions and
|
||||||
limitations under the License.
|
limitations under the License.
|
||||||
|
|
24
README.md
24
README.md
|
@ -1,12 +1,12 @@
|
||||||
# pronouns.cc
|
# pronouns.cc
|
||||||
|
|
||||||
pronouns.cc rewrite in Python, using Quart for routing, SQLAlchemy for the database, and Celery for background tasks.
|
pronouns.cc rewrite in Python, using Quart for routing, SQLAlchemy for the database, and Celery for background tasks.
|
||||||
|
|
||||||
## Running
|
## Running
|
||||||
|
|
||||||
This isn't anywhere *near* complete yet. For now, you can install [`poe`](https://github.com/nat-n/poethepoet),
|
This isn't anywhere *near* complete yet. For now, you can install [`poe`](https://github.com/nat-n/poethepoet),
|
||||||
run `poe migrate`, and then `poe server`.
|
run `poe migrate`, and then `poe server`.
|
||||||
|
|
||||||
For configuration, a `.env` file is used. See `foxnouns/settings.py`--all keys are required unless specified otherwise.
|
For configuration, a `.env` file is used. See `foxnouns/settings.py`--all keys are required unless specified otherwise.
|
||||||
|
|
||||||
(Note that the docker-compose file doesn't work yet)
|
(Note that the docker-compose file doesn't work yet)
|
||||||
|
|
24
SCOPES.md
24
SCOPES.md
|
@ -1,12 +1,12 @@
|
||||||
- `user`
|
- `user`
|
||||||
- `user.read_hidden`: read current user's hidden data.
|
- `user.read_hidden`: read current user's hidden data.
|
||||||
This includes data such as timezone and whether the user's member list is hidden.
|
This includes data such as timezone and whether the user's member list is hidden.
|
||||||
- `user.read_privileged`: read privileged user data such as authentication methods
|
- `user.read_privileged`: read privileged user data such as authentication methods
|
||||||
- `user.update`: update current user. This scope cannot update privileged data. This scope implies `user.read_hidden`.
|
- `user.update`: update current user. This scope cannot update privileged data. This scope implies `user.read_hidden`.
|
||||||
- `member`
|
- `member`
|
||||||
- `member.read`: read member list, even if it's hidden, including hidden members.
|
- `member.read`: read member list, even if it's hidden, including hidden members.
|
||||||
- `member.update`: update and delete existing members.
|
- `member.update`: update and delete existing members.
|
||||||
While `update` and `delete` could be separate, that might lull users into a false sense of security,
|
While `update` and `delete` could be separate, that might lull users into a false sense of security,
|
||||||
as it would still be possible to clear members and scramble their names,
|
as it would still be possible to clear members and scramble their names,
|
||||||
which would be equivalent to `delete` anyway.
|
which would be equivalent to `delete` anyway.
|
||||||
- `member.create`: create new members
|
- `member.create`: create new members
|
||||||
|
|
230
alembic.ini
230
alembic.ini
|
@ -1,115 +1,115 @@
|
||||||
# A generic, single database configuration.
|
# A generic, single database configuration.
|
||||||
|
|
||||||
[alembic]
|
[alembic]
|
||||||
# path to migration scripts
|
# path to migration scripts
|
||||||
script_location = alembic
|
script_location = alembic
|
||||||
|
|
||||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
# for all available tokens
|
# for all available tokens
|
||||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
file_template = %%(epoch)s_%%(slug)s
|
file_template = %%(epoch)s_%%(slug)s
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
# defaults to the current working directory.
|
# defaults to the current working directory.
|
||||||
prepend_sys_path = .
|
prepend_sys_path = .
|
||||||
|
|
||||||
# timezone to use when rendering the date within the migration file
|
# timezone to use when rendering the date within the migration file
|
||||||
# as well as the filename.
|
# as well as the filename.
|
||||||
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
# string value is passed to ZoneInfo()
|
# string value is passed to ZoneInfo()
|
||||||
# leave blank for localtime
|
# leave blank for localtime
|
||||||
# timezone =
|
# timezone =
|
||||||
|
|
||||||
# max length of characters to apply to the
|
# max length of characters to apply to the
|
||||||
# "slug" field
|
# "slug" field
|
||||||
# truncate_slug_length = 40
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
# set to 'true' to run the environment during
|
||||||
# the 'revision' command, regardless of autogenerate
|
# the 'revision' command, regardless of autogenerate
|
||||||
# revision_environment = false
|
# revision_environment = false
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
# a source .py file to be detected as revisions in the
|
# a source .py file to be detected as revisions in the
|
||||||
# versions/ directory
|
# versions/ directory
|
||||||
# sourceless = false
|
# sourceless = false
|
||||||
|
|
||||||
# version location specification; This defaults
|
# version location specification; This defaults
|
||||||
# to alembic/versions. When using multiple version
|
# to alembic/versions. When using multiple version
|
||||||
# directories, initial revisions must be specified with --version-path.
|
# directories, initial revisions must be specified with --version-path.
|
||||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||||
|
|
||||||
# version path separator; As mentioned above, this is the character used to split
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||||
# Valid values for version_path_separator are:
|
# Valid values for version_path_separator are:
|
||||||
#
|
#
|
||||||
# version_path_separator = :
|
# version_path_separator = :
|
||||||
# version_path_separator = ;
|
# version_path_separator = ;
|
||||||
# version_path_separator = space
|
# version_path_separator = space
|
||||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||||
|
|
||||||
# set to 'true' to search source files recursively
|
# set to 'true' to search source files recursively
|
||||||
# in each "version_locations" directory
|
# in each "version_locations" directory
|
||||||
# new in Alembic version 1.10
|
# new in Alembic version 1.10
|
||||||
# recursive_version_locations = false
|
# recursive_version_locations = false
|
||||||
|
|
||||||
# the output encoding used when revision files
|
# the output encoding used when revision files
|
||||||
# are written from script.py.mako
|
# are written from script.py.mako
|
||||||
# output_encoding = utf-8
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
[post_write_hooks]
|
||||||
# post_write_hooks defines scripts or Python functions that are run
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
# on newly generated revision scripts. See the documentation for further
|
# on newly generated revision scripts. See the documentation for further
|
||||||
# detail and examples
|
# detail and examples
|
||||||
|
|
||||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
# hooks = black
|
# hooks = black
|
||||||
# black.type = console_scripts
|
# black.type = console_scripts
|
||||||
# black.entrypoint = black
|
# black.entrypoint = black
|
||||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||||
# hooks = ruff
|
# hooks = ruff
|
||||||
# ruff.type = exec
|
# ruff.type = exec
|
||||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
# Logging configuration
|
# Logging configuration
|
||||||
[loggers]
|
[loggers]
|
||||||
keys = root,sqlalchemy,alembic
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
[handlers]
|
[handlers]
|
||||||
keys = console
|
keys = console
|
||||||
|
|
||||||
[formatters]
|
[formatters]
|
||||||
keys = generic
|
keys = generic
|
||||||
|
|
||||||
[logger_root]
|
[logger_root]
|
||||||
level = WARN
|
level = WARN
|
||||||
handlers = console
|
handlers = console
|
||||||
qualname =
|
qualname =
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
[logger_sqlalchemy]
|
||||||
level = WARN
|
level = WARN
|
||||||
handlers =
|
handlers =
|
||||||
qualname = sqlalchemy.engine
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
[logger_alembic]
|
[logger_alembic]
|
||||||
level = INFO
|
level = INFO
|
||||||
handlers =
|
handlers =
|
||||||
qualname = alembic
|
qualname = alembic
|
||||||
|
|
||||||
[handler_console]
|
[handler_console]
|
||||||
class = StreamHandler
|
class = StreamHandler
|
||||||
args = (sys.stderr,)
|
args = (sys.stderr,)
|
||||||
level = NOTSET
|
level = NOTSET
|
||||||
formatter = generic
|
formatter = generic
|
||||||
|
|
||||||
[formatter_generic]
|
[formatter_generic]
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
datefmt = %H:%M:%S
|
datefmt = %H:%M:%S
|
||||||
|
|
160
alembic/env.py
160
alembic/env.py
|
@ -1,80 +1,80 @@
|
||||||
from logging.config import fileConfig
|
from logging.config import fileConfig
|
||||||
|
|
||||||
from sqlalchemy import engine_from_config, pool
|
from sqlalchemy import engine_from_config, pool
|
||||||
|
|
||||||
from alembic import context
|
from alembic import context
|
||||||
from foxnouns.db import Base
|
from foxnouns.db import Base
|
||||||
from foxnouns.db.sync import SYNC_DATABASE_URL
|
from foxnouns.db.sync import SYNC_DATABASE_URL
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
# this is the Alembic Config object, which provides
|
||||||
# access to the values within the .ini file in use.
|
# access to the values within the .ini file in use.
|
||||||
config = context.config
|
config = context.config
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
# Interpret the config file for Python logging.
|
||||||
# This line sets up loggers basically.
|
# This line sets up loggers basically.
|
||||||
if config.config_file_name is not None:
|
if config.config_file_name is not None:
|
||||||
fileConfig(config.config_file_name)
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
# add your model's MetaData object here
|
# add your model's MetaData object here
|
||||||
# for 'autogenerate' support
|
# for 'autogenerate' support
|
||||||
# from myapp import mymodel
|
# from myapp import mymodel
|
||||||
# target_metadata = mymodel.Base.metadata
|
# target_metadata = mymodel.Base.metadata
|
||||||
target_metadata = Base.metadata
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
# other values from the config, defined by the needs of env.py,
|
||||||
# can be acquired:
|
# can be acquired:
|
||||||
# my_important_option = config.get_main_option("my_important_option")
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
# ... etc.
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline() -> None:
|
def run_migrations_offline() -> None:
|
||||||
"""Run migrations in 'offline' mode.
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
This configures the context with just a URL
|
This configures the context with just a URL
|
||||||
and not an Engine, though an Engine is acceptable
|
and not an Engine, though an Engine is acceptable
|
||||||
here as well. By skipping the Engine creation
|
here as well. By skipping the Engine creation
|
||||||
we don't even need a DBAPI to be available.
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
Calls to context.execute() here emit the given string to the
|
||||||
script output.
|
script output.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
url = SYNC_DATABASE_URL
|
url = SYNC_DATABASE_URL
|
||||||
context.configure(
|
context.configure(
|
||||||
url=url,
|
url=url,
|
||||||
target_metadata=target_metadata,
|
target_metadata=target_metadata,
|
||||||
literal_binds=True,
|
literal_binds=True,
|
||||||
dialect_opts={"paramstyle": "named"},
|
dialect_opts={"paramstyle": "named"},
|
||||||
)
|
)
|
||||||
|
|
||||||
with context.begin_transaction():
|
with context.begin_transaction():
|
||||||
context.run_migrations()
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online() -> None:
|
def run_migrations_online() -> None:
|
||||||
"""Run migrations in 'online' mode.
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
In this scenario we need to create an Engine
|
||||||
and associate a connection with the context.
|
and associate a connection with the context.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
engine_config = config.get_section(config.config_ini_section, {})
|
engine_config = config.get_section(config.config_ini_section, {})
|
||||||
engine_config["sqlalchemy.url"] = SYNC_DATABASE_URL
|
engine_config["sqlalchemy.url"] = SYNC_DATABASE_URL
|
||||||
|
|
||||||
connectable = engine_from_config(
|
connectable = engine_from_config(
|
||||||
engine_config,
|
engine_config,
|
||||||
prefix="sqlalchemy.",
|
prefix="sqlalchemy.",
|
||||||
poolclass=pool.NullPool,
|
poolclass=pool.NullPool,
|
||||||
)
|
)
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
with connectable.connect() as connection:
|
||||||
context.configure(connection=connection, target_metadata=target_metadata)
|
context.configure(connection=connection, target_metadata=target_metadata)
|
||||||
|
|
||||||
with context.begin_transaction():
|
with context.begin_transaction():
|
||||||
context.run_migrations()
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
if context.is_offline_mode():
|
||||||
run_migrations_offline()
|
run_migrations_offline()
|
||||||
else:
|
else:
|
||||||
run_migrations_online()
|
run_migrations_online()
|
||||||
|
|
|
@ -1,26 +1,26 @@
|
||||||
"""${message}
|
"""${message}
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
Revision ID: ${up_revision}
|
||||||
Revises: ${down_revision | comma,n}
|
Revises: ${down_revision | comma,n}
|
||||||
Create Date: ${create_date}
|
Create Date: ${create_date}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
${imports if imports else ""}
|
${imports if imports else ""}
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = ${repr(up_revision)}
|
revision: str = ${repr(up_revision)}
|
||||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
${upgrades if upgrades else "pass"}
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
${downgrades if downgrades else "pass"}
|
${downgrades if downgrades else "pass"}
|
||||||
|
|
|
@ -1,65 +1,65 @@
|
||||||
"""Init
|
"""Init
|
||||||
|
|
||||||
Revision ID: b39613fd7327
|
Revision ID: b39613fd7327
|
||||||
Revises:
|
Revises:
|
||||||
Create Date: 2024-03-09 16:32:28.590145
|
Create Date: 2024-03-09 16:32:28.590145
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = "b39613fd7327"
|
revision: str = "b39613fd7327"
|
||||||
down_revision: Union[str, None] = None
|
down_revision: Union[str, None] = None
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
op.create_table(
|
op.create_table(
|
||||||
"fediverse_apps",
|
"fediverse_apps",
|
||||||
sa.Column("id", sa.BigInteger(), nullable=False),
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
sa.Column("instance", sa.Text(), nullable=False),
|
sa.Column("instance", sa.Text(), nullable=False),
|
||||||
sa.Column("client_id", sa.Text(), nullable=False),
|
sa.Column("client_id", sa.Text(), nullable=False),
|
||||||
sa.Column("client_secret", sa.Text(), nullable=False),
|
sa.Column("client_secret", sa.Text(), nullable=False),
|
||||||
sa.Column("instance_type", sa.Integer(), nullable=False),
|
sa.Column("instance_type", sa.Integer(), nullable=False),
|
||||||
sa.PrimaryKeyConstraint("id"),
|
sa.PrimaryKeyConstraint("id"),
|
||||||
sa.UniqueConstraint("instance"),
|
sa.UniqueConstraint("instance"),
|
||||||
)
|
)
|
||||||
op.create_table(
|
op.create_table(
|
||||||
"users",
|
"users",
|
||||||
sa.Column("id", sa.BigInteger(), nullable=False),
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
sa.Column("username", sa.Text(), nullable=False),
|
sa.Column("username", sa.Text(), nullable=False),
|
||||||
sa.Column("display_name", sa.Text(), nullable=True),
|
sa.Column("display_name", sa.Text(), nullable=True),
|
||||||
sa.Column("bio", sa.Text(), nullable=True),
|
sa.Column("bio", sa.Text(), nullable=True),
|
||||||
sa.PrimaryKeyConstraint("id"),
|
sa.PrimaryKeyConstraint("id"),
|
||||||
sa.UniqueConstraint("username"),
|
sa.UniqueConstraint("username"),
|
||||||
)
|
)
|
||||||
op.create_table(
|
op.create_table(
|
||||||
"auth_methods",
|
"auth_methods",
|
||||||
sa.Column("id", sa.BigInteger(), nullable=False),
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
sa.Column("auth_type", sa.Integer(), nullable=False),
|
sa.Column("auth_type", sa.Integer(), nullable=False),
|
||||||
sa.Column("remote_id", sa.Text(), nullable=False),
|
sa.Column("remote_id", sa.Text(), nullable=False),
|
||||||
sa.Column("remote_username", sa.Text(), nullable=True),
|
sa.Column("remote_username", sa.Text(), nullable=True),
|
||||||
sa.Column("user_id", sa.BigInteger(), nullable=False),
|
sa.Column("user_id", sa.BigInteger(), nullable=False),
|
||||||
sa.Column("fediverse_app_id", sa.BigInteger(), nullable=True),
|
sa.Column("fediverse_app_id", sa.BigInteger(), nullable=True),
|
||||||
sa.ForeignKeyConstraint(
|
sa.ForeignKeyConstraint(
|
||||||
["fediverse_app_id"],
|
["fediverse_app_id"],
|
||||||
["fediverse_apps.id"],
|
["fediverse_apps.id"],
|
||||||
),
|
),
|
||||||
sa.ForeignKeyConstraint(
|
sa.ForeignKeyConstraint(
|
||||||
["user_id"],
|
["user_id"],
|
||||||
["users.id"],
|
["users.id"],
|
||||||
),
|
),
|
||||||
sa.PrimaryKeyConstraint("id"),
|
sa.PrimaryKeyConstraint("id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
op.drop_table("auth_methods")
|
op.drop_table("auth_methods")
|
||||||
op.drop_table("users")
|
op.drop_table("users")
|
||||||
op.drop_table("fediverse_apps")
|
op.drop_table("fediverse_apps")
|
||||||
|
|
|
@ -1,39 +1,39 @@
|
||||||
"""Add tokens
|
"""Add tokens
|
||||||
|
|
||||||
Revision ID: 0b63f7c8ab96
|
Revision ID: 0b63f7c8ab96
|
||||||
Revises: b39613fd7327
|
Revises: b39613fd7327
|
||||||
Create Date: 2024-03-13 17:01:50.434602
|
Create Date: 2024-03-13 17:01:50.434602
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = "0b63f7c8ab96"
|
revision: str = "0b63f7c8ab96"
|
||||||
down_revision: Union[str, None] = "b39613fd7327"
|
down_revision: Union[str, None] = "b39613fd7327"
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
op.create_table(
|
op.create_table(
|
||||||
"tokens",
|
"tokens",
|
||||||
sa.Column("id", sa.BigInteger(), nullable=False),
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
sa.Column("expires_at", sa.DateTime(), nullable=False),
|
sa.Column("expires_at", sa.DateTime(), nullable=False),
|
||||||
sa.Column("scopes", postgresql.ARRAY(sa.Text()), nullable=False),
|
sa.Column("scopes", postgresql.ARRAY(sa.Text()), nullable=False),
|
||||||
sa.Column("user_id", sa.BigInteger(), nullable=False),
|
sa.Column("user_id", sa.BigInteger(), nullable=False),
|
||||||
sa.ForeignKeyConstraint(
|
sa.ForeignKeyConstraint(
|
||||||
["user_id"],
|
["user_id"],
|
||||||
["users.id"],
|
["users.id"],
|
||||||
),
|
),
|
||||||
sa.PrimaryKeyConstraint("id"),
|
sa.PrimaryKeyConstraint("id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
op.drop_table("tokens")
|
op.drop_table("tokens")
|
||||||
|
|
|
@ -1,56 +1,56 @@
|
||||||
"""Add names/pronouns/fields
|
"""Add names/pronouns/fields
|
||||||
|
|
||||||
Revision ID: 1d8f8443a7f5
|
Revision ID: 1d8f8443a7f5
|
||||||
Revises: 0b63f7c8ab96
|
Revises: 0b63f7c8ab96
|
||||||
Create Date: 2024-03-20 15:36:08.756635
|
Create Date: 2024-03-20 15:36:08.756635
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = "1d8f8443a7f5"
|
revision: str = "1d8f8443a7f5"
|
||||||
down_revision: Union[str, None] = "0b63f7c8ab96"
|
down_revision: Union[str, None] = "0b63f7c8ab96"
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
op.add_column(
|
op.add_column(
|
||||||
"users",
|
"users",
|
||||||
sa.Column(
|
sa.Column(
|
||||||
"names",
|
"names",
|
||||||
postgresql.JSONB(astext_type=sa.Text()),
|
postgresql.JSONB(astext_type=sa.Text()),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
server_default="[]",
|
server_default="[]",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
op.add_column(
|
op.add_column(
|
||||||
"users",
|
"users",
|
||||||
sa.Column(
|
sa.Column(
|
||||||
"pronouns",
|
"pronouns",
|
||||||
postgresql.JSONB(astext_type=sa.Text()),
|
postgresql.JSONB(astext_type=sa.Text()),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
server_default="[]",
|
server_default="[]",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
op.add_column(
|
op.add_column(
|
||||||
"users",
|
"users",
|
||||||
sa.Column(
|
sa.Column(
|
||||||
"fields",
|
"fields",
|
||||||
postgresql.JSONB(astext_type=sa.Text()),
|
postgresql.JSONB(astext_type=sa.Text()),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
server_default="[]",
|
server_default="[]",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
op.drop_column("users", "fields")
|
op.drop_column("users", "fields")
|
||||||
op.drop_column("users", "pronouns")
|
op.drop_column("users", "pronouns")
|
||||||
op.drop_column("users", "names")
|
op.drop_column("users", "names")
|
||||||
|
|
|
@ -1,47 +1,47 @@
|
||||||
"""Add members
|
"""Add members
|
||||||
|
|
||||||
Revision ID: 17cc8cb77be5
|
Revision ID: 17cc8cb77be5
|
||||||
Revises: 1d8f8443a7f5
|
Revises: 1d8f8443a7f5
|
||||||
Create Date: 2024-03-20 16:00:59.251354
|
Create Date: 2024-03-20 16:00:59.251354
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = "17cc8cb77be5"
|
revision: str = "17cc8cb77be5"
|
||||||
down_revision: Union[str, None] = "1d8f8443a7f5"
|
down_revision: Union[str, None] = "1d8f8443a7f5"
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.create_table(
|
op.create_table(
|
||||||
"members",
|
"members",
|
||||||
sa.Column("id", sa.BigInteger(), nullable=False),
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
sa.Column("name", sa.Text(), nullable=False),
|
sa.Column("name", sa.Text(), nullable=False),
|
||||||
sa.Column("display_name", sa.Text(), nullable=True),
|
sa.Column("display_name", sa.Text(), nullable=True),
|
||||||
sa.Column("bio", sa.Text(), nullable=True),
|
sa.Column("bio", sa.Text(), nullable=True),
|
||||||
sa.Column("names", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
sa.Column("names", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||||
sa.Column("pronouns", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
sa.Column("pronouns", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||||
sa.Column("fields", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
sa.Column("fields", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||||
sa.Column("user_id", sa.BigInteger(), nullable=False),
|
sa.Column("user_id", sa.BigInteger(), nullable=False),
|
||||||
sa.ForeignKeyConstraint(
|
sa.ForeignKeyConstraint(
|
||||||
["user_id"],
|
["user_id"],
|
||||||
["users.id"],
|
["users.id"],
|
||||||
),
|
),
|
||||||
sa.PrimaryKeyConstraint("id"),
|
sa.PrimaryKeyConstraint("id"),
|
||||||
)
|
)
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.drop_table("members")
|
op.drop_table("members")
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
|
|
@ -1,32 +1,32 @@
|
||||||
"""Add unique index to members
|
"""Add unique index to members
|
||||||
|
|
||||||
Revision ID: a000d800f45f
|
Revision ID: a000d800f45f
|
||||||
Revises: 17cc8cb77be5
|
Revises: 17cc8cb77be5
|
||||||
Create Date: 2024-03-21 15:52:09.403257
|
Create Date: 2024-03-21 15:52:09.403257
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = "a000d800f45f"
|
revision: str = "a000d800f45f"
|
||||||
down_revision: Union[str, None] = "17cc8cb77be5"
|
down_revision: Union[str, None] = "17cc8cb77be5"
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
op.create_index(
|
op.create_index(
|
||||||
"members_user_name_idx",
|
"members_user_name_idx",
|
||||||
"members",
|
"members",
|
||||||
["user_id", sa.text("lower(name)")],
|
["user_id", sa.text("lower(name)")],
|
||||||
unique=True,
|
unique=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
op.drop_index("members_user_name_idx", table_name="members")
|
op.drop_index("members_user_name_idx", table_name="members")
|
||||||
|
|
|
@ -1,29 +1,29 @@
|
||||||
"""Add avatars
|
"""Add avatars
|
||||||
|
|
||||||
Revision ID: 7503d2a6094c
|
Revision ID: 7503d2a6094c
|
||||||
Revises: a000d800f45f
|
Revises: a000d800f45f
|
||||||
Create Date: 2024-03-27 15:36:49.749722
|
Create Date: 2024-03-27 15:36:49.749722
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = "7503d2a6094c"
|
revision: str = "7503d2a6094c"
|
||||||
down_revision: Union[str, None] = "a000d800f45f"
|
down_revision: Union[str, None] = "a000d800f45f"
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
op.add_column("members", sa.Column("avatar", sa.Text(), nullable=True))
|
op.add_column("members", sa.Column("avatar", sa.Text(), nullable=True))
|
||||||
op.add_column("users", sa.Column("avatar", sa.Text(), nullable=True))
|
op.add_column("users", sa.Column("avatar", sa.Text(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
op.drop_column("users", "avatar")
|
op.drop_column("users", "avatar")
|
||||||
op.drop_column("members", "avatar")
|
op.drop_column("members", "avatar")
|
||||||
|
|
|
@ -1,36 +1,36 @@
|
||||||
version: "3"
|
version: "3"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
app:
|
app:
|
||||||
image: foxnouns
|
image: foxnouns
|
||||||
build: .
|
build: .
|
||||||
environment:
|
environment:
|
||||||
- DATABASE_USER=postgres
|
- DATABASE_USER=postgres
|
||||||
- DATABASE_PASSWORD=postgres
|
- DATABASE_PASSWORD=postgres
|
||||||
- DATABASE_HOST=postgres
|
- DATABASE_HOST=postgres
|
||||||
- DATABASE_NAME=postgres
|
- DATABASE_NAME=postgres
|
||||||
volumes:
|
volumes:
|
||||||
- "./.env:/app/.env"
|
- "./.env:/app/.env"
|
||||||
ports:
|
ports:
|
||||||
- "8000:8000"
|
- "8000:8000"
|
||||||
networks:
|
networks:
|
||||||
- default
|
- default
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: docker.io/postgres:15-alpine
|
image: docker.io/postgres:15-alpine
|
||||||
volumes:
|
volumes:
|
||||||
- "postgres_data:/var/lib/postgresql/data"
|
- "postgres_data:/var/lib/postgresql/data"
|
||||||
command: ["postgres",
|
command: ["postgres",
|
||||||
"-c", "max-connections=1000",
|
"-c", "max-connections=1000",
|
||||||
"-c", "timezone=Etc/UTC",
|
"-c", "timezone=Etc/UTC",
|
||||||
"-c", "max_wal_size=1GB",
|
"-c", "max_wal_size=1GB",
|
||||||
"-c", "min_wal_size=80MB",
|
"-c", "min_wal_size=80MB",
|
||||||
"-c", "shared_buffers=128MB"]
|
"-c", "shared_buffers=128MB"]
|
||||||
environment:
|
environment:
|
||||||
- "POSTGRES_PASSWORD=postgres"
|
- "POSTGRES_PASSWORD=postgres"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
networks:
|
networks:
|
||||||
- default
|
- default
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
|
|
10
entry.sh
10
entry.sh
|
@ -1,5 +1,5 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
source .env
|
source .env
|
||||||
# poetry run alembic upgrade head
|
# poetry run alembic upgrade head
|
||||||
# poetry run uvicorn --workers=${WORKERS:-2} --host=0.0.0.0 --port=8000 'foxnouns.app:app'
|
# poetry run uvicorn --workers=${WORKERS:-2} --host=0.0.0.0 --port=8000 'foxnouns.app:app'
|
||||||
poe migrate && poe server
|
poe migrate && poe server
|
||||||
|
|
124
foxnouns/app.py
124
foxnouns/app.py
|
@ -1,62 +1,62 @@
|
||||||
from quart import Quart, g, request
|
from quart import Quart, g, request
|
||||||
from quart_cors import cors
|
from quart_cors import cors
|
||||||
from quart_schema import QuartSchema, RequestSchemaValidationError
|
from quart_schema import QuartSchema, RequestSchemaValidationError
|
||||||
|
|
||||||
from . import blueprints
|
from . import blueprints
|
||||||
from .db.aio import async_session
|
from .db.aio import async_session
|
||||||
from .db.util import validate_token
|
from .db.util import validate_token
|
||||||
from .exceptions import ErrorCode, ExpectedError
|
from .exceptions import ErrorCode, ExpectedError
|
||||||
from .settings import BASE_DOMAIN, SECRET_KEY
|
from .settings import BASE_DOMAIN, SECRET_KEY
|
||||||
|
|
||||||
app = Quart(__name__, host_matching=True, static_host=BASE_DOMAIN)
|
app = Quart(__name__, host_matching=True, static_host=BASE_DOMAIN)
|
||||||
app.secret_key = SECRET_KEY
|
app.secret_key = SECRET_KEY
|
||||||
app = cors(
|
app = cors(
|
||||||
app,
|
app,
|
||||||
allow_origin="*",
|
allow_origin="*",
|
||||||
allow_methods="*",
|
allow_methods="*",
|
||||||
allow_headers=["Content-Type", "Authorization", "User-Agent"],
|
allow_headers=["Content-Type", "Authorization", "User-Agent"],
|
||||||
max_age=86400,
|
max_age=86400,
|
||||||
)
|
)
|
||||||
QuartSchema(app)
|
QuartSchema(app)
|
||||||
|
|
||||||
for bp in blueprints.__all__:
|
for bp in blueprints.__all__:
|
||||||
app.register_blueprint(bp)
|
app.register_blueprint(bp)
|
||||||
|
|
||||||
|
|
||||||
@app.errorhandler(RequestSchemaValidationError)
|
@app.errorhandler(RequestSchemaValidationError)
|
||||||
async def handle_request_validation_error(error: RequestSchemaValidationError):
|
async def handle_request_validation_error(error: RequestSchemaValidationError):
|
||||||
# TODO: parse the error and return a format closer to the draft APIv2
|
# TODO: parse the error and return a format closer to the draft APIv2
|
||||||
return {"code": ErrorCode.BadRequest, "message": "Bad request"}, 400
|
return {"code": ErrorCode.BadRequest, "message": "Bad request"}, 400
|
||||||
|
|
||||||
|
|
||||||
@app.errorhandler(ExpectedError)
|
@app.errorhandler(ExpectedError)
|
||||||
async def handle_expected_error(error: ExpectedError):
|
async def handle_expected_error(error: ExpectedError):
|
||||||
return {"code": error.type, "message": error.msg}, error.status_code
|
return {"code": error.type, "message": error.msg}, error.status_code
|
||||||
|
|
||||||
|
|
||||||
@app.errorhandler(404)
|
@app.errorhandler(404)
|
||||||
async def handle_404(_):
|
async def handle_404(_):
|
||||||
return {"code": 404, "message": "Not found"}, 404
|
return {"code": 404, "message": "Not found"}, 404
|
||||||
|
|
||||||
|
|
||||||
@app.errorhandler(500)
|
@app.errorhandler(500)
|
||||||
async def handle_500(_):
|
async def handle_500(_):
|
||||||
return {"code": 500, "message": "Internal server error"}, 500
|
return {"code": 500, "message": "Internal server error"}, 500
|
||||||
|
|
||||||
|
|
||||||
@app.before_request
|
@app.before_request
|
||||||
async def get_user_from_token():
|
async def get_user_from_token():
|
||||||
"""Get the current user from a token given in the `Authorization` header.
|
"""Get the current user from a token given in the `Authorization` header.
|
||||||
If no token is set, does nothing; if an invalid token is set, raises an error."""
|
If no token is set, does nothing; if an invalid token is set, raises an error."""
|
||||||
|
|
||||||
token = request.headers.get("Authorization", None)
|
token = request.headers.get("Authorization", None)
|
||||||
if not token:
|
if not token:
|
||||||
return
|
return
|
||||||
|
|
||||||
async with async_session() as session:
|
async with async_session() as session:
|
||||||
try:
|
try:
|
||||||
token, user = await validate_token(session, token)
|
token, user = await validate_token(session, token)
|
||||||
g.token = token
|
g.token = token
|
||||||
g.user = user
|
g.user = user
|
||||||
except:
|
except:
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from .v2.auth.discord import bp as discord_auth_blueprint
|
from .v2.auth.discord import bp as discord_auth_blueprint
|
||||||
from .v2.members import bp as members_blueprint
|
from .v2.members import bp as members_blueprint
|
||||||
from .v2.meta import bp as meta_blueprint
|
from .v2.meta import bp as meta_blueprint
|
||||||
from .v2.users import bp as users_blueprint
|
from .v2.users import bp as users_blueprint
|
||||||
|
|
||||||
__all__ = [users_blueprint, members_blueprint, meta_blueprint, discord_auth_blueprint]
|
__all__ = [users_blueprint, members_blueprint, meta_blueprint, discord_auth_blueprint]
|
||||||
|
|
|
@ -1,61 +1,61 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from quart import Blueprint
|
from quart import Blueprint
|
||||||
from quart_schema import validate_response
|
from quart_schema import validate_response
|
||||||
|
|
||||||
from foxnouns.models.user import SelfUserModel
|
from foxnouns.models.user import SelfUserModel
|
||||||
from foxnouns.settings import BASE_DOMAIN
|
from foxnouns.settings import BASE_DOMAIN
|
||||||
|
|
||||||
bp = Blueprint("auth_v2", __name__)
|
bp = Blueprint("auth_v2", __name__)
|
||||||
|
|
||||||
|
|
||||||
class URLsResponse(BaseModel):
|
class URLsResponse(BaseModel):
|
||||||
discord: str | None = Field(default=None)
|
discord: str | None = Field(default=None)
|
||||||
google: str | None = Field(default=None)
|
google: str | None = Field(default=None)
|
||||||
tumblr: str | None = Field(default=None)
|
tumblr: str | None = Field(default=None)
|
||||||
|
|
||||||
|
|
||||||
@bp.post("/api/v2/auth/urls", host=BASE_DOMAIN)
|
@bp.post("/api/v2/auth/urls", host=BASE_DOMAIN)
|
||||||
@validate_response(URLsResponse, 200)
|
@validate_response(URLsResponse, 200)
|
||||||
async def urls():
|
async def urls():
|
||||||
# TODO: build authorization URLs + callback URLs, store state in Redis
|
# TODO: build authorization URLs + callback URLs, store state in Redis
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
class OAuthCallbackRequest(BaseModel):
|
class OAuthCallbackRequest(BaseModel):
|
||||||
callback_domain: str
|
callback_domain: str
|
||||||
code: str
|
code: str
|
||||||
state: str
|
state: str
|
||||||
|
|
||||||
|
|
||||||
class BaseCallbackResponse(BaseModel):
|
class BaseCallbackResponse(BaseModel):
|
||||||
"""The base class for callback responses."""
|
"""The base class for callback responses."""
|
||||||
|
|
||||||
has_account: bool
|
has_account: bool
|
||||||
|
|
||||||
|
|
||||||
class ExistingUserCallbackResponse(BaseCallbackResponse):
|
class ExistingUserCallbackResponse(BaseCallbackResponse):
|
||||||
"""The class returned when a user already exists."""
|
"""The class returned when a user already exists."""
|
||||||
|
|
||||||
token: str
|
token: str
|
||||||
user: SelfUserModel
|
user: SelfUserModel
|
||||||
|
|
||||||
|
|
||||||
class NewUserCallbackResponse(BaseCallbackResponse):
|
class NewUserCallbackResponse(BaseCallbackResponse):
|
||||||
"""The class returned when the user is new and has to create an account."""
|
"""The class returned when the user is new and has to create an account."""
|
||||||
|
|
||||||
remote_username: str
|
remote_username: str
|
||||||
ticket: str
|
ticket: str
|
||||||
require_captcha: bool
|
require_captcha: bool
|
||||||
|
|
||||||
|
|
||||||
class DeletedUserCallbackResponse(BaseCallbackResponse):
|
class DeletedUserCallbackResponse(BaseCallbackResponse):
|
||||||
"""The class returned when the user has been deleted."""
|
"""The class returned when the user has been deleted."""
|
||||||
|
|
||||||
token: str
|
token: str
|
||||||
user: SelfUserModel
|
user: SelfUserModel
|
||||||
|
|
||||||
deleted_at: datetime
|
deleted_at: datetime
|
||||||
self_delete: bool
|
self_delete: bool
|
||||||
delete_reason: str | None = Field(default=None)
|
delete_reason: str | None = Field(default=None)
|
||||||
|
|
|
@ -1,15 +1,17 @@
|
||||||
from quart import Blueprint
|
from quart import Blueprint
|
||||||
from quart_schema import validate_request, validate_response
|
from quart_schema import validate_request, validate_response
|
||||||
|
|
||||||
from foxnouns.settings import BASE_DOMAIN
|
from foxnouns import settings
|
||||||
|
from foxnouns.decorators import require_config_key
|
||||||
from . import BaseCallbackResponse, OAuthCallbackRequest
|
|
||||||
|
from . import BaseCallbackResponse, OAuthCallbackRequest
|
||||||
bp = Blueprint("discord_v2", __name__)
|
|
||||||
|
bp = Blueprint("discord_v2", __name__)
|
||||||
|
|
||||||
@bp.post("/api/v2/auth/discord/callback", host=BASE_DOMAIN)
|
|
||||||
@validate_request(OAuthCallbackRequest)
|
@bp.post("/api/v2/auth/discord/callback", host=settings.BASE_DOMAIN)
|
||||||
@validate_response(BaseCallbackResponse)
|
@require_config_key(keys=[settings.DISCORD_CLIENT_ID, settings.DISCORD_CLIENT_SECRET])
|
||||||
async def discord_callback(data: OAuthCallbackRequest):
|
@validate_request(OAuthCallbackRequest)
|
||||||
raise NotImplementedError()
|
@validate_response(BaseCallbackResponse)
|
||||||
|
async def discord_callback(data: OAuthCallbackRequest):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
|
@ -1,61 +1,61 @@
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from quart import Blueprint, g
|
from quart import Blueprint, g
|
||||||
from quart_schema import validate_request, validate_response
|
from quart_schema import validate_request, validate_response
|
||||||
|
|
||||||
from foxnouns import tasks
|
from foxnouns import tasks
|
||||||
from foxnouns.auth import require_auth
|
from foxnouns.decorators import require_auth
|
||||||
from foxnouns.db import Member
|
from foxnouns.db import Member
|
||||||
from foxnouns.db.aio import async_session
|
from foxnouns.db.aio import async_session
|
||||||
from foxnouns.db.util import user_from_ref
|
from foxnouns.db.util import user_from_ref
|
||||||
from foxnouns.exceptions import ErrorCode, NotFoundError
|
from foxnouns.exceptions import ErrorCode, NotFoundError
|
||||||
from foxnouns.models.member import FullMemberModel, MemberPatchModel
|
from foxnouns.models.member import FullMemberModel, MemberPatchModel
|
||||||
from foxnouns.settings import BASE_DOMAIN
|
from foxnouns.settings import BASE_DOMAIN
|
||||||
|
|
||||||
bp = Blueprint("members_v2", __name__)
|
bp = Blueprint("members_v2", __name__)
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/api/v2/users/<user_ref>/members", host=BASE_DOMAIN)
|
@bp.get("/api/v2/users/<user_ref>/members", host=BASE_DOMAIN)
|
||||||
@validate_response(list[FullMemberModel], 200)
|
@validate_response(list[FullMemberModel], 200)
|
||||||
async def get_members(user_ref: str):
|
async def get_members(user_ref: str):
|
||||||
async with async_session() as session:
|
async with async_session() as session:
|
||||||
user = await user_from_ref(session, user_ref)
|
user = await user_from_ref(session, user_ref)
|
||||||
if not user:
|
if not user:
|
||||||
raise NotFoundError("User not found", type=ErrorCode.UserNotFound)
|
raise NotFoundError("User not found", type=ErrorCode.UserNotFound)
|
||||||
|
|
||||||
return [FullMemberModel.model_validate(m) for m in user.members]
|
return [FullMemberModel.model_validate(m) for m in user.members]
|
||||||
|
|
||||||
|
|
||||||
class MemberCreateModel(MemberPatchModel):
|
class MemberCreateModel(MemberPatchModel):
|
||||||
name: str = Field(
|
name: str = Field(
|
||||||
min_length=1,
|
min_length=1,
|
||||||
max_length=100,
|
max_length=100,
|
||||||
pattern=r"^[^@\?!#\/\\\[\]\"\{\}'$%&()+<=>^|~`,\*]{1,100}$",
|
pattern=r"^[^@\?!#\/\\\[\]\"\{\}'$%&()+<=>^|~`,\*]{1,100}$",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@bp.post("/api/v2/members", host=BASE_DOMAIN)
|
@bp.post("/api/v2/members", host=BASE_DOMAIN)
|
||||||
@require_auth(scope="member.create")
|
@require_auth(scope="member.create")
|
||||||
@validate_request(MemberCreateModel)
|
@validate_request(MemberCreateModel)
|
||||||
@validate_response(FullMemberModel, 200)
|
@validate_response(FullMemberModel, 200)
|
||||||
async def create_member(data: MemberCreateModel):
|
async def create_member(data: MemberCreateModel):
|
||||||
async with async_session() as session:
|
async with async_session() as session:
|
||||||
member = Member(
|
member = Member(
|
||||||
user_id=g.user.id,
|
user_id=g.user.id,
|
||||||
name=data.name,
|
name=data.name,
|
||||||
bio=data.bio,
|
bio=data.bio,
|
||||||
names=[e.model_dump() for e in data.names],
|
names=[e.model_dump() for e in data.names],
|
||||||
pronouns=[e.model_dump() for e in data.pronouns],
|
pronouns=[e.model_dump() for e in data.pronouns],
|
||||||
fields=[e.model_dump() for e in data.fields],
|
fields=[e.model_dump() for e in data.fields],
|
||||||
)
|
)
|
||||||
|
|
||||||
session.add(member)
|
session.add(member)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
# This has to be fetched before we can pass the model to Pydantic.
|
# This has to be fetched before we can pass the model to Pydantic.
|
||||||
# In a normal SELECT this is automatically fetched, but because we just created the object,
|
# In a normal SELECT this is automatically fetched, but because we just created the object,
|
||||||
# we have to do it manually.
|
# we have to do it manually.
|
||||||
await member.awaitable_attrs.user
|
await member.awaitable_attrs.user
|
||||||
|
|
||||||
if data.avatar:
|
if data.avatar:
|
||||||
tasks.process_member_avatar.delay(member.id, data.avatar)
|
tasks.process_member_avatar.delay(member.id, data.avatar)
|
||||||
|
|
||||||
return FullMemberModel.model_validate(member)
|
return FullMemberModel.model_validate(member)
|
||||||
|
|
|
@ -1,26 +1,26 @@
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from quart import Blueprint
|
from quart import Blueprint
|
||||||
from quart_schema import validate_response
|
from quart_schema import validate_response
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.sql import func
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
from foxnouns.db import Member, User
|
from foxnouns.db import Member, User
|
||||||
from foxnouns.db.aio import async_session
|
from foxnouns.db.aio import async_session
|
||||||
from foxnouns.settings import BASE_DOMAIN
|
from foxnouns.settings import BASE_DOMAIN
|
||||||
|
|
||||||
bp = Blueprint("meta_v2", __name__)
|
bp = Blueprint("meta_v2", __name__)
|
||||||
|
|
||||||
|
|
||||||
class MetaResponse(BaseModel):
|
class MetaResponse(BaseModel):
|
||||||
users: int
|
users: int
|
||||||
members: int
|
members: int
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/api/v2/meta", host=BASE_DOMAIN)
|
@bp.get("/api/v2/meta", host=BASE_DOMAIN)
|
||||||
@validate_response(MetaResponse)
|
@validate_response(MetaResponse)
|
||||||
async def meta():
|
async def meta():
|
||||||
async with async_session() as session:
|
async with async_session() as session:
|
||||||
user_count = await session.scalar(select(func.count()).select_from(User))
|
user_count = await session.scalar(select(func.count()).select_from(User))
|
||||||
member_count = await session.scalar(select(func.count()).select_from(Member))
|
member_count = await session.scalar(select(func.count()).select_from(Member))
|
||||||
|
|
||||||
return MetaResponse(users=user_count, members=member_count)
|
return MetaResponse(users=user_count, members=member_count)
|
||||||
|
|
|
@ -1,105 +1,105 @@
|
||||||
from pydantic import Field, field_validator
|
from pydantic import Field, field_validator
|
||||||
from quart import Blueprint, g
|
from quart import Blueprint, g
|
||||||
from quart_schema import validate_request, validate_response
|
from quart_schema import validate_request, validate_response
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
|
|
||||||
from foxnouns import tasks
|
from foxnouns import tasks
|
||||||
from foxnouns.auth import require_auth
|
from foxnouns.decorators import require_auth
|
||||||
from foxnouns.db import User
|
from foxnouns.db import User
|
||||||
from foxnouns.db.aio import async_session
|
from foxnouns.db.aio import async_session
|
||||||
from foxnouns.db.snowflake import Snowflake
|
from foxnouns.db.snowflake import Snowflake
|
||||||
from foxnouns.db.util import create_token, generate_token, is_self, user_from_ref
|
from foxnouns.db.util import create_token, generate_token, is_self, user_from_ref
|
||||||
from foxnouns.exceptions import ErrorCode, NotFoundError
|
from foxnouns.exceptions import ErrorCode, NotFoundError
|
||||||
from foxnouns.models import BasePatchModel
|
from foxnouns.models import BasePatchModel
|
||||||
from foxnouns.models.user import SelfUserModel, UserModel, check_username
|
from foxnouns.models.user import SelfUserModel, UserModel, check_username
|
||||||
from foxnouns.settings import BASE_DOMAIN
|
from foxnouns.settings import BASE_DOMAIN
|
||||||
|
|
||||||
bp = Blueprint("users_v2", __name__)
|
bp = Blueprint("users_v2", __name__)
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/api/v2/users/<user_ref>", host=BASE_DOMAIN)
|
@bp.get("/api/v2/users/<user_ref>", host=BASE_DOMAIN)
|
||||||
@validate_response(UserModel, 200)
|
@validate_response(UserModel, 200)
|
||||||
async def get_user(user_ref: str):
|
async def get_user(user_ref: str):
|
||||||
async with async_session() as session:
|
async with async_session() as session:
|
||||||
user = await user_from_ref(session, user_ref)
|
user = await user_from_ref(session, user_ref)
|
||||||
if not user:
|
if not user:
|
||||||
raise NotFoundError("User not found", type=ErrorCode.UserNotFound)
|
raise NotFoundError("User not found", type=ErrorCode.UserNotFound)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
SelfUserModel.model_validate(user)
|
SelfUserModel.model_validate(user)
|
||||||
if is_self(user)
|
if is_self(user)
|
||||||
else UserModel.model_validate(user)
|
else UserModel.model_validate(user)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class EditUserRequest(BasePatchModel):
|
class EditUserRequest(BasePatchModel):
|
||||||
username: str | None = Field(
|
username: str | None = Field(
|
||||||
min_length=2, max_length=40, pattern=r"^[\w\-\.]{2,40}$", default=None
|
min_length=2, max_length=40, pattern=r"^[\w\-\.]{2,40}$", default=None
|
||||||
)
|
)
|
||||||
display_name: str | None = Field(max_length=100, default=None)
|
display_name: str | None = Field(max_length=100, default=None)
|
||||||
bio: str | None = Field(max_length=1024, default=None)
|
bio: str | None = Field(max_length=1024, default=None)
|
||||||
|
|
||||||
avatar: str | None = Field(max_length=1_000_000, default=None)
|
avatar: str | None = Field(max_length=1_000_000, default=None)
|
||||||
|
|
||||||
@field_validator("username")
|
@field_validator("username")
|
||||||
@classmethod
|
@classmethod
|
||||||
def check_username(cls, value):
|
def check_username(cls, value):
|
||||||
return check_username(value)
|
return check_username(value)
|
||||||
|
|
||||||
|
|
||||||
@bp.patch("/api/v2/users/@me", host=BASE_DOMAIN)
|
@bp.patch("/api/v2/users/@me", host=BASE_DOMAIN)
|
||||||
@require_auth(scope="user.update")
|
@require_auth(scope="user.update")
|
||||||
@validate_request(EditUserRequest)
|
@validate_request(EditUserRequest)
|
||||||
@validate_response(SelfUserModel, 200)
|
@validate_response(SelfUserModel, 200)
|
||||||
async def edit_user(data: EditUserRequest):
|
async def edit_user(data: EditUserRequest):
|
||||||
"""Updates the current user."""
|
"""Updates the current user."""
|
||||||
|
|
||||||
async with async_session() as session:
|
async with async_session() as session:
|
||||||
user = await session.scalar(select(User).where(User.id == g.user.id))
|
user = await session.scalar(select(User).where(User.id == g.user.id))
|
||||||
await user.awaitable_attrs.members
|
await user.awaitable_attrs.members
|
||||||
|
|
||||||
if data.username:
|
if data.username:
|
||||||
user.username = data.username
|
user.username = data.username
|
||||||
if data.is_set("display_name"):
|
if data.is_set("display_name"):
|
||||||
user.display_name = data.display_name
|
user.display_name = data.display_name
|
||||||
if data.is_set("bio"):
|
if data.is_set("bio"):
|
||||||
user.bio = data.bio
|
user.bio = data.bio
|
||||||
|
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
if data.is_set("avatar"):
|
if data.is_set("avatar"):
|
||||||
if data.avatar:
|
if data.avatar:
|
||||||
tasks.process_user_avatar.delay(user.id, data.avatar)
|
tasks.process_user_avatar.delay(user.id, data.avatar)
|
||||||
else:
|
else:
|
||||||
tasks.delete_user_avatar.delay(user.id)
|
tasks.delete_user_avatar.delay(user.id)
|
||||||
|
|
||||||
return SelfUserModel.model_validate(user)
|
return SelfUserModel.model_validate(user)
|
||||||
|
|
||||||
|
|
||||||
class DebugUserData(BasePatchModel):
|
class DebugUserData(BasePatchModel):
|
||||||
username: str
|
username: str
|
||||||
|
|
||||||
|
|
||||||
class DebugUserResponse(SelfUserModel):
|
class DebugUserResponse(SelfUserModel):
|
||||||
token: str
|
token: str
|
||||||
|
|
||||||
|
|
||||||
@bp.post("/api/v2/users/debug", host=BASE_DOMAIN)
|
@bp.post("/api/v2/users/debug", host=BASE_DOMAIN)
|
||||||
@validate_request(DebugUserData)
|
@validate_request(DebugUserData)
|
||||||
@validate_response(DebugUserResponse, 200)
|
@validate_response(DebugUserResponse, 200)
|
||||||
async def debug_create_user(data: DebugUserData):
|
async def debug_create_user(data: DebugUserData):
|
||||||
"""Creates a user from just a username, and returns it along with a token.
|
"""Creates a user from just a username, and returns it along with a token.
|
||||||
FIXME: this must be removed **BEFORE** deploying to production (or even public testing)
|
FIXME: this must be removed **BEFORE** deploying to production (or even public testing)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
async with async_session() as session:
|
async with async_session() as session:
|
||||||
user = User(id=Snowflake.generate_int(), username=data.username)
|
user = User(id=Snowflake.generate_int(), username=data.username)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
session.add(user)
|
session.add(user)
|
||||||
token = await create_token(session, user, ["*"])
|
token = await create_token(session, user, ["*"])
|
||||||
await session.commit()
|
await session.commit()
|
||||||
await user.awaitable_attrs.members
|
await user.awaitable_attrs.members
|
||||||
|
|
||||||
user.token = generate_token(token)
|
user.token = generate_token(token)
|
||||||
return DebugUserResponse.model_validate(user)
|
return DebugUserResponse.model_validate(user)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from .base import Base
|
from .base import Base
|
||||||
from .member import Member
|
from .member import Member
|
||||||
from .user import AuthMethod, FediverseApp, Token, User
|
from .user import AuthMethod, FediverseApp, Token, User
|
||||||
|
|
||||||
__all__ = [Base, User, Token, AuthMethod, FediverseApp, Member]
|
__all__ = [Base, User, Token, AuthMethod, FediverseApp, Member]
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
from sqlalchemy import URL
|
from sqlalchemy import URL
|
||||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||||
|
|
||||||
from foxnouns.settings import DATABASE, ECHO_SQL
|
from foxnouns.settings import DATABASE, ECHO_SQL
|
||||||
|
|
||||||
ASYNC_DATABASE_URL = URL.create(
|
ASYNC_DATABASE_URL = URL.create(
|
||||||
"postgresql+asyncpg",
|
"postgresql+asyncpg",
|
||||||
username=DATABASE["USER"],
|
username=DATABASE["USER"],
|
||||||
password=DATABASE["PASSWORD"],
|
password=DATABASE["PASSWORD"],
|
||||||
host=DATABASE["HOST"],
|
host=DATABASE["HOST"],
|
||||||
database=DATABASE["NAME"],
|
database=DATABASE["NAME"],
|
||||||
)
|
)
|
||||||
|
|
||||||
engine = create_async_engine(ASYNC_DATABASE_URL, echo=ECHO_SQL)
|
engine = create_async_engine(ASYNC_DATABASE_URL, echo=ECHO_SQL)
|
||||||
async_session = async_sessionmaker(engine, expire_on_commit=False)
|
async_session = async_sessionmaker(engine, expire_on_commit=False)
|
||||||
|
|
||||||
__all__ = [engine, async_session]
|
__all__ = [engine, async_session]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||||
from sqlalchemy.orm import DeclarativeBase
|
from sqlalchemy.orm import DeclarativeBase
|
||||||
|
|
||||||
|
|
||||||
class Base(AsyncAttrs, DeclarativeBase):
|
class Base(AsyncAttrs, DeclarativeBase):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,34 +1,34 @@
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from sqlalchemy import BigInteger, ForeignKey, Index, Text, func, text
|
from sqlalchemy import BigInteger, ForeignKey, Index, Text, func, text
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.postgresql import JSONB
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
|
||||||
from .base import Base
|
from .base import Base
|
||||||
from .snowflake import Snowflake
|
from .snowflake import Snowflake
|
||||||
from .user import User
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class Member(Base):
|
class Member(Base):
|
||||||
__tablename__ = "members"
|
__tablename__ = "members"
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(
|
id: Mapped[int] = mapped_column(
|
||||||
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
||||||
)
|
)
|
||||||
name: Mapped[str] = mapped_column(Text(), nullable=False)
|
name: Mapped[str] = mapped_column(Text(), nullable=False)
|
||||||
display_name: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
display_name: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
||||||
bio: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
bio: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
||||||
avatar: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
avatar: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
||||||
|
|
||||||
names: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
names: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
||||||
pronouns: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
pronouns: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
||||||
fields: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
fields: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
||||||
|
|
||||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
||||||
user: Mapped[User] = relationship(back_populates="members", lazy="immediate")
|
user: Mapped[User] = relationship(back_populates="members", lazy="immediate")
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index(
|
Index(
|
||||||
"members_user_name_idx", "user_id", func.lower(text("name")), unique=True
|
"members_user_name_idx", "user_id", func.lower(text("name")), unique=True
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from redis import asyncio as aioredis
|
from redis import asyncio as aioredis
|
||||||
|
|
||||||
from foxnouns.settings import REDIS_URL
|
from foxnouns.settings import REDIS_URL
|
||||||
|
|
||||||
redis = aioredis.from_url(REDIS_URL)
|
redis = aioredis.from_url(REDIS_URL)
|
||||||
|
|
|
@ -1,110 +1,110 @@
|
||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from random import randrange
|
from random import randrange
|
||||||
|
|
||||||
_local = threading.local()
|
_local = threading.local()
|
||||||
|
|
||||||
|
|
||||||
def _get_increment() -> int:
|
def _get_increment() -> int:
|
||||||
if not hasattr(_local, "increment"):
|
if not hasattr(_local, "increment"):
|
||||||
_local.increment = randrange(0, 4095)
|
_local.increment = randrange(0, 4095)
|
||||||
|
|
||||||
increment = _local.increment
|
increment = _local.increment
|
||||||
_local.increment += 1
|
_local.increment += 1
|
||||||
return increment
|
return increment
|
||||||
|
|
||||||
|
|
||||||
class Snowflake:
|
class Snowflake:
|
||||||
"""A Snowflake ID (https://en.wikipedia.org/wiki/Snowflake_ID).
|
"""A Snowflake ID (https://en.wikipedia.org/wiki/Snowflake_ID).
|
||||||
This class wraps an integer and adds convenience functions."""
|
This class wraps an integer and adds convenience functions."""
|
||||||
|
|
||||||
EPOCH = 1_640_995_200_000 # 2022-01-01 at 00:00:00 UTC
|
EPOCH = 1_640_995_200_000 # 2022-01-01 at 00:00:00 UTC
|
||||||
|
|
||||||
_raw: int
|
_raw: int
|
||||||
|
|
||||||
def __init__(self, src: int):
|
def __init__(self, src: int):
|
||||||
self._raw = src
|
self._raw = src
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return str(self.id)
|
return str(self.id)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"Snowflake<{self.id}, {self.process}, {self.thread}, {self.increment}, {self.timestamp}>"
|
return f"Snowflake<{self.id}, {self.process}, {self.thread}, {self.increment}, {self.timestamp}>"
|
||||||
|
|
||||||
def __int__(self) -> int:
|
def __int__(self) -> int:
|
||||||
return self._raw
|
return self._raw
|
||||||
|
|
||||||
def __float__(self) -> float:
|
def __float__(self) -> float:
|
||||||
return float(self._raw)
|
return float(self._raw)
|
||||||
|
|
||||||
def __lt__(self, y: "Snowflake"):
|
def __lt__(self, y: "Snowflake"):
|
||||||
return self.id < y.id
|
return self.id < y.id
|
||||||
|
|
||||||
def __le__(self, y: "Snowflake"):
|
def __le__(self, y: "Snowflake"):
|
||||||
return self.id <= y.id
|
return self.id <= y.id
|
||||||
|
|
||||||
def __eq__(self, y: "Snowflake"):
|
def __eq__(self, y: "Snowflake"):
|
||||||
return self.id == y.id
|
return self.id == y.id
|
||||||
|
|
||||||
def __ne__(self, y: "Snowflake"):
|
def __ne__(self, y: "Snowflake"):
|
||||||
return self.id != y.id
|
return self.id != y.id
|
||||||
|
|
||||||
def __gt__(self, y: "Snowflake"):
|
def __gt__(self, y: "Snowflake"):
|
||||||
return self.id > y.id
|
return self.id > y.id
|
||||||
|
|
||||||
def __ge__(self, y: "Snowflake"):
|
def __ge__(self, y: "Snowflake"):
|
||||||
return self.id >= y.id
|
return self.id >= y.id
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def id(self) -> int:
|
def id(self) -> int:
|
||||||
"""The raw integer value of the snowflake."""
|
"""The raw integer value of the snowflake."""
|
||||||
return self._raw
|
return self._raw
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def time(self) -> datetime:
|
def time(self) -> datetime:
|
||||||
"""The time embedded into the snowflake."""
|
"""The time embedded into the snowflake."""
|
||||||
return datetime.fromtimestamp(self.timestamp, tz=timezone.utc)
|
return datetime.fromtimestamp(self.timestamp, tz=timezone.utc)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def timestamp(self) -> float:
|
def timestamp(self) -> float:
|
||||||
"""The unix timestamp embedded into the snowflake."""
|
"""The unix timestamp embedded into the snowflake."""
|
||||||
return ((self._raw >> 22) + self.EPOCH) / 1000
|
return ((self._raw >> 22) + self.EPOCH) / 1000
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def process(self) -> int:
|
def process(self) -> int:
|
||||||
"""The process ID embedded into the snowflake."""
|
"""The process ID embedded into the snowflake."""
|
||||||
return (self._raw & 0x3E0000) >> 17
|
return (self._raw & 0x3E0000) >> 17
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def thread(self) -> int:
|
def thread(self) -> int:
|
||||||
"""The thread ID embedded into the snowflake."""
|
"""The thread ID embedded into the snowflake."""
|
||||||
return (self._raw & 0x1F000) >> 12
|
return (self._raw & 0x1F000) >> 12
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def increment(self) -> int:
|
def increment(self) -> int:
|
||||||
"""The increment embedded into the snowflake."""
|
"""The increment embedded into the snowflake."""
|
||||||
return self._raw & 0xFFF
|
return self._raw & 0xFFF
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def generate(cls, time: datetime | None = None):
|
def generate(cls, time: datetime | None = None):
|
||||||
"""Generates a new snowflake.
|
"""Generates a new snowflake.
|
||||||
If `time` is set, use that time for the snowflake, otherwise, use the current time.
|
If `time` is set, use that time for the snowflake, otherwise, use the current time.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
process_id = os.getpid()
|
process_id = os.getpid()
|
||||||
thread_id = threading.get_native_id()
|
thread_id = threading.get_native_id()
|
||||||
increment = _get_increment()
|
increment = _get_increment()
|
||||||
now = time if time else datetime.now(tz=timezone.utc)
|
now = time if time else datetime.now(tz=timezone.utc)
|
||||||
timestamp = round(now.timestamp() * 1000) - cls.EPOCH
|
timestamp = round(now.timestamp() * 1000) - cls.EPOCH
|
||||||
|
|
||||||
return cls(
|
return cls(
|
||||||
timestamp << 22
|
timestamp << 22
|
||||||
| (process_id % 32) << 17
|
| (process_id % 32) << 17
|
||||||
| (thread_id % 32) << 12
|
| (thread_id % 32) << 12
|
||||||
| (increment % 4096)
|
| (increment % 4096)
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def generate_int(cls, time: datetime | None = None):
|
def generate_int(cls, time: datetime | None = None):
|
||||||
return cls.generate(time).id
|
return cls.generate(time).id
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
from sqlalchemy import URL, create_engine
|
from sqlalchemy import URL, create_engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
from foxnouns.settings import DATABASE, ECHO_SQL
|
from foxnouns.settings import DATABASE, ECHO_SQL
|
||||||
|
|
||||||
SYNC_DATABASE_URL = URL.create(
|
SYNC_DATABASE_URL = URL.create(
|
||||||
"postgresql+psycopg",
|
"postgresql+psycopg",
|
||||||
username=DATABASE["USER"],
|
username=DATABASE["USER"],
|
||||||
password=DATABASE["PASSWORD"],
|
password=DATABASE["PASSWORD"],
|
||||||
host=DATABASE["HOST"],
|
host=DATABASE["HOST"],
|
||||||
database=DATABASE["NAME"],
|
database=DATABASE["NAME"],
|
||||||
)
|
)
|
||||||
|
|
||||||
engine = create_engine(SYNC_DATABASE_URL, echo=ECHO_SQL)
|
engine = create_engine(SYNC_DATABASE_URL, echo=ECHO_SQL)
|
||||||
session = sessionmaker(engine)
|
session = sessionmaker(engine)
|
||||||
|
|
||||||
__all__ = [engine, session]
|
__all__ = [engine, session]
|
||||||
|
|
|
@ -1,117 +1,117 @@
|
||||||
import enum
|
import enum
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from sqlalchemy import BigInteger, DateTime, ForeignKey, Integer, Text
|
from sqlalchemy import BigInteger, DateTime, ForeignKey, Integer, Text
|
||||||
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
|
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
|
||||||
from .base import Base
|
from .base import Base
|
||||||
from .snowflake import Snowflake
|
from .snowflake import Snowflake
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .member import Member
|
from .member import Member
|
||||||
|
|
||||||
|
|
||||||
class User(Base):
|
class User(Base):
|
||||||
__tablename__ = "users"
|
__tablename__ = "users"
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(
|
id: Mapped[int] = mapped_column(
|
||||||
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
||||||
)
|
)
|
||||||
username: Mapped[str] = mapped_column(Text(), unique=True, nullable=False)
|
username: Mapped[str] = mapped_column(Text(), unique=True, nullable=False)
|
||||||
display_name: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
display_name: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
||||||
bio: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
bio: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
||||||
avatar: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
avatar: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
||||||
|
|
||||||
names: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
names: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
||||||
pronouns: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
pronouns: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
||||||
fields: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
fields: Mapped[list[Any]] = mapped_column(JSONB(), nullable=False, default=[])
|
||||||
|
|
||||||
tokens: Mapped[list["Token"]] = relationship(
|
tokens: Mapped[list["Token"]] = relationship(
|
||||||
back_populates="user", cascade="all, delete-orphan"
|
back_populates="user", cascade="all, delete-orphan"
|
||||||
)
|
)
|
||||||
auth_methods: Mapped[list["AuthMethod"]] = relationship(
|
auth_methods: Mapped[list["AuthMethod"]] = relationship(
|
||||||
back_populates="user", cascade="all, delete-orphan"
|
back_populates="user", cascade="all, delete-orphan"
|
||||||
)
|
)
|
||||||
members: Mapped[list["Member"]] = relationship(
|
members: Mapped[list["Member"]] = relationship(
|
||||||
back_populates="user", cascade="all, delete-orphan"
|
back_populates="user", cascade="all, delete-orphan"
|
||||||
)
|
)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"User(id={self.id!r}, username={self.username!r})"
|
return f"User(id={self.id!r}, username={self.username!r})"
|
||||||
|
|
||||||
|
|
||||||
class Token(Base):
|
class Token(Base):
|
||||||
__tablename__ = "tokens"
|
__tablename__ = "tokens"
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(
|
id: Mapped[int] = mapped_column(
|
||||||
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
||||||
)
|
)
|
||||||
expires_at: Mapped[datetime] = mapped_column(DateTime(), nullable=False)
|
expires_at: Mapped[datetime] = mapped_column(DateTime(), nullable=False)
|
||||||
scopes: Mapped[list[str]] = mapped_column(ARRAY(Text), nullable=False)
|
scopes: Mapped[list[str]] = mapped_column(ARRAY(Text), nullable=False)
|
||||||
|
|
||||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
||||||
user: Mapped[User] = relationship(back_populates="tokens", lazy="immediate")
|
user: Mapped[User] = relationship(back_populates="tokens", lazy="immediate")
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"Token(id={self.id!r}, user={self.user_id!r})"
|
return f"Token(id={self.id!r}, user={self.user_id!r})"
|
||||||
|
|
||||||
def has_scope(self, scope: str):
|
def has_scope(self, scope: str):
|
||||||
"""Returns whether this token can be used for the given scope."""
|
"""Returns whether this token can be used for the given scope."""
|
||||||
|
|
||||||
# `*` is a special scope for site tokens, which grants access to all endpoints.
|
# `*` is a special scope for site tokens, which grants access to all endpoints.
|
||||||
if "*" in self.scopes:
|
if "*" in self.scopes:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Some scopes have sub-scopes, indicated by a `.` (i.e. `user.edit` is contained in `user`)
|
# Some scopes have sub-scopes, indicated by a `.` (i.e. `user.edit` is contained in `user`)
|
||||||
# Tokens can have these narrower scopes given to them, or the wider, more privileged scopes
|
# Tokens can have these narrower scopes given to them, or the wider, more privileged scopes
|
||||||
# This way, both `user` and `user.edit` tokens will grant access to `user.edit` endpoints.
|
# This way, both `user` and `user.edit` tokens will grant access to `user.edit` endpoints.
|
||||||
return scope in self.scopes or scope.split(".")[0] in self.scopes
|
return scope in self.scopes or scope.split(".")[0] in self.scopes
|
||||||
|
|
||||||
|
|
||||||
class AuthType(enum.IntEnum):
|
class AuthType(enum.IntEnum):
|
||||||
DISCORD = 1
|
DISCORD = 1
|
||||||
GOOGLE = 2
|
GOOGLE = 2
|
||||||
TUMBLR = 3
|
TUMBLR = 3
|
||||||
FEDIVERSE = 4
|
FEDIVERSE = 4
|
||||||
EMAIL = 5
|
EMAIL = 5
|
||||||
|
|
||||||
|
|
||||||
class AuthMethod(Base):
|
class AuthMethod(Base):
|
||||||
__tablename__ = "auth_methods"
|
__tablename__ = "auth_methods"
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(
|
id: Mapped[int] = mapped_column(
|
||||||
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
||||||
)
|
)
|
||||||
auth_type: Mapped[AuthType] = mapped_column(Integer(), nullable=False)
|
auth_type: Mapped[AuthType] = mapped_column(Integer(), nullable=False)
|
||||||
|
|
||||||
remote_id: Mapped[str] = mapped_column(Text(), nullable=False)
|
remote_id: Mapped[str] = mapped_column(Text(), nullable=False)
|
||||||
remote_username: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
remote_username: Mapped[str | None] = mapped_column(Text(), nullable=True)
|
||||||
|
|
||||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
||||||
user: Mapped[User] = relationship(back_populates="auth_methods", lazy="immediate")
|
user: Mapped[User] = relationship(back_populates="auth_methods", lazy="immediate")
|
||||||
|
|
||||||
fediverse_app_id: Mapped[int] = mapped_column(
|
fediverse_app_id: Mapped[int] = mapped_column(
|
||||||
ForeignKey("fediverse_apps.id"), nullable=True
|
ForeignKey("fediverse_apps.id"), nullable=True
|
||||||
)
|
)
|
||||||
fediverse_app: Mapped["FediverseApp"] = relationship(lazy="immediate")
|
fediverse_app: Mapped["FediverseApp"] = relationship(lazy="immediate")
|
||||||
|
|
||||||
|
|
||||||
class FediverseInstanceType(enum.IntEnum):
|
class FediverseInstanceType(enum.IntEnum):
|
||||||
MASTODON_API = 1
|
MASTODON_API = 1
|
||||||
MISSKEY_API = 2
|
MISSKEY_API = 2
|
||||||
|
|
||||||
|
|
||||||
class FediverseApp(Base):
|
class FediverseApp(Base):
|
||||||
__tablename__ = "fediverse_apps"
|
__tablename__ = "fediverse_apps"
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(
|
id: Mapped[int] = mapped_column(
|
||||||
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
BigInteger(), primary_key=True, default=Snowflake.generate_int
|
||||||
)
|
)
|
||||||
instance: Mapped[str] = mapped_column(Text(), unique=True, nullable=False)
|
instance: Mapped[str] = mapped_column(Text(), unique=True, nullable=False)
|
||||||
client_id: Mapped[str] = mapped_column(Text(), nullable=False)
|
client_id: Mapped[str] = mapped_column(Text(), nullable=False)
|
||||||
client_secret: Mapped[str] = mapped_column(Text(), nullable=False)
|
client_secret: Mapped[str] = mapped_column(Text(), nullable=False)
|
||||||
instance_type: Mapped[FediverseInstanceType] = mapped_column(
|
instance_type: Mapped[FediverseInstanceType] = mapped_column(
|
||||||
Integer(), nullable=False
|
Integer(), nullable=False
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,89 +1,89 @@
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from itsdangerous import BadSignature
|
from itsdangerous import BadSignature
|
||||||
from itsdangerous.url_safe import URLSafeTimedSerializer
|
from itsdangerous.url_safe import URLSafeTimedSerializer
|
||||||
from quart import g
|
from quart import g
|
||||||
from sqlalchemy import insert, select
|
from sqlalchemy import insert, select
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy.orm import selectinload
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
from foxnouns.exceptions import ErrorCode, ForbiddenError
|
from foxnouns.exceptions import ErrorCode, ForbiddenError
|
||||||
from foxnouns.settings import SECRET_KEY
|
from foxnouns.settings import SECRET_KEY
|
||||||
|
|
||||||
from .member import Member
|
from .member import Member
|
||||||
from .user import Token, User
|
from .user import Token, User
|
||||||
|
|
||||||
|
|
||||||
async def user_from_ref(session: AsyncSession, user_ref: str):
|
async def user_from_ref(session: AsyncSession, user_ref: str):
|
||||||
"""Returns a user from a `user_ref` value. If `user_ref` is `@me`, returns the current user.
|
"""Returns a user from a `user_ref` value. If `user_ref` is `@me`, returns the current user.
|
||||||
Otherwise, tries to convert the user to a snowflake ID and queries that. Otherwise, returns a user with that username.
|
Otherwise, tries to convert the user to a snowflake ID and queries that. Otherwise, returns a user with that username.
|
||||||
"""
|
"""
|
||||||
query = select(User).options(selectinload(User.members))
|
query = select(User).options(selectinload(User.members))
|
||||||
|
|
||||||
if user_ref == "@me":
|
if user_ref == "@me":
|
||||||
if "user" in g:
|
if "user" in g:
|
||||||
if g.token.has_scope("user.read"):
|
if g.token.has_scope("user.read"):
|
||||||
query = query.where(User.id == g.user.id)
|
query = query.where(User.id == g.user.id)
|
||||||
else:
|
else:
|
||||||
raise ForbiddenError(
|
raise ForbiddenError(
|
||||||
"Missing scope 'user.read'", type=ErrorCode.MissingScope
|
"Missing scope 'user.read'", type=ErrorCode.MissingScope
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ForbiddenError("Not authenticated")
|
raise ForbiddenError("Not authenticated")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
id = int(user_ref)
|
id = int(user_ref)
|
||||||
query = query.where(User.id == id)
|
query = query.where(User.id == id)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
query = query.where(User.username == user_ref)
|
query = query.where(User.username == user_ref)
|
||||||
|
|
||||||
return await session.scalar(query)
|
return await session.scalar(query)
|
||||||
|
|
||||||
|
|
||||||
async def user_members(session: AsyncSession, user: User):
|
async def user_members(session: AsyncSession, user: User):
|
||||||
query = select(Member).where(Member.user_id == user.id)
|
query = select(Member).where(Member.user_id == user.id)
|
||||||
|
|
||||||
res = await session.scalars(query)
|
res = await session.scalars(query)
|
||||||
return res.all()
|
return res.all()
|
||||||
|
|
||||||
|
|
||||||
serializer = URLSafeTimedSerializer(SECRET_KEY)
|
serializer = URLSafeTimedSerializer(SECRET_KEY)
|
||||||
|
|
||||||
|
|
||||||
def generate_token(token: Token):
|
def generate_token(token: Token):
|
||||||
return serializer.dumps(token.id)
|
return serializer.dumps(token.id)
|
||||||
|
|
||||||
|
|
||||||
async def create_token(session: AsyncSession, user: User, scopes: list[str] = ["*"]):
|
async def create_token(session: AsyncSession, user: User, scopes: list[str] = ["*"]):
|
||||||
expires = datetime.datetime.now() + datetime.timedelta(days=90)
|
expires = datetime.datetime.now() + datetime.timedelta(days=90)
|
||||||
query = (
|
query = (
|
||||||
insert(Token)
|
insert(Token)
|
||||||
.values(user_id=user.id, expires_at=expires, scopes=scopes)
|
.values(user_id=user.id, expires_at=expires, scopes=scopes)
|
||||||
.returning(Token)
|
.returning(Token)
|
||||||
)
|
)
|
||||||
return await session.scalar(query)
|
return await session.scalar(query)
|
||||||
|
|
||||||
|
|
||||||
async def validate_token(session: AsyncSession, header: str) -> tuple[Token, User]:
|
async def validate_token(session: AsyncSession, header: str) -> tuple[Token, User]:
|
||||||
try:
|
try:
|
||||||
token_id = serializer.loads(header)
|
token_id = serializer.loads(header)
|
||||||
except BadSignature:
|
except BadSignature:
|
||||||
raise ForbiddenError("Invalid token", type=ErrorCode.InvalidToken)
|
raise ForbiddenError("Invalid token", type=ErrorCode.InvalidToken)
|
||||||
|
|
||||||
row = (
|
row = (
|
||||||
await session.execute(
|
await session.execute(
|
||||||
select(Token, User).join(Token.user).where(Token.id == token_id)
|
select(Token, User).join(Token.user).where(Token.id == token_id)
|
||||||
)
|
)
|
||||||
).first()
|
).first()
|
||||||
|
|
||||||
if not row or not row.Token:
|
if not row or not row.Token:
|
||||||
raise ForbiddenError("Invalid token", type=ErrorCode.InvalidToken)
|
raise ForbiddenError("Invalid token", type=ErrorCode.InvalidToken)
|
||||||
|
|
||||||
if row.Token.expires_at < datetime.datetime.now():
|
if row.Token.expires_at < datetime.datetime.now():
|
||||||
raise ForbiddenError("Token has expired", type=ErrorCode.InvalidToken)
|
raise ForbiddenError("Token has expired", type=ErrorCode.InvalidToken)
|
||||||
|
|
||||||
return (row.Token, row.User)
|
return (row.Token, row.User)
|
||||||
|
|
||||||
|
|
||||||
def is_self(user: User) -> bool:
|
def is_self(user: User) -> bool:
|
||||||
return "user" in g and g.user.id == user.id
|
return "user" in g and g.user.id == user.id
|
||||||
|
|
45
foxnouns/decorators.py
Normal file
45
foxnouns/decorators.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
from typing import Any
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
from quart import g
|
||||||
|
|
||||||
|
from foxnouns.exceptions import ErrorCode, ForbiddenError, UnsupportedEndpointError
|
||||||
|
|
||||||
|
|
||||||
|
def require_auth(*, scope: str | None = None):
|
||||||
|
"""Decorator that requires a token with the given scopes.
|
||||||
|
If no token is given or the required scopes aren't set on it, execution is aborted."""
|
||||||
|
|
||||||
|
def decorator(func):
|
||||||
|
@wraps(func)
|
||||||
|
async def wrapper(*args, **kwargs):
|
||||||
|
if "user" not in g or "token" not in g:
|
||||||
|
raise ForbiddenError("Not authenticated", type=ErrorCode.Forbidden)
|
||||||
|
|
||||||
|
if scope and not g.token.has_scope(scope):
|
||||||
|
raise ForbiddenError(
|
||||||
|
f"Missing scope '{scope}'", type=ErrorCode.MissingScope
|
||||||
|
)
|
||||||
|
|
||||||
|
return await func(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
def require_config_key(*, keys: list[Any]):
|
||||||
|
"""Decorator that requires one or more config keys to be set.
|
||||||
|
If any of them are None, execution is aborted."""
|
||||||
|
|
||||||
|
def decorator(func):
|
||||||
|
@wraps(func)
|
||||||
|
async def wrapper(*args, **kwargs):
|
||||||
|
for key in keys:
|
||||||
|
if not key:
|
||||||
|
raise UnsupportedEndpointError()
|
||||||
|
return await func(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return decorator
|
|
@ -1,82 +1,93 @@
|
||||||
import enum
|
import enum
|
||||||
|
|
||||||
|
|
||||||
class ErrorCode(enum.IntEnum):
|
class ErrorCode(enum.IntEnum):
|
||||||
BadRequest = 400
|
BadRequest = 400
|
||||||
Forbidden = 403
|
Forbidden = 403
|
||||||
NotFound = 404
|
NotFound = 404
|
||||||
MethodNotAllowed = 405
|
MethodNotAllowed = 405
|
||||||
TooManyRequests = 429
|
TooManyRequests = 429
|
||||||
InternalServerError = 500 # catch-all code for unknown errors
|
InternalServerError = 500 # catch-all code for unknown errors
|
||||||
|
|
||||||
# Login/authorize error codes
|
# Login/authorize error codes
|
||||||
InvalidState = 1001
|
InvalidState = 1001
|
||||||
InvalidOAuthCode = 1002
|
InvalidOAuthCode = 1002
|
||||||
InvalidToken = 1003 # a token was supplied, but it is invalid
|
InvalidToken = 1003 # a token was supplied, but it is invalid
|
||||||
InviteRequired = 1004
|
InviteRequired = 1004
|
||||||
InvalidTicket = 1005 # invalid signup ticket
|
InvalidTicket = 1005 # invalid signup ticket
|
||||||
InvalidUsername = 1006 # invalid username (when signing up)
|
InvalidUsername = 1006 # invalid username (when signing up)
|
||||||
UsernameTaken = 1007 # username taken (when signing up)
|
UsernameTaken = 1007 # username taken (when signing up)
|
||||||
InvitesDisabled = 1008 # invites are disabled (unneeded)
|
InvitesDisabled = 1008 # invites are disabled (unneeded)
|
||||||
InviteLimitReached = 1009 # invite limit reached (when creating invites)
|
InviteLimitReached = 1009 # invite limit reached (when creating invites)
|
||||||
InviteAlreadyUsed = 1010 # invite already used (when signing up)
|
InviteAlreadyUsed = 1010 # invite already used (when signing up)
|
||||||
DeletionPending = 1011 # own user deletion pending, returned with undo code
|
DeletionPending = 1011 # own user deletion pending, returned with undo code
|
||||||
RecentExport = 1012 # latest export is too recent
|
RecentExport = 1012 # latest export is too recent
|
||||||
UnsupportedInstance = 1013 # unsupported fediverse software
|
UnsupportedInstance = 1013 # unsupported fediverse software
|
||||||
AlreadyLinked = 1014 # user already has linked account of the same type
|
AlreadyLinked = 1014 # user already has linked account of the same type
|
||||||
NotLinked = 1015 # user already doesn't have a linked account
|
NotLinked = 1015 # user already doesn't have a linked account
|
||||||
LastProvider = (
|
LastProvider = (
|
||||||
1016 # unlinking provider would leave account with no authentication method
|
1016 # unlinking provider would leave account with no authentication method
|
||||||
)
|
)
|
||||||
InvalidCaptcha = 1017 # invalid or missing captcha response
|
InvalidCaptcha = 1017 # invalid or missing captcha response
|
||||||
MissingScope = 1018 # missing the required scope for this endpoint
|
MissingScope = 1018 # missing the required scope for this endpoint
|
||||||
|
|
||||||
# User-related error codes
|
# User-related error codes
|
||||||
UserNotFound = 2001
|
UserNotFound = 2001
|
||||||
MemberListPrivate = 2002
|
MemberListPrivate = 2002
|
||||||
FlagLimitReached = 2003
|
FlagLimitReached = 2003
|
||||||
RerollingTooQuickly = 2004
|
RerollingTooQuickly = 2004
|
||||||
|
|
||||||
# Member-related error codes
|
# Member-related error codes
|
||||||
MemberNotFound = 3001
|
MemberNotFound = 3001
|
||||||
MemberLimitReached = 3002
|
MemberLimitReached = 3002
|
||||||
MemberNameInUse = 3003
|
MemberNameInUse = 3003
|
||||||
NotOwnMember = 3004
|
NotOwnMember = 3004
|
||||||
|
|
||||||
# General request error codes
|
# General request error codes
|
||||||
RequestTooBig = 4001
|
RequestTooBig = 4001
|
||||||
MissingPermissions = 4002
|
MissingPermissions = 4002
|
||||||
|
|
||||||
# Moderation related error codes
|
# Moderation related error codes
|
||||||
ReportAlreadyHandled = 5001
|
ReportAlreadyHandled = 5001
|
||||||
NotSelfDelete = 5002
|
NotSelfDelete = 5002
|
||||||
|
|
||||||
|
|
||||||
class ExpectedError(Exception):
|
class ExpectedError(Exception):
|
||||||
msg: str
|
msg: str
|
||||||
type: ErrorCode
|
type: ErrorCode
|
||||||
status_code: int = 500
|
status_code: int = 500
|
||||||
|
|
||||||
def __init__(self, msg: str, type: ErrorCode):
|
def __init__(self, msg: str, type: ErrorCode):
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
self.type = type
|
self.type = type
|
||||||
super().__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"{self.__class__.__name__}({self.msg})"
|
return f"{self.__class__.__name__}({self.msg})"
|
||||||
|
|
||||||
|
|
||||||
class NotFoundError(ExpectedError):
|
class NotFoundError(ExpectedError):
|
||||||
status_code = 404
|
status_code = 404
|
||||||
|
|
||||||
def __init__(self, msg: str, type=ErrorCode.NotFound):
|
def __init__(self, msg: str, type=ErrorCode.NotFound):
|
||||||
self.type = type
|
self.type = type
|
||||||
super().__init__(msg, type)
|
super().__init__(msg, type)
|
||||||
|
|
||||||
|
|
||||||
class ForbiddenError(ExpectedError):
|
class ForbiddenError(ExpectedError):
|
||||||
status_code = 403
|
status_code = 403
|
||||||
|
|
||||||
def __init__(self, msg: str, type=ErrorCode.Forbidden):
|
def __init__(self, msg: str, type=ErrorCode.Forbidden):
|
||||||
self.type = type
|
self.type = type
|
||||||
super().__init__(msg, type)
|
super().__init__(msg, type)
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedEndpointError(ExpectedError):
|
||||||
|
status_code = 404
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.type = ErrorCode.NotFound
|
||||||
|
super().__init__(
|
||||||
|
"Endpoint is not supported on this instance",
|
||||||
|
type=ErrorCode.NotFound,
|
||||||
|
)
|
||||||
|
|
|
@ -1,21 +1,21 @@
|
||||||
from pydantic import BaseModel, field_validator
|
from pydantic import BaseModel, field_validator
|
||||||
|
|
||||||
|
|
||||||
class BasePatchModel(BaseModel):
|
class BasePatchModel(BaseModel):
|
||||||
model_config = {"from_attributes": True}
|
model_config = {"from_attributes": True}
|
||||||
|
|
||||||
def is_set(self, key: str) -> bool:
|
def is_set(self, key: str) -> bool:
|
||||||
return key in self.model_fields_set
|
return key in self.model_fields_set
|
||||||
|
|
||||||
|
|
||||||
class BaseSnowflakeModel(BaseModel):
|
class BaseSnowflakeModel(BaseModel):
|
||||||
"""A base model with a Snowflake ID that is serialized as a string.
|
"""A base model with a Snowflake ID that is serialized as a string.
|
||||||
Also sets `model_config.from_attributes` to `True`."""
|
Also sets `model_config.from_attributes` to `True`."""
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
|
|
||||||
@field_validator("id", mode="before")
|
@field_validator("id", mode="before")
|
||||||
def transform_id_to_str(cls, value) -> str:
|
def transform_id_to_str(cls, value) -> str:
|
||||||
return str(value)
|
return str(value)
|
||||||
|
|
||||||
model_config = {"from_attributes": True}
|
model_config = {"from_attributes": True}
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
class FieldEntry(BaseModel):
|
class FieldEntry(BaseModel):
|
||||||
value: str = Field(max_length=128)
|
value: str = Field(max_length=128)
|
||||||
status: str
|
status: str
|
||||||
|
|
||||||
|
|
||||||
class ProfileField(BaseModel):
|
class ProfileField(BaseModel):
|
||||||
name: str = Field(max_length=128)
|
name: str = Field(max_length=128)
|
||||||
entries: list[FieldEntry]
|
entries: list[FieldEntry]
|
||||||
|
|
||||||
|
|
||||||
class PronounEntry(BaseModel):
|
class PronounEntry(BaseModel):
|
||||||
value: str = Field(max_length=128)
|
value: str = Field(max_length=128)
|
||||||
status: str
|
status: str
|
||||||
display: str | None = Field(max_length=128, default=None)
|
display: str | None = Field(max_length=128, default=None)
|
||||||
|
|
|
@ -1,32 +1,32 @@
|
||||||
from pydantic import Field, field_validator
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
from . import BasePatchModel
|
from . import BasePatchModel
|
||||||
from .fields import FieldEntry, ProfileField, PronounEntry
|
from .fields import FieldEntry, ProfileField, PronounEntry
|
||||||
from .user import BaseMemberModel, BaseUserModel
|
from .user import BaseMemberModel, BaseUserModel
|
||||||
|
|
||||||
|
|
||||||
class FullMemberModel(BaseMemberModel):
|
class FullMemberModel(BaseMemberModel):
|
||||||
user: BaseUserModel
|
user: BaseUserModel
|
||||||
|
|
||||||
|
|
||||||
class MemberPatchModel(BasePatchModel):
|
class MemberPatchModel(BasePatchModel):
|
||||||
name: str | None = Field(
|
name: str | None = Field(
|
||||||
min_length=1,
|
min_length=1,
|
||||||
max_length=100,
|
max_length=100,
|
||||||
default=None,
|
default=None,
|
||||||
pattern=r"^[^@\?!#\/\\\[\]\"\{\}'$%&()+<=>^|~`,\*]{1,100}$",
|
pattern=r"^[^@\?!#\/\\\[\]\"\{\}'$%&()+<=>^|~`,\*]{1,100}$",
|
||||||
)
|
)
|
||||||
bio: str | None = Field(max_length=1024, default=None)
|
bio: str | None = Field(max_length=1024, default=None)
|
||||||
|
|
||||||
avatar: str | None = Field(max_length=1_000_000, default=None)
|
avatar: str | None = Field(max_length=1_000_000, default=None)
|
||||||
|
|
||||||
names: list[FieldEntry] = Field(default=[])
|
names: list[FieldEntry] = Field(default=[])
|
||||||
pronouns: list[PronounEntry] = Field(default=[])
|
pronouns: list[PronounEntry] = Field(default=[])
|
||||||
fields: list[ProfileField] = Field(default=[])
|
fields: list[ProfileField] = Field(default=[])
|
||||||
|
|
||||||
@field_validator("name")
|
@field_validator("name")
|
||||||
@classmethod
|
@classmethod
|
||||||
def check_name(cls, value):
|
def check_name(cls, value):
|
||||||
if value in [".", "..", "edit"]:
|
if value in [".", "..", "edit"]:
|
||||||
raise ValueError("Name is not allowed")
|
raise ValueError("Name is not allowed")
|
||||||
return value
|
return value
|
||||||
|
|
|
@ -1,56 +1,56 @@
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
|
||||||
from . import BaseSnowflakeModel
|
from . import BaseSnowflakeModel
|
||||||
from .fields import FieldEntry, ProfileField, PronounEntry
|
from .fields import FieldEntry, ProfileField, PronounEntry
|
||||||
|
|
||||||
|
|
||||||
class BaseUserModel(BaseSnowflakeModel):
|
class BaseUserModel(BaseSnowflakeModel):
|
||||||
name: str = Field(alias="username")
|
name: str = Field(alias="username")
|
||||||
display_name: str | None
|
display_name: str | None
|
||||||
bio: str | None
|
bio: str | None
|
||||||
avatar: str | None
|
avatar: str | None
|
||||||
|
|
||||||
names: list[FieldEntry] = Field(default=[])
|
names: list[FieldEntry] = Field(default=[])
|
||||||
pronouns: list[PronounEntry] = Field(default=[])
|
pronouns: list[PronounEntry] = Field(default=[])
|
||||||
fields: list[ProfileField] = Field(default=[])
|
fields: list[ProfileField] = Field(default=[])
|
||||||
|
|
||||||
|
|
||||||
class UserModel(BaseUserModel):
|
class UserModel(BaseUserModel):
|
||||||
members: list["BaseMemberModel"] = Field(default=[])
|
members: list["BaseMemberModel"] = Field(default=[])
|
||||||
|
|
||||||
|
|
||||||
class BaseMemberModel(BaseSnowflakeModel):
|
class BaseMemberModel(BaseSnowflakeModel):
|
||||||
name: str
|
name: str
|
||||||
display_name: str | None
|
display_name: str | None
|
||||||
bio: str | None
|
bio: str | None
|
||||||
avatar: str | None
|
avatar: str | None
|
||||||
|
|
||||||
names: list[FieldEntry] = Field(default=[])
|
names: list[FieldEntry] = Field(default=[])
|
||||||
pronouns: list[PronounEntry] = Field(default=[])
|
pronouns: list[PronounEntry] = Field(default=[])
|
||||||
fields: list[ProfileField] = Field(default=[])
|
fields: list[ProfileField] = Field(default=[])
|
||||||
|
|
||||||
|
|
||||||
class SelfUserModel(UserModel):
|
class SelfUserModel(UserModel):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def check_username(value):
|
def check_username(value):
|
||||||
if not value:
|
if not value:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if value.lower() in (
|
if value.lower() in (
|
||||||
"..",
|
"..",
|
||||||
"admin",
|
"admin",
|
||||||
"administrator",
|
"administrator",
|
||||||
"mod",
|
"mod",
|
||||||
"moderator",
|
"moderator",
|
||||||
"api",
|
"api",
|
||||||
"page",
|
"page",
|
||||||
"pronouns",
|
"pronouns",
|
||||||
"settings",
|
"settings",
|
||||||
"pronouns.cc",
|
"pronouns.cc",
|
||||||
"pronounscc",
|
"pronounscc",
|
||||||
):
|
):
|
||||||
raise ValueError("Username is not allowed")
|
raise ValueError("Username is not allowed")
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
|
@ -1,41 +1,45 @@
|
||||||
from environs import Env
|
from environs import Env
|
||||||
|
|
||||||
# read .env file
|
# read .env file
|
||||||
env = Env()
|
env = Env()
|
||||||
env.read_env()
|
env.read_env()
|
||||||
|
|
||||||
# Format: postgresql+{driver}//{user}:{password}@{host}/{name}
|
# Format: postgresql+{driver}//{user}:{password}@{host}/{name}
|
||||||
# Note that the driver is set by the application.
|
# Note that the driver is set by the application.
|
||||||
with env.prefixed("DATABASE_"):
|
with env.prefixed("DATABASE_"):
|
||||||
DATABASE = {
|
DATABASE = {
|
||||||
"USER": env("USER"),
|
"USER": env("USER"),
|
||||||
"PASSWORD": env("PASSWORD"),
|
"PASSWORD": env("PASSWORD"),
|
||||||
"HOST": env("HOST"),
|
"HOST": env("HOST"),
|
||||||
"NAME": env("NAME"),
|
"NAME": env("NAME"),
|
||||||
}
|
}
|
||||||
|
|
||||||
# The Redis database used for Celery and ephemeral storage.
|
# The Redis database used for Celery and ephemeral storage.
|
||||||
REDIS_URL = env("REDIS_URL", "redis://localhost")
|
REDIS_URL = env("REDIS_URL", "redis://localhost")
|
||||||
|
|
||||||
with env.prefixed("MINIO_"):
|
with env.prefixed("MINIO_"):
|
||||||
MINIO = {
|
MINIO = {
|
||||||
"ENDPOINT": env("ENDPOINT"),
|
"ENDPOINT": env("ENDPOINT"),
|
||||||
"ACCESS_KEY": env("ACCESS_KEY"),
|
"ACCESS_KEY": env("ACCESS_KEY"),
|
||||||
"SECRET_KEY": env("SECRET_KEY"),
|
"SECRET_KEY": env("SECRET_KEY"),
|
||||||
"BUCKET": env("BUCKET"),
|
"BUCKET": env("BUCKET"),
|
||||||
"SECURE": env.bool("SECURE", True),
|
"SECURE": env.bool("SECURE", True),
|
||||||
"REGION": env("REGION", "auto"),
|
"REGION": env("REGION", "auto"),
|
||||||
}
|
}
|
||||||
|
|
||||||
# The base domain the API is served on. This must be set.
|
# Discord OAuth credentials. If these are not set the Discord OAuth endpoints will not work.
|
||||||
BASE_DOMAIN = env("BASE_DOMAIN")
|
DISCORD_CLIENT_ID = env("DISCORD_CLIENT_ID", None)
|
||||||
# The base domain for short URLs.
|
DISCORD_CLIENT_SECRET = env("DISCORD_CLIENT_SECRET", None)
|
||||||
SHORT_DOMAIN = env("SHORT_DOMAIN", "prns.localhost")
|
|
||||||
# The base URL used for the frontend. This will usually be the same as BASE_DOMAIN prefixed with https://.
|
# The base domain the API is served on. This must be set.
|
||||||
FRONTEND_BASE = env("FRONTEND_DOMAIN", f"https://{BASE_DOMAIN}")
|
BASE_DOMAIN = env("BASE_DOMAIN")
|
||||||
|
# The base domain for short URLs.
|
||||||
# Secret key for signing tokens, generate with (for example) `openssl rand -base64 32`
|
SHORT_DOMAIN = env("SHORT_DOMAIN", "prns.localhost")
|
||||||
SECRET_KEY = env("SECRET_KEY")
|
# The base URL used for the frontend. This will usually be the same as BASE_DOMAIN prefixed with https://.
|
||||||
|
FRONTEND_BASE = env("FRONTEND_DOMAIN", f"https://{BASE_DOMAIN}")
|
||||||
# Whether to echo SQL statements to the logs.
|
|
||||||
ECHO_SQL = env.bool("ECHO_SQL", False)
|
# Secret key for signing tokens, generate with (for example) `openssl rand -base64 32`
|
||||||
|
SECRET_KEY = env("SECRET_KEY")
|
||||||
|
|
||||||
|
# Whether to echo SQL statements to the logs.
|
||||||
|
ECHO_SQL = env.bool("ECHO_SQL", False)
|
||||||
|
|
|
@ -1,155 +1,155 @@
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
import pyvips
|
import pyvips
|
||||||
from celery import Celery
|
from celery import Celery
|
||||||
from celery.utils.log import get_task_logger
|
from celery.utils.log import get_task_logger
|
||||||
from minio import Minio
|
from minio import Minio
|
||||||
from sqlalchemy import select, update
|
from sqlalchemy import select, update
|
||||||
|
|
||||||
from foxnouns.db import Member, User
|
from foxnouns.db import Member, User
|
||||||
from foxnouns.db.sync import session
|
from foxnouns.db.sync import session
|
||||||
from foxnouns.settings import MINIO, REDIS_URL
|
from foxnouns.settings import MINIO, REDIS_URL
|
||||||
|
|
||||||
app = Celery("tasks", broker=REDIS_URL)
|
app = Celery("tasks", broker=REDIS_URL)
|
||||||
|
|
||||||
logger = get_task_logger(__name__)
|
logger = get_task_logger(__name__)
|
||||||
|
|
||||||
minio = Minio(
|
minio = Minio(
|
||||||
MINIO["ENDPOINT"],
|
MINIO["ENDPOINT"],
|
||||||
access_key=MINIO["ACCESS_KEY"],
|
access_key=MINIO["ACCESS_KEY"],
|
||||||
secret_key=MINIO["SECRET_KEY"],
|
secret_key=MINIO["SECRET_KEY"],
|
||||||
secure=MINIO["SECURE"],
|
secure=MINIO["SECURE"],
|
||||||
region=MINIO["REGION"],
|
region=MINIO["REGION"],
|
||||||
)
|
)
|
||||||
bucket = MINIO["BUCKET"]
|
bucket = MINIO["BUCKET"]
|
||||||
|
|
||||||
|
|
||||||
def convert_avatar(uri: str) -> bytes:
|
def convert_avatar(uri: str) -> bytes:
|
||||||
"""Converts a base64 data URI into a WebP image.
|
"""Converts a base64 data URI into a WebP image.
|
||||||
Images are resized and cropped to 512x512 and exported with quality 95.
|
Images are resized and cropped to 512x512 and exported with quality 95.
|
||||||
Only PNG, WebP, and JPEG images are allowed as input."""
|
Only PNG, WebP, and JPEG images are allowed as input."""
|
||||||
|
|
||||||
if not uri.startswith("data:image/"):
|
if not uri.startswith("data:image/"):
|
||||||
raise ValueError("Not a data URI")
|
raise ValueError("Not a data URI")
|
||||||
|
|
||||||
content_type, encoded = uri.removeprefix("data:").split(";base64,", 1)
|
content_type, encoded = uri.removeprefix("data:").split(";base64,", 1)
|
||||||
if content_type not in ["image/png", "image/webp", "image/jpeg"]:
|
if content_type not in ["image/png", "image/webp", "image/jpeg"]:
|
||||||
raise ValueError("Invalid content type for image")
|
raise ValueError("Invalid content type for image")
|
||||||
|
|
||||||
img = pyvips.Image.thumbnail_buffer(
|
img = pyvips.Image.thumbnail_buffer(
|
||||||
base64.b64decode(encoded),
|
base64.b64decode(encoded),
|
||||||
512,
|
512,
|
||||||
height=512,
|
height=512,
|
||||||
size=pyvips.Size.BOTH,
|
size=pyvips.Size.BOTH,
|
||||||
crop=pyvips.Interesting.CENTRE,
|
crop=pyvips.Interesting.CENTRE,
|
||||||
)
|
)
|
||||||
return img.write_to_buffer(".webp", Q=95)
|
return img.write_to_buffer(".webp", Q=95)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def process_user_avatar(user_id: int, avatar: str) -> None:
|
def process_user_avatar(user_id: int, avatar: str) -> None:
|
||||||
"""Processes an avatar string, uploads it to S3, and updates the user's avatar hash.
|
"""Processes an avatar string, uploads it to S3, and updates the user's avatar hash.
|
||||||
Also deletes the old avatar if one was already set."""
|
Also deletes the old avatar if one was already set."""
|
||||||
|
|
||||||
with session() as conn:
|
with session() as conn:
|
||||||
user = conn.scalar(select(User).where(User.id == user_id))
|
user = conn.scalar(select(User).where(User.id == user_id))
|
||||||
if not user:
|
if not user:
|
||||||
raise ValueError("process_user_avatar was passed the ID of a nonexistent user")
|
raise ValueError("process_user_avatar was passed the ID of a nonexistent user")
|
||||||
|
|
||||||
img = convert_avatar(avatar)
|
img = convert_avatar(avatar)
|
||||||
hash = hashlib.new("sha256", data=img).hexdigest()
|
hash = hashlib.new("sha256", data=img).hexdigest()
|
||||||
old_hash = user.avatar
|
old_hash = user.avatar
|
||||||
|
|
||||||
minio.put_object(
|
minio.put_object(
|
||||||
bucket,
|
bucket,
|
||||||
f"users/{user_id}/avatars/{hash}.webp",
|
f"users/{user_id}/avatars/{hash}.webp",
|
||||||
BytesIO(img),
|
BytesIO(img),
|
||||||
len(img),
|
len(img),
|
||||||
"image/webp",
|
"image/webp",
|
||||||
)
|
)
|
||||||
|
|
||||||
with session() as conn:
|
with session() as conn:
|
||||||
conn.execute(update(User).values(avatar=hash).where(User.id == user_id))
|
conn.execute(update(User).values(avatar=hash).where(User.id == user_id))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
if old_hash and old_hash != hash:
|
if old_hash and old_hash != hash:
|
||||||
minio.remove_object(bucket, f"users/{user_id}/avatars/{old_hash}.webp")
|
minio.remove_object(bucket, f"users/{user_id}/avatars/{old_hash}.webp")
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def delete_user_avatar(user_id: int) -> None:
|
def delete_user_avatar(user_id: int) -> None:
|
||||||
"""Deletes a user's avatar."""
|
"""Deletes a user's avatar."""
|
||||||
|
|
||||||
with session() as conn:
|
with session() as conn:
|
||||||
user = conn.scalar(select(User).where(User.id == user_id))
|
user = conn.scalar(select(User).where(User.id == user_id))
|
||||||
if not user:
|
if not user:
|
||||||
raise ValueError("delete_user_avatar was passed the ID of a nonexistent user")
|
raise ValueError("delete_user_avatar was passed the ID of a nonexistent user")
|
||||||
if not user.avatar:
|
if not user.avatar:
|
||||||
logger.info(
|
logger.info(
|
||||||
"delete_user_avatar was called for a user with a null avatar (%d)", user_id
|
"delete_user_avatar was called for a user with a null avatar (%d)", user_id
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
minio.remove_object(bucket, f"users/{user_id}/avatars/{user.avatar}.webp")
|
minio.remove_object(bucket, f"users/{user_id}/avatars/{user.avatar}.webp")
|
||||||
|
|
||||||
with session() as conn:
|
with session() as conn:
|
||||||
conn.execute(update(User).values(avatar=None).where(User.id == user_id))
|
conn.execute(update(User).values(avatar=None).where(User.id == user_id))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def process_member_avatar(member_id: int, avatar: str) -> None:
|
def process_member_avatar(member_id: int, avatar: str) -> None:
|
||||||
"""Processes an avatar string, uploads it to S3, and updates the member's avatar hash.
|
"""Processes an avatar string, uploads it to S3, and updates the member's avatar hash.
|
||||||
Also deletes the old avatar if one was already set."""
|
Also deletes the old avatar if one was already set."""
|
||||||
|
|
||||||
with session() as conn:
|
with session() as conn:
|
||||||
member = conn.scalar(select(Member).where(Member.id == member_id))
|
member = conn.scalar(select(Member).where(Member.id == member_id))
|
||||||
if not member:
|
if not member:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"process_member_avatar was passed the ID of a nonexistent member"
|
"process_member_avatar was passed the ID of a nonexistent member"
|
||||||
)
|
)
|
||||||
|
|
||||||
img = convert_avatar(avatar)
|
img = convert_avatar(avatar)
|
||||||
hash = hashlib.new("sha256", data=img).hexdigest()
|
hash = hashlib.new("sha256", data=img).hexdigest()
|
||||||
old_hash = member.avatar
|
old_hash = member.avatar
|
||||||
|
|
||||||
minio.put_object(
|
minio.put_object(
|
||||||
bucket,
|
bucket,
|
||||||
f"members/{member_id}/avatars/{hash}.webp",
|
f"members/{member_id}/avatars/{hash}.webp",
|
||||||
BytesIO(img),
|
BytesIO(img),
|
||||||
len(img),
|
len(img),
|
||||||
"image/webp",
|
"image/webp",
|
||||||
)
|
)
|
||||||
|
|
||||||
with session() as conn:
|
with session() as conn:
|
||||||
conn.execute(update(Member).values(avatar=hash).where(Member.id == member_id))
|
conn.execute(update(Member).values(avatar=hash).where(Member.id == member_id))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
if old_hash and old_hash != hash:
|
if old_hash and old_hash != hash:
|
||||||
minio.remove_object(bucket, f"members/{member_id}/avatars/{old_hash}.webp")
|
minio.remove_object(bucket, f"members/{member_id}/avatars/{old_hash}.webp")
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def delete_member_avatar(member_id: int) -> None:
|
def delete_member_avatar(member_id: int) -> None:
|
||||||
"""Deletes a member's avatar."""
|
"""Deletes a member's avatar."""
|
||||||
|
|
||||||
with session() as conn:
|
with session() as conn:
|
||||||
member = conn.scalar(select(Member).where(Member.id == member_id))
|
member = conn.scalar(select(Member).where(Member.id == member_id))
|
||||||
if not member:
|
if not member:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"delete_member_avatar was passed the ID of a nonexistent member"
|
"delete_member_avatar was passed the ID of a nonexistent member"
|
||||||
)
|
)
|
||||||
if not member.avatar:
|
if not member.avatar:
|
||||||
logger.info(
|
logger.info(
|
||||||
"delete_member_avatar was called for a member with a null avatar (%d)",
|
"delete_member_avatar was called for a member with a null avatar (%d)",
|
||||||
member_id,
|
member_id,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
minio.remove_object(bucket, f"members/{member_id}/avatars/{member.avatar}.webp")
|
minio.remove_object(bucket, f"members/{member_id}/avatars/{member.avatar}.webp")
|
||||||
|
|
||||||
with session() as conn:
|
with session() as conn:
|
||||||
conn.execute(update(Member).values(avatar=None).where(Member.id == member_id))
|
conn.execute(update(Member).values(avatar=None).where(Member.id == member_id))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
|
@ -1,32 +1,32 @@
|
||||||
export type User = {
|
export type User = {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
display_name: string | null;
|
display_name: string | null;
|
||||||
bio: string | null;
|
bio: string | null;
|
||||||
avatar: string | null;
|
avatar: string | null;
|
||||||
|
|
||||||
names: FieldEntry[];
|
names: FieldEntry[];
|
||||||
pronouns: PronounEntry[];
|
pronouns: PronounEntry[];
|
||||||
fields: ProfileField[];
|
fields: ProfileField[];
|
||||||
};
|
};
|
||||||
|
|
||||||
export type FieldEntry = {
|
export type FieldEntry = {
|
||||||
value: string;
|
value: string;
|
||||||
status: string;
|
status: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ProfileField = {
|
export type ProfileField = {
|
||||||
name: string;
|
name: string;
|
||||||
entries: FieldEntry[];
|
entries: FieldEntry[];
|
||||||
};
|
};
|
||||||
|
|
||||||
export type PronounEntry = {
|
export type PronounEntry = {
|
||||||
value: string;
|
value: string;
|
||||||
status: string;
|
status: string;
|
||||||
display: string | null;
|
display: string | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type Meta = {
|
export type Meta = {
|
||||||
users: number;
|
users: number;
|
||||||
members: number;
|
members: number;
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,83 +1,83 @@
|
||||||
import type { Cookies, ServerLoadEvent } from "@sveltejs/kit";
|
import type { Cookies, ServerLoadEvent } from "@sveltejs/kit";
|
||||||
|
|
||||||
export type FetchOptions = {
|
export type FetchOptions = {
|
||||||
fetchFn?: typeof fetch;
|
fetchFn?: typeof fetch;
|
||||||
token?: string;
|
token?: string;
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
data?: any;
|
data?: any;
|
||||||
version?: number;
|
version?: number;
|
||||||
extraHeaders?: Record<string, string>;
|
extraHeaders?: Record<string, string>;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch a path from the API and parse the response.
|
* Fetch a path from the API and parse the response.
|
||||||
* To make sure the request is authenticated in load functions,
|
* To make sure the request is authenticated in load functions,
|
||||||
* pass `fetch` from the request object into opts.
|
* pass `fetch` from the request object into opts.
|
||||||
*
|
*
|
||||||
* @param method The HTTP method, i.e. GET, POST, PATCH
|
* @param method The HTTP method, i.e. GET, POST, PATCH
|
||||||
* @param path The path to request, minus the leading `/api/v2`
|
* @param path The path to request, minus the leading `/api/v2`
|
||||||
* @param opts Extra options for this request
|
* @param opts Extra options for this request
|
||||||
* @returns T
|
* @returns T
|
||||||
* @throws APIError
|
* @throws APIError
|
||||||
*/
|
*/
|
||||||
export default async function request<T>(
|
export default async function request<T>(
|
||||||
method: string,
|
method: string,
|
||||||
path: string,
|
path: string,
|
||||||
opts: FetchOptions = {},
|
opts: FetchOptions = {},
|
||||||
): Promise<T> {
|
): Promise<T> {
|
||||||
const { token, data, version, extraHeaders } = opts;
|
const { token, data, version, extraHeaders } = opts;
|
||||||
const fetchFn = opts.fetchFn ?? fetch;
|
const fetchFn = opts.fetchFn ?? fetch;
|
||||||
|
|
||||||
const resp = await fetchFn(`/api/v${version ?? 2}${path}`, {
|
const resp = await fetchFn(`/api/v${version ?? 2}${path}`, {
|
||||||
method,
|
method,
|
||||||
body: data ? JSON.stringify(data) : undefined,
|
body: data ? JSON.stringify(data) : undefined,
|
||||||
headers: {
|
headers: {
|
||||||
...extraHeaders,
|
...extraHeaders,
|
||||||
...(token ? { Authorization: token } : {}),
|
...(token ? { Authorization: token } : {}),
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (resp.status < 200 || resp.status >= 400) throw await resp.json();
|
if (resp.status < 200 || resp.status >= 400) throw await resp.json();
|
||||||
return (await resp.json()) as T;
|
return (await resp.json()) as T;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch a path from the API and discard the response.
|
* Fetch a path from the API and discard the response.
|
||||||
* To make sure the request is authenticated in load functions,
|
* To make sure the request is authenticated in load functions,
|
||||||
* pass `fetch` from the request object into opts.
|
* pass `fetch` from the request object into opts.
|
||||||
*
|
*
|
||||||
* @param method The HTTP method, i.e. GET, POST, PATCH
|
* @param method The HTTP method, i.e. GET, POST, PATCH
|
||||||
* @param path The path to request, minus the leading `/api/v2`
|
* @param path The path to request, minus the leading `/api/v2`
|
||||||
* @param opts Extra options for this request
|
* @param opts Extra options for this request
|
||||||
* @throws APIError
|
* @throws APIError
|
||||||
*/
|
*/
|
||||||
export async function fastRequest(
|
export async function fastRequest(
|
||||||
method: string,
|
method: string,
|
||||||
path: string,
|
path: string,
|
||||||
opts: FetchOptions = {},
|
opts: FetchOptions = {},
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const { token, data, version, extraHeaders } = opts;
|
const { token, data, version, extraHeaders } = opts;
|
||||||
const fetchFn = opts.fetchFn ?? fetch;
|
const fetchFn = opts.fetchFn ?? fetch;
|
||||||
|
|
||||||
const resp = await fetchFn(`/api/v2${version ?? 2}${path}`, {
|
const resp = await fetchFn(`/api/v2${version ?? 2}${path}`, {
|
||||||
method,
|
method,
|
||||||
body: data ? JSON.stringify(data) : undefined,
|
body: data ? JSON.stringify(data) : undefined,
|
||||||
headers: {
|
headers: {
|
||||||
...extraHeaders,
|
...extraHeaders,
|
||||||
...(token ? { Authorization: token } : {}),
|
...(token ? { Authorization: token } : {}),
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (resp.status < 200 || resp.status >= 400) throw await resp.json();
|
if (resp.status < 200 || resp.status >= 400) throw await resp.json();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper function to get a token from a request cookie.
|
* Helper function to get a token from a request cookie.
|
||||||
* Accepts both a cookie object ({ cookies }) or a request object (req).
|
* Accepts both a cookie object ({ cookies }) or a request object (req).
|
||||||
* @param s A Cookies or ServerLoadEvent object
|
* @param s A Cookies or ServerLoadEvent object
|
||||||
* @returns A token, or `undefined` if no token is set.
|
* @returns A token, or `undefined` if no token is set.
|
||||||
*/
|
*/
|
||||||
export const getToken = (s: Cookies | ServerLoadEvent) =>
|
export const getToken = (s: Cookies | ServerLoadEvent) =>
|
||||||
"cookies" in s ? s.cookies.get("pronounscc-token") : s.get("pronounscc-token");
|
"cookies" in s ? s.cookies.get("pronounscc-token") : s.get("pronounscc-token");
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
import request, { getToken } from "$lib/request";
|
import request, { getToken } from "$lib/request";
|
||||||
import type { User, Meta } from "$lib/entities";
|
import type { User, Meta } from "$lib/entities";
|
||||||
|
|
||||||
export async function load({ fetch, cookies }) {
|
export async function load({ fetch, cookies }) {
|
||||||
const meta = await request<Meta>("GET", "/meta", { fetchFn: fetch });
|
const meta = await request<Meta>("GET", "/meta", { fetchFn: fetch });
|
||||||
|
|
||||||
let user;
|
let user;
|
||||||
if (cookies.get("pronounscc-token")) {
|
if (cookies.get("pronounscc-token")) {
|
||||||
user = await request<User>("GET", "/users/@me", { fetchFn: fetch, token: getToken(cookies) });
|
user = await request<User>("GET", "/users/@me", { fetchFn: fetch, token: getToken(cookies) });
|
||||||
}
|
}
|
||||||
|
|
||||||
return { meta, user, token: getToken(cookies) };
|
return { meta, user, token: getToken(cookies) };
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import "bootstrap/scss/bootstrap.scss";
|
import "bootstrap/scss/bootstrap.scss";
|
||||||
import "bootstrap-icons/font/bootstrap-icons.scss";
|
import "bootstrap-icons/font/bootstrap-icons.scss";
|
||||||
import type { LayoutData } from "./$types";
|
import type { LayoutData } from "./$types";
|
||||||
|
|
||||||
export let data: LayoutData;
|
export let data: LayoutData;
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{JSON.stringify(data.meta)}
|
{JSON.stringify(data.meta)}
|
||||||
|
|
||||||
{#if data.user}
|
{#if data.user}
|
||||||
{JSON.stringify(data.user)}
|
{JSON.stringify(data.user)}
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
<slot />
|
<slot />
|
||||||
|
|
3520
frontend/yarn.lock
3520
frontend/yarn.lock
File diff suppressed because it is too large
Load diff
3032
poetry.lock
generated
3032
poetry.lock
generated
File diff suppressed because it is too large
Load diff
142
pyproject.toml
142
pyproject.toml
|
@ -1,71 +1,71 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "foxnouns"
|
name = "foxnouns"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
description = ""
|
description = ""
|
||||||
authors = ["sam <sam@sleepycat.moe>"]
|
authors = ["sam <sam@sleepycat.moe>"]
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.11"
|
python = "^3.11"
|
||||||
sqlalchemy = { extras = ["asyncio"], version = "^2.0.28" }
|
sqlalchemy = { extras = ["asyncio"], version = "^2.0.28" }
|
||||||
psycopg = "^3.1.18"
|
psycopg = "^3.1.18"
|
||||||
celery = { extras = ["redis"], version = "^5.3.6" }
|
celery = { extras = ["redis"], version = "^5.3.6" }
|
||||||
quart = "^0.19.4"
|
quart = "^0.19.4"
|
||||||
# Temporary until a release containing this commit is made:
|
# Temporary until a release containing this commit is made:
|
||||||
# https://github.com/pgjones/quart-schema/commit/9f4455a1363c6edd2b23b898c554e52a9ce6d00f
|
# https://github.com/pgjones/quart-schema/commit/9f4455a1363c6edd2b23b898c554e52a9ce6d00f
|
||||||
quart-schema = { git = "https://github.com/pgjones/quart-schema.git" }
|
quart-schema = { git = "https://github.com/pgjones/quart-schema.git" }
|
||||||
# quart-schema = { extras = ["pydantic"], version = "^0.19.1" }
|
# quart-schema = { extras = ["pydantic"], version = "^0.19.1" }
|
||||||
pydantic = "^2.6.3"
|
pydantic = "^2.6.3"
|
||||||
itsdangerous = "^2.1.2"
|
itsdangerous = "^2.1.2"
|
||||||
uvicorn = "^0.28.0"
|
uvicorn = "^0.28.0"
|
||||||
asyncpg = "^0.29.0"
|
asyncpg = "^0.29.0"
|
||||||
environs = "^11.0.0"
|
environs = "^11.0.0"
|
||||||
alembic = "^1.13.1"
|
alembic = "^1.13.1"
|
||||||
quart-cors = "^0.7.0"
|
quart-cors = "^0.7.0"
|
||||||
minio = "^7.2.5"
|
minio = "^7.2.5"
|
||||||
pyvips = "^2.2.2"
|
pyvips = "^2.2.2"
|
||||||
redis = "^5.0.3"
|
redis = "^5.0.3"
|
||||||
|
|
||||||
[tool.poetry.group.dev]
|
[tool.poetry.group.dev]
|
||||||
optional = true
|
optional = true
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
ruff = "^0.3.4"
|
ruff = "^0.3.4"
|
||||||
|
|
||||||
[tool.poetry.group.test]
|
[tool.poetry.group.test]
|
||||||
optional = true
|
optional = true
|
||||||
|
|
||||||
[tool.poetry.group.test.dependencies]
|
[tool.poetry.group.test.dependencies]
|
||||||
pytest = "^8.0.2"
|
pytest = "^8.0.2"
|
||||||
pytest-asyncio = "^0.23.5.post1"
|
pytest-asyncio = "^0.23.5.post1"
|
||||||
|
|
||||||
[tool.poe.tasks.dev]
|
[tool.poe.tasks.dev]
|
||||||
help = "Run a development server with auto-reload"
|
help = "Run a development server with auto-reload"
|
||||||
cmd = "env QUART_APP=foxnouns.app:app quart --debug run --reload"
|
cmd = "env QUART_APP=foxnouns.app:app quart --debug run --reload"
|
||||||
|
|
||||||
[tool.poe.tasks.celery]
|
[tool.poe.tasks.celery]
|
||||||
help = "Run a Celery task worker"
|
help = "Run a Celery task worker"
|
||||||
cmd = "celery -A foxnouns.tasks worker"
|
cmd = "celery -A foxnouns.tasks worker"
|
||||||
|
|
||||||
[tool.poe.tasks.server]
|
[tool.poe.tasks.server]
|
||||||
help = "Run a production server"
|
help = "Run a production server"
|
||||||
cmd = "uvicorn 'foxnouns.app:app'"
|
cmd = "uvicorn 'foxnouns.app:app'"
|
||||||
|
|
||||||
[tool.poe.tasks.migrate]
|
[tool.poe.tasks.migrate]
|
||||||
help = "Migrate the database to the latest revision"
|
help = "Migrate the database to the latest revision"
|
||||||
cmd = "alembic upgrade head"
|
cmd = "alembic upgrade head"
|
||||||
|
|
||||||
[tool.poe.tasks]
|
[tool.poe.tasks]
|
||||||
test = "pytest"
|
test = "pytest"
|
||||||
lint = "ruff check"
|
lint = "ruff check"
|
||||||
format = "ruff format"
|
format = "ruff format"
|
||||||
"check-imports" = "ruff check --select I"
|
"check-imports" = "ruff check --select I"
|
||||||
"sort-imports" = "ruff check --select I --fix"
|
"sort-imports" = "ruff check --select I --fix"
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
addopts = ["--import-mode=importlib"]
|
addopts = ["--import-mode=importlib"]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core"]
|
requires = ["poetry-core"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
|
@ -1,60 +1,60 @@
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
import pytest_asyncio
|
||||||
from sqlalchemy import delete, text
|
from sqlalchemy import delete, text
|
||||||
|
|
||||||
from foxnouns.db import Base
|
from foxnouns.db import Base
|
||||||
from foxnouns.settings import DATABASE
|
from foxnouns.settings import DATABASE
|
||||||
|
|
||||||
# Override the database name to the testing database
|
# Override the database name to the testing database
|
||||||
DATABASE["NAME"] = f"{DATABASE['NAME']}_test"
|
DATABASE["NAME"] = f"{DATABASE['NAME']}_test"
|
||||||
|
|
||||||
|
|
||||||
def pytest_collection_modifyitems(items):
|
def pytest_collection_modifyitems(items):
|
||||||
"""Ensure that all async tests use the same event loop."""
|
"""Ensure that all async tests use the same event loop."""
|
||||||
|
|
||||||
pytest_asyncio_tests = (
|
pytest_asyncio_tests = (
|
||||||
item for item in items if pytest_asyncio.is_async_test(item)
|
item for item in items if pytest_asyncio.is_async_test(item)
|
||||||
)
|
)
|
||||||
session_scope_marker = pytest.mark.asyncio(scope="session")
|
session_scope_marker = pytest.mark.asyncio(scope="session")
|
||||||
for async_test in pytest_asyncio_tests:
|
for async_test in pytest_asyncio_tests:
|
||||||
async_test.add_marker(session_scope_marker, append=False)
|
async_test.add_marker(session_scope_marker, append=False)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session", autouse=True)
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
def setup():
|
def setup():
|
||||||
"""Migrate the testing database to the latest migration, and once the tests complete, clear the database again."""
|
"""Migrate the testing database to the latest migration, and once the tests complete, clear the database again."""
|
||||||
|
|
||||||
from alembic import command, config
|
from alembic import command, config
|
||||||
from foxnouns.db.sync import engine
|
from foxnouns.db.sync import engine
|
||||||
|
|
||||||
cfg = config.Config("alembic.ini")
|
cfg = config.Config("alembic.ini")
|
||||||
cfg.attributes["connection"] = engine.connect()
|
cfg.attributes["connection"] = engine.connect()
|
||||||
command.upgrade(cfg, "head")
|
command.upgrade(cfg, "head")
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
with engine.begin() as session:
|
with engine.begin() as session:
|
||||||
Base.metadata.drop_all(session)
|
Base.metadata.drop_all(session)
|
||||||
session.execute(text("DROP TABLE alembic_version"))
|
session.execute(text("DROP TABLE alembic_version"))
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function", autouse=True)
|
@pytest.fixture(scope="function", autouse=True)
|
||||||
def clean_tables_after_tests():
|
def clean_tables_after_tests():
|
||||||
"""Clean tables after every test."""
|
"""Clean tables after every test."""
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
from foxnouns.db.sync import engine
|
from foxnouns.db.sync import engine
|
||||||
|
|
||||||
with engine.begin() as session:
|
with engine.begin() as session:
|
||||||
for table in reversed(Base.metadata.sorted_tables):
|
for table in reversed(Base.metadata.sorted_tables):
|
||||||
session.execute(delete(table))
|
session.execute(delete(table))
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture(scope="session", autouse=True)
|
@pytest_asyncio.fixture(scope="session", autouse=True)
|
||||||
async def app():
|
async def app():
|
||||||
from foxnouns.app import app
|
from foxnouns.app import app
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
import pytest
|
import pytest
|
||||||
from quart import Quart
|
from quart import Quart
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
class TestUsers:
|
class TestUsers:
|
||||||
async def test_get_me_returns_403_if_unauthenticated(self, app: Quart):
|
async def test_get_me_returns_403_if_unauthenticated(self, app: Quart):
|
||||||
resp = await app.test_client().get("/api/v2/users/@me")
|
resp = await app.test_client().get("/api/v2/users/@me")
|
||||||
assert resp.status_code == 403
|
assert resp.status_code == 403
|
||||||
|
|
||||||
async def test_get_users_returns_404_if_user_not_found(self, app: Quart):
|
async def test_get_users_returns_404_if_user_not_found(self, app: Quart):
|
||||||
resp = await app.test_client().get("/api/v2/users/unknown_user")
|
resp = await app.test_client().get("/api/v2/users/unknown_user")
|
||||||
assert resp.status_code == 404
|
assert resp.status_code == 404
|
||||||
|
|
Loading…
Reference in a new issue