Initial commit
This commit is contained in:
176
.gitignore
vendored
Normal file
176
.gitignore
vendored
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
# Created by https://www.toptal.com/developers/gitignore/api/python
|
||||||
|
# Edit at https://www.toptal.com/developers/gitignore?templates=python
|
||||||
|
|
||||||
|
### Python ###
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
### Python Patch ###
|
||||||
|
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||||
|
poetry.toml
|
||||||
|
|
||||||
|
# ruff
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
|
# LSP config files
|
||||||
|
pyrightconfig.json
|
||||||
|
|
||||||
|
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||||
3
.idea/.gitignore
generated
vendored
Normal file
3
.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<settings>
|
||||||
|
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||||
|
<version value="1.0" />
|
||||||
|
</settings>
|
||||||
|
</component>
|
||||||
7
.idea/misc.xml
generated
Normal file
7
.idea/misc.xml
generated
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="Black">
|
||||||
|
<option name="sdkName" value="Python 3.13 (survive)" />
|
||||||
|
</component>
|
||||||
|
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.13 (survive)" project-jdk-type="Python SDK" />
|
||||||
|
</project>
|
||||||
8
.idea/modules.xml
generated
Normal file
8
.idea/modules.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ProjectModuleManager">
|
||||||
|
<modules>
|
||||||
|
<module fileurl="file://$PROJECT_DIR$/.idea/survive.iml" filepath="$PROJECT_DIR$/.idea/survive.iml" />
|
||||||
|
</modules>
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
17
.idea/survive.iml
generated
Normal file
17
.idea/survive.iml
generated
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module type="PYTHON_MODULE" version="4">
|
||||||
|
<component name="NewModuleRootManager">
|
||||||
|
<content url="file://$MODULE_DIR$">
|
||||||
|
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||||
|
</content>
|
||||||
|
<orderEntry type="jdk" jdkName="Python 3.13 (survive)" jdkType="Python SDK" />
|
||||||
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
</component>
|
||||||
|
<component name="PyDocumentationSettings">
|
||||||
|
<option name="format" value="PLAIN" />
|
||||||
|
<option name="myDocStringFormat" value="Plain" />
|
||||||
|
</component>
|
||||||
|
<component name="TestRunnerService">
|
||||||
|
<option name="PROJECT_TEST_RUNNER" value="py.test" />
|
||||||
|
</component>
|
||||||
|
</module>
|
||||||
19
Dockerfile
Normal file
19
Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
FROM python:3.9-bookworm
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV TZ=Europe/Warsaw
|
||||||
|
ENV PORT=8000
|
||||||
|
|
||||||
|
RUN python -m venv .venv \
|
||||||
|
&& .venv/bin/pip install --upgrade pip
|
||||||
|
|
||||||
|
COPY ./requirements.txt .
|
||||||
|
RUN .venv/bin/pip install --no-cache-dir -r requirements.txt
|
||||||
|
RUN .venv/bin/pip install --no-cache-dir lxml
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN chmod +x /app/entrypoint.sh
|
||||||
|
|
||||||
|
ENTRYPOINT ["/app/entrypoint.sh"]
|
||||||
30
README.md
Normal file
30
README.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Quick Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build the Docker image
|
||||||
|
docker build -t myapp .
|
||||||
|
|
||||||
|
# Run the container
|
||||||
|
docker run -p 8000:8000 myapp
|
||||||
|
```
|
||||||
|
# Clone the repository
|
||||||
|
# Create virtual environment
|
||||||
|
``python3 -m venv .venv``
|
||||||
|
|
||||||
|
# Activate virtual environment
|
||||||
|
# Linux/macOS:
|
||||||
|
```source .venv/bin/activate```
|
||||||
|
# Windows (cmd):
|
||||||
|
```.venv\Scripts\activate```
|
||||||
|
|
||||||
|
# Upgrade pip
|
||||||
|
```pip install --upgrade pip```
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
```pip install -r requirements.txt```
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
```alembic upgrade head```
|
||||||
|
|
||||||
|
# Start the app
|
||||||
|
```uvicorn src:app --host 0.0.0.0 --port 8000 --log-level info```
|
||||||
147
alembic.ini
Normal file
147
alembic.ini
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts.
|
||||||
|
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||||
|
# format, relative to the token %(here)s which refers to the location of this
|
||||||
|
# ini file
|
||||||
|
script_location = %(here)s/src/alembic
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory. for multiple paths, the path separator
|
||||||
|
# is defined by "path_separator" below.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||||
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to <script_location>/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "path_separator"
|
||||||
|
# below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||||
|
|
||||||
|
# path_separator; This indicates what character is used to split lists of file
|
||||||
|
# paths, including version_locations and prepend_sys_path within configparser
|
||||||
|
# files such as alembic.ini.
|
||||||
|
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||||
|
# to provide os-dependent path splitting.
|
||||||
|
#
|
||||||
|
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||||
|
# take place if path_separator is not present in alembic.ini. If this
|
||||||
|
# option is omitted entirely, fallback logic is as follows:
|
||||||
|
#
|
||||||
|
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||||
|
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||||
|
# behavior of splitting on spaces and/or commas.
|
||||||
|
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||||
|
# behavior of splitting on spaces, commas, or colons.
|
||||||
|
#
|
||||||
|
# Valid values for path_separator are:
|
||||||
|
#
|
||||||
|
# path_separator = :
|
||||||
|
# path_separator = ;
|
||||||
|
# path_separator = space
|
||||||
|
# path_separator = newline
|
||||||
|
#
|
||||||
|
# Use os.pathsep. Default configuration used for new projects.
|
||||||
|
path_separator = os
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
# database URL. This is consumed by the user-maintained env.py script only.
|
||||||
|
# other means of configuring database URLs may be customized within the env.py
|
||||||
|
# file.
|
||||||
|
sqlalchemy.url = sqlite+pysqlite:///database.db
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = module
|
||||||
|
# ruff.module = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration. This is also consumed by the user-maintained
|
||||||
|
# env.py script only.
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARNING
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARNING
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
6
config.json
Normal file
6
config.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"jwt_secret": "secret",
|
||||||
|
"db": {
|
||||||
|
"connection_string": "sqlite:///database.sqlite3"
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
database.sqlite3
Normal file
BIN
database.sqlite3
Normal file
Binary file not shown.
8
entrypoint.sh
Normal file
8
entrypoint.sh
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/bash
|
||||||
|
|
||||||
|
source /app/.venv/bin/activate
|
||||||
|
|
||||||
|
cd /app/ || exit 1
|
||||||
|
|
||||||
|
alembic upgrade head
|
||||||
|
uvicorn src:app --host 0.0.0.0 --port "${PORT:-8000}" --log-level info
|
||||||
27
main.py
Normal file
27
main.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import argparse
|
||||||
|
import uvicorn
|
||||||
|
from src.users.models import User
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(description="User management console")
|
||||||
|
parser.add_argument("--add-user", action="store_true", help="Add a new user to the database")
|
||||||
|
parser.add_argument("--server", action="store_true", help="Run the FastAPI server")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.add_user:
|
||||||
|
name = input("Enter username: ").strip()
|
||||||
|
password = input("Enter password: ").strip()
|
||||||
|
if name and password:
|
||||||
|
User.add_user(name, password)
|
||||||
|
else:
|
||||||
|
print("Username and password cannot be empty.")
|
||||||
|
|
||||||
|
elif args.server:
|
||||||
|
uvicorn.run(
|
||||||
|
"src:app",
|
||||||
|
host="0.0.0.0",
|
||||||
|
port=8000,
|
||||||
|
reload=True
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("No action specified. Use --add-user to add a user or --server to run the server.")
|
||||||
51
requirements.txt
Normal file
51
requirements.txt
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
alembic==1.16.5
|
||||||
|
annotated-types==0.7.0
|
||||||
|
anyio==4.11.0
|
||||||
|
bcrypt==4.0.1
|
||||||
|
certifi==2025.8.3
|
||||||
|
cffi==2.0.0
|
||||||
|
click==8.3.0
|
||||||
|
cryptography==46.0.2
|
||||||
|
dnspython==2.8.0
|
||||||
|
email-validator==2.3.0
|
||||||
|
fastapi==0.118.0
|
||||||
|
fastapi-cli==0.0.13
|
||||||
|
fastapi-cloud-cli==0.3.0
|
||||||
|
greenlet==3.2.4
|
||||||
|
h11==0.16.0
|
||||||
|
httpcore==1.0.9
|
||||||
|
httptools==0.6.4
|
||||||
|
httpx==0.28.1
|
||||||
|
idna==3.10
|
||||||
|
Jinja2==3.1.6
|
||||||
|
jwt==1.4.0
|
||||||
|
Mako==1.3.10
|
||||||
|
markdown-it-py==4.0.0
|
||||||
|
MarkupSafe==3.0.3
|
||||||
|
mdurl==0.1.2
|
||||||
|
passlib==1.7.4
|
||||||
|
pycparser==2.23
|
||||||
|
pydantic==2.11.9
|
||||||
|
pydantic_core==2.33.2
|
||||||
|
Pygments==2.19.2
|
||||||
|
PyJWT==2.10.1
|
||||||
|
python-dotenv==1.1.1
|
||||||
|
python-multipart==0.0.20
|
||||||
|
PyYAML==6.0.3
|
||||||
|
rich==14.1.0
|
||||||
|
rich-toolkit==0.15.1
|
||||||
|
rignore==0.7.0
|
||||||
|
sentry-sdk==2.39.0
|
||||||
|
shellingham==1.5.4
|
||||||
|
sniffio==1.3.1
|
||||||
|
SQLAlchemy==2.0.43
|
||||||
|
sqlmodel==0.0.25
|
||||||
|
starlette==0.48.0
|
||||||
|
typer==0.19.2
|
||||||
|
typing-inspection==0.4.2
|
||||||
|
typing_extensions==4.15.0
|
||||||
|
urllib3==2.5.0
|
||||||
|
uvicorn==0.37.0
|
||||||
|
uvloop==0.21.0
|
||||||
|
watchfiles==1.1.0
|
||||||
|
websockets==15.0.1
|
||||||
9
src/__init__.py
Normal file
9
src/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
from src.users.router import users_router
|
||||||
|
from src.maps.router import maps_router
|
||||||
|
|
||||||
|
app.include_router(users_router, prefix="/users")
|
||||||
|
app.include_router(maps_router, prefix="/maps")
|
||||||
1
src/alembic/README
Normal file
1
src/alembic/README
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
||||||
85
src/alembic/env.py
Normal file
85
src/alembic/env.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
from sqlmodel import SQLModel
|
||||||
|
import sqlmodel
|
||||||
|
|
||||||
|
from src.db import Base
|
||||||
|
from src.utils.config import get
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
config.set_main_option('sqlalchemy.url', get('db', 'connection_string'))
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = SQLModel.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
29
src/alembic/script.py.mako
Normal file
29
src/alembic/script.py.mako
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
import sqlmodel
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
48
src/alembic/versions/2607b8f9586f_.py
Normal file
48
src/alembic/versions/2607b8f9586f_.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 2607b8f9586f
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-10-04 15:15:16.698698
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlmodel
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '2607b8f9586f'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('user',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||||
|
sa.Column('password', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_user_name'), 'user', ['name'], unique=True)
|
||||||
|
op.create_table('waypoint',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||||
|
sa.Column('x', sa.Float(), nullable=False),
|
||||||
|
sa.Column('y', sa.Float(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('waypoint')
|
||||||
|
op.drop_index(op.f('ix_user_name'), table_name='user')
|
||||||
|
op.drop_table('user')
|
||||||
|
# ### end Alembic commands ###
|
||||||
16
src/db/__init__.py
Normal file
16
src/db/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||||
|
|
||||||
|
from src.utils.config import get
|
||||||
|
|
||||||
|
engine = create_engine(get('db', 'connection_string'), echo=True)
|
||||||
|
SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False)
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
def get_db():
|
||||||
|
db = SessionLocal()
|
||||||
|
try:
|
||||||
|
yield db
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
0
src/maps/__init__.py
Normal file
0
src/maps/__init__.py
Normal file
10
src/maps/models.py
Normal file
10
src/maps/models.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from typing import Optional
|
||||||
|
from sqlmodel import SQLModel, Field
|
||||||
|
|
||||||
|
class WaypointBase(SQLModel):
|
||||||
|
name: str
|
||||||
|
x: float
|
||||||
|
y: float
|
||||||
|
|
||||||
|
class Waypoint(WaypointBase, table=True):
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True, sa_column_kwargs={"autoincrement": True})
|
||||||
37
src/maps/router.py
Normal file
37
src/maps/router.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from sqlmodel import select
|
||||||
|
|
||||||
|
from src.db import get_db
|
||||||
|
from src.maps.models import Waypoint
|
||||||
|
from src.maps.schemas import WaypointCreate, WaypointResponse
|
||||||
|
from src.utils.decorators import auth_required
|
||||||
|
|
||||||
|
maps_router = APIRouter()
|
||||||
|
|
||||||
|
@auth_required()
|
||||||
|
@maps_router.post("/waypoints", response_model=WaypointResponse)
|
||||||
|
def create_waypoint(waypoint: WaypointCreate, db: Session = Depends(get_db)):
|
||||||
|
db_wp = Waypoint.model_validate(waypoint)
|
||||||
|
db.add(db_wp)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(db_wp)
|
||||||
|
return db_wp
|
||||||
|
|
||||||
|
|
||||||
|
@auth_required()
|
||||||
|
@maps_router.get("/waypoints", response_model=List[WaypointResponse])
|
||||||
|
def get_waypoints(db: Session = Depends(get_db)):
|
||||||
|
waypoints = db.execute(select(Waypoint)).scalars().all()
|
||||||
|
return waypoints
|
||||||
|
|
||||||
|
|
||||||
|
@auth_required()
|
||||||
|
@maps_router.get("/waypoints/{waypoint_id}", response_model=WaypointResponse)
|
||||||
|
def get_waypoint(waypoint_id: int, db: Session = Depends(get_db)):
|
||||||
|
wp = db.get(Waypoint, waypoint_id)
|
||||||
|
if not wp:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Waypoint not found")
|
||||||
|
return wp
|
||||||
12
src/maps/schemas.py
Normal file
12
src/maps/schemas.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
class WaypointCreate(BaseModel):
|
||||||
|
name: str
|
||||||
|
x: float
|
||||||
|
y: float
|
||||||
|
|
||||||
|
class WaypointResponse(WaypointCreate):
|
||||||
|
id: int
|
||||||
|
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
85
src/maps/tests.py
Normal file
85
src/maps/tests.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from sqlalchemy import StaticPool
|
||||||
|
from sqlmodel import SQLModel, create_engine, Session
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from src import app, maps_router
|
||||||
|
from src.maps.models import Waypoint
|
||||||
|
from src.db import get_db
|
||||||
|
|
||||||
|
# --- Setup in-memory SQLite ---
|
||||||
|
TEST_DATABASE_URL = "sqlite:///:memory:"
|
||||||
|
engine = create_engine(
|
||||||
|
TEST_DATABASE_URL,
|
||||||
|
connect_args={"check_same_thread": False},
|
||||||
|
poolclass=StaticPool # <-- crucial for in-memory DB
|
||||||
|
)
|
||||||
|
TestingSessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False)
|
||||||
|
|
||||||
|
# --- Override dependency ---
|
||||||
|
def override_get_db():
|
||||||
|
db = TestingSessionLocal()
|
||||||
|
try:
|
||||||
|
yield db
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
app.dependency_overrides[get_db] = override_get_db
|
||||||
|
|
||||||
|
# Include router (no need to check prefix)
|
||||||
|
app.include_router(maps_router, prefix="/maps")
|
||||||
|
|
||||||
|
client = TestClient(app)
|
||||||
|
|
||||||
|
# --- Fixture to create tables ---
|
||||||
|
@pytest.fixture(scope="module", autouse=True)
|
||||||
|
def setup_db():
|
||||||
|
SQLModel.metadata.create_all(engine)
|
||||||
|
yield
|
||||||
|
SQLModel.metadata.drop_all(engine)
|
||||||
|
|
||||||
|
# --- Bypass auth decorator for tests ---
|
||||||
|
def fake_auth_dependency():
|
||||||
|
return lambda: True
|
||||||
|
|
||||||
|
# Monkeypatch your auth_required to do nothing in tests
|
||||||
|
from src.utils.decorators import auth_required
|
||||||
|
auth_required = lambda *args, **kwargs: (lambda x: x)
|
||||||
|
|
||||||
|
# --- Tests ---
|
||||||
|
def test_create_waypoint():
|
||||||
|
payload = {"name": "TestPoint", "x": 10.5, "y": 20.5}
|
||||||
|
response = client.post("/maps/waypoints", json=payload)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["name"] == payload["name"]
|
||||||
|
assert data["x"] == payload["x"]
|
||||||
|
assert data["y"] == payload["y"]
|
||||||
|
assert "id" in data
|
||||||
|
|
||||||
|
def test_get_waypoints():
|
||||||
|
response = client.get("/maps/waypoints")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
assert len(data) >= 1
|
||||||
|
|
||||||
|
def test_get_waypoint_by_id():
|
||||||
|
# Create waypoint first
|
||||||
|
payload = {"name": "Point1", "x": 1.0, "y": 2.0}
|
||||||
|
create_resp = client.post("/maps/waypoints", json=payload)
|
||||||
|
wp_id = create_resp.json()["id"]
|
||||||
|
|
||||||
|
# Fetch by ID
|
||||||
|
response = client.get(f"/maps/waypoints/{wp_id}")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == wp_id
|
||||||
|
assert data["name"] == payload["name"]
|
||||||
|
|
||||||
|
def test_get_waypoint_not_found():
|
||||||
|
response = client.get("/maps/waypoints/9999")
|
||||||
|
assert response.status_code == 404
|
||||||
|
data = response.json()
|
||||||
|
assert data["detail"] == "Waypoint not found"
|
||||||
0
src/users/__init__.py
Normal file
0
src/users/__init__.py
Normal file
31
src/users/models.py
Normal file
31
src/users/models.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
from sqlmodel import SQLModel, Field, Session
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
|
||||||
|
|
||||||
|
class User(SQLModel, table=True):
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
name: str = Field(index=True, unique=True)
|
||||||
|
password: str
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def make_password(plain_password: str) -> str:
|
||||||
|
return pwd_context.hash(plain_password)
|
||||||
|
|
||||||
|
def verify_password(self, plain_password: str) -> bool:
|
||||||
|
return pwd_context.verify(plain_password, self.password)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add_user(cls, session: Session, name: str, password: str):
|
||||||
|
try:
|
||||||
|
user = cls(name=name, password=cls.make_password(password))
|
||||||
|
session.add(user)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(user)
|
||||||
|
print(f"User '{name}' added successfully.")
|
||||||
|
return user
|
||||||
|
except Exception as e:
|
||||||
|
session.rollback()
|
||||||
|
print(f"Error: Could not add user '{name}': {e}")
|
||||||
31
src/users/router.py
Normal file
31
src/users/router.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
from fastapi import Request
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from src.db import get_db
|
||||||
|
from src.users.models import User
|
||||||
|
from src.users.schemas import LoginRequest
|
||||||
|
from src.utils.decorators import auth_required
|
||||||
|
from src.utils.jwt import create_access_token
|
||||||
|
|
||||||
|
|
||||||
|
users_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@users_router.post("/login")
|
||||||
|
def login(request: LoginRequest, db: Session = Depends(get_db)):
|
||||||
|
user = db.query(User).filter(User.name == request.name).first()
|
||||||
|
if not user:
|
||||||
|
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials")
|
||||||
|
|
||||||
|
if not user.verify_password(request.password):
|
||||||
|
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials")
|
||||||
|
|
||||||
|
access_token = create_access_token(data={"sub": user.id})
|
||||||
|
return {"access_token": access_token, "token_type": "bearer"}
|
||||||
|
|
||||||
|
@users_router.get("/info")
|
||||||
|
@auth_required()
|
||||||
|
async def get_info(request: Request):
|
||||||
|
user_name = request.state.user.name
|
||||||
|
return {"message": f"Hello, {user_name}!"}
|
||||||
6
src/users/schemas.py
Normal file
6
src/users/schemas.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class LoginRequest(BaseModel):
|
||||||
|
name: str
|
||||||
|
password: str
|
||||||
0
src/utils/__init__.py
Normal file
0
src/utils/__init__.py
Normal file
35
src/utils/config.py
Normal file
35
src/utils/config.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
class Environment(Enum):
|
||||||
|
PRODUCTION = 'production'
|
||||||
|
DEVELOPMENT = 'development'
|
||||||
|
|
||||||
|
|
||||||
|
def get_config_file() -> dict:
|
||||||
|
with open('config.json', 'r', encoding='utf-8') as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
def get(*keys: str) -> Any:
|
||||||
|
env_var = '__'.join(keys).upper()
|
||||||
|
if env_var in os.environ:
|
||||||
|
return os.environ[env_var]
|
||||||
|
|
||||||
|
config = get_config_file()
|
||||||
|
for key in keys:
|
||||||
|
if not isinstance(config, dict) or key not in config:
|
||||||
|
raise KeyError(f"Key path {' -> '.join(keys)} not found in config.")
|
||||||
|
config = config[key]
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
def get_environment() -> Environment:
|
||||||
|
value = get('environment')
|
||||||
|
try:
|
||||||
|
return Environment(value)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Invalid environment value '{value}' in config. Must be one of {[e.value for e in Environment]}")
|
||||||
88
src/utils/decorators.py
Normal file
88
src/utils/decorators.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
from functools import wraps
|
||||||
|
from fastapi import Request, HTTPException, status
|
||||||
|
import jwt
|
||||||
|
|
||||||
|
from src.utils.config import get
|
||||||
|
from src.users.models import User
|
||||||
|
from src.db import get_db
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
SECRET_KEY = get('jwt_secret')
|
||||||
|
ALGORITHM = "HS256"
|
||||||
|
|
||||||
|
def auth_required():
|
||||||
|
def decorator(func):
|
||||||
|
@wraps(func)
|
||||||
|
async def wrapper(*args, **kwargs):
|
||||||
|
request: Request | None = kwargs.get("request")
|
||||||
|
|
||||||
|
# Try to locate Request in args if not found in kwargs
|
||||||
|
if not request:
|
||||||
|
for arg in args:
|
||||||
|
if isinstance(arg, Request):
|
||||||
|
request = arg
|
||||||
|
break
|
||||||
|
|
||||||
|
if not request:
|
||||||
|
raise Exception(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Request object not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
if not auth_header or not auth_header.startswith("Bearer "):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Authorization header missing or invalid",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"}
|
||||||
|
)
|
||||||
|
|
||||||
|
token = auth_header.split(" ")[1]
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||||
|
user_id = payload.get("sub")
|
||||||
|
if not user_id:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Token payload invalid",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"}
|
||||||
|
)
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Token has expired",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"}
|
||||||
|
)
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid token",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Optional: Fetch the user from the DB and pass it to the route
|
||||||
|
try:
|
||||||
|
db: Session = kwargs.get("db") or next(get_db())
|
||||||
|
user = db.query(User).filter(User.id == user_id).first()
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Database error"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="User not found",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Inject user into request.state
|
||||||
|
request.state.user = user
|
||||||
|
|
||||||
|
return await func(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return decorator
|
||||||
21
src/utils/jwt.py
Normal file
21
src/utils/jwt.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
import jwt
|
||||||
|
|
||||||
|
from src.utils.config import get
|
||||||
|
|
||||||
|
SECRET_KEY = get('jwt_secret')
|
||||||
|
ALGORITHM = "HS256"
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES = 6000
|
||||||
|
|
||||||
|
def create_access_token(data: dict, expires_delta: timedelta | None = None) -> str:
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if "sub" in to_encode:
|
||||||
|
to_encode["sub"] = str(to_encode["sub"])
|
||||||
|
|
||||||
|
expire = datetime.utcnow() + (expires_delta or timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES))
|
||||||
|
to_encode.update({"exp": expire})
|
||||||
|
|
||||||
|
token = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||||
|
return token
|
||||||
|
|
||||||
Reference in New Issue
Block a user