Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,18 @@ pdm run ruff format .

All code needs to pass ruff formatting and linting before it can be merged.

### Logging

To add a logger to a new service or file, use the `LOGGER_NAME` function in `app/utilities/constants.py`

```python
from app.utilities.constants import LOGGER_NAME

log = logging.getLogger(LOGGER_NAME("my_service"))
```

If you'd like to create a new logger name in the hierarchy, you'll need to add it to `alembic.ini` under the logger section. Following the pre-existing examples for `logger_uvicorn` for example.

### Frontend

#### Prettier
Expand Down
12 changes: 12 additions & 0 deletions backend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -136,3 +136,15 @@ To apply the migration, run the following command:
```bash
pdm run alembic upgrade head
```

### Logging

To add a logger to a new service or file, use the `LOGGER_NAME` function in `app/utilities/constants.py`

```python
from app.utilities.constants import LOGGER_NAME

log = logging.getLogger(LOGGER_NAME("my_service"))
```

If you'd like to create a new logger name in the hierarchy, you'll need to add it to `alembic.ini` under the logger section. Following the pre-existing examples for `logger_uvicorn` for example.
13 changes: 10 additions & 3 deletions backend/alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
# output_encoding = utf-8

# Updated in env.py using the POSTGRES_DATABASE_URL environment variable
# sqlalchemy.url =
# sqlalchemy.url =


[post_write_hooks]
Expand All @@ -83,8 +83,10 @@ ruff.executable = %(here)s/.venv/bin/ruff
ruff.options = check --fix REVISION_SCRIPT_FILENAME

# Logging configuration
# Every time you want to define a new sub-logger, you need to add it to loggers or it won't show up.
# Would recommend just using uvicorn."name of area you want to log" to identify a smaller scope
[loggers]
keys = root,sqlalchemy,alembic
keys = root,sqlalchemy,alembic,uvicorn

[handlers]
keys = console
Expand All @@ -107,12 +109,17 @@ level = INFO
handlers =
qualname = alembic

[logger_uvicorn]
level = INFO
handlers =
qualname = uvicorn

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
8 changes: 8 additions & 0 deletions backend/app/models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import logging

from alembic import command
from alembic.config import Config

from app.utilities.constants import LOGGER_NAME

# Make sure all models are here to reflect all current models
# when autogenerating new migration
from .Base import Base
Expand All @@ -10,8 +14,12 @@
# Used to avoid import errors for the models
__all__ = ["Base", "User", "Role"]

log = logging.getLogger(LOGGER_NAME("models"))


def run_migrations():
log.info("Running run_migrations in models/__init__ on server startup")

alembic_cfg = Config("alembic.ini")
# Emulates `alembic upgrade head` to migrate up to latest revision
command.upgrade(alembic_cfg, "head")
12 changes: 6 additions & 6 deletions backend/app/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,18 @@
from contextlib import asynccontextmanager
from typing import Union

from backend.app.routes import send_email
from dotenv import load_dotenv
from fastapi import FastAPI

from . import models
from .routes import send_email, user
from .utilities.constants import LOGGER_NAME
from .utilities.firebase_init import initialize_firebase

load_dotenv()

# we need to load env variables before initialization code runs
from . import models # noqa: E402
from .routes import user # noqa: E402
from .utilities.firebase_init import initialize_firebase # noqa: E402

log = logging.getLogger("uvicorn")
log = logging.getLogger(LOGGER_NAME("server"))


@asynccontextmanager
Expand Down
3 changes: 2 additions & 1 deletion backend/app/services/implementations/user_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,13 @@
UserCreateResponse,
UserRole,
)
from app.utilities.constants import LOGGER_NAME


class UserService(IUserService):
def __init__(self, db: Session):
self.db = db
self.logger = logging.getLogger(__name__)
self.logger = logging.getLogger(LOGGER_NAME("user_service"))

async def create_user(self, user: UserCreateRequest) -> UserCreateResponse:
firebase_user = None
Expand Down
5 changes: 5 additions & 0 deletions backend/app/utilities/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
SERVER_LOGGER_NAME = "uvicorn"


def LOGGER_NAME(name: str):
return f"{SERVER_LOGGER_NAME}.{name}"
8 changes: 8 additions & 0 deletions backend/app/utilities/firebase_init.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,19 @@
import logging
import os

import firebase_admin
from firebase_admin import credentials

from app.utilities.constants import LOGGER_NAME

log = logging.getLogger(LOGGER_NAME("firebase_init"))


def initialize_firebase():
log.info("Running initialize_firebase")
cwd = os.getcwd()
service_account_path = os.path.join(cwd, "serviceAccountKey.json")
cred = credentials.Certificate(service_account_path)

firebase_admin.initialize_app(cred)
log.info("Finished initializing firebase")
23 changes: 18 additions & 5 deletions backend/migrations/env.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import os
from logging.config import fileConfig

Expand All @@ -9,27 +10,33 @@

load_dotenv()

log = logging.getLogger("alembic")
log.info("Entering env.py for alembic migration")
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
log.info("Finished setting up alembic config object")

# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
fileConfig(config.config_file_name, disable_existing_loggers=False)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata

log.info("Pulling model metadata")

target_metadata = Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
config.set_main_option("sqlalchemy.url", os.environ["POSTGRES_DATABASE_URL"])
log.info("Finished migration env config setup")


def run_migrations_offline() -> None:
Expand Down Expand Up @@ -69,13 +76,19 @@ def run_migrations_online() -> None:
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
try:
log.info("Established database connection")
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)

with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
log.info("Finished running migrations in alembic env")
except Exception as e:
log.error(e)

with context.begin_transaction():
context.run_migrations()

log.info("Starting up migration env")

if context.is_offline_mode():
run_migrations_offline()
Expand Down
4 changes: 4 additions & 0 deletions backend/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ distribution = false
dev = "fastapi dev app/server.py"
precommit = "pre-commit run"
precommit-install = "pre-commit install"
dc-down = "docker-compose down -v"
dc-up = "docker-compose up -d"
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

FYI, Matt mentioned that we don't really need docker containers other than the database container. So even if you just do pdm run dev + docker compose up -d db for only the database container, that is enough. Maybe something to look into in another PR? If we don't need them we can just remove the other containers.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah that's a good idea, I'll look into that for another PR

docker-db = {composite = ["dc-down", "dc-up"]}
db-dev = {composite = ["docker-db", "dev"]}
revision = "alembic revision --autogenerate"
upgrade = "alembic upgrade head"

Expand Down