diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 38d1023d..a30e9ec9 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -3,40 +3,30 @@
// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
{
"name": "CopWatch",
-
// Update the 'dockerComposeFile' list if you have more compose files or use different names.
// The .devcontainer/docker-compose.yml file contains any overrides you need/want to make.
"dockerComposeFile": [
"../docker-compose.yml",
"docker-compose.yml"
],
-
// The 'service' property is the name of the service for the container that VS Code should
// use. Update this value and .devcontainer/docker-compose.yml to the real service name.
"service": "django",
-
// The optional 'workspaceFolder' property is the path VS Code should open by default when
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
"workspaceFolder": "/code",
-
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
-
// Uncomment the next line if you want start specific services in your Docker Compose config.
// "runServices": [],
-
// Uncomment the next line if you want to keep your containers running after VS Code shuts down.
"shutdownAction": "stopCompose",
-
// Uncomment the next line to run commands after the container is created - for example installing curl.
"postCreateCommand": "sudo chown appuser /home/appuser/.cache /code/frontend/node_modules /code/venv /code/public /var/run/docker.sock",
-
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "appuser",
-
// A command to run each time the container is successfully started.
"postStartCommand": "git config --global --add safe.directory ${containerWorkspaceFolder}",
-
"customizations": {
"vscode": {
"extensions": [
diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml
index 1561ffe0..f8c09276 100755
--- a/.github/workflows/deploy.yaml
+++ b/.github/workflows/deploy.yaml
@@ -24,7 +24,7 @@ jobs:
echo "ENV_URL=https://nccopwatch.org/" >> $GITHUB_ENV
- uses: actions/setup-python@v4
with:
- python-version: '3.10'
+ python-version: '3.12'
cache: 'pip'
cache-dependency-path: 'requirements/*/*.txt'
- name: Install dependencies
diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml
index 5ab5f730..c0e1a074 100644
--- a/.github/workflows/test.yaml
+++ b/.github/workflows/test.yaml
@@ -28,7 +28,7 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
- python-version: '3.10'
+ python-version: '3.12'
cache: 'pip'
cache-dependency-path: 'requirements/*/*.txt'
- name: Install dependencies
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ecf8bd74..4b116952 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -3,7 +3,7 @@ repos:
rev: 24.10.0
hooks:
- id: black
- language_version: python3.10
+ language_version: python3.12
exclude: migrations
- repo: https://github.com/PyCQA/flake8
rev: 7.1.1
diff --git a/Dockerfile b/Dockerfile
index ade43c2c..8703d0a2 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -7,7 +7,7 @@ RUN npm install --silent
COPY frontend/ /code/
RUN npm run build
-FROM python:3.10-slim-bullseye AS base
+FROM python:3.12-slim-bullseye AS base
# Create a group and user to run our app
ARG APP_USER=appuser
@@ -95,7 +95,7 @@ ENTRYPOINT ["/code/docker-entrypoint.sh"]
# Start uWSGI
CMD ["newrelic-admin", "run-program", "uwsgi", "--single-interpreter", "--enable-threads", "--show-config"]
-FROM python:3.10-slim-bullseye AS dev
+FROM python:3.12-slim-bullseye AS dev
ARG USERNAME=appuser
ARG USER_UID=1000
diff --git a/Makefile b/Makefile
index 2ffca190..d7a9a229 100644
--- a/Makefile
+++ b/Makefile
@@ -9,7 +9,7 @@ update_requirements:
pip-compile --output-file=requirements/base/base.txt requirements/base/base.in
pip-compile --output-file=requirements/test/test.txt requirements/test/test.in
pip-compile --output-file=requirements/dev/dev.txt requirements/dev/dev.in
- pip-compile --upgrade --output-file=requirements/deploy/deploy.txt requirements/deploy/deploy.in
+ pip-compile --output-file=requirements/deploy/deploy.txt requirements/deploy/deploy.in
install_requirements:
@echo 'Installing pip-tools...'
diff --git a/docs/conf.py b/docs/conf.py
index 0fc87980..9c81fb56 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
#
# NC Traffic Stops documentation build configuration file, created by
# sphinx-quickstart on Sat Sep 6 15:13:52 2014.
diff --git a/docs/dev-setup.rst b/docs/dev-setup.rst
index 8093c7e9..76c6a272 100755
--- a/docs/dev-setup.rst
+++ b/docs/dev-setup.rst
@@ -5,7 +5,7 @@ Below you will find basic setup and deployment instructions for the NC Traffic
Stops project. To begin you should have the following applications installed on
your local development system:
-- Python 3.10
+- Python 3.12
- NodeJS >= 12.6.0
- `pip >= 8 or so `_
- Postgres >= 16
@@ -85,8 +85,8 @@ To use ``psql`` locally, make sure you have the following env variables loaded
To setup your local environment you should create a virtualenv and install the
necessary requirements::
- $ which python3.11 # make sure you have Python 3.11 installed
- $ mkvirtualenv --python=`which python3.11` traffic-stops
+ $ which python3.12 # make sure you have Python 3.10 installed
+ $ mkvirtualenv --python=`which python3.10` traffic-stops
(traffic-stops)$ pip install -U pip
(traffic-stops)$ make setup
diff --git a/nc/admin.py b/nc/admin.py
index eae8686f..55db7913 100644
--- a/nc/admin.py
+++ b/nc/admin.py
@@ -1,16 +1,18 @@
-from ckeditor.widgets import CKEditorWidget
from django import forms
from django.contrib import admin
+from django_ckeditor_5.widgets import CKEditor5Widget
from nc.models import Agency, Resource, ResourceFile, StopSummary
+@admin.register(Agency)
class AgencyAdmin(admin.ModelAdmin):
list_display = ("name", "id", "census_profile_id")
search_fields = ("name",)
ordering = ("id",)
+@admin.register(StopSummary)
class StopSummaryAdmin(admin.ModelAdmin):
list_display = (
"id",
@@ -56,14 +58,15 @@ class InlineResourceFile(admin.StackedInline):
class ResourceForm(forms.ModelForm):
# https://django-ckeditor.readthedocs.io/en/latest/#widget
- title = forms.CharField(widget=CKEditorWidget())
- description = forms.CharField(widget=CKEditorWidget())
+ title = forms.CharField(widget=CKEditor5Widget())
+ description = forms.CharField(widget=CKEditor5Widget())
class Meta:
model = Resource
fields = "__all__"
+@admin.register(Resource)
class ResourceAdmin(admin.ModelAdmin):
fields = (
"agencies",
@@ -93,8 +96,3 @@ def save_model(self, request, obj, form, change):
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
form.instance.agencies.set(form.cleaned_data["agencies"], clear=True)
-
-
-admin.site.register(Agency, AgencyAdmin)
-admin.site.register(StopSummary, StopSummaryAdmin)
-admin.site.register(Resource, ResourceAdmin)
diff --git a/nc/data/download_from_nc.py b/nc/data/download_from_nc.py
index e0877cc9..be11b7a9 100755
--- a/nc/data/download_from_nc.py
+++ b/nc/data/download_from_nc.py
@@ -31,18 +31,18 @@ def nc_download_and_unzip_data(destination, prefix="state-"):
# make sure destination exists or create a temporary directory
if not destination:
destination = tempfile.mkdtemp(prefix=prefix)
- logger.debug("Created temp directory {}".format(destination))
+ logger.debug(f"Created temp directory {destination}")
else:
if not os.path.exists(destination):
os.makedirs(destination)
- logger.info("Created {}".format(destination))
+ logger.info(f"Created {destination}")
zip_basename = date.today().strftime("NC_STOPS_Extract_%Y_%m_%d.zip")
zip_filename = os.path.join(destination, zip_basename)
# don't re-download data if raw data file already exists
if os.path.exists(zip_filename):
- logger.debug("{} exists, skipping download".format(zip_filename))
+ logger.debug(f"{zip_filename} exists, skipping download")
else:
- logger.debug("Downloading data to {}".format(zip_filename))
+ logger.debug(f"Downloading data to {zip_filename}")
nc_data_site = settings.NC_FTP_HOST
nc_data_file = "STOPS_Extract.zip"
nc_data_directory = "/TSTOPextract"
@@ -52,11 +52,11 @@ def nc_download_and_unzip_data(destination, prefix="state-"):
listing = ftps.retrlines("LIST", show_ftp_listing)
line = listing.split("\n")[0]
if not line.startswith("226 "): # server's "Transfer complete" message
- raise ValueError("Expected 226 response from ftp server, got %r" % listing)
+ raise ValueError(f"Expected 226 response from ftp server, got {listing!r}")
logger.info('Downloading "%s"...', nc_data_file)
with open(zip_filename, "wb") as f:
- ftps.retrbinary("RETR %s" % nc_data_file, f.write)
- logger.info('File written to "%s"' % zip_filename)
+ ftps.retrbinary(f"RETR {nc_data_file}", f.write)
+ logger.info(f'File written to "{zip_filename}"')
unzip_data(destination, zip_path=zip_filename)
return destination
diff --git a/nc/data/importer.py b/nc/data/importer.py
index 95dba374..c02e3343 100755
--- a/nc/data/importer.py
+++ b/nc/data/importer.py
@@ -159,8 +159,8 @@ def to_standard_csv(input_path, output_path):
quoting=csv.QUOTE_MINIMAL,
skipinitialspace=False,
)
- with open(input_path, "rt") as input:
- with open(output_path, "wt") as output:
+ with open(input_path) as input:
+ with open(output_path, "w") as output:
reader = csv.reader(input, dialect="nc_data_in")
writer = csv.writer(output, dialect="nc_data_out")
headings_written = False
@@ -186,20 +186,20 @@ def convert_to_csv(destination):
continue
csv_path = data_path.replace(".txt", ".csv")
if os.path.exists(csv_path):
- logger.info("{} already exists, skipping csv conversion".format(csv_path))
+ logger.info(f"{csv_path} already exists, skipping csv conversion")
continue
- logger.info("Converting {} > {}".format(data_path, csv_path))
+ logger.info(f"Converting {data_path} > {csv_path}")
# Edit source data .txt file in-place to remove NUL bytes
# (only seen in Stop.txt)
- call([r"sed -i 's/\x0//g' {}".format(data_path)], shell=True)
+ call([rf"sed -i 's/\x0//g' {data_path}"], shell=True)
to_standard_csv(data_path, csv_path)
data_count = line_count(data_path)
csv_count = line_count(csv_path)
if data_count == (csv_count - 1):
- logger.debug("CSV line count matches original data file: {}".format(data_count))
+ logger.debug(f"CSV line count matches original data file: {data_count}")
else:
- logger.error("DAT {}".format(data_count))
- logger.error("CSV {}".format(csv_count))
+ logger.error(f"DAT {data_count}")
+ logger.error(f"CSV {csv_count}")
def update_nc_agencies(nc_csv_path, destination):
diff --git a/nc/migrations/0001_initial.py b/nc/migrations/0001_initial.py
index 7cc41e04..1abd666c 100644
--- a/nc/migrations/0001_initial.py
+++ b/nc/migrations/0001_initial.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/nc/migrations/0002_agency_census_profile_id.py b/nc/migrations/0002_agency_census_profile_id.py
index 90f6dbd0..051fc2e0 100644
--- a/nc/migrations/0002_agency_census_profile_id.py
+++ b/nc/migrations/0002_agency_census_profile_id.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/nc/migrations/0003_auto_20180115_1141.py b/nc/migrations/0003_auto_20180115_1141.py
index 79723388..ec3b4f90 100644
--- a/nc/migrations/0003_auto_20180115_1141.py
+++ b/nc/migrations/0003_auto_20180115_1141.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/nc/models.py b/nc/models.py
index 0edd641b..4201b775 100755
--- a/nc/models.py
+++ b/nc/models.py
@@ -207,7 +207,7 @@ class Agency(models.Model):
census_profile_id = models.CharField(max_length=16, blank=True, default="")
last_reported_stop = models.DateField(null=True)
- class Meta(object):
+ class Meta:
verbose_name_plural = "Agencies"
def __str__(self):
diff --git a/nc/prime_cache.py b/nc/prime_cache.py
index 3d9b941c..85bd6eeb 100755
--- a/nc/prime_cache.py
+++ b/nc/prime_cache.py
@@ -1,8 +1,8 @@
import logging
import time
+from collections.abc import Generator
from contextlib import contextmanager
-from typing import Generator
import boto3
import httpx
diff --git a/nc/tests/api/test_arrests.py b/nc/tests/api/test_arrests.py
index 9d9cd266..7314b60a 100644
--- a/nc/tests/api/test_arrests.py
+++ b/nc/tests/api/test_arrests.py
@@ -20,7 +20,7 @@ def reverse_querystring(
"""
base_url = reverse(view, urlconf=urlconf, args=args, kwargs=kwargs, current_app=current_app)
if query_kwargs:
- return "{}?{}".format(base_url, urlencode(query_kwargs))
+ return f"{base_url}?{urlencode(query_kwargs)}"
return base_url
diff --git a/nc/tests/api/test_basic_search.py b/nc/tests/api/test_basic_search.py
index fde14f26..c4986b7b 100755
--- a/nc/tests/api/test_basic_search.py
+++ b/nc/tests/api/test_basic_search.py
@@ -8,7 +8,7 @@
pytestmark = pytest.mark.django_db
-RACE_VALUES = set([v[0] for v in RACE_CHOICES])
+RACE_VALUES = {v[0] for v in RACE_CHOICES}
def test_no_agency(client, search_url):
diff --git a/nc/tests/api/test_timezones.py b/nc/tests/api/test_timezones.py
index 877a0d90..7cde7b8e 100755
--- a/nc/tests/api/test_timezones.py
+++ b/nc/tests/api/test_timezones.py
@@ -30,7 +30,7 @@ def test_stop_date_after_august_excludes_july_stop(client, search_url, durham, j
data={"agency": durham.pk, "stop_date_after": dt.date(2020, 8, 1)},
format="json",
)
- stop_ids = set([stop["stop_id"] for stop in response.data["results"]])
+ stop_ids = {stop["stop_id"] for stop in response.data["results"]}
assert july_person.stop.stop_id not in stop_ids
@@ -41,7 +41,7 @@ def test_stop_date_after_august_includes_august_stop(client, search_url, durham,
data={"agency": durham.pk, "stop_date_after": dt.date(2020, 8, 1)},
format="json",
)
- stop_ids = set([stop["stop_id"] for stop in response.data["results"]])
+ stop_ids = {stop["stop_id"] for stop in response.data["results"]}
assert {august_person.stop.stop_id} == stop_ids
assert august_person.stop.date == response.data["results"][0]["date"]
@@ -53,6 +53,6 @@ def test_stop_date_after_july_includes_both(client, search_url, durham, july_per
data={"agency": durham.pk, "stop_date_after": dt.date(2020, 7, 1)},
format="json",
)
- stop_ids = set([stop["stop_id"] for stop in response.data["results"]])
+ stop_ids = {stop["stop_id"] for stop in response.data["results"]}
assert july_person.stop.stop_id in stop_ids
assert august_person.stop.stop_id in stop_ids
diff --git a/nc/tests/factories.py b/nc/tests/factories.py
index f84abeb3..b9b753ea 100755
--- a/nc/tests/factories.py
+++ b/nc/tests/factories.py
@@ -19,14 +19,14 @@ def refresh_view(obj, create, extracted, **kwargs):
class AgencyFactory(ViewRefreshFactory):
- class Meta(object):
+ class Meta:
model = models.Agency
name = factory.Sequence(lambda n: "Agency %03d" % n)
class PersonFactory(ViewRefreshFactory):
- class Meta(object):
+ class Meta:
model = models.Person
person_id = factory.Sequence(lambda x: x)
@@ -38,7 +38,7 @@ class Meta(object):
class StopFactory(ViewRefreshFactory):
- class Meta(object):
+ class Meta:
model = models.Stop
stop_id = factory.Sequence(lambda x: x)
@@ -61,7 +61,7 @@ def year(self, create, extracted, **kwargs):
class SearchFactory(ViewRefreshFactory):
- class Meta(object):
+ class Meta:
model = models.Search
search_id = factory.Sequence(lambda x: x)
@@ -71,7 +71,7 @@ class Meta(object):
class ContrabandFactory(ViewRefreshFactory):
- class Meta(object):
+ class Meta:
model = models.Contraband
contraband_id = factory.Sequence(lambda x: x)
diff --git a/nc/tests/test_api.py b/nc/tests/test_api.py
index 8e33ac58..aa20b17c 100755
--- a/nc/tests/test_api.py
+++ b/nc/tests/test_api.py
@@ -134,7 +134,7 @@ def test_officer_stops_count(self):
ethnicity="H", stop__agency=agency, stop__year=2017, stop__officer_id=p1.stop.officer_id
)
url = reverse("nc:agency-api-stops", args=[agency.pk])
- url = "{}?officer={}".format(url, p1.stop.officer_id)
+ url = f"{url}?officer={p1.stop.officer_id}"
response = self.client.get(url, format="json")
response_data = response.json()
self.assertEqual(response.status_code, status.HTTP_200_OK)
diff --git a/nc/urls.py b/nc/urls.py
index 6244d3be..23a3bebf 100755
--- a/nc/urls.py
+++ b/nc/urls.py
@@ -1,5 +1,4 @@
-from django.conf.urls import include
-from django.urls import path, re_path
+from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
from rest_framework.routers import DefaultRouter
@@ -13,7 +12,7 @@
urlpatterns = [ # noqa
- re_path(r"^api/", include(router.urls)),
+ path("api/", include(router.urls)),
path("api/about/contact/", csrf_exempt(views.ContactView.as_view()), name="contact-form"),
path(
"api/agency//year-range/",
diff --git a/requirements/base/base.in b/requirements/base/base.in
index 1e749128..bd1135a1 100755
--- a/requirements/base/base.in
+++ b/requirements/base/base.in
@@ -1,33 +1,34 @@
# base requirements.in
-django==3.2.25
-celery==5.4.0
-census==0.8.22
+django==4.2.18
+celery
+census
us
dealer
boto
-boto3==1.35.76
-botocore==1.35.76
-click==8.1.7
+boto3
+botocore==1.36.15
+click
# django-cache-machine is no longer used, remains for legacy migrations
-django-ckeditor==6.7.0
-django-click==2.3.0
+django-ckeditor-5
+django-click
django-crispy-forms
django-dotenv
django-extensions
django-filter
django-memoize
-django-pgviews-redux==0.8.0
+
+django-pgviews-redux
django-redis
-django-storages==1.13.2
-djangorestframework==3.12.4
+django-storages
+djangorestframework
dj-database-url
-drf-extensions==0.7.1
-psycopg2==2.9.9
-brotli==1.1.0
+drf-extensions
+psycopg2
+brotli
httpx[http2]
-requests==2.32.3
-urllib3==2.2.1
+requests
+urllib3
six
whitenoise
-pandas==2.2.2
-vine==5.1.0
+pandas
+vine
diff --git a/requirements/base/base.txt b/requirements/base/base.txt
index d9646c0a..52ad9a4b 100755
--- a/requirements/base/base.txt
+++ b/requirements/base/base.txt
@@ -2,21 +2,23 @@
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
-# pip-compile --output-file=requirements/base/base.txt requirements/base/base.in
+# pip-compile --output-file=requirements/base/base.txt --strip-extras requirements/base/base.in
#
-amqp==5.2.0
+amqp==5.3.1
# via kombu
-anyio==4.6.2.post1
+anyio==4.8.0
# via httpx
-asgiref==3.5.2
+asgiref==3.8.1
# via django
-billiard==4.2.0
+async-timeout==5.0.1
+ # via redis
+billiard==4.2.1
# via celery
boto==2.49.0
# via -r requirements/base/base.in
-boto3==1.35.76
+boto3==1.36.15
# via -r requirements/base/base.in
-botocore==1.35.76
+botocore==1.36.15
# via
# -r requirements/base/base.in
# boto3
@@ -25,16 +27,16 @@ brotli==1.1.0
# via -r requirements/base/base.in
celery==5.4.0
# via -r requirements/base/base.in
-census==0.8.22
+census==0.8.23
# via -r requirements/base/base.in
-certifi==2020.6.20
+certifi==2025.1.31
# via
# httpcore
# httpx
# requests
-charset-normalizer==3.0.1
+charset-normalizer==3.4.1
# via requests
-click==8.1.7
+click==8.1.8
# via
# -r requirements/base/base.in
# celery
@@ -50,41 +52,41 @@ click-repl==0.3.0
# via celery
dealer==2.1.0
# via -r requirements/base/base.in
-dj-database-url==0.5.0
+dj-database-url==2.3.0
# via -r requirements/base/base.in
-django==3.2.25
+django==4.2.18
# via
# -r requirements/base/base.in
- # django-ckeditor
+ # dj-database-url
+ # django-ckeditor-5
+ # django-crispy-forms
+ # django-extensions
# django-filter
- # django-js-asset
# django-memoize
# django-redis
# django-storages
# djangorestframework
-django-ckeditor==6.7.0
+django-ckeditor-5==0.2.17
# via -r requirements/base/base.in
-django-click==2.3.0
+django-click==2.4.0
# via -r requirements/base/base.in
-django-crispy-forms==1.10.0
+django-crispy-forms==2.3
# via -r requirements/base/base.in
django-dotenv==1.4.2
# via -r requirements/base/base.in
-django-extensions==3.0.1
+django-extensions==3.2.3
# via -r requirements/base/base.in
-django-filter==2.4.0
+django-filter==24.3
# via -r requirements/base/base.in
-django-js-asset==2.1.0
- # via django-ckeditor
django-memoize==2.3.1
# via -r requirements/base/base.in
-django-pgviews-redux==0.8.0
+django-pgviews-redux==0.11.0
# via -r requirements/base/base.in
-django-redis==4.12.1
+django-redis==5.4.0
# via -r requirements/base/base.in
-django-storages==1.13.2
+django-storages==1.14.4
# via -r requirements/base/base.in
-djangorestframework==3.12.4
+djangorestframework==3.15.2
# via
# -r requirements/base/base.in
# drf-extensions
@@ -94,77 +96,78 @@ exceptiongroup==1.2.2
# via anyio
h11==0.14.0
# via httpcore
-h2==4.1.0
+h2==4.2.0
# via httpx
-hpack==4.0.0
+hpack==4.1.0
# via h2
-httpcore==1.0.6
+httpcore==1.0.7
# via httpx
-httpx[http2]==0.27.2
+httpx==0.28.1
# via -r requirements/base/base.in
-hyperframe==6.0.1
+hyperframe==6.1.0
# via h2
-idna==2.10
+idna==3.10
# via
# anyio
# httpx
# requests
-jellyfish==0.6.1
+jellyfish==1.1.3
# via us
jmespath==1.0.1
# via
# boto3
# botocore
-kombu==5.3.7
+kombu==5.4.2
# via celery
-numpy==2.0.0
+numpy==2.2.2
# via pandas
-pandas==2.2.2
+pandas==2.2.3
# via -r requirements/base/base.in
-prompt-toolkit==3.0.47
+pillow==11.1.0
+ # via django-ckeditor-5
+prompt-toolkit==3.0.50
# via click-repl
-psycopg2==2.9.9
+psycopg2==2.9.10
# via -r requirements/base/base.in
python-dateutil==2.9.0.post0
# via
# botocore
# celery
# pandas
-pytz==2022.1
- # via
- # django
- # pandas
-redis==3.5.3
+pytz==2025.1
+ # via pandas
+redis==5.2.1
# via django-redis
requests==2.32.3
# via
# -r requirements/base/base.in
# census
-s3transfer==0.10.1
+s3transfer==0.11.2
# via boto3
-six==1.15.0
+six==1.17.0
# via
# -r requirements/base/base.in
- # django-extensions
# python-dateutil
sniffio==1.3.1
- # via
- # anyio
- # httpx
-sqlparse==0.3.1
+ # via anyio
+sqlparse==0.5.3
# via django
typing-extensions==4.12.2
- # via anyio
-tzdata==2024.1
+ # via
+ # anyio
+ # asgiref
+ # dj-database-url
+tzdata==2025.1
# via
# celery
+ # kombu
# pandas
-urllib3==2.2.1
+urllib3==2.3.0
# via
# -r requirements/base/base.in
# botocore
# requests
-us==2.0.2
+us==3.2.0
# via -r requirements/base/base.in
vine==5.1.0
# via
@@ -174,5 +177,5 @@ vine==5.1.0
# kombu
wcwidth==0.2.13
# via prompt-toolkit
-whitenoise==5.1.0
+whitenoise==6.9.0
# via -r requirements/base/base.in
diff --git a/requirements/deploy/deploy.txt b/requirements/deploy/deploy.txt
index 593cb790..79575dfc 100644
--- a/requirements/deploy/deploy.txt
+++ b/requirements/deploy/deploy.txt
@@ -2,19 +2,19 @@
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
-# pip-compile --output-file=requirements/deploy/deploy.txt requirements/deploy/deploy.in
+# pip-compile --output-file=requirements/deploy/deploy.txt --strip-extras requirements/deploy/deploy.in
#
-certifi==2020.6.20
+certifi==2025.1.31
# via
# -c requirements/deploy/../base/base.txt
# sentry-sdk
-newrelic==10.4.0
+newrelic==10.6.0
# via -r requirements/deploy/deploy.in
python3-memcached==1.51
# via -r requirements/deploy/deploy.in
sentry-sdk==2.20.0
# via -r requirements/deploy/deploy.in
-urllib3==2.2.1
+urllib3==2.3.0
# via
# -c requirements/deploy/../base/base.txt
# sentry-sdk
diff --git a/requirements/dev/dev.in b/requirements/dev/dev.in
index 8bcc8b17..c8cbf5cb 100644
--- a/requirements/dev/dev.in
+++ b/requirements/dev/dev.in
@@ -12,7 +12,7 @@ cffi
Jinja2
openshift
kubernetes
-kubernetes-validate==1.30
+kubernetes-validate
referencing
jsonschema
@@ -24,7 +24,7 @@ sphinx-autobuild
rstcheck
# AWS tools
-awscli==1.36.17
+awscli==1.37.15
django-debug-toolbar
diff --git a/requirements/dev/dev.txt b/requirements/dev/dev.txt
index afb876db..96e4a579 100755
--- a/requirements/dev/dev.txt
+++ b/requirements/dev/dev.txt
@@ -2,52 +2,52 @@
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
-# pip-compile --output-file=requirements/dev/dev.txt requirements/dev/dev.in
+# pip-compile --output-file=requirements/dev/dev.txt --strip-extras requirements/dev/dev.in
#
alabaster==0.7.16
# via sphinx
annotated-types==0.7.0
# via pydantic
-ansible==10.5.0
+ansible==10.7.0
# via
# -r requirements/dev/dev.in
# invoke-kubesae
-ansible-core==2.17.5
+ansible-core==2.17.8
# via ansible
-anyio==4.6.2.post1
+anyio==4.8.0
# via
# -c requirements/dev/../base/base.txt
# starlette
# watchfiles
appnope==0.1.4
# via -r requirements/dev/dev.in
-asgiref==3.5.2
+asgiref==3.8.1
# via
# -c requirements/dev/../base/base.txt
# django
-asttokens==2.4.1
+asttokens==3.0.0
# via stack-data
-attrs==24.2.0
+attrs==25.1.0
# via
# jsonschema
# referencing
-awscli==1.36.17
+awscli==1.37.15
# via -r requirements/dev/dev.in
-babel==2.16.0
+babel==2.17.0
# via sphinx
-boto3==1.35.76
+boto3==1.36.15
# via
# -c requirements/dev/../base/base.txt
# invoke-kubesae
-botocore==1.35.76
+botocore==1.36.15
# via
# -c requirements/dev/../base/base.txt
# awscli
# boto3
# s3transfer
-cachetools==5.5.0
+cachetools==5.5.1
# via google-auth
-certifi==2020.6.20
+certifi==2025.1.31
# via
# -c requirements/dev/../base/base.txt
# kubernetes
@@ -58,11 +58,11 @@ cffi==1.17.1
# cryptography
cfn-flip==1.3.0
# via troposphere
-charset-normalizer==3.0.1
+charset-normalizer==3.4.1
# via
# -c requirements/dev/../base/base.txt
# requests
-click==8.1.7
+click==8.1.8
# via
# -c requirements/dev/../base/base.txt
# -c requirements/dev/../test/test.txt
@@ -74,17 +74,17 @@ colorama==0.4.6
# awscli
# invoke-kubesae
# sphinx-autobuild
-cryptography==43.0.3
+cryptography==44.0.1
# via
# -r requirements/dev/dev.in
# ansible-core
decorator==5.1.1
# via ipython
-django==3.2.25
+django==4.2.18
# via
# -c requirements/dev/../base/base.txt
# django-debug-toolbar
-django-debug-toolbar==4.3.0
+django-debug-toolbar==5.0.1
# via -r requirements/dev/dev.in
docutils==0.16
# via
@@ -99,34 +99,34 @@ exceptiongroup==1.2.2
# -c requirements/dev/../test/test.txt
# anyio
# ipython
-executing==2.1.0
+executing==2.2.0
# via stack-data
-google-auth==2.35.0
+google-auth==2.38.0
# via kubernetes
h11==0.14.0
# via
# -c requirements/dev/../base/base.txt
# uvicorn
-idna==2.10
+idna==3.10
# via
# -c requirements/dev/../base/base.txt
# anyio
# requests
imagesize==1.4.1
# via sphinx
-importlib-resources==6.4.5
+importlib-resources==6.5.2
# via kubernetes-validate
invoke==2.2.0
# via invoke-kubesae
invoke-kubesae==0.1.0
# via -r requirements/dev/dev.in
-ipython==8.29.0
+ipython==8.32.0
# via -r requirements/dev/dev.in
-jedi==0.19.1
+jedi==0.19.2
# via
# ipython
# pudb
-jinja2==3.1.4
+jinja2==3.1.5
# via
# -r requirements/dev/dev.in
# ansible-core
@@ -142,11 +142,11 @@ jsonschema==4.23.0
# kubernetes-validate
jsonschema-specifications==2024.10.1
# via jsonschema
-kubernetes==31.0.0
+kubernetes==32.0.0
# via
# -r requirements/dev/dev.in
# openshift
-kubernetes-validate==1.30.0
+kubernetes-validate==1.32.0
# via -r requirements/dev/dev.in
markdown-it-py==3.0.0
# via rich
@@ -162,7 +162,7 @@ oauthlib==3.2.2
# requests-oauthlib
openshift==0.13.2
# via -r requirements/dev/dev.in
-packaging==24.1
+packaging==24.2
# via
# -c requirements/dev/../test/test.txt
# ansible-core
@@ -173,7 +173,7 @@ parso==0.8.4
# via jedi
pexpect==4.9.0
# via ipython
-prompt-toolkit==3.0.47
+prompt-toolkit==3.0.50
# via
# -c requirements/dev/../base/base.txt
# ipython
@@ -191,11 +191,11 @@ pyasn1-modules==0.4.1
# via google-auth
pycparser==2.22
# via cffi
-pydantic==2.9.2
+pydantic==2.10.6
# via rstcheck-core
-pydantic-core==2.23.4
+pydantic-core==2.27.2
# via pydantic
-pygments==2.18.0
+pygments==2.19.1
# via
# ipython
# pudb
@@ -204,15 +204,10 @@ pygments==2.18.0
python-dateutil==2.9.0.post0
# via
# -c requirements/dev/../base/base.txt
- # -c requirements/dev/../test/test.txt
# botocore
# kubernetes
python-string-utils==1.0.0
# via openshift
-pytz==2022.1
- # via
- # -c requirements/dev/../base/base.txt
- # django
pyyaml==6.0.2
# via
# -c requirements/dev/../test/test.txt
@@ -221,7 +216,7 @@ pyyaml==6.0.2
# cfn-flip
# kubernetes
# kubernetes-validate
-referencing==0.35.1
+referencing==0.36.2
# via
# -r requirements/dev/dev.in
# jsonschema
@@ -237,9 +232,9 @@ requests-oauthlib==2.0.0
# via kubernetes
resolvelib==1.0.1
# via ansible-core
-rich==13.9.3
+rich==13.9.4
# via typer
-rpds-py==0.20.0
+rpds-py==0.22.3
# via
# jsonschema
# referencing
@@ -251,18 +246,16 @@ rstcheck==6.2.4
# via -r requirements/dev/dev.in
rstcheck-core==1.2.1
# via rstcheck
-s3transfer==0.10.1
+s3transfer==0.11.2
# via
# -c requirements/dev/../base/base.txt
# awscli
# boto3
shellingham==1.5.4
# via typer
-six==1.15.0
+six==1.17.0
# via
# -c requirements/dev/../base/base.txt
- # -c requirements/dev/../test/test.txt
- # asttokens
# cfn-flip
# kubernetes
# openshift
@@ -291,37 +284,39 @@ sphinxcontrib-qthelp==2.0.0
# via sphinx
sphinxcontrib-serializinghtml==2.0.0
# via sphinx
-sqlparse==0.3.1
+sqlparse==0.5.3
# via
# -c requirements/dev/../base/base.txt
# django
# django-debug-toolbar
stack-data==0.6.3
# via ipython
-starlette==0.41.2
+starlette==0.45.3
# via sphinx-autobuild
traitlets==5.14.3
# via
# ipython
# matplotlib-inline
-troposphere==4.8.3
+troposphere==4.9.0
# via -r requirements/dev/dev.in
-typer==0.12.5
+typer==0.15.1
# via rstcheck
typing-extensions==4.12.2
# via
# -c requirements/dev/../base/base.txt
# -c requirements/dev/../test/test.txt
# anyio
+ # asgiref
# ipython
# kubernetes-validate
# pydantic
# pydantic-core
+ # referencing
# rich
# typer
# urwid
# uvicorn
-urllib3==2.2.1
+urllib3==2.3.0
# via
# -c requirements/dev/../base/base.txt
# botocore
@@ -333,9 +328,9 @@ urwid==2.6.16
# urwid-readline
urwid-readline==0.15.1
# via pudb
-uvicorn==0.32.0
+uvicorn==0.34.0
# via sphinx-autobuild
-watchfiles==0.24.0
+watchfiles==1.0.4
# via sphinx-autobuild
wcwidth==0.2.13
# via
@@ -344,7 +339,7 @@ wcwidth==0.2.13
# urwid
websocket-client==1.8.0
# via kubernetes
-websockets==13.1
+websockets==14.2
# via sphinx-autobuild
-wheel==0.44.0
+wheel==0.45.1
# via -r requirements/dev/dev.in
diff --git a/requirements/test/test.in b/requirements/test/test.in
index 7dac4448..559f2544 100755
--- a/requirements/test/test.in
+++ b/requirements/test/test.in
@@ -12,3 +12,4 @@ coverage
flake8
black
pre-commit
+identify
diff --git a/requirements/test/test.txt b/requirements/test/test.txt
index 819238de..12f5eb7b 100755
--- a/requirements/test/test.txt
+++ b/requirements/test/test.txt
@@ -2,17 +2,17 @@
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
-# pip-compile --output-file=requirements/test/test.txt requirements/test/test.in
+# pip-compile --output-file=requirements/test/test.txt --strip-extras requirements/test/test.in
#
-black==24.10.0
+black==25.1.0
# via -r requirements/test/test.in
cfgv==3.4.0
# via pre-commit
-click==8.1.7
+click==8.1.8
# via
# -c requirements/test/../base/base.txt
# black
-coverage[toml]==7.6.4
+coverage==7.6.12
# via
# -r requirements/test/test.in
# pytest-cov
@@ -22,19 +22,21 @@ exceptiongroup==1.2.2
# via
# -c requirements/test/../base/base.txt
# pytest
-factory-boy==3.3.1
+factory-boy==3.3.3
# via -r requirements/test/test.in
-faker==30.8.1
+faker==36.1.0
# via factory-boy
-filelock==3.16.1
+filelock==3.17.0
# via virtualenv
flake8==7.1.1
# via -r requirements/test/test.in
-identify==2.6.1
- # via pre-commit
+identify==2.6.7
+ # via
+ # -r requirements/test/test.in
+ # pre-commit
iniconfig==2.0.0
# via pytest
-isort==5.13.2
+isort==6.0.0
# via -r requirements/test/test.in
mccabe==0.7.0
# via flake8
@@ -42,7 +44,7 @@ mypy-extensions==1.0.0
# via black
nodeenv==1.9.1
# via pre-commit
-packaging==24.1
+packaging==24.2
# via
# black
# pytest
@@ -54,33 +56,25 @@ platformdirs==4.3.6
# virtualenv
pluggy==1.5.0
# via pytest
-pre-commit==4.0.1
+pre-commit==4.1.0
# via -r requirements/test/test.in
pycodestyle==2.12.1
# via flake8
pyflakes==3.2.0
# via flake8
-pytest==8.3.3
+pytest==8.3.4
# via
# -r requirements/test/test.in
# pytest-cov
# pytest-django
-pytest-cov==5.0.0
+pytest-cov==6.0.0
# via -r requirements/test/test.in
-pytest-django==4.9.0
+pytest-django==4.10.0
# via -r requirements/test/test.in
-python-dateutil==2.9.0.post0
- # via
- # -c requirements/test/../base/base.txt
- # faker
pyyaml==6.0.2
# via
# -r requirements/test/test.in
# pre-commit
-six==1.15.0
- # via
- # -c requirements/test/../base/base.txt
- # python-dateutil
tomli==2.2.1
# via
# black
@@ -90,6 +84,9 @@ typing-extensions==4.12.2
# via
# -c requirements/test/../base/base.txt
# black
+tzdata==2025.1
+ # via
+ # -c requirements/test/../base/base.txt
# faker
-virtualenv==20.27.0
+virtualenv==20.29.2
# via pre-commit
diff --git a/traffic_stops/__init__.py b/traffic_stops/__init__.py
index d13e9513..b6fc8176 100644
--- a/traffic_stops/__init__.py
+++ b/traffic_stops/__init__.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
diff --git a/traffic_stops/celery.py b/traffic_stops/celery.py
index c7e0e0fc..df76dff2 100644
--- a/traffic_stops/celery.py
+++ b/traffic_stops/celery.py
@@ -18,4 +18,4 @@
@app.task(bind=True)
def debug_task(self):
- print("Request: {0!r}".format(self.request))
+ print(f"Request: {self.request!r}")
diff --git a/traffic_stops/routers.py b/traffic_stops/routers.py
index a690a5a8..dc6af565 100644
--- a/traffic_stops/routers.py
+++ b/traffic_stops/routers.py
@@ -5,14 +5,14 @@
logger = logging.getLogger(__name__)
-class StateDatasetRouter(object):
+class StateDatasetRouter:
"""Read/write from speciic State databases"""
def _db_name(self, model):
- return "traffic_stops_{}".format(model._meta.app_label)
+ return f"traffic_stops_{model._meta.app_label}"
def _db_name_from_label(self, app_label):
- return "traffic_stops_{}".format(app_label)
+ return f"traffic_stops_{app_label}"
def db_for_read(self, model, **hints):
"""Return state DB if model's app name is a database"""
@@ -21,7 +21,7 @@ def db_for_read(self, model, **hints):
name = state_db
else:
name = "default"
- logger.debug("db_for_read({}): {}".format(state_db, name))
+ logger.debug(f"db_for_read({state_db}): {name}")
return name
def db_for_write(self, model, **hints):
@@ -31,7 +31,7 @@ def db_for_write(self, model, **hints):
name = state_db
else:
name = "default"
- logger.debug("db_for_write({}): {}".format(state_db, name))
+ logger.debug(f"db_for_write({state_db}): {name}")
return name
def allow_migrate(self, db, app_label, model_name=None, **hints):
@@ -50,5 +50,5 @@ def allow_migrate(self, db, app_label, model_name=None, **hints):
ret = True
else:
ret = False
- logger.debug("allow_syncdb({}, {} {}): {}".format(db, app_label, model_name, ret))
+ logger.debug(f"allow_syncdb({db}, {app_label} {model_name}): {ret}")
return ret
diff --git a/traffic_stops/settings/base.py b/traffic_stops/settings/base.py
index cd506c8c..a884c6c4 100755
--- a/traffic_stops/settings/base.py
+++ b/traffic_stops/settings/base.py
@@ -20,7 +20,7 @@
DATABASES = {
"default": {
- "ENGINE": "django.db.backends.postgresql_psycopg2",
+ "ENGINE": "django.db.backends.postgresql",
"NAME": "traffic_stops",
"USER": "",
"PASSWORD": "",
@@ -28,7 +28,7 @@
"PORT": "",
},
"traffic_stops_nc": {
- "ENGINE": "django.db.backends.postgresql_psycopg2",
+ "ENGINE": "django.db.backends.postgresql",
"NAME": "traffic_stops_nc",
"USER": "",
"PASSWORD": "",
@@ -92,7 +92,6 @@ def __init__(self, tz_name=None):
USE_I18N = True
-USE_L10N = True
USE_TZ = True
@@ -117,9 +116,14 @@ def __init__(self, tz_name=None):
MEDIA_STORAGE_BUCKET_NAME = os.getenv("MEDIA_STORAGE_BUCKET_NAME", "")
MEDIA_LOCATION = os.getenv("MEDIA_LOCATION", "")
MEDIA_S3_CUSTOM_DOMAIN = os.getenv("MEDIA_S3_CUSTOM_DOMAIN", "")
-DEFAULT_FILE_STORAGE = os.getenv(
- "DEFAULT_FILE_STORAGE", "django.core.files.storage.FileSystemStorage"
-)
+STORAGES = {
+ "default": {
+ "BACKEND": os.getenv("DEFAULT_FILE_STORAGE", "django.core.files.storage.FileSystemStorage")
+ },
+ "staticfiles": {
+ "BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
+ },
+}
AWS_DEFAULT_ACL = os.getenv("AWS_DEFAULT_ACL")
AWS_S3_SIGNATURE_VERSION = os.getenv("AWS_S3_SIGNATURE_VERSION", "s3v4")
AWS_S3_REGION_NAME = os.getenv("AWS_S3_REGION_NAME", "us-east-2")
@@ -185,7 +189,7 @@ def __init__(self, tz_name=None):
"django_filters",
"rest_framework",
"django_pgviews",
- "ckeditor",
+ "django_ckeditor_5",
# Custom apps
"tsdata",
"nc",
@@ -352,4 +356,140 @@ def __init__(self, tz_name=None):
# Contact us Email
CONTACT_US_EMAILS = os.getenv("CONTACT_US_EMAILS", "forwardjustice-team@caktusgroup.com").split(":")
-CKEDITOR_BASEPATH = "/static/ckeditor/ckeditor/"
+customColorPalette = [
+ {"color": "hsl(4, 90%, 58%)", "label": "Red"},
+ {"color": "hsl(340, 82%, 52%)", "label": "Pink"},
+ {"color": "hsl(291, 64%, 42%)", "label": "Purple"},
+ {"color": "hsl(262, 52%, 47%)", "label": "Deep Purple"},
+ {"color": "hsl(231, 48%, 48%)", "label": "Indigo"},
+ {"color": "hsl(207, 90%, 54%)", "label": "Blue"},
+]
+CKEDITOR_5_CONFIGS = {
+ "default": {
+ "toolbar": [
+ "heading",
+ "|",
+ "bold",
+ "italic",
+ "link",
+ "bulletedList",
+ "numberedList",
+ "blockQuote",
+ "imageUpload",
+ ],
+ },
+ "extends": {
+ "blockToolbar": [
+ "paragraph",
+ "heading1",
+ "heading2",
+ "heading3",
+ "|",
+ "bulletedList",
+ "numberedList",
+ "|",
+ "blockQuote",
+ ],
+ "toolbar": [
+ "heading",
+ "|",
+ "outdent",
+ "indent",
+ "|",
+ "bold",
+ "italic",
+ "link",
+ "underline",
+ "strikethrough",
+ "code",
+ "subscript",
+ "superscript",
+ "highlight",
+ "|",
+ "codeBlock",
+ "sourceEditing",
+ "insertImage",
+ "bulletedList",
+ "numberedList",
+ "todoList",
+ "|",
+ "blockQuote",
+ "imageUpload",
+ "|",
+ "fontSize",
+ "fontFamily",
+ "fontColor",
+ "fontBackgroundColor",
+ "mediaEmbed",
+ "removeFormat",
+ "insertTable",
+ ],
+ "image": {
+ "toolbar": [
+ "imageTextAlternative",
+ "|",
+ "imageStyle:alignLeft",
+ "imageStyle:alignRight",
+ "imageStyle:alignCenter",
+ "imageStyle:side",
+ "|",
+ ],
+ "styles": [
+ "full",
+ "side",
+ "alignLeft",
+ "alignRight",
+ "alignCenter",
+ ],
+ },
+ "table": {
+ "contentToolbar": [
+ "tableColumn",
+ "tableRow",
+ "mergeTableCells",
+ "tableProperties",
+ "tableCellProperties",
+ ],
+ "tableProperties": {
+ "borderColors": customColorPalette,
+ "backgroundColors": customColorPalette,
+ },
+ "tableCellProperties": {
+ "borderColors": customColorPalette,
+ "backgroundColors": customColorPalette,
+ },
+ },
+ "heading": {
+ "options": [
+ {"model": "paragraph", "title": "Paragraph", "class": "ck-heading_paragraph"},
+ {
+ "model": "heading1",
+ "view": "h1",
+ "title": "Heading 1",
+ "class": "ck-heading_heading1",
+ },
+ {
+ "model": "heading2",
+ "view": "h2",
+ "title": "Heading 2",
+ "class": "ck-heading_heading2",
+ },
+ {
+ "model": "heading3",
+ "view": "h3",
+ "title": "Heading 3",
+ "class": "ck-heading_heading3",
+ },
+ ]
+ },
+ },
+ "list": {
+ "properties": {
+ "styles": "true",
+ "startIndex": "true",
+ "reversed": "true",
+ }
+ },
+}
+
+CKEDITOR_5_FILE_STORAGE = "traffic_stops.storages.MediaBoto3Storage"
diff --git a/traffic_stops/settings/deploy.py b/traffic_stops/settings/deploy.py
index 8cb95269..b371af08 100755
--- a/traffic_stops/settings/deploy.py
+++ b/traffic_stops/settings/deploy.py
@@ -31,7 +31,12 @@
"whitenoise.middleware.WhiteNoiseMiddleware",
] + MIDDLEWARE
-STATICFILES_STORAGE = "django.contrib.staticfiles.storage.ManifestStaticFilesStorage"
+STORAGES["default"] = {
+ "BACKEND": "django.core.files.storage.FileSystemStorage",
+}
+STORAGES["staticfiles"] = {
+ "BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
+}
EMAIL_HOST = os.getenv("EMAIL_HOST", "localhost")
EMAIL_HOST_USER = os.getenv("EMAIL_HOST_USER", "")
@@ -47,7 +52,7 @@
else:
default_smtp_port = 25
EMAIL_PORT = os.getenv("EMAIL_PORT", default_smtp_port)
-EMAIL_SUBJECT_PREFIX = "[trafficstops %s] " % ENVIRONMENT.title()
+EMAIL_SUBJECT_PREFIX = f"[trafficstops {ENVIRONMENT.title()}] "
DEFAULT_FROM_EMAIL = f"noreply@{os.getenv('DOMAIN', os.environ)}"
SERVER_EMAIL = DEFAULT_FROM_EMAIL
diff --git a/traffic_stops/urls.py b/traffic_stops/urls.py
index 910de10b..7cd59686 100755
--- a/traffic_stops/urls.py
+++ b/traffic_stops/urls.py
@@ -1,8 +1,7 @@
from django.conf import settings
-from django.conf.urls import include
from django.conf.urls.static import static
from django.contrib import admin
-from django.urls import path, re_path
+from django.urls import include, path, re_path
from .views import index
@@ -10,8 +9,9 @@
urlpatterns = [ # noqa
- re_path(r"^", include(("nc.urls", "nc"), namespace="nc"), name="home"),
- re_path(r"^admin/", admin.site.urls),
+ path("", include(("nc.urls", "nc"), namespace="nc"), name="home"),
+ path("admin/", admin.site.urls),
+ path("ckeditor5/", include("django_ckeditor_5.urls")),
]
if settings.DEBUG:
diff --git a/tsdata/__init__.py b/tsdata/__init__.py
index 7fc45367..e69de29b 100644
--- a/tsdata/__init__.py
+++ b/tsdata/__init__.py
@@ -1 +0,0 @@
-default_app_config = "tsdata.apps.TSDataConfig"
diff --git a/tsdata/acs.py b/tsdata/acs.py
index ceed6e67..7db1579c 100755
--- a/tsdata/acs.py
+++ b/tsdata/acs.py
@@ -49,7 +49,7 @@
}
-class ACS(object):
+class ACS:
"""Base class to call ACS API and normalize output"""
source = "ACS 5-Year Data (2017-2021)"
@@ -131,7 +131,7 @@ def call_api(self):
return self.api.acs5.state_place(self.variables, self.fips, census.ALL)
def get(self):
- df = super(ACSStatePlaces, self).get()
+ df = super().get()
# ignore Census Designated Places (CDP)
return df[~df.location.str.contains("CDP")]
diff --git a/tsdata/admin.py b/tsdata/admin.py
index c38a2255..b38480dc 100755
--- a/tsdata/admin.py
+++ b/tsdata/admin.py
@@ -4,6 +4,7 @@
from tsdata.tasks import import_dataset
+@admin.register(Dataset)
class DatasetAdmin(admin.ModelAdmin):
list_display = ("id", "name", "state", "date_received", "destination")
list_filter = ("state",)
@@ -12,17 +13,17 @@ class DatasetAdmin(admin.ModelAdmin):
date_hierarchy = "date_received"
actions = ["import_dataset"]
+ @admin.action(description="Import selected dataset")
def import_dataset(self, request, queryset):
if queryset.count() > 1:
self.message_user(request, "Please select one dataset at a time", level=messages.ERROR)
return
import_dataset.delay(queryset[0].pk)
- msg = "{} successfully queued for import.".format(queryset[0].name)
+ msg = f"{queryset[0].name} successfully queued for import."
self.message_user(request, msg)
- import_dataset.short_description = "Import selected dataset"
-
+@admin.register(Import)
class ImportAdmin(admin.ModelAdmin):
list_display = ("id", "dataset", "date_started", "date_finished", "successful")
list_filter = ("successful",)
@@ -31,6 +32,7 @@ class ImportAdmin(admin.ModelAdmin):
ordering = ("-date_started",)
+@admin.register(CensusProfile)
class CensusProfileAdmin(admin.ModelAdmin):
list_display = ("id", "location", "state", "geography", "total", "source")
list_filter = ("state", "geography", "source")
@@ -48,14 +50,9 @@ def has_delete_permission(self, request, obj=None):
return False
+@admin.register(StateFacts)
class StateFactsAdmin(admin.ModelAdmin):
inlines = (TopAgencyFactsInline,)
def has_delete_permission(self, request, obj=None):
return False
-
-
-admin.site.register(Dataset, DatasetAdmin)
-admin.site.register(Import, ImportAdmin)
-admin.site.register(CensusProfile, CensusProfileAdmin)
-admin.site.register(StateFacts, StateFactsAdmin)
diff --git a/tsdata/dataset_facts.py b/tsdata/dataset_facts.py
index 857f2896..7fec4d2b 100755
--- a/tsdata/dataset_facts.py
+++ b/tsdata/dataset_facts.py
@@ -80,10 +80,10 @@ def compute_dataset_facts(Agency, Stop, state_key, Search=None, override_start_d
state_facts = StateFacts.objects.get(state_key=state_key)
facts = [
- "Timeframe: %s - %s" % (first_stop_time_str, last_stop_time_str),
- "Stops: {:,}".format(state_facts.total_stops),
- "Searches: {:,}".format(state_facts.total_searches),
- "Agencies: {:,}".format(state_facts.total_agencies),
+ f"Timeframe: {first_stop_time_str} - {last_stop_time_str}",
+ f"Stops: {state_facts.total_stops:,}",
+ f"Searches: {state_facts.total_searches:,}",
+ f"Agencies: {state_facts.total_agencies:,}",
"",
"Top 5:",
]
@@ -91,7 +91,7 @@ def compute_dataset_facts(Agency, Stop, state_key, Search=None, override_start_d
top_agencies = Agency.objects.annotate(num_stops=Count("stops")).order_by("-num_stops")[:5]
rank = 1
for agency in top_agencies:
- facts.append("Id {}: {} {:,}".format(agency.id, agency.name, agency.num_stops))
+ facts.append(f"Id {agency.id}: {agency.name} {agency.num_stops:,}")
TopAgencyFacts.objects.filter(state_facts=state_facts, rank=rank).update(
agency_id=agency.id,
stops=agency.num_stops,
diff --git a/tsdata/management/commands/import_census.py b/tsdata/management/commands/import_census.py
index 0029039b..f5af939e 100755
--- a/tsdata/management/commands/import_census.py
+++ b/tsdata/management/commands/import_census.py
@@ -53,7 +53,7 @@ def handle(self, *args, **options):
else:
r = requests.get(options["url"])
if r.status_code != 200:
- raise CommandError("Failed to access {}".format(options["url"]))
+ raise CommandError(f"Failed to access {options['url']}")
data = r.json()
if options["output"]:
print(json.dumps(data, indent=options["indent"]))
diff --git a/tsdata/migrations/0001_initial.py b/tsdata/migrations/0001_initial.py
index 69b44d7f..1f403f2e 100644
--- a/tsdata/migrations/0001_initial.py
+++ b/tsdata/migrations/0001_initial.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/tsdata/migrations/0002_censusprofile.py b/tsdata/migrations/0002_censusprofile.py
index 36c549b6..4b181215 100644
--- a/tsdata/migrations/0002_censusprofile.py
+++ b/tsdata/migrations/0002_censusprofile.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/tsdata/migrations/0003_auto_20160808_1823.py b/tsdata/migrations/0003_auto_20160808_1823.py
index 0e63bc11..9ca7b3b1 100644
--- a/tsdata/migrations/0003_auto_20160808_1823.py
+++ b/tsdata/migrations/0003_auto_20160808_1823.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/tsdata/migrations/0004_auto_20161004_0743.py b/tsdata/migrations/0004_auto_20161004_0743.py
index f55b7482..3454af2e 100644
--- a/tsdata/migrations/0004_auto_20161004_0743.py
+++ b/tsdata/migrations/0004_auto_20161004_0743.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/tsdata/migrations/0005_auto_20170314_1359.py b/tsdata/migrations/0005_auto_20170314_1359.py
index 393ee44d..26c0a757 100644
--- a/tsdata/migrations/0005_auto_20170314_1359.py
+++ b/tsdata/migrations/0005_auto_20170314_1359.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
import django.core.validators
from django.db import migrations, models
@@ -46,6 +43,6 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='topagencyfacts',
- unique_together=set([('state_facts', 'rank')]),
+ unique_together={('state_facts', 'rank')},
),
]
diff --git a/tsdata/migrations/0006_create_statefacts.py b/tsdata/migrations/0006_create_statefacts.py
index cad4522d..82cf1341 100644
--- a/tsdata/migrations/0006_create_statefacts.py
+++ b/tsdata/migrations/0006_create_statefacts.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
from django.conf import settings
from django.db import migrations, models
diff --git a/tsdata/migrations/0007_auto_20170316_1215.py b/tsdata/migrations/0007_auto_20170316_1215.py
index f099e068..4d5dcf2e 100644
--- a/tsdata/migrations/0007_auto_20170316_1215.py
+++ b/tsdata/migrations/0007_auto_20170316_1215.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/tsdata/models.py b/tsdata/models.py
index 4d569649..b0581c05 100755
--- a/tsdata/models.py
+++ b/tsdata/models.py
@@ -32,7 +32,7 @@ class Dataset(models.Model):
report_email_2 = models.EmailField(blank=True)
def __str__(self):
- return "{}: {}".format(self.get_state_display(), self.name)
+ return f"{self.get_state_display()}: {self.name}"
@property
def agency_model(self):
@@ -53,7 +53,7 @@ class Import(models.Model):
successful = models.BooleanField(default=False)
def __str__(self):
- return "Import of {}".format(self.dataset)
+ return f"Import of {self.dataset}"
class CensusProfile(models.Model):
@@ -99,7 +99,7 @@ class StateFacts(models.Model):
end_date = models.CharField(max_length=20, default="")
def __str__(self):
- return "Facts for state %s" % self.state_key
+ return f"Facts for state {self.state_key}"
class Meta:
verbose_name_plural = "state facts"
@@ -113,7 +113,7 @@ class TopAgencyFacts(models.Model):
name = models.CharField(max_length=255, default="")
def __str__(self):
- return "Facts for state %s agency %s" % (self.state_facts.state_key, self.name)
+ return f"Facts for state {self.state_facts.state_key} agency {self.name}"
class Meta:
unique_together = (("state_facts", "rank"),)
diff --git a/tsdata/sql.py b/tsdata/sql.py
index 040f32ba..7516fec5 100644
--- a/tsdata/sql.py
+++ b/tsdata/sql.py
@@ -36,7 +36,7 @@ def get_sql_statements(cursor, select_sql):
cursor.execute(select_sql, params=None)
sql = ""
for row in cursor.fetchall():
- sql += "{}\n".format(row[0])
+ sql += f"{row[0]}\n"
return sql
diff --git a/tsdata/tasks.py b/tsdata/tasks.py
index 3c64c94a..78165897 100755
--- a/tsdata/tasks.py
+++ b/tsdata/tasks.py
@@ -22,10 +22,10 @@
@app.task
def import_dataset(dataset_id):
"""Execute a state dataset import process"""
- logger.info("Received Dataset ID: {}".format(dataset_id))
+ logger.info(f"Received Dataset ID: {dataset_id}")
dataset = Dataset.objects.get(pk=dataset_id)
run = Import.objects.create(dataset=dataset)
- logger.info("Starting {} import".format(dataset.state))
+ logger.info(f"Starting {dataset.state} import")
state_import = RUN_MAP[run.dataset.state]
report_emails = [email for email in [dataset.report_email_1, dataset.report_email_2] if email]
try:
@@ -41,7 +41,7 @@ def import_dataset(dataset_id):
if report_emails:
send_mail(
"Import completed successfully",
- "Import of %s completed successfully" % dataset,
+ f"Import of {dataset} completed successfully",
settings.DEFAULT_FROM_EMAIL,
report_emails,
)
@@ -83,7 +83,7 @@ def compliance_report(dataset_id):
if not qs:
send_mail(
- "{} Compliance Report, {}".format(dataset.state.upper(), now.date().isoformat()),
+ f"{dataset.state.upper()} Compliance Report, {now.date().isoformat()}",
"All agencies have reported within the last 90 days.",
settings.DEFAULT_FROM_EMAIL,
settings.COMPLIANCE_REPORT_LIST,
@@ -98,7 +98,7 @@ def compliance_report(dataset_id):
writer.writerows(filter(lambda r: r["last_reported_stop"] is None, qs))
message = EmailMessage(
- "{} Compliance Report, {}".format(dataset.state.upper(), now.date().isoformat()),
+ f"{dataset.state.upper()} Compliance Report, {now.date().isoformat()}",
"Attached are the agencies out of compliance in the most recent data import.",
settings.DEFAULT_FROM_EMAIL,
settings.COMPLIANCE_REPORT_LIST,
diff --git a/tsdata/tests/factories.py b/tsdata/tests/factories.py
index 6419c3eb..82ccf0dc 100755
--- a/tsdata/tests/factories.py
+++ b/tsdata/tests/factories.py
@@ -10,7 +10,7 @@
class DatasetFactory(factory.django.DjangoModelFactory):
state = factory.Iterator(models.STATE_CHOICES, getter=lambda c: c[0])
name = factory.Sequence(lambda n: "Dataset %d" % n)
- url = factory.LazyAttribute(lambda obj: "https://example.com/%s" % obj.state)
+ url = factory.LazyAttribute(lambda obj: f"https://example.com/{obj.state}")
date_received = factory.fuzzy.FuzzyDateTime(
datetime.datetime(2008, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)
)
@@ -20,7 +20,7 @@ class Meta:
class CensusProfileFactory(factory.django.DjangoModelFactory):
- class Meta(object):
+ class Meta:
model = models.CensusProfile
id = factory.Sequence(lambda n: "16000US%07d" % n)
diff --git a/tsdata/tests/test_utils.py b/tsdata/tests/test_utils.py
index 997ebd90..dc336887 100755
--- a/tsdata/tests/test_utils.py
+++ b/tsdata/tests/test_utils.py
@@ -85,7 +85,7 @@ def test_line_count(self):
expected_num_lines = 2345
with tempfile.NamedTemporaryFile(mode="w") as t:
for i in range(expected_num_lines):
- print("line %s" % i, file=t)
+ print(f"line {i}", file=t)
t.flush()
self.assertEqual(expected_num_lines, line_count(t.name))
diff --git a/tsdata/utils.py b/tsdata/utils.py
index f9954c5a..992bbe45 100644
--- a/tsdata/utils.py
+++ b/tsdata/utils.py
@@ -18,7 +18,7 @@ def call(cmd, shell=False):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
stdout, stderr = p.communicate()
if p.returncode != 0:
- raise IOError(stderr)
+ raise OSError(stderr)
if stderr:
logger.error(stderr.decode("utf-8"))
return stdout
@@ -76,10 +76,10 @@ def unzip_data(destination, url=None, zip_path=None):
first_file_in_zip = get_datafile_path(None, destination, zip_path=zip_path)
if os.path.exists(first_file_in_zip):
- logger.debug("{} exists, skipping extract".format(first_file_in_zip))
+ logger.debug(f"{first_file_in_zip} exists, skipping extract")
else:
archive = zipfile.ZipFile(zip_path)
- logger.debug("Extracting archive into {}".format(destination))
+ logger.debug(f"Extracting archive into {destination}")
archive.extractall(path=destination)
logger.debug("Extraction complete")
@@ -89,17 +89,17 @@ def download_and_unzip_data(url, destination, prefix="state-"):
# make sure destination exists or create a temporary directory
if not destination:
destination = tempfile.mkdtemp(prefix=prefix)
- logger.debug("Created temp directory {}".format(destination))
+ logger.debug(f"Created temp directory {destination}")
else:
if not os.path.exists(destination):
os.makedirs(destination)
- logger.info("Created {}".format(destination))
+ logger.info(f"Created {destination}")
zip_filename = get_zipfile_path(url, destination)
# don't re-download data if raw data file already exists
if os.path.exists(zip_filename):
- logger.debug("{} exists, skipping download".format(zip_filename))
+ logger.debug(f"{zip_filename} exists, skipping download")
else:
- logger.debug("Downloading data to {}".format(zip_filename))
+ logger.debug(f"Downloading data to {zip_filename}")
response = requests.get(url, stream=True)
# XXX check status code here; e.g., if permissions haven't been granted
# for a file being downloaded from S3 a 403 will be returned
@@ -112,9 +112,7 @@ def download_and_unzip_data(url, destination, prefix="state-"):
downloaded += len(chunk)
now = time.clock()
if (now - start) >= 5:
- logger.debug(
- "{0:.2g}% downloaded".format(downloaded / content_length * 100)
- )
+ logger.debug(f"{downloaded / content_length * 100:.2g}% downloaded")
start = now
f.write(chunk)
f.flush()
@@ -124,7 +122,7 @@ def download_and_unzip_data(url, destination, prefix="state-"):
return destination
-class GroupedData(object):
+class GroupedData:
"""Data structure to build and flatten nested dictionaries"""
def __init__(self, by, defaults=None):