diff --git a/.flake8 b/.flake8 deleted file mode 100644 index c6a6d95..0000000 --- a/.flake8 +++ /dev/null @@ -1,5 +0,0 @@ -[flake8] -max-line-length = 120 -per-file-ignores = - jdhapi/views/submit_abstract.py: E501 -ignore = E402, F401, F403 \ No newline at end of file diff --git a/jdh/settings.py b/jdh/settings.py index 618c657..00366aa 100644 --- a/jdh/settings.py +++ b/jdh/settings.py @@ -10,9 +10,10 @@ https://docs.djangoproject.com/en/3.1/ref/settings/ """ -from pathlib import Path import os import sys +from pathlib import Path + from .base import get_env_variable # Build paths inside the project like this: BASE_DIR / 'subdir'. @@ -292,6 +293,8 @@ FACEBOOK_JDH_PAGE_ID = get_env_variable("FACEBOOK_JDH_PAGE_ID") FACEBOOK_JDH_ACCESS_TOKEN = get_env_variable("FACEBOOK_JDH_ACCESS_TOKEN") -#OJS API -OJS_API_KEY_TOKEN= get_env_variable("OJS_API_KEY_TOKEN", "default") -OJS_API_URL = get_env_variable("OJS_API_URL", "http://ojs.journalofdigitalhistory.org") \ No newline at end of file +# OJS API +OJS_API_KEY_TOKEN = get_env_variable("OJS_API_KEY_TOKEN", "default") +OJS_API_URL = get_env_variable("OJS_API_URL", "http://ojs.journalofdigitalhistory.org") + +COPY_EDITOR_ADDRESS = get_env_variable("COPY_EDITOR_ADDRESS", "") diff --git a/jdhapi/admin.py b/jdhapi/admin.py index 5ceae1e..f7f6a2b 100644 --- a/jdhapi/admin.py +++ b/jdhapi/admin.py @@ -11,10 +11,10 @@ from .tasks import ( save_article_fingerprint, save_article_specific_content, - save_citation, save_libraries, save_references, ) +from .utils.articles import save_citation from import_export.admin import ExportActionMixin from django.utils.html import format_html @@ -37,7 +37,7 @@ def save_notebook_specific_cell(modeladmin, request, queryset): def save_article_citation(modeladmin, request, queryset): for article in queryset: - save_citation.delay(article_id=article.pk) + save_citation(article_id=article.pk) save_article_citation.short_description = "3: Generate citation" diff --git a/jdhapi/models/article.py b/jdhapi/models/article.py index 517a3ed..1a4ec33 100644 --- a/jdhapi/models/article.py +++ b/jdhapi/models/article.py @@ -1,8 +1,8 @@ import logging + from django.db import models from model_utils import FieldTracker - logger = logging.getLogger(__name__) @@ -35,7 +35,6 @@ class Article(models.Model): Methods: get_kernel_language(): Returns the kernel language based on the 'tool' tag. __str__(): Returns the title of the abstract. - send_email_if_peer_review(): Sends an email with a PDF attachment if the article is in peer review status. """ class Status(models.TextChoices): @@ -55,7 +54,14 @@ class Status(models.TextChoices): "DESIGN_REVIEW", "Design review", ) - PUBLISHED = "PUBLISHED", "Published" + COPY_EDITING = ( + "COPY_EDITING", + "Copy editing", + ) + PUBLISHED = ( + "PUBLISHED", + "Published", + ) class CopyrightType(models.TextChoices): DRAFT = ( @@ -130,7 +136,7 @@ class RepositoryType(models.TextChoices): blank=True, null=True, help_text="Url to find here https://data.journalofdigitalhistory.org/", - ) # New field for Dataverse URL + ) publication_date = models.DateTimeField(blank=True, null=True) copyright_type = models.CharField( max_length=15, diff --git a/jdhapi/signals.py b/jdhapi/signals.py index 067eab1..4135c1f 100644 --- a/jdhapi/signals.py +++ b/jdhapi/signals.py @@ -1,11 +1,10 @@ import requests -import logging from django.core.exceptions import ValidationError -from django.db.models.signals import post_save, pre_save +from django.db.models.signals import pre_save from django.dispatch import receiver + from jdhapi.models import Article from jdhapi.utils.articles import convert_string_to_base64 -from jdhapi.utils.run_github_action import trigger_workflow @receiver(pre_save, sender=Article) @@ -37,5 +36,4 @@ def check_notebook_url(notebook_url, repository_url): if instance.notebook_url and check_notebook_url( instance.notebook_url, instance.repository_url ): - raise ValidationError("Notebook url is not correct") diff --git a/jdhapi/tasks.py b/jdhapi/tasks.py index ee2f907..825ff93 100644 --- a/jdhapi/tasks.py +++ b/jdhapi/tasks.py @@ -8,7 +8,6 @@ from jdhapi.utils.articles import ( get_notebook_stats, get_notebook_specifics_tags, - get_citation, generate_tags, generate_narrative_tags, get_notebook_references_fulltext, @@ -75,18 +74,6 @@ def save_article_specific_content(article_id): article.save() -@shared_task -def save_citation(article_id): - logger.info(f"save_article_citation:{article_id}") - try: - article = Article.objects.get(pk=article_id) - except Article.DoesNotExist: - logger.error(f"save_article_citation:{article_id} not found") - citation = get_citation(raw_url=article.notebook_ipython_url, article=article) - article.citation = citation - article.save() - - @shared_task def save_libraries(article_id): logger.info(f"save_article_libraries:{article_id}") diff --git a/jdhapi/urls.py b/jdhapi/urls.py index 7f8cda6..5e0c838 100644 --- a/jdhapi/urls.py +++ b/jdhapi/urls.py @@ -25,17 +25,17 @@ ), path("api/abstracts/submit", views.submit_abstract, name="submit-abstract"), path("api/articles/", views.ArticleList.as_view(), name="article-list"), - path("api/articles//status/", views.ArticleStatus.as_view(), name='article-status'), - path( - "api/articles/status", - views.update_article_status, - name="article-change-status", - ), + path("api/articles//status", views.ArticleStatus.as_view(), name='article-status'), path( "api/articles//", views.ArticleDetail.as_view(), name="article-detail", ), + path( + "api/articles/status", + views.update_article_status, + name="article-change-status", + ), path("api/articles/advance", views.AdvanceArticleList.as_view(), name="advance-article-list"), path( "api/articles/bluesky", @@ -51,6 +51,8 @@ path("api/articles/ojs/submissions", views.get_count_submission_from_ojs, name="count-submission-from-ojs"), path("api/articles/ojs/submission", views.send_article_to_ojs, name="articles-send-to-ojs"), path("api/articles/tweet", views.get_tweet_md_file, name="articles-tweet"), + path("api/articles/docx", views.get_docx, name="article-docx"), + path("api/articles/docx/email", views.send_docx_email, name="article-docx-email"), path("api/authors/", views.AuthorList.as_view(), name="author-list"), path("api/authors//", views.AuthorDetail.as_view(), name="author-detail"), path( diff --git a/jdhapi/utils/articles.py b/jdhapi/utils/articles.py index f8ecef1..c12fda5 100644 --- a/jdhapi/utils/articles.py +++ b/jdhapi/utils/articles.py @@ -13,7 +13,7 @@ from django.utils.html import strip_tags from jdhapi.utils.doi import get_doi_url_formatted_jdh -from jdhapi.models import Author, Tag +from jdhapi.models import Article, Author, Tag from jdhseo.utils import getReferencesFromJupyterNotebook from requests.exceptions import HTTPError @@ -217,6 +217,18 @@ def get_citation(raw_url, article): } +def save_citation(article_id): + logger.info(f"save_article_citation:{article_id}") + try: + article = Article.objects.get(pk=article_id) + except Article.DoesNotExist: + logger.error(f"save_article_citation:{article_id} not found") + return + citation = get_citation(raw_url=article.notebook_ipython_url, article=article) + article.citation = citation + article.save() + + def get_raw_from_github( repository_url, file_type, host="https://raw.githubusercontent.com" ): diff --git a/jdhapi/utils/github_action.py b/jdhapi/utils/github_action.py new file mode 100644 index 0000000..4bfa7e8 --- /dev/null +++ b/jdhapi/utils/github_action.py @@ -0,0 +1,196 @@ +#!/usr/bin/env python3 +import logging +import requests +import time +from datetime import datetime, timezone +from urllib.parse import urlparse +from jdh.settings import GITHUB_ACCESS_TOKEN + +logger = logging.getLogger(__name__) + + +def trigger_workflow(repo_url, workflow_filename, token=None, ref="main"): + """ + :param repo_url: GitHub repository link + :param workflow_filename: Filename of the workflow in .github/workflows (e.g. "hello-world.yml") + :param token: GitHub access token with repo permissions (optional, will use env variable if not provided) + :param ref: Git ref (branch or tag) to run the workflow on + """ + + logger.info("[trigger_workflow] - Trigger workflow '%s' on ref '%s' for %s/%s", workflow_filename, ref, owner, repo) + + token = _get_github_token(token) + owner, repo = _parse_owner_repo(repo_url) + + url = f"https://api.github.com/repos/{owner}/{repo}/actions/workflows/{workflow_filename}/dispatches" + headers = { + "Authorization": f"Bearer {token}", + "Accept": "application/vnd.github+json", + } + payload = {"ref": ref} + try: + res = requests.post(url, json=payload, headers=headers, timeout=10) + if res.status_code == 204: + logger.info( + "Workflow '%s' dispatched on ref '%s' for %s/%s.", + workflow_filename, + ref, + owner, + repo, + ) + else : + logger.error( + "Failed to dispatch workflow '%s' (%s): %s", + workflow_filename, + res.status_code, + res.text, + ) + res.raise_for_status() + except requests.RequestException as e: + logger.error("Workflow dispatch failed: %s", e) + raise requests.RequestException(f"Workflow dispatch failed: {e}") from e + + +def trigger_workflow_and_wait( + repo_url, + workflow_filename, + token=None, + ref="main", + timeout_seconds=600, + poll_interval_seconds=5, +): + """ + :param repo_url: GitHub repository link + :param workflow_filename: Filename of the workflow in .github/workflows (e.g. "hello-world.yml") + :param token: GitHub access token with repo permissions (optional, will use env variable if not provided) + :param ref: Git ref (branch or tag) to run the workflow on + :param timeout_seconds: Maximum time to wait for workflow completion + :param poll_interval_seconds: Time to wait between polling for workflow status + """ + + logger.info("[trigger_workflow_and_wait] - Trigger workflow and wait for '%s' on ref '%s' for %s/%s", workflow_filename, ref, owner, repo) + + token = _get_github_token(token) + owner, repo = _parse_owner_repo(repo_url) + started_at = datetime.now(timezone.utc) + + trigger_workflow(repo_url, workflow_filename, token=token, ref=ref) + + runs_url = ( + f"https://api.github.com/repos/{owner}/{repo}/actions/workflows/" + f"{workflow_filename}/runs" + ) + headers = { + "Authorization": f"Bearer {token}", + "Accept": "application/vnd.github+json", + } + + deadline = time.time() + timeout_seconds + run_id = None + + while time.time() < deadline: + try: + res = requests.get(runs_url, headers=headers, timeout=10) + res.raise_for_status() + data = res.json() + except requests.RequestException as e: + logger.error("Failed to list workflow runs: %s", e) + raise requests.RequestException(f"Workflow dispatch failed: {e}") from e + + for run in data.get("workflow_runs", []): + created_at = _parse_github_datetime(run.get("created_at")) + if not created_at: + continue + if ( + run.get("event") == "workflow_dispatch" + and run.get("head_branch") == ref + and created_at >= started_at + ): + run_id = run.get("id") + status = run.get("status") + conclusion = run.get("conclusion") + + if status == "completed": + if conclusion == "success": + logger.info( + "Workflow '%s' completed successfully for %s/%s.", + workflow_filename, + owner, + repo, + ) + return + raise RuntimeError( + f"Workflow '{workflow_filename}' interrupted: {conclusion}" + ) + break + + if run_id is None: + logger.info("Waiting for workflow run to start...") + + time.sleep(poll_interval_seconds) + + raise TimeoutError( + f"Workflow '{workflow_filename}' did not complete within {timeout_seconds}s" + ) + + +def _parse_owner_repo(repo_url): + """ + Retrieve owner and repository name from a github repository url + + :param repo_url: Description + :return: Return a tuple of (owner name, repository name) + """ + logger.info("[_parse_owner_repo] - Retrieve owner and repository name from a github repository url") + + parsed = urlparse(repo_url) + path = parsed.path.lstrip("/") + + if path.endswith(".git"): + path = path[:-4] + + parts = path.split("/") + + owner = parts[0] + repo = parts[1] + + if len(parts) >= 2: + return owner, repo + else : + raise ValueError(f"Invalid repository URL: '{repo_url}'") + + +def _get_github_token(token): + """ + Return the provided GitHub access token or fall back to the environment variable. + + :param token: GitHub access token (optional) + :return: GitHub access token + :raises ValueError: If no token is provided and none is set in the environment + """ + logger.info("[_get_github_token] - Retrieve github access token") + + resolved = token or GITHUB_ACCESS_TOKEN + if not resolved: + raise ValueError( + "No GitHub access token provided and GITHUB_ACCESS_TOKEN is not set." + ) + return resolved + + + +def _parse_github_datetime(value): + """ + Parse a GitHub datetime string into a timezone-aware datetime object. + GitHub datetime strings are in ISO 8601 format: "YYYY-MM-DDTHH:MM:SSZ" + + :param value: GitHub datetime string + :return: Timezone-aware datetime object or None if parsing fails + """ + if not value: + return None + try: + return datetime.fromisoformat(value.replace("Z", "+00:00")) + except (ValueError, AttributeError): + logger.error("Failed to parse GitHub datetime value: '%s'", value) + return None \ No newline at end of file diff --git a/jdhapi/utils/run_github_action.py b/jdhapi/utils/run_github_action.py deleted file mode 100644 index 5dc1ea4..0000000 --- a/jdhapi/utils/run_github_action.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python3 -import os -import sys -import requests -from pathlib import Path -from urllib.parse import urlparse - - -def trigger_workflow(repo_url, workflow_filename, token=None, ref="main"): - """ - :param owner: GitHub username or organization - :param repo: Repository name - :param workflow_filename: Filename of the workflow in .github/workflows (e.g. "hello-world.yml") - :param ref: Git ref (branch or tag) to run the workflow on - """ - if not token: - from jdh.settings import GITHUB_ACCESS_TOKEN - - token = GITHUB_ACCESS_TOKEN - - parsed = urlparse(repo_url) - path = parsed.path.lstrip("/") - - if path.endswith(".git"): - path = path[:-4] - - parts = path.split("/") - if len(parts) >= 2: - owner = parts[0] - repo = parts[1] - - url = f"https://api.github.com/repos/{owner}/{repo}/actions/workflows/{workflow_filename}/dispatches" - headers = { - "Authorization": f"Bearer {token}", - "Accept": "application/vnd.github+json", - } - payload = {"ref": ref} - resp = requests.post(url, json=payload, headers=headers) - if resp.status_code == 204: - print(f"Workflow '{workflow_filename}' dispatched on ref '{ref}'.") - else: - print(f"Failed to dispatch workflow: {resp.status_code}") - print(resp.json()) - resp.raise_for_status() \ No newline at end of file diff --git a/jdhapi/views/articles/__init__.py b/jdhapi/views/articles/__init__.py index 532f04b..6a28d02 100644 --- a/jdhapi/views/articles/__init__.py +++ b/jdhapi/views/articles/__init__.py @@ -3,3 +3,4 @@ from .ojs import * from .social_media import * from .update_article import * +from .copy_editing import * diff --git a/jdhapi/views/articles/articles.py b/jdhapi/views/articles/articles.py index 81c6837..f21e74e 100644 --- a/jdhapi/views/articles/articles.py +++ b/jdhapi/views/articles/articles.py @@ -4,7 +4,7 @@ from rest_framework import generics, filters from rest_framework.permissions import BasePermission from django.shortcuts import get_object_or_404 -from jdhapi.views.articles.status_handlers import TechnicalReviewHandler +from jdhapi.views.articles.status_handlers import * from rest_framework.permissions import IsAdminUser from rest_framework.response import Response from rest_framework.views import APIView @@ -94,6 +94,9 @@ class ArticleStatus(APIView): permission_classes = [IsAdminUser] STATUS_HANDLERS = { 'TECHNICAL_REVIEW': TechnicalReviewHandler(), + 'COPY_EDITING': CopyEditingHandler(), + 'PEER_REVIEW': PeerReviewHandler(), + 'PUBLISHED' : PublishedHandler() } def patch(self, request, abstract__pid): diff --git a/jdhapi/views/articles/copy_editing.py b/jdhapi/views/articles/copy_editing.py new file mode 100644 index 0000000..e9dff18 --- /dev/null +++ b/jdhapi/views/articles/copy_editing.py @@ -0,0 +1,209 @@ +import logging +import requests +from django.conf import settings +from django.core.mail import EmailMessage +from django.http import HttpResponse +from jdhapi.models import Article +from jdhapi.utils.github_action import trigger_workflow_and_wait +from rest_framework.decorators import ( + api_view, + permission_classes, +) +from rest_framework.permissions import IsAdminUser +from rest_framework.response import Response + +logger = logging.getLogger(__name__) + +COPY_EDITOR_ADDRESS = settings.COPY_EDITOR_ADDRESS + +@api_view(["GET"]) +@permission_classes([IsAdminUser]) +def get_docx(request): + """ + GET api/articles/docx + + Helper function to get the docx file from the request. + Needs a pid in the request query parameters. + """ + logger.info("GET api/articles/docx") + + branch_name = "pandoc" + pid = request.GET.get("pid") + + if not pid: + return Response({"error": "Article PID is required."}, status=400) + + try: + workflow_error = ensure_pandoc_workflow(pid) + if workflow_error: + return workflow_error + + docx_bytes = fetch_docx_bytes(pid, branch_name) + return HttpResponse( + docx_bytes, + content_type="application/vnd.openxmlformats-officedocument.wordprocessingml.document", + headers={"Content-Disposition": f'attachment; filename="article_{pid}.docx"'}, + status=200 + ) + except FileNotFoundError as e: + return Response({"error": str(e)}, status=404) + except ValueError as e: + return Response({"error": str(e)}, status=502) + except requests.exceptions.RequestException as e: + return Response( + {"error": "Failed to get article.docx", "details": str(e)}, status=500 + ) + + +@api_view(["POST"]) +@permission_classes([IsAdminUser]) +def send_docx_email(request): + """ + POST api/articles/docx/email + + Send the docx as an email attachment. + :params pid: the article PID + :params body: the email body to send to copy editor + :params branch_name: the branch name where the docx file is located, by default "pandoc" + """ + logger.info("POST api/articles/docx/email") + + branch_name = "pandoc" + pid = request.data.get("pid") + body= request.data.get("body") + + if not pid: + return Response({"error": "Article PID is required."}, status=400) + + try: + workflow_error = ensure_pandoc_workflow(pid) + if workflow_error: + return workflow_error + + docx_bytes = fetch_docx_bytes(pid, branch_name) + send_email_copy_editor(pid, docx_bytes, body) + return Response({"message": f"Docx sent succesfully by email for article : {pid}"}, status=200) + + except FileNotFoundError as e: + return Response({"error": str(e)}, status=404) + except ValueError as e: + return Response({"error": str(e)}, status=502) + except requests.exceptions.RequestException as e: + return Response( + {"error": "Failed to get article.docx", "details": str(e)}, status=500 + ) + except Exception as e: + return Response({"error": "Failed to send email", "details": str(e)}, status=502) + + +def fetch_docx_bytes(pid, branch_name): + """ + Helper function to fetch the docx + :params pid: the article PID + :params branch_name: the branch name where the docx file is located + """ + logger.info("[fetch_docx_bytes] Fetch the docx document for the article with PID '%s'", pid) + + url = f"https://api.github.com/repos/jdh-observer/{pid}/contents/article.docx?ref={branch_name}" + headers = {"Authorization": f"Bearer {settings.GITHUB_ACCESS_TOKEN}"} + + response = requests.get(url, headers=headers) + + if response.status_code == 200: + data = response.json() + download_url = data.get("download_url") + + if not download_url: + raise ValueError("Download URL not available for the file.") + + file_response = requests.get(download_url) + file_response.raise_for_status() + + return file_response.content + + if response.status_code == 404: + raise FileNotFoundError(f"article.docx file not found for article ID '{pid}'.") + + raise ValueError("Unexpected error occurred while contacting GitHub API.") + + +def send_email_copy_editor(pid, docx_bytes, body): + """ + Helper function to send the email to copy editing editor + :params pid: the article PID + :params docx_bytes: the content of the docx file in bytes + """ + logger.info("[run_pandoc_workflow] Running pandoc workflow for PID '%s'", pid) + + filename = f"article_{pid}.docx" + message = EmailMessage( + subject="Article to review for copy editing", + body=body, + from_email="jdh.admin@uni.lu", + to=[COPY_EDITOR_ADDRESS], + ) + message.attach( + filename, + docx_bytes, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + ) + message.send(fail_silently=False) + + +def run_pandoc_workflow(repository_url): + """ + Helper function to run the pandoc workflow will be executed + :params repository_url: the article GitHub repository URL + """ + logger.info("[run_pandoc_workflow] Running pandoc workflow for this repository: '%s'", repository_url) + + try: + logger.debug( + "run_pandoc_workflow wait repo=%s", + repository_url, + ) + trigger_workflow_and_wait( + repository_url, + workflow_filename="pandoc.yml", + ) + logger.debug("Pandoc workflow completed repo=%s", repository_url) + except Exception as e: + logger.error("run_pandoc_workflow failed: %s", e) + return Response( + {"error": "Failed to run pandoc workflow", "details": str(e)}, + status=502, + ) + + +def ensure_pandoc_workflow(pid): + """ + Helper function to ensure the pandoc workflow will be executed + :params pid: the article PID + """ + logger.info("[ensure_pandoc_workflow] Starting pandoc workflow for arrticle with PID : '%s'", pid) + + try: + article = Article.objects.get(abstract__pid=pid) + except Article.DoesNotExist: + return Response( + {"error": f"Article not found for PID '{pid}'."}, status=404 + ) + + if not article.repository_url: + return Response( + {"error": f"repository_url is missing for PID '{pid}'."}, + status=400, + ) + try: + logger.debug( + "Run pandoc workflow and wait for completion pid=%s, repo=%s", + pid, + article.repository_url, + ) + run_pandoc_workflow(article.repository_url) + logger.debug("Pandoc workflow completed for pid=%s", pid) + except Exception as e: + return Response( + {"error": "Failed to run pandoc workflow", "details": str(e)}, + status=502, + ) \ No newline at end of file diff --git a/jdhapi/views/articles/status_handlers.py b/jdhapi/views/articles/status_handlers.py index cd998cb..58f572b 100644 --- a/jdhapi/views/articles/status_handlers.py +++ b/jdhapi/views/articles/status_handlers.py @@ -1,13 +1,71 @@ +import logging +from django.utils import timezone +from jdhapi.views.articles.copy_editing import send_docx_email +from jdhapi.utils.articles import save_citation +from rest_framework import status from rest_framework.response import Response -from jdhapi.models import Article + +logger = logging.getLogger(__name__) class StatusHandler: def handle(self, article, request): raise NotImplementedError class TechnicalReviewHandler(StatusHandler): - def handle(self, article, request): + def handle(self, article, request): + logger.info("Setting status TECHNICAL_REVIEW pid=%s", article.abstract.pid) article.status = article.Status.TECHNICAL_REVIEW article.save() return Response({"status": "TECHNICAL_REVIEW set", "article pid": article.abstract.pid}) +class CopyEditingHandler(StatusHandler): + def handle(self, article, request): + logger.info("Starting COPY_EDITING flow pid=%s", article.abstract.pid) + + article.status = article.Status.COPY_EDITING + article.save() + logger.info("Set status COPY_EDITING pid=%s", article.abstract.pid) + return Response({"status": "COPY_EDITING set", "article pid": article.abstract.pid}) + + +class PeerReviewHandler(StatusHandler): + def handle(self, article, request): + logger.info("Setting status PEER_REVIEW pid=%s", article.abstract.pid) + article.status = article.Status.PEER_REVIEW + article.save() + return Response({"status": "PEER_REVIEW set", "article pid": article.abstract.pid}) + + +class PublishedHandler(StatusHandler): + def handle(self, article, request): + logger.info("Setting status PUBLISHED pid=%s", article.abstract.pid) + # control on the DOI field mandatory + if not article.doi: + return Response( + {"error": "Doi is mandatory if published"}, + status=status.HTTP_400_BAD_REQUEST, + ) + # quick synchronous validation before scheduling + article_data = article.data if isinstance(article.data, dict) else {} + if not article_data.get("title"): + return Response( + {"error": "Article data title is mandatory if published"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # run save_citation synchronously; publish only on success + try: + save_citation(article_id=article.pk) + except Exception as exc: + logger.exception("save_citation failed pid=%s", article.abstract.pid) + return Response( + {"error": "save_citation failed", "details": str(exc)}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # set the publication_date to now + article.publication_date = timezone.now() + article.status = article.Status.PUBLISHED + article.save() + return Response({"status": "PUBLISHED set", "article pid": article.abstract.pid}) + \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..d40d8b4 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,4 @@ +[tool.ruff] +line-length = 88 +select = ["F", "E", "I", "RUF"] +fix = true