Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions simple_backend/src/task_tracker/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.env
6 changes: 6 additions & 0 deletions simple_backend/src/task_tracker/database.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"8263106097683147186": {
"status": "ON_REVIEW",
"descr": "\u0421\u043f\u043b\u044f\u0441\u0430\u0442\u044c \u0434\u0436\u0438\u0433\u0443"
}
}
161 changes: 155 additions & 6 deletions simple_backend/src/task_tracker/main.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,168 @@
from fastapi import FastAPI
from pydantic import BaseModel
from enum import Enum
from dotenv import load_dotenv
from openai import OpenAI
from abc import ABC, abstractmethod
import json
import random
import requests
import os

class Status(Enum):
OPEN = "OPEN"
IN_PROCESS = "IN_PROCESS"
ON_REVIEW = "ON_REVIEW"
CLOSED = "CLOSED"

class Task(BaseModel):
status: Status
descr: str
ai_solve_idea: str = ""


class BaseHTTPClient(ABC):
def __init__(self, base_url: str, headers: dict):
self.base_url = base_url
self.headers = headers

@abstractmethod
def _send_request(self, method: str, endpoint: str, **kwargs):
pass

def get(self, endpoint: str, params: dict = None):
return self._send_request("GET", endpoint, params=params)

def post(self, endpoint: str, json: dict = None):
return self._send_request("POST", endpoint, json=json)

def patch(self, endpoint: str, json: dict = None):
return self._send_request("PATCH", endpoint, json=json)

class GistClient(BaseHTTPClient):
def __init__(self, token: str):
base_url = "https://api.github.com"
headers = {
"Accept": "application/vnd.github+json",
"Authorization": f"Bearer {token}",
"X-GitHub-Api-Version": "2022-11-28"
}
super().__init__(base_url, headers)

def _send_request(self, method: str, endpoint: str, **kwargs):
response = requests.request(
method,
f"{self.base_url}{endpoint}",
headers=self.headers,
**kwargs
)
response.raise_for_status()
return response.json()

class OpenRouterClient(BaseHTTPClient):
def __init__(self, api_key: str, base_url: str):
headers = {
"Authorization": f"Bearer {api_key}"
}
super().__init__(base_url, headers)

def _send_request(self, method: str, endpoint: str, **kwargs):
response = requests.request(
method,
f"{self.base_url}{endpoint}",
headers=self.headers,
**kwargs
)
response.raise_for_status()
return response.json()

def create_chat_completion(self, model: str, messages: list):
return self.post(
"/chat/completions",
json={"model": model, "messages": messages}
)

class Storage:
def __init__(self, client: GistClient, gist_id: str, filename: str):
self.client = client
self.gist_id = gist_id
self.filename = filename

def get_tasks_from_database(self):
data = self.client.get(f"/gists/{self.gist_id}")
return json.loads(data['files'][self.filename]['content'])

def dump_tasks_to_database(self, tasks):
self.client.patch(
f"/gists/{self.gist_id}",
json={"files": {self.filename: {"content": json.dumps(tasks)}}}
)


app = FastAPI()
load_dotenv()

gist_client = GistClient(os.getenv('TOKEN'))
openrouter_client = OpenRouterClient(
api_key = os.getenv('OPENROUTER_API_KEY'),
base_url="https://openrouter.ai/api/v1"
)
storage = Storage(gist_client, os.getenv('GIST_ID'), os.getenv('GIST_FILENAME'))


@app.get("/tasks")
def get_tasks():
pass
return storage.get_tasks_from_database()


@app.post("/tasks")
def create_task(task):
pass
def create_task(task:Task):

task = task.model_dump(mode = "json")
task_id = abs(hash(random.randbytes(32)))

tasks = storage.get_tasks_from_database()


completion = openrouter_client.create_chat_completion(
model="deepseek/deepseek-r1:free",
messages=[
{
"role": "system",
"content": "You are an assistaint. User will send you task, and you must give user an advice about how to solve this task. Answer in russian."
},
{
"role": "user",
"content": task['descr']
}
]
)
task['ai_solve_idea'] = completion['choices'][0]['message']['content']
tasks[task_id] = task

storage.dump_tasks_to_database(tasks)

return f"Task created with id {task_id}"

@app.put("/tasks/{task_id}")
def update_task(task_id: int):
pass
def update_task(task_id: int, new_status: Status):

tasks = storage.get_tasks_from_database()
try:
tasks[str(task_id)]['status'] = new_status.value
except KeyError:
return "No task with this ID"

storage.dump_tasks_to_database(tasks)

return "Task status changed succesfully"

@app.delete("/tasks/{task_id}")
def delete_task(task_id: int):
pass
tasks = storage.get_tasks_from_database()
try:
tasks.pop(str(task_id))
except KeyError:
return "No task with this ID"
storage.dump_tasks_to_database(tasks)
return "Task deleted succesfully"
8 changes: 8 additions & 0 deletions simple_backend/src/task_tracker/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
Состояние гонки -- оно же race condition -- может возникать в данном случае, если несколько клиентов одновременно или почти одновременно отправляют заявки в gist, либо если один клиент посылает запросы с очень высокой частотой.
Это может привести к откату изменений, внесённых одними клиентами, запросом, сделанным другим клиентом.
Это возникает из за того, что работа с json-файлом выглядит как загрузка и перезапись всего файла.
Это можно решить, отказавшись от json в пользу СУБД
Если использование json необходимо, следует сделать промежуточный исполняемый файл, который будет лежать на одном сервере с json, хранить очередь заявок на внесение изменений и перезаписывать файл согласно этой очереди.
Можно также попробовать внедрить блокировку по ETag

Помимо того, потенциально, могут возникать коллизии хэшей, однако их шансы незначительны для объёма тасок измеряемых миллионами. Риски существенны только при миллиардах тасок.