Skip to content

Commit 968b0dd

Browse files
committed
💶 accounting: add export sales data + graph
1 parent 0aaac44 commit 968b0dd

File tree

4 files changed

+245
-63
lines changed

4 files changed

+245
-63
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ fastapi dev main.py
4545
| ---------------------- | -------------------------------------------------------- | -------------------- |
4646
| Conversation Cleanup | Auto-delete conversations after x months of inactivity | ✅ Done |
4747
| Ad Refresh | Automatic refreshing of listings | ⚒️ Under Refactoring |
48-
| Sales Analytics | Export transactions, generate graphs & statistics | 📝 To Be Done |
48+
| Sales Analytics | Export transactions, generate graphs & statistics | Done |
4949
| Annual Reports | Yearly compatible reporting system | 📝 To Be Done |
5050
| Favorite Messages | Quick-copy system for 5 favorite messages | 📝 To Be Done |
5151
| Publication Menu | Database-linked posting system with search functionality | 📝 To Be Done |

backend/routers/accounting.py

Lines changed: 236 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
11
import datetime
2-
import re
3-
from fastapi import HTTPException, APIRouter, Depends
2+
from fastapi import APIRouter, Depends
43
from pydantic import BaseModel
5-
from sqlmodel import Session, select
4+
from sqlmodel import Session
65
from constants import API_URL
76
from utils import execute_request, get_vinted_headers
8-
from config.models import User, get_session
9-
from fastapi import APIRouter, Depends
7+
from config.models import get_session
108

119
router = APIRouter(
1210
prefix="/accounting",
@@ -21,81 +19,248 @@ class ExportSalesData(BaseModel):
2119
period: str
2220

2321

22+
def get_transactions_data(year, month, headers):
23+
page = 1
24+
url = f"{API_URL}wallet/invoices/{year}/{month}?page={page}"
25+
response = execute_request("GET", url, headers)
26+
27+
data = response.json()
28+
total_records = data["invoice_lines_pagination"]["total_records"]
29+
total_pages = data["invoice_lines_pagination"]["total_pages"]
30+
31+
if total_records == 0:
32+
return {
33+
"bought_data": [],
34+
"sold_data": [],
35+
}
36+
37+
bought_data = []
38+
sold_data = []
39+
40+
while page <= total_pages:
41+
url = f"{API_URL}wallet/invoices/{year}/{month}?page={page}"
42+
response = execute_request("GET", url, headers)
43+
data = response.json()
44+
45+
for line in data["invoice_lines"]:
46+
amount = float(line["amount"]["amount"])
47+
if line.get("entity_type") == "payout":
48+
continue
49+
if line["type"] == "debit":
50+
sold_data.append(amount)
51+
else:
52+
bought_data.append(amount)
53+
54+
page += 1
55+
56+
return {
57+
"bought_data": bought_data,
58+
"sold_data": sold_data,
59+
}
60+
61+
62+
def get_labels(start_year, start_month, headers):
63+
url = f"{API_URL}wallet/invoices/current"
64+
response = execute_request("GET", url, headers)
65+
data = response.json()
66+
history = data["history"]
67+
68+
labels = [
69+
f"{item['title']} {str(item['year'])[-2:]}"
70+
for item in history
71+
if (item["year"] > start_year)
72+
or (item["year"] == start_year and item["month"] >= start_month)
73+
]
74+
75+
labels.reverse()
76+
return labels
77+
78+
79+
def get_join_date(headers):
80+
url = f"{API_URL}wallet/invoices/current"
81+
response = execute_request("GET", url, headers)
82+
data = response.json()
83+
history = data["history"]
84+
return {
85+
"year": history[-1]["year"],
86+
"month": history[-1]["month"],
87+
"title": history[-1]["title"],
88+
}
89+
90+
91+
def get_months_to_fetch(period, headers):
92+
"""Generate list of (year, month) tuples based on period"""
93+
now = datetime.datetime.now()
94+
current_year = now.year
95+
current_month = now.month
96+
97+
months_to_fetch = []
98+
99+
if period == "sinceBegin":
100+
join_date = get_join_date(headers)
101+
start_year = join_date["year"]
102+
start_month = join_date["month"]
103+
elif period == "ytd":
104+
start_year = current_year
105+
start_month = 1
106+
elif period == "last1M":
107+
if current_month == 1:
108+
start_year = current_year - 1
109+
start_month = 12
110+
else:
111+
start_year = current_year
112+
start_month = current_month - 1
113+
elif period == "last3M":
114+
for i in range(3, 0, -1):
115+
if current_month - i <= 0:
116+
year = current_year - 1
117+
month = 12 + (current_month - i)
118+
else:
119+
year = current_year
120+
month = current_month - i
121+
months_to_fetch.append((year, month))
122+
return months_to_fetch
123+
elif period == "last6M":
124+
for i in range(6, 0, -1):
125+
if current_month - i <= 0:
126+
year = current_year - 1
127+
month = 12 + (current_month - i)
128+
else:
129+
year = current_year
130+
month = current_month - i
131+
months_to_fetch.append((year, month))
132+
return months_to_fetch
133+
elif period == "last1Y":
134+
for i in range(12, 0, -1):
135+
if current_month - i <= 0:
136+
year = current_year - 1
137+
month = 12 + (current_month - i)
138+
else:
139+
year = current_year
140+
month = current_month - i
141+
months_to_fetch.append((year, month))
142+
return months_to_fetch
143+
else:
144+
return [(current_year, current_month)]
145+
146+
# Generate months from start to current
147+
year = start_year
148+
month = start_month
149+
150+
while (year < current_year) or (year == current_year and month <= current_month):
151+
months_to_fetch.append((year, month))
152+
month += 1
153+
if month > 12:
154+
month = 1
155+
year += 1
156+
157+
return months_to_fetch
158+
159+
24160
@router.post("/export-sales-data")
25161
def export_sales_data(
26162
export_sales_data: ExportSalesData,
27-
session: Session = Depends(get_session),
163+
headers: dict = Depends(get_vinted_headers),
28164
):
29-
# todo: implement api call to get real data
30-
31-
articles_bought_prices_data = [
32-
[12.50, 8.90, 15.00, 22.30, 5.50],
33-
[18.75, 11.20, 25.80, 9.90, 14.60, 33.40],
34-
[7.30, 19.95, 16.75, 12.10, 28.50],
35-
[21.40, 13.85, 9.60, 17.20, 11.95, 24.90, 8.75],
36-
[15.60, 20.30, 12.40, 18.90, 26.75, 14.50],
37-
[19.80, 8.40, 23.70, 16.30, 10.95, 29.60],
38-
[13.20, 17.95, 11.80, 22.40, 15.90, 25.30, 120.33],
39-
]
165+
# Get months to fetch based on period
166+
months_to_fetch = get_months_to_fetch(export_sales_data.period, headers)
167+
168+
articles_bought_prices_data = []
169+
articles_sold_prices_data = []
170+
171+
for year, month in months_to_fetch:
172+
data = get_transactions_data(year, month, headers)
173+
articles_bought_prices_data.append(data["bought_data"])
174+
articles_sold_prices_data.append(data["sold_data"])
40175

41-
articles_sold_prices_data = [
42-
[18.90, 14.50, 23.00, 31.50, 12.90],
43-
[27.40, 19.80, 38.50, 16.75, 22.90, 45.60],
44-
[13.70, 29.50, 24.90, 19.40, 42.80],
45-
[32.10, 21.75, 16.20, 26.90, 19.95, 37.50, 15.40],
46-
[24.80, 31.90, 19.60, 28.50, 39.75, 23.30],
47-
[29.70, 15.90, 36.40, 25.80, 18.95, 44.20],
48-
[21.60, 27.50, 19.30, 34.80, 24.90, 38.90, 17.80],
49-
]
50176
total_articles_bought = sum(
51177
len(articles_bought) for articles_bought in articles_bought_prices_data
52178
)
53179
total_articles_sold = sum(
54180
len(articles_sold) for articles_sold in articles_sold_prices_data
55181
)
56182

183+
# Handle empty data cases
184+
if not articles_bought_prices_data or not articles_sold_prices_data:
185+
return {
186+
"labels": [],
187+
"turnover_data": [],
188+
"gross_profit_data": [],
189+
"total_turnover": 0,
190+
"maximum_turnover": 0,
191+
"minimum_turnover": 0,
192+
"average_turnover": 0,
193+
"total_gross_profit": 0,
194+
"maximum_gross_profit": 0,
195+
"minimum_gross_profit": 0,
196+
"average_gross_profit": 0,
197+
"articles_bought_prices_data": articles_bought_prices_data,
198+
"total_articles_bought": total_articles_bought,
199+
"average_article_bought_price": 0,
200+
"average_nb_articles_bought": 0,
201+
"most_expensive_article_bought": 0,
202+
"least_expensive_article_bought": 0,
203+
"articles_sold_prices_data": articles_sold_prices_data,
204+
"total_articles_sold": total_articles_sold,
205+
"average_article_sold_price": 0,
206+
"average_nb_article_sold": 0,
207+
"most_expensive_article_sold": 0,
208+
"least_expensive_article_sold": 0,
209+
}
210+
57211
turnover_data = [sum(articles_sold) for articles_sold in articles_sold_prices_data]
58212
gross_profit_data = [
59213
sum(articles_sold) - sum(articles_bought)
60214
for articles_bought, articles_sold in zip(
61215
articles_bought_prices_data, articles_sold_prices_data
62216
)
63217
]
64-
labels = [
65-
"Jan 24",
66-
"Fév 24",
67-
"Mar 24",
68-
"Avr 24",
69-
"Mai 24",
70-
"Jun 24",
71-
"Jul 24",
72-
]
73218

74219
data = {
75-
"labels": labels,
220+
"labels": get_labels(months_to_fetch[0][0], months_to_fetch[0][1], headers),
76221
"turnover_data": turnover_data,
77222
"gross_profit_data": gross_profit_data,
78223
"total_turnover": sum(turnover_data),
79-
"maximum_turnover": max(turnover_data),
80-
"minimum_turnover": min(turnover_data),
81-
"average_turnover": sum(turnover_data) / len(turnover_data),
224+
"maximum_turnover": max(turnover_data) if turnover_data else 0,
225+
"minimum_turnover": min(turnover_data) if turnover_data else 0,
226+
"average_turnover": (
227+
sum(turnover_data) / len(turnover_data) if turnover_data else 0
228+
),
82229
"total_gross_profit": sum(gross_profit_data),
83-
"maximum_gross_profit": max(gross_profit_data),
84-
"minimum_gross_profit": min(gross_profit_data),
85-
"average_gross_profit": sum(gross_profit_data) / len(gross_profit_data),
230+
"maximum_gross_profit": max(gross_profit_data) if gross_profit_data else 0,
231+
"minimum_gross_profit": min(gross_profit_data) if gross_profit_data else 0,
232+
"average_gross_profit": (
233+
sum(gross_profit_data) / len(gross_profit_data) if gross_profit_data else 0
234+
),
86235
"articles_bought_prices_data": articles_bought_prices_data,
87236
"total_articles_bought": total_articles_bought,
88-
"average_article_bought_price": sum(
89-
sum(articles_bought) for articles_bought in articles_bought_prices_data
90-
)
91-
/ total_articles_bought,
237+
"average_article_bought_price": abs(
238+
sum(sum(articles_bought) for articles_bought in articles_bought_prices_data)
239+
/ total_articles_bought
240+
),
92241
"average_nb_articles_bought": total_articles_bought
93242
/ len(articles_bought_prices_data),
94-
"most_expensive_article_bought": max(
95-
max(articles_bought) for articles_bought in articles_bought_prices_data
243+
"most_expensive_article_bought": (
244+
abs(
245+
max(
246+
max(articles_bought)
247+
for articles_bought in articles_bought_prices_data
248+
if articles_bought
249+
)
250+
)
251+
if articles_bought_prices_data
252+
else 0
96253
),
97-
"least_expensive_article_bought": min(
98-
min(articles_bought) for articles_bought in articles_bought_prices_data
254+
"least_expensive_article_bought": (
255+
abs(
256+
min(
257+
min(articles_bought)
258+
for articles_bought in articles_bought_prices_data
259+
if articles_bought
260+
)
261+
)
262+
if articles_bought_prices_data
263+
else 0
99264
),
100265
"articles_sold_prices_data": articles_sold_prices_data,
101266
"total_articles_sold": sum(
@@ -105,12 +270,28 @@ def export_sales_data(
105270
sum(articles_sold) for articles_sold in articles_sold_prices_data
106271
)
107272
/ total_articles_sold,
108-
"average_nb_article_sold": total_articles_sold / len(articles_sold_prices_data),
109-
"most_expensive_article_sold": max(
110-
max(articles_sold) for articles_sold in articles_sold_prices_data
273+
"average_nb_article_sold": (
274+
total_articles_sold / len(articles_sold_prices_data)
275+
if articles_sold_prices_data
276+
else 0
277+
),
278+
"most_expensive_article_sold": (
279+
max(
280+
max(articles_sold)
281+
for articles_sold in articles_sold_prices_data
282+
if articles_sold
283+
)
284+
if articles_sold_prices_data
285+
else 0
111286
),
112-
"least_expensive_article_sold": min(
113-
min(articles_sold) for articles_sold in articles_sold_prices_data
287+
"least_expensive_article_sold": (
288+
min(
289+
min(articles_sold)
290+
for articles_sold in articles_sold_prices_data
291+
if articles_sold
292+
)
293+
if articles_sold_prices_data
294+
else 0
114295
),
115296
}
116297

backend/utils.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,5 +102,6 @@ def refresh_access_token(headers: dict, session: Session):
102102
session = next(get_session())
103103
auth = session.exec(select(User).order_by(User.id.desc())).first()
104104
auth.vinted_access_token = response.json()["access_token"]
105+
auth.vinted_refresh_token = response.json()["refresh_token"]
105106
session.add(auth)
106107
session.commit()

frontend/src/routes/accounting/+page.svelte

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -106,20 +106,20 @@
106106
data: {
107107
labels: [],
108108
datasets: [
109-
{
110-
label: "Chiffre d'affaires (€)",
111-
data: [],
112-
borderColor: "#059669",
113-
backgroundColor: "#10b981",
114-
order: 1,
115-
},
116109
{
117110
label: "Bénéfice brut (€)",
118111
borderColor: "#2563eb",
119112
data: [],
120113
backgroundColor: "#3b82f6",
121114
order: 0,
122115
},
116+
{
117+
label: "Chiffre d'affaires (€)",
118+
data: [],
119+
borderColor: "#059669",
120+
backgroundColor: "#10b981",
121+
order: 1,
122+
},
123123
],
124124
},
125125
options: {

0 commit comments

Comments
 (0)