Skip to content

Commit 7071d36

Browse files
authored
Add files via upload
1 parent d227cf4 commit 7071d36

File tree

5 files changed

+668
-154
lines changed

5 files changed

+668
-154
lines changed

src/services/ai_modal_engine.py

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
from google import genai
2+
from google.genai import types
3+
import traceback
4+
from ..config import db, get_user_keys
5+
6+
class AiModalEngine:
7+
@staticmethod
8+
def _get_client(api_key):
9+
return genai.Client(api_key=api_key)
10+
11+
@staticmethod
12+
def initialize_firebase_session(uid, context):
13+
try:
14+
keys = get_user_keys(uid)
15+
api_key = keys.get('gemini_key')
16+
if not api_key:
17+
return "Error: No API Key found in Settings."
18+
19+
client = AiModalEngine._get_client(api_key)
20+
21+
# Parsona
22+
instruction = f"""
23+
PARSONA:
24+
You are the QuantVAT AI Trading Journal Auditor, a senior Risk Manager and Trading Psychologist with 50 years trading experience like a Market Wizard.
25+
Speak with veteran authority. Tone is blunt but constructive.
26+
27+
MANDATE:
28+
1. Analyze the 'WHY' behind execution based on the provided logs.
29+
2. STRUCTURE:
30+
- ## 📊 OVERVIEW: 2-sentence performance reality check.
31+
- ## 🚩 RED FLAGS: Top 2 execution errors (FOMO, sizing, fear, etc).
32+
- ## 💡 THE REMEDY: One specific, actionable rule for the next session.
33+
3. FORMAT: Use bold text for emphasis.
34+
4. NO TABLES: Use bullet points only.
35+
5. INTERACTION: End with a provocative question about a specific trade.
36+
37+
TRADING LEDGER (CSV FORMAT):
38+
{context}
39+
"""
40+
41+
prompt = "Analyze my execution performance based on the CSV data above. End with: 'I have analyzed your data. Ready for audit.'"
42+
43+
response = client.models.generate_content(
44+
model='gemini-3-flash-preview',
45+
contents=prompt,
46+
config=types.GenerateContentConfig(
47+
system_instruction=instruction
48+
)
49+
)
50+
51+
history = [
52+
{"role": "user", "parts": [{"text": prompt}]},
53+
{"role": "model", "parts": [{"text": response.text}]}
54+
]
55+
56+
db.collection('users').document(uid).set({
57+
"ai_history": history,
58+
"ai_context": context
59+
}, merge=True)
60+
61+
return response.text
62+
except Exception as e:
63+
print(f"AI Init Error: {traceback.format_exc()}")
64+
return f"System Error: {str(e)}"
65+
66+
@staticmethod
67+
def continue_firebase_chat(uid, prompt):
68+
try:
69+
user_doc = db.collection('users').document(uid).get()
70+
data = user_doc.to_dict() if user_doc.exists else {}
71+
history = data.get("ai_history", [])
72+
context = data.get("ai_context", "")
73+
74+
api_key = get_user_keys(uid).get('gemini_key')
75+
if not api_key:
76+
return "Error: API Key missing."
77+
78+
client = AiModalEngine._get_client(api_key)
79+
80+
# Robust mapping
81+
contents = []
82+
for h in history:
83+
p = h['parts'][0]
84+
text_content = p['text'] if isinstance(p, dict) else str(p)
85+
contents.append(types.Content(
86+
role=h['role'],
87+
parts=[types.Part.from_text(text=text_content)]
88+
))
89+
90+
contents.append(types.Content(role="user", parts=[types.Part.from_text(text=prompt)]))
91+
92+
# Re-injects Persona
93+
instruction = f"PARSONA: QuantVAT AI Trading Journal Auditor. Senior Risk Manager.\nDATA:\n{context}"
94+
95+
response = client.models.generate_content(
96+
model='gemini-3-flash-preview',
97+
contents=contents,
98+
config=types.GenerateContentConfig(
99+
system_instruction=instruction
100+
)
101+
)
102+
103+
# Append new turn and sync to Firestore
104+
history.append({"role": "user", "parts": [{"text": prompt}]})
105+
history.append({"role": "model", "parts": [{"text": response.text}]})
106+
db.collection('users').document(uid).set({"ai_history": history}, merge=True)
107+
108+
return response.text
109+
except Exception as e:
110+
print(f"AI Chat Error: {traceback.format_exc()}")
111+
return f"Auditor Error: {str(e)}"

src/services/analysis.py

Lines changed: 35 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727

2828
ORIGINAL_MATCHED_HEADERS = ["Ticker", "Spot MrktCap", "Spot Volume", "Spot VTMR", "Futures Volume", "Futures VTMR", "OISS", "Funding Rate"]
2929
ORIGINAL_FUTURES_HEADERS = ["Ticker", "Market Cap", "Volume", "VTMR", "OISS", "Funding Rate"]
30-
ORIGINAL_SPOT_HEADERS = ["Ticker", "Market Cap", "Volume", "Spot VTMR"]
30+
ORIGINAL_SPOT_HEADERS = ["Ticker", "MarketCap", "Volume", "VTMR"]
3131

3232
class FileScanner:
3333
"""Locates the latest Spot and Futures data files in the USER directory."""
@@ -79,40 +79,44 @@ class DataProcessor:
7979
def load_spot(path: Path) -> pd.DataFrame:
8080
print(f" Parsing Spot File: {path.name}")
8181
try:
82+
# Explicit UTF-8 for Unicode preservation
8283
if path.suffix == '.html':
83-
df = pd.read_html(path)[0]
84+
df = pd.read_html(str(path), encoding='utf-8')[0]
8485
else:
85-
df = pd.read_csv(path)
86+
df = pd.read_csv(path, encoding='utf-8')
8687
df.columns = [c.lower().replace(' ', '_') for c in df.columns]
8788

8889
col_map = {
8990
'ticker': 'ticker',
9091
'symbol': 'ticker',
91-
'spot_vtmr': 'spot_flip',
92-
'flipping_multiple': 'spot_flip',
93-
'market_cap': 'spot_mc',
94-
'marketcap': 'spot_mc',
95-
'volume_24h': 'spot_vol',
96-
'volume': 'spot_vol'
92+
'vtmr': 'vtmr', # <--- Ensures VTMR isn't blank
93+
'spot_vtmr': 'vtmr',
94+
'flipping_multiple': 'vtmr',
95+
'market_cap': 'market_cap',
96+
'marketcap': 'market_cap',
97+
'volume_24h': 'volume',
98+
'volume': 'volume'
9799
}
98100

99101
df = df.rename(columns=col_map, errors='ignore')
100102

101-
# Normalize ticker column
103+
# Normalize ticker column (Find it if it's missing)
102104
if 'ticker' not in df.columns:
103105
for col in df.columns:
104106
if 'sym' in col or 'tick' in col or 'tok' in col:
105107
df = df.rename(columns={col: 'ticker'})
106108
break
107109

110+
# Unicode-safe cleaning (Protects Chinese characters)
108111
if 'ticker' in df.columns:
109-
df['ticker'] = df['ticker'].apply(lambda x: re.sub(r'[^A-Z0-9]', '', str(x).upper()))
112+
df['ticker'] = df['ticker'].apply(lambda x: str(x).strip().upper())
113+
110114
print(f" Extracted {len(df)} spot tokens")
111115
return df
112116
except Exception as e:
113117
print(f" Spot File Error: {e}")
114118
return pd.DataFrame()
115-
119+
116120
@staticmethod
117121
def _generate_table_html(title: str, df: pd.DataFrame, headers: List[str], df_cols: List[str]) -> str:
118122
if df.empty:
@@ -123,6 +127,7 @@ def _generate_table_html(title: str, df: pd.DataFrame, headers: List[str], df_co
123127
df_display[m] = ""
124128
df_display = df_display[df_cols]
125129
df_display.columns = headers
130+
# escape=False is critical for rendering ticker links
126131
table_html = df_display.to_html(index=False, classes='table', escape=False)
127132
return f'<div class="table-container"><h2>{title}</h2>{table_html}</div>'
128133

@@ -139,46 +144,47 @@ def generate_html_report(futures_df: pd.DataFrame, spot_df: pd.DataFrame) -> Opt
139144
try:
140145
if 'vtmr' in valid_futures.columns:
141146
valid_futures = valid_futures[valid_futures['vtmr'] >= 0.50]
142-
valid_futures['vtmr_display'] = valid_futures['vtmr'].apply(lambda x: f"{x:.1f}x")
147+
valid_futures['vtmr_display'] = valid_futures['vtmr'].apply(lambda x: f"{x:.2f}x")
143148
except Exception as e:
144149
print(f" Futures high-quality filtering error: {e}")
145150
valid_futures['vtmr_display'] = valid_futures['vtmr']
146151

147-
# Create the 3 main datasets: Overlap, Futures-Only, Spot-Only
152+
# Suffix-based merge to prevent blank column mapping issues
148153
merged = pd.merge(spot_df, valid_futures, on='ticker', how='inner', suffixes=('_spot', '_fut'))
149-
if 'vtmr' in merged.columns:
150-
merged = merged.sort_values('vtmr', ascending=False)
154+
if 'vtmr_fut' in merged.columns:
155+
merged = merged.sort_values('vtmr_fut', ascending=False)
151156

152157
futures_only = valid_futures[~valid_futures['ticker'].isin(spot_df['ticker'])].copy()
153158
if 'vtmr' in futures_only.columns:
154159
futures_only = futures_only.sort_values('vtmr', ascending=False)
155160

156161
spot_only = spot_df[~spot_df['ticker'].isin(merged['ticker'])].copy()
157162

158-
if 'spot_flip' in spot_only.columns:
163+
if 'vtmr' in spot_only.columns:
159164
try:
160165
spot_only = spot_only.copy()
161-
spot_only.loc[:, 'flip_numeric'] = spot_only['spot_flip'].astype(str).str.replace('x', '', case=False).astype(float)
166+
spot_only.loc[:, 'flip_numeric'] = spot_only['vtmr'].astype(str).str.replace('x', '', case=False).astype(float)
162167
spot_only = spot_only[spot_only['flip_numeric'] >= 0.50]
163168
spot_only = spot_only.drop(columns=['flip_numeric'])
164169
except Exception as e:
165170
print(f" Spot filtering error: {e}")
166171

167-
if 'spot_flip' in spot_only.columns:
172+
if 'vtmr' in spot_only.columns:
168173
try:
169174
spot_only = spot_only.copy()
170-
spot_only.loc[:, 'sort_val'] = spot_only['spot_flip'].astype(str).str.replace('x', '', case=False).astype(float)
175+
spot_only.loc[:, 'sort_val'] = spot_only['vtmr'].astype(str).str.replace('x', '', case=False).astype(float)
171176
spot_only = spot_only.sort_values('sort_val', ascending=False).drop(columns=['sort_val'])
172177
except Exception:
173178
pass
174179

175-
merged_cols = ['ticker', 'spot_mc', 'spot_vol', 'spot_flip', 'volume', 'vtmr_display', 'oiss', 'funding']
180+
merged_cols = ['ticker', 'market_cap_spot', 'volume_spot', 'vtmr_spot', 'volume_fut', 'vtmr_display', 'oiss', 'funding']
176181
futures_cols = ['ticker', 'market_cap', 'volume', 'vtmr_display', 'oiss', 'funding']
182+
spot_cols = ['ticker', 'market_cap', 'volume', 'vtmr']
177183

178184
html_content = ""
179185
html_content += DataProcessor._generate_table_html("Tokens in Both Futures & Spot Markets", merged, ORIGINAL_MATCHED_HEADERS, merged_cols)
180186
html_content += DataProcessor._generate_table_html("Remaining Futures-Only Tokens", futures_only, ORIGINAL_FUTURES_HEADERS, futures_cols)
181-
html_content += DataProcessor._generate_table_html("Remaining Spot-Only Tokens", spot_only, ORIGINAL_SPOT_HEADERS, ['ticker', 'spot_mc', 'spot_vol', 'spot_flip'])
187+
html_content += DataProcessor._generate_table_html("Remaining Spot-Only Tokens", spot_only, ORIGINAL_SPOT_HEADERS, spot_cols)
182188
current_time = now_str("%d-%m-%Y %H:%M:%S")
183189

184190
cheat_sheet_pdf_footer = """
@@ -212,14 +218,14 @@ def generate_html_report(futures_df: pd.DataFrame, spot_df: pd.DataFrame) -> Opt
212218
<h2 style="color: #2c3e50; border-bottom: 2px solid #3498db; padding-bottom: 5px; margin-top: 20px;">Remaining Spot Only Tokens</h2>
213219
<p>Remember those remaining spot only tokens because there is plenty opportunity there too. So, check them out. Don't fade on them.</p>
214220
<h2 style="color: #2c3e50; border-bottom: 2px solid #3498db; padding-bottom: 5px; margin-top: 20px;">Disclaimer</h2>
215-
<small>This analysis was generated by you using the <strong>Crypto Volume Analysis Toolkit</strong> by <strong>@heisbuba</strong>. It empowers your market research but does not replace your due diligence. Verify the data, back your own instincts, and trade entirely at your own risk.</small>
221+
<small>This analysis was generated by you using the <strong>QuantVAT</strong> by <strong>@heisbuba</strong>. It empowers your market research but does not replace your due diligence. Verify the data, back your own instincts, and trade entirely at your own risk.</small>
216222
</div>
217223
"""
218224
html = f"""
219225
<!DOCTYPE html>
220226
<html>
221227
<head>
222-
<title>Crypto Volume-driven Data Analysis Report</title>
228+
<title>Quantitative Crypto Volume-driven Data Analysis Report</title>
223229
<meta charset="UTF-8">
224230
<style>{ORIGINAL_HTML_STYLE}</style>
225231
</head>
@@ -232,7 +238,7 @@ def generate_html_report(futures_df: pd.DataFrame, spot_df: pd.DataFrame) -> Opt
232238
{html_content}
233239
{cheat_sheet_pdf_footer}
234240
<div class="footer">
235-
<p>Generated by Crypto Volume Analysis Toolkit 4.0 | By (@heisbuba)</p>
241+
<p>Generated by QuantVAT | By (@heisbuba)</p>
236242
</div>
237243
</body>
238244
</html>
@@ -241,25 +247,23 @@ def generate_html_report(futures_df: pd.DataFrame, spot_df: pd.DataFrame) -> Opt
241247

242248
def crypto_analysis_v4(user_keys, user_id) -> None:
243249
"""Main execution flow for Advanced Analysis."""
244-
print(" ADVANCED CRYPTO VOLUME ANALYSIS v4.0")
250+
print(" ADVANCED QUANT CRYPTO VOLUME ANALYSIS")
245251
print(" Scanning for Futures PDF and Spot CSV/HTML files")
246252
print(" " + "=" * 50)
247253

248-
# 1. Find Files
254+
# Find Files
249255
spot_file, futures_file = FileScanner.find_files(user_id)
250256
if not spot_file or not futures_file:
251257
print(" Required files not found.")
252258
raise FileNotFoundError(" You Need CoinAlyze Futures PDF and Spot Market Data. Kindly Generate Spot Data And Upload Futures PDF First.")
253259

254-
# 2. Parse Files
260+
# Parse Files
255261
futures_df = PDFParser.extract(futures_file)
256262
spot_df = DataProcessor.load_spot(spot_file)
257263

258-
# 3. Generate HTML
259264
html_content = DataProcessor.generate_html_report(futures_df, spot_df)
260-
261265
if html_content:
262-
# 4. Create PDF
266+
# Create PDF
263267
pdf_path = convert_html_to_pdf(html_content, user_id)
264268

265269
print(" 🧹 Cleaning up source files after analysis...")

0 commit comments

Comments
 (0)