-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathapp.py
66 lines (48 loc) · 1.98 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
from flask import Flask, request, jsonify
import asyncio
from backend import AIMODEL, search_urls, create_crawler
import json
from flask_cors import CORS
from flask import json
app = Flask(__name__)
CORS(app)
model = AIMODEL()
def clean_response(raw_response):
json_start = raw_response.find("{")
json_end = raw_response.rfind("}") + 1
clean_json = raw_response[json_start:json_end]
data = json.loads(clean_json)
if 'bool' in data:
data['bool'] = True if data['bool'] == 'true' else False if data['bool'] == 'false' else data['bool']
return data
@app.route("/analyze", methods=["POST"])
def analyze():
print("GOT THE QUERY\n")
user_input = request.json.get("query")
print(user_input)
if not user_input:
return jsonify({"error": "No query provided"}), 400
# Check if the input is a URL
if any(kwd in user_input for kwd in ["http", "www", "https", "://"]):
target = asyncio.run(create_crawler(user_input))[user_input]
print("Taarget- content: ", target)
query = model.generate_search_queries(user_input)
else:
query = user_input
target = query
print("Generated Query:", query)
urls = search_urls(query) # Perform a search to get relevant URLs
scraped_content = {}
for url in urls[:4]:
content = asyncio.run(create_crawler(url))
scraped_content.update(content)
print("DONE SCRAPING\n")
combined_content = "\n".join(scraped_content.values()) # Combine the scraped content
combined_content += f"\n\n\nThese are the url's which were used to analyse : {urls}"
analysis_result_raw = model.analyse_url(query, combined_content, target)
print("DONE ANALYZING\n")
analysis_result = clean_response(analysis_result_raw)
print(analysis_result)
return jsonify(analysis_result)
if __name__ == "__main__":
app.run(port=5000, debug=True)