Skip to content

Commit d7de150

Browse files
committed
added frontend and caddy
1 parent 4679ab5 commit d7de150

35 files changed

+221
-64
lines changed

create_fastapi_project/templates/__init__.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,9 @@ def install_template(root: str, template: ITemplate, app_name: str):
3535

3636
# Add pyproject.toml file and installl packages
3737
app_folder: str = "app"
38-
if template == ITemplate.full:
39-
app_folder = "backend/app"
38+
if template == ITemplate.full or template == ITemplate.langchain_basic:
39+
# TODO: CHECK PATHS IN MACOS AND WINDOWS | (os.path.join)
40+
app_folder = "backend\\app"
4041

4142
poetry_path = os.path.join(root, app_folder)
4243
has_pyproject = add_configuration_to_pyproject(poetry_path)

create_fastapi_project/templates/basic/README.md

+1-2
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ This is a FastAPI project initialized using [`create-fastapi-project`](https://g
66

77
The commands in this documentation can be customized on the **Makefile**. It can be started with and without docker.
88

9+
This project uses poetry, if you don't have it installed, you can the follow the instruction in [Poetry Documentation](https://python-poetry.org/docs/#installation).
910

1011
- Run the server (Recommended using docker):
1112

@@ -20,7 +21,6 @@ make run-dev
2021
make run-prod
2122
```
2223

23-
2424
- Run the server without docker:
2525

2626
First, make sure you have all packages installed:
@@ -68,5 +68,4 @@ Running `pip install create-fastapi-project@latest` (with no arguments) launches
6868

6969
By choosing `create-fastapi-project`, you streamline your initial project setup, leverage reliable patterns, and enjoy the convenience of a tool tailored for FastAPI development.
7070

71-
7271
We love ❤️ [FastAPI](https://fastapi.tiangolo.com/) and its ecosystem. You can check out the [create-fastapi-project GitHub repository](https://github.com/allient/create-fastapi-project) - your feedback and contributions are welcome!
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,11 @@
11
PROJECT_NAME=
22
OPENAI_API_KEY=
33
UNSPLASH_API_KEY=
4-
SERP_API_KEY=
4+
SERP_API_KEY=
5+
6+
#############################################
7+
# Caddy variables
8+
#############################################
9+
EXT_ENDPOINT1=127.0.0.1
10+
LOCAL_1=localhost
11+
LOCAL_2=127.0.0.1

create_fastapi_project/templates/langchain_basic/README.md

+2
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@ This is a FastAPI project initialized using [`create-fastapi-project`](https://g
2929

3030
The commands in this documentation can be customized on the **Makefile**. It can be started with and without docker.
3131

32+
This project uses poetry, if you don't have it installed, you can the follow the instruction in [Poetry Documentation](https://python-poetry.org/docs/#installation).
33+
3234
- Run the server (Recommended using docker):
3335

3436
```bash

create_fastapi_project/templates/langchain_basic/app/app/api/v1/endpoints/chat.py renamed to create_fastapi_project/templates/langchain_basic/backend/app/app/api/v1/endpoints/chat.py

+56-50
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
IChatResponse,
33
IUserMessage,
44
)
5+
import logging
56
from app.utils.adaptive_cards.cards import create_adaptive_card
67
from app.utils.callback import (
78
CustomAsyncCallbackHandler,
@@ -84,55 +85,60 @@ async def websocket_endpoint(websocket: WebSocket):
8485
@router.websocket("/tools")
8586
async def websocket_endpoint(websocket: WebSocket):
8687
await websocket.accept()
87-
while True:
88-
data = await websocket.receive_json()
89-
user_message = data["message"]
90-
user_message_card = create_adaptive_card(user_message)
9188

92-
resp = IChatResponse(
93-
sender="you",
94-
message=user_message_card.to_dict(),
95-
type="start",
96-
message_id=str(uuid7()),
97-
id=str(uuid7()),
98-
)
99-
100-
await websocket.send_json(resp.dict())
101-
message_id: str = str(uuid7())
102-
custom_handler = CustomFinalStreamingStdOutCallbackHandler(
103-
websocket, message_id=message_id
104-
)
105-
106-
tools = [
107-
GeneralKnowledgeTool(),
108-
PokemonSearchTool(),
109-
ImageSearchTool(),
110-
YoutubeSearchTool(),
111-
GeneralWeatherTool(),
112-
]
113-
114-
llm = ChatOpenAI(
115-
streaming=True,
116-
temperature=0,
117-
)
118-
119-
agent = ZeroShotAgent.from_llm_and_tools(
120-
llm=llm,
121-
tools=tools,
122-
prefix=zero_agent_prompt.prefix,
123-
suffix=zero_agent_prompt.suffix,
124-
format_instructions=zero_agent_prompt.format_instructions,
125-
input_variables=zero_agent_prompt.input_variables,
126-
)
127-
# TODO: We should use this
128-
# * max_execution_time=1,
129-
# early_stopping_method="generate",
130-
agent_executor = AgentExecutor.from_agent_and_tools(
131-
agent=agent,
132-
tools=tools,
133-
verbose=False,
134-
handle_parsing_errors=True,
135-
memory=memory,
136-
)
89+
while True:
90+
try:
91+
data = await websocket.receive_json()
92+
user_message = data["message"]
93+
user_message_card = create_adaptive_card(user_message)
94+
95+
resp = IChatResponse(
96+
sender="you",
97+
message=user_message_card.to_dict(),
98+
type="start",
99+
message_id=str(uuid7()),
100+
id=str(uuid7()),
101+
)
102+
103+
await websocket.send_json(resp.dict())
104+
message_id: str = str(uuid7())
105+
custom_handler = CustomFinalStreamingStdOutCallbackHandler(
106+
websocket, message_id=message_id
107+
)
108+
109+
tools = [
110+
GeneralKnowledgeTool(),
111+
PokemonSearchTool(),
112+
ImageSearchTool(),
113+
YoutubeSearchTool(),
114+
GeneralWeatherTool(),
115+
]
137116

138-
await agent_executor.arun(input=user_message, callbacks=[custom_handler])
117+
llm = ChatOpenAI(
118+
streaming=True,
119+
temperature=0,
120+
)
121+
122+
agent = ZeroShotAgent.from_llm_and_tools(
123+
llm=llm,
124+
tools=tools,
125+
prefix=zero_agent_prompt.prefix,
126+
suffix=zero_agent_prompt.suffix,
127+
format_instructions=zero_agent_prompt.format_instructions,
128+
input_variables=zero_agent_prompt.input_variables,
129+
)
130+
# TODO: We should use this
131+
# * max_execution_time=1,
132+
# early_stopping_method="generate",
133+
agent_executor = AgentExecutor.from_agent_and_tools(
134+
agent=agent,
135+
tools=tools,
136+
verbose=False,
137+
handle_parsing_errors=True,
138+
memory=memory,
139+
)
140+
141+
await agent_executor.arun(input=user_message, callbacks=[custom_handler])
142+
except WebSocketDisconnect:
143+
logging.info("websocket disconnect")
144+
break

create_fastapi_project/templates/langchain_basic/app/app/utils/adaptive_cards/cards.py renamed to create_fastapi_project/templates/langchain_basic/backend/app/app/utils/adaptive_cards/cards.py

+2
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ def custom_media(anwser):
2323
if url_image.endswith(")."):
2424
url_image = url_image[:-2]
2525
media = Image(url=url_image)
26+
return None
2627
return ICreateMediaAC(media_object=media, media_type="image", url=url_image)
2728

2829
regex_audio = r"\b(https?|ftp):\/\/[^\s/$.?#].[^\s]*\.(mp3|wav|ogg)\b"
@@ -69,6 +70,7 @@ def custom_media(anwser):
6970
media = Image(url=photo)
7071
list_media_element.append(media)
7172
body_container_images = ImageSet(images=list_media_element)
73+
return None
7274
return ICreateMediaAC(
7375
media_object=body_container_images, media_type="image", url=""
7476
)

create_fastapi_project/templates/langchain_basic/app/app/utils/prompt_zero.py renamed to create_fastapi_project/templates/langchain_basic/backend/app/app/utils/prompt_zero.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ class IZeroPrompt(BaseModel):
99
input_variables: list[str]
1010

1111

12-
PREFIX = """Answer the following questions as best and complete you can because you are a female health and wellness coach called Alita. You have access to the following tools:"""
12+
PREFIX = """Answer the following questions as best and complete you can. You have access to the following tools:"""
1313
FORMAT_INSTRUCTIONS = """Use the following format:
1414
1515
Question: the input question you must answer
@@ -20,7 +20,7 @@ class IZeroPrompt(BaseModel):
2020
... (this Thought/Action/Action Input/Observation can repeat N times)
2121
Thought: I now know the final answer
2222
Final Answer: the final answer to the original input question"""
23-
SUFFIX = """When answering, if you find link the formart is the next [title](link).
23+
SUFFIX = """When answering, your answers should be with markdown format.
2424
2525
Question: {input}
2626
Thought:{agent_scratchpad}"""

create_fastapi_project/templates/langchain_basic/app/app/utils/tools.py renamed to create_fastapi_project/templates/langchain_basic/backend/app/app/utils/tools.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ async def _arun(self, query: str, run_manager: Optional[Any] = None) -> str:
105105
image_url = result["urls"]["small"]
106106
images_urls.append(image_url)
107107
image_list_string = "\n".join(
108-
[f"{i+1}. [Image {i+1}]({url})" for i, url in enumerate(images_urls)]
108+
[f"{i+1}. ![Image {i+1}]({url})" for i, url in enumerate(images_urls)]
109109
)
110110
return image_list_string
111111

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
{
2+
3+
}
4+
5+
fastapi.{$EXT_ENDPOINT1}:80, fastapi.{$LOCAL_1}:80, fastapi.{$LOCAL_2}:80, :80 {
6+
reverse_proxy fastapi_server:8000
7+
}
8+
9+
frontend.{$EXT_ENDPOINT1}:80, frontend.{$LOCAL_1}:80, frontend.{$LOCAL_2}:80 {
10+
reverse_proxy streamlit_frontend:8501
11+
}
12+
13+
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,49 @@
1-
version: '3.8'
1+
version: "3.8"
22

33
services:
44
fastapi_server:
5-
build: .
5+
container_name: fastapi_server
6+
build: ./backend
67
restart: always
78
command: "sh -c 'uvicorn app.main:app --reload --workers 3 --host 0.0.0.0 --port 8000'"
89
volumes:
9-
- ./app:/code
10+
- ./backend/app:/code
1011
expose:
1112
- 8000
1213
ports:
13-
- 8000:8000
14+
- 8000:8000
1415
env_file: ".env"
1516

17+
streamlit_frontend:
18+
container_name: streamlit_frontend
19+
build: ./frontend
20+
restart: always
21+
# command: "sh -c 'streamlit run app.py --server.port 8501'"
22+
volumes:
23+
- ./frontend:/code
24+
expose:
25+
- 8501
26+
ports:
27+
- 8501:8501
28+
env_file: ".env"
29+
30+
caddy_reverse_proxy:
31+
container_name: caddy_reverse_proxy
32+
image: caddy:alpine
33+
restart: always
34+
ports:
35+
- 80:80
36+
- 443:443
37+
environment:
38+
- EXT_ENDPOINT1=${EXT_ENDPOINT1}
39+
- LOCAL_1=${LOCAL_1}
40+
- LOCAL_2=${LOCAL_2}
41+
volumes:
42+
- ./caddy/Caddyfile:/etc/caddy/Caddyfile
43+
- ./static:/code/static
44+
- caddy_data:/data
45+
- caddy_config:/config
46+
47+
volumes:
48+
caddy_data:
49+
caddy_config:

create_fastapi_project/templates/langchain_basic/docker-compose.yml

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
version: '3.8'
1+
version: "3.8"
22

33
services:
44
fastapi_server:
@@ -12,4 +12,3 @@ services:
1212
ports:
1313
- 8000:8000
1414
env_file: ".env"
15-
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
# app/Dockerfile
2+
3+
FROM python:3.9-slim
4+
5+
WORKDIR /app
6+
7+
RUN apt-get update && apt-get install -y \
8+
build-essential \
9+
curl \
10+
software-properties-common \
11+
git \
12+
&& rm -rf /var/lib/apt/lists/*
13+
14+
15+
# Copiar la carpeta app al directorio de trabajo del contenedor
16+
COPY app /app
17+
18+
RUN pip3 install -r /app/requirements.txt
19+
20+
EXPOSE 8501
21+
22+
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
23+
24+
ENTRYPOINT ["streamlit", "run", "streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
streamlit
2+
websockets
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
import streamlit as st
2+
import asyncio
3+
import websockets
4+
import json
5+
6+
7+
async def retrieve_bot_response(text):
8+
async with websockets.connect(
9+
"ws://fastapi_server:8000/api/v1/chat/tools"
10+
) as websocket:
11+
message_data = {"message": text}
12+
json_data = json.dumps(message_data)
13+
14+
await websocket.send(json_data)
15+
counter = 0
16+
with st.empty():
17+
stream_data = ""
18+
try:
19+
while True:
20+
counter += 1
21+
response = await asyncio.wait_for(websocket.recv(), timeout=20)
22+
response = json.loads(response)
23+
24+
if response["sender"] == "bot":
25+
stream_data = (
26+
response["message"]["body"][0]["items"][0]["text"]
27+
if counter != 2
28+
else ""
29+
)
30+
st.markdown(stream_data)
31+
32+
if response["type"] == "end":
33+
break
34+
st.markdown(stream_data)
35+
except asyncio.TimeoutError:
36+
st.warning("Connection timed out. Closing the connection.")
37+
38+
return stream_data
39+
40+
41+
st.title("Simple chat")
42+
43+
# Initialize chat history
44+
if "messages" not in st.session_state:
45+
st.session_state.messages = []
46+
47+
# Display chat messages from history on app rerun
48+
for message in st.session_state.messages:
49+
with st.chat_message(message["role"]):
50+
st.markdown(message["content"])
51+
52+
# Accept user input
53+
if prompt := st.chat_input("What is up?"):
54+
# Add user message to chat history
55+
st.session_state.messages.append({"role": "user", "content": prompt})
56+
# Display user message in chat message container
57+
with st.chat_message("user"):
58+
st.markdown(prompt)
59+
60+
# Display assistant response in chat message container
61+
with st.chat_message("assistant"):
62+
message_placeholder = st.empty()
63+
full_response = asyncio.new_event_loop().run_until_complete(
64+
retrieve_bot_response(prompt)
65+
)
66+
67+
# Add assistant response to chat history
68+
st.session_state.messages.append({"role": "assistant", "content": full_response})

0 commit comments

Comments
 (0)