Skip to content

Commit 3f272b3

Browse files
committed
Update tutorials to run tests
1 parent 2c62c0a commit 3f272b3

5 files changed

+60
-73
lines changed

index.toml

+3-2
Original file line numberDiff line numberDiff line change
@@ -359,6 +359,7 @@ completion_time = "15 min"
359359
created_at = 2024-02-13
360360
haystack_2 = true
361361
dependencies = ["datasets>=2.6.1", "sentence-transformers>=2.2.0", "accelerate"]
362+
needs_gpu = true
362363

363364
[[tutorial]]
364365
title = "Build an Extractive QA Pipeline"
@@ -382,7 +383,7 @@ aliases = []
382383
completion_time = "15 min"
383384
created_at = 2024-02-12
384385
haystack_2 = true
385-
dependencies = ["pydantic<1.10.10", "datasets>=2.6.1"]
386+
dependencies = ["pydantic<1.10.10", "datasets>=2.6.1", "deepeval-haystack", "ragas-haystack", "uptrain-haystack"]
386387

387388
[[tutorial]]
388389
title = "Building Fallbacks to Websearch with Conditional Routing"
@@ -431,5 +432,5 @@ aliases = []
431432
completion_time = "20 min"
432433
created_at = 2024-03-05
433434
haystack_2 = true
434-
dependencies = ["sentence-transformers>=2.2.0"]
435+
dependencies = ["sentence-transformers>=2.2.0", "gradio"]
435436
featured = true

tutorials/34_Extractive_QA_Pipeline.ipynb

+3-1
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,9 @@
6161
},
6262
"outputs": [],
6363
"source": [
64-
"!pip install haystack-ai accelerate sentence-transformers datasets"
64+
"%%bash\n",
65+
"\n",
66+
"pip install haystack-ai accelerate \"sentence-transformers>=2.2.0\" \"datasets>=2.6.1\""
6567
]
6668
},
6769
{

tutorials/36_Building_Fallbacks_with_Conditional_Routing.ipynb

+2-5
Original file line numberDiff line numberDiff line change
@@ -352,7 +352,7 @@
352352
"source": [
353353
"### Visualize the Pipeline\n",
354354
"\n",
355-
"To understand how you formed this pipeline with conditional routing, use [draw()](https://docs.haystack.deepset.ai/v2.0/docs/drawing-pipeline-graphs) method of the pipeline. If you're running this notebook on Google Colab, the generated file will be saved in \"Files\" section on the sidebar or you can call `Image.open()`:"
355+
"To understand how you formed this pipeline with conditional routing, use `show()` method of the pipeline"
356356
]
357357
},
358358
{
@@ -368,10 +368,7 @@
368368
},
369369
"outputs": [],
370370
"source": [
371-
"from PIL import Image\n",
372-
"\n",
373-
"pipe.draw(\"pipe.png\")\n",
374-
"Image.open(\"pipe.png\")"
371+
"pipe.show()"
375372
]
376373
},
377374
{

tutorials/39_Embedding_Metadata_for_Improved_Retrieval.ipynb

+1-3
Original file line numberDiff line numberDiff line change
@@ -184,9 +184,7 @@
184184
" document_cleaner = DocumentCleaner()\n",
185185
" document_splitter = DocumentSplitter(split_by=\"sentence\", split_length=2)\n",
186186
" document_embedder = SentenceTransformersDocumentEmbedder(\n",
187-
" model=\"thenlper/gte-large\",\n",
188-
" meta_fields_to_embed=metadata_fields_to_embed,\n",
189-
" device=ComponentDevice.from_str(\"cuda:0\"),\n",
187+
" model=\"thenlper/gte-large\", meta_fields_to_embed=metadata_fields_to_embed\n",
190188
" )\n",
191189
" document_writer = DocumentWriter(document_store=document_store, policy=DuplicatePolicy.OVERWRITE)\n",
192190
"\n",

tutorials/40_Building_Chat_Application_with_Function_Calling.ipynb

+51-62
Original file line numberDiff line numberDiff line change
@@ -1043,7 +1043,7 @@
10431043
"\n",
10441044
"As you notice above, OpenAI Chat Completions API does not call the function; instead, the model generates JSON that you can use to call the function in your code. That's why, to build an end-to-end chat application, you need to check if the OpenAI response is a `tool_calls` for every message. If so, you need to call the corresponding function with the provided arguments and send the function response back to OpenAI. Otherwise, append both user and messages to the `messages` list to have a regular conversation with the model. \n",
10451045
"\n",
1046-
"Run the code cell below and use the input box to interact with the chat application that has access to two tools you've created above. \n",
1046+
"To build a nice UI for your application, you can use [Gradio](https://www.gradio.app/) that comes with a chat interface. Install `gradio`, run the code cell below and use the input box to interact with the chat application that has access to two tools you've created above. \n",
10471047
"\n",
10481048
"Example queries you can try:\n",
10491049
"* \"***What is the capital of Sweden?***\": A basic query without any function calls\n",
@@ -1057,7 +1057,18 @@
10571057
},
10581058
{
10591059
"cell_type": "code",
1060-
"execution_count": 24,
1060+
"execution_count": null,
1061+
"metadata": {},
1062+
"outputs": [],
1063+
"source": [
1064+
"%%bash\n",
1065+
"\n",
1066+
"pip install gradio"
1067+
]
1068+
},
1069+
{
1070+
"cell_type": "code",
1071+
"execution_count": null,
10611072
"metadata": {
10621073
"colab": {
10631074
"base_uri": "https://localhost:8080/",
@@ -1079,85 +1090,63 @@
10791090
"id": "sK_JeKZLhXcy",
10801091
"outputId": "b0c8c776-4151-44a0-9acd-b12f53119af8"
10811092
},
1082-
"outputs": [
1083-
{
1084-
"name": "stdout",
1085-
"output_type": "stream",
1086-
"text": [
1087-
"ENTER YOUR MESSAGE 👇 INFO: Type 'exit' or 'quit' to stop\n",
1088-
"Can you tell me where Giorgio lives?\n"
1089-
]
1090-
},
1091-
{
1092-
"data": {
1093-
"application/vnd.jupyter.widget-view+json": {
1094-
"model_id": "235721f8c1c14ce3885fba24638bc27f",
1095-
"version_major": 2,
1096-
"version_minor": 0
1097-
},
1098-
"text/plain": [
1099-
"Batches: 0%| | 0/1 [00:00<?, ?it/s]"
1100-
]
1101-
},
1102-
"metadata": {},
1103-
"output_type": "display_data"
1104-
},
1105-
{
1106-
"name": "stdout",
1107-
"output_type": "stream",
1108-
"text": [
1109-
"Giorgio lives in Rome.ENTER YOUR MESSAGE 👇 INFO: Type 'exit' or 'quit' to stop\n",
1110-
"What's the weather like there?\n",
1111-
"The weather in Rome is sunny with a temperature of 14°C.ENTER YOUR MESSAGE 👇 INFO: Type 'exit' or 'quit' to stop\n",
1112-
"exit\n"
1113-
]
1114-
}
1115-
],
1093+
"outputs": [],
11161094
"source": [
1095+
"import gradio as gr\n",
11171096
"import json\n",
11181097
"\n",
1119-
"from haystack.dataclasses import ChatMessage, ChatRole\n",
1098+
"from haystack.dataclasses import ChatMessage\n",
11201099
"from haystack.components.generators.chat import OpenAIChatGenerator\n",
1121-
"from haystack.components.generators.utils import print_streaming_chunk\n",
11221100
"\n",
1123-
"chat_generator = OpenAIChatGenerator(model=\"gpt-3.5-turbo\", streaming_callback=print_streaming_chunk)\n",
1101+
"chat_generator = OpenAIChatGenerator(model=\"gpt-3.5-turbo\")\n",
11241102
"response = None\n",
11251103
"messages = [\n",
11261104
" ChatMessage.from_system(\n",
11271105
" \"Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.\"\n",
11281106
" )\n",
11291107
"]\n",
11301108
"\n",
1131-
"while True:\n",
1132-
" # if OpenAI response is a tool call\n",
1133-
" if response and response[\"replies\"][0].meta[\"finish_reason\"] == \"tool_calls\":\n",
1134-
" function_calls = json.loads(response[\"replies\"][0].content)\n",
11351109
"\n",
1136-
" for function_call in function_calls:\n",
1137-
" ## Parse function calling information\n",
1138-
" function_name = function_call[\"function\"][\"name\"]\n",
1139-
" function_args = json.loads(function_call[\"function\"][\"arguments\"])\n",
1110+
"def chatbot_with_fc(message, history):\n",
1111+
" messages.append(ChatMessage.from_user(message))\n",
1112+
" response = chat_generator.run(messages=messages, generation_kwargs={\"tools\": tools})\n",
11401113
"\n",
1141-
" ## Find the correspoding function and call it with the given arguments\n",
1142-
" function_to_call = available_functions[function_name]\n",
1143-
" function_response = function_to_call(**function_args)\n",
1114+
" while True:\n",
1115+
" # if OpenAI response is a tool call\n",
1116+
" if response and response[\"replies\"][0].meta[\"finish_reason\"] == \"tool_calls\":\n",
1117+
" function_calls = json.loads(response[\"replies\"][0].content)\n",
1118+
" print(response[\"replies\"][0])\n",
1119+
" for function_call in function_calls:\n",
1120+
" ## Parse function calling information\n",
1121+
" function_name = function_call[\"function\"][\"name\"]\n",
1122+
" function_args = json.loads(function_call[\"function\"][\"arguments\"])\n",
11441123
"\n",
1145-
" ## Append function response to the messages list using `ChatMessage.from_function`\n",
1146-
" messages.append(ChatMessage.from_function(content=json.dumps(function_response), name=function_name))\n",
1124+
" ## Find the correspoding function and call it with the given arguments\n",
1125+
" function_to_call = available_functions[function_name]\n",
1126+
" function_response = function_to_call(**function_args)\n",
11471127
"\n",
1148-
" # Regular Conversation\n",
1149-
" else:\n",
1150-
" # Append assistant messages to the messages list\n",
1151-
" if not messages[-1].is_from(ChatRole.SYSTEM):\n",
1152-
" messages.append(response[\"replies\"][0])\n",
1128+
" ## Append function response to the messages list using `ChatMessage.from_function`\n",
1129+
" messages.append(ChatMessage.from_function(content=json.dumps(function_response), name=function_name))\n",
1130+
" response = chat_generator.run(messages=messages, generation_kwargs={\"tools\": tools})\n",
11531131
"\n",
1154-
" user_input = input(\"ENTER YOUR MESSAGE 👇 INFO: Type 'exit' or 'quit' to stop\\n\")\n",
1155-
" if user_input.lower() == \"exit\" or user_input.lower() == \"quit\":\n",
1156-
" break\n",
1132+
" # Regular Conversation\n",
11571133
" else:\n",
1158-
" messages.append(ChatMessage.from_user(user_input))\n",
1134+
" messages.append(response[\"replies\"][0])\n",
1135+
" break\n",
1136+
" return response[\"replies\"][0].content\n",
1137+
"\n",
11591138
"\n",
1160-
" response = chat_generator.run(messages=messages, generation_kwargs={\"tools\": tools})"
1139+
"demo = gr.ChatInterface(\n",
1140+
" fn=chatbot_with_fc,\n",
1141+
" examples=[\n",
1142+
" \"Can you tell me where Giorgio lives?\",\n",
1143+
" \"What's the weather like in Madrid?\",\n",
1144+
" \"Who lives in London?\",\n",
1145+
" \"What's the weather like where Mark lives?\",\n",
1146+
" ],\n",
1147+
" title=\"Ask me about weather or where people live!\",\n",
1148+
")\n",
1149+
"demo.launch()"
11611150
]
11621151
},
11631152
{

0 commit comments

Comments
 (0)