|
31 | 31 | "\n", |
32 | 32 | "### 2. Connect to Amazon services\n", |
33 | 33 | "\n", |
34 | | - "For this sample notebook, configure `mem0ai` with [Amazon Neptune Analytics](https://docs.aws.amazon.com/neptune-analytics/latest/userguide/what-is-neptune-analytics.html) as the graph store, [Amazon OpenSearch Serverless](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/serverless-overview.html) as the vector store, and [Amazon Bedrock](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html) for generating embeddings.\n", |
| 34 | + "For this sample notebook, configure `mem0ai` with [Amazon Neptune Analytics](https://docs.aws.amazon.com/neptune-analytics/latest/userguide/what-is-neptune-analytics.html) as the vector and graph store, and [Amazon Bedrock](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html) for generating embeddings.\n", |
35 | 35 | "\n", |
36 | 36 | "Use the following guide for setup details: [Setup AWS Bedrock, AOSS, and Neptune](https://docs.mem0.ai/examples/aws_example#aws-bedrock-and-aoss)\n", |
37 | 37 | "\n", |
| 38 | + "The Neptune Analytics instance must be created using the same vector dimensions as the embedding model creates. See: https://docs.aws.amazon.com/neptune-analytics/latest/userguide/vector-index.html\n", |
| 39 | + "\n", |
38 | 40 | "Your configuration should look similar to:\n", |
39 | 41 | "\n", |
40 | 42 | "```python\n", |
41 | 43 | "config = {\n", |
42 | 44 | " \"embedder\": {\n", |
43 | 45 | " \"provider\": \"aws_bedrock\",\n", |
44 | 46 | " \"config\": {\n", |
45 | | - " \"model\": \"amazon.titan-embed-text-v2:0\"\n", |
| 47 | + " \"model\": \"amazon.titan-embed-text-v2:0\",\n", |
| 48 | + " \"embedding_dims\": 1024\n", |
46 | 49 | " }\n", |
47 | 50 | " },\n", |
48 | 51 | " \"llm\": {\n", |
|
54 | 57 | " }\n", |
55 | 58 | " },\n", |
56 | 59 | " \"vector_store\": {\n", |
57 | | - " \"provider\": \"opensearch\",\n", |
| 60 | + " \"provider\": \"neptune\",\n", |
58 | 61 | " \"config\": {\n", |
59 | | - " \"collection_name\": \"mem0\",\n", |
60 | | - " \"host\": \"your-opensearch-domain.us-west-2.es.amazonaws.com\",\n", |
61 | | - " \"port\": 443,\n", |
62 | | - " \"http_auth\": auth,\n", |
63 | | - " \"connection_class\": RequestsHttpConnection,\n", |
64 | | - " \"pool_maxsize\": 20,\n", |
65 | | - " \"use_ssl\": True,\n", |
66 | | - " \"verify_certs\": True,\n", |
67 | | - " \"embedding_model_dims\": 1024,\n", |
68 | | - " }\n", |
| 62 | + " \"endpoint\": f\"neptune-graph://my-graph-identifier\",\n", |
| 63 | + " },\n", |
69 | 64 | " },\n", |
70 | 65 | " \"graph_store\": {\n", |
71 | 66 | " \"provider\": \"neptune\",\n", |
|
96 | 91 | "import os\n", |
97 | 92 | "import logging\n", |
98 | 93 | "import sys\n", |
99 | | - "import boto3\n", |
100 | | - "from opensearchpy import RequestsHttpConnection, AWSV4SignerAuth\n", |
101 | 94 | "from dotenv import load_dotenv\n", |
102 | 95 | "\n", |
103 | 96 | "load_dotenv()\n", |
104 | 97 | "\n", |
105 | | - "logging.getLogger(\"mem0.graphs.neptune.main\").setLevel(logging.DEBUG)\n", |
106 | | - "logging.getLogger(\"mem0.graphs.neptune.base\").setLevel(logging.DEBUG)\n", |
| 98 | + "logging.getLogger(\"mem0.graphs.neptune.main\").setLevel(logging.INFO)\n", |
| 99 | + "logging.getLogger(\"mem0.graphs.neptune.base\").setLevel(logging.INFO)\n", |
107 | 100 | "logger = logging.getLogger(__name__)\n", |
108 | 101 | "logger.setLevel(logging.DEBUG)\n", |
109 | 102 | "\n", |
|
120 | 113 | "source": [ |
121 | 114 | "Setup the Mem0 configuration using:\n", |
122 | 115 | "- Amazon Bedrock as the embedder\n", |
123 | | - "- Amazon Neptune Analytics instance as a graph store\n", |
124 | | - "- OpenSearch as the vector store" |
| 116 | + "- Amazon Neptune Analytics instance as a vector / graph store" |
125 | 117 | ] |
126 | 118 | }, |
127 | 119 | { |
|
136 | 128 | "\n", |
137 | 129 | "graph_identifier = os.environ.get(\"GRAPH_ID\")\n", |
138 | 130 | "\n", |
139 | | - "opensearch_host = os.environ.get(\"OS_HOST\")\n", |
140 | | - "opensearch_post = os.environ.get(\"OS_PORT\")\n", |
141 | | - "\n", |
142 | | - "credentials = boto3.Session().get_credentials()\n", |
143 | | - "region = os.environ.get(\"AWS_REGION\")\n", |
144 | | - "auth = AWSV4SignerAuth(credentials, region)\n", |
145 | | - "\n", |
146 | 131 | "config = {\n", |
147 | 132 | " \"embedder\": {\n", |
148 | 133 | " \"provider\": \"aws_bedrock\",\n", |
149 | 134 | " \"config\": {\n", |
150 | 135 | " \"model\": bedrock_embedder_model,\n", |
| 136 | + " \"embedding_dims\": embedding_model_dims\n", |
151 | 137 | " }\n", |
152 | 138 | " },\n", |
153 | 139 | " \"llm\": {\n", |
|
159 | 145 | " }\n", |
160 | 146 | " },\n", |
161 | 147 | " \"vector_store\": {\n", |
162 | | - " \"provider\": \"opensearch\",\n", |
| 148 | + " \"provider\": \"neptune\",\n", |
163 | 149 | " \"config\": {\n", |
164 | | - " \"collection_name\": \"mem0ai_vector_store\",\n", |
165 | | - " \"host\": opensearch_host,\n", |
166 | | - " \"port\": opensearch_post,\n", |
167 | | - " \"http_auth\": auth,\n", |
168 | | - " \"embedding_model_dims\": embedding_model_dims,\n", |
169 | | - " \"use_ssl\": True,\n", |
170 | | - " \"verify_certs\": True,\n", |
171 | | - " \"connection_class\": RequestsHttpConnection,\n", |
| 150 | + " \"endpoint\": f\"neptune-graph://{graph_identifier}\",\n", |
172 | 151 | " },\n", |
173 | 152 | " },\n", |
174 | 153 | " \"graph_store\": {\n", |
|
431 | 410 | "source": [ |
432 | 411 | "## Conclusion\n", |
433 | 412 | "\n", |
434 | | - "In this example we demonstrated how an AWS tech stack can be used to store and retrieve memory context. Bedrock LLM models can be used to interpret given conversations. OpenSearch can store text chunks with vector embeddings. Neptune Analytics can store the text chunks in a graph format with relationship entities." |
| 413 | + "In this example we demonstrated how an AWS tech stack can be used to store and retrieve memory context. Bedrock LLM models can be used to interpret given conversations. Neptune Analytics can store the text chunks in a graph format with relationship entities." |
435 | 414 | ] |
436 | 415 | } |
437 | 416 | ], |
438 | 417 | "metadata": { |
439 | 418 | "kernelspec": { |
440 | | - "display_name": ".venv", |
| 419 | + "display_name": "Python 3 (ipykernel)", |
441 | 420 | "language": "python", |
442 | 421 | "name": "python3" |
443 | 422 | }, |
|
451 | 430 | "name": "python", |
452 | 431 | "nbconvert_exporter": "python", |
453 | 432 | "pygments_lexer": "ipython3", |
454 | | - "version": "3.13.2" |
| 433 | + "version": "3.13.5" |
455 | 434 | } |
456 | 435 | }, |
457 | 436 | "nbformat": 4, |
458 | | - "nbformat_minor": 2 |
| 437 | + "nbformat_minor": 4 |
459 | 438 | } |
0 commit comments