Skip to content

Commit 961d231

Browse files
committed
Adds missing tags and import catch statement
1 parent 59edd9a commit 961d231

File tree

4 files changed

+24
-3
lines changed

4 files changed

+24
-3
lines changed

contrib/hamilton/contrib/dagworks/sphinx_doc_chunking/__init__.py

+18-3
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,17 @@
99
5. what this doesn't do is create embeddings -- but that would be easy to extend.
1010
"""
1111

12+
import logging
1213
import re
1314

14-
import requests
15-
from langchain import text_splitter
16-
from langchain_core import documents
15+
logger = logging.getLogger(__name__)
16+
17+
from hamilton import contrib
18+
19+
with contrib.catch_import_errors(__name__, __file__, logger):
20+
import requests
21+
from langchain import text_splitter
22+
from langchain_core import documents
1723

1824
from hamilton.htypes import Collect, Parallelizable
1925

@@ -160,3 +166,12 @@ def collect_chunked_url_text(url_result: Collect[dict]) -> list:
160166
.build()
161167
)
162168
dr.display_all_functions("dag.png")
169+
result = dr.execute(
170+
["collect_chunked_url_text"],
171+
inputs={"chunk_size": 256, "chunk_overlap": 32},
172+
)
173+
# do something with the result...
174+
import pprint
175+
176+
for chunk in result["collect_chunked_url_text"]:
177+
pprint.pprint(chunk)
Loading

contrib/hamilton/contrib/dagworks/sphinx_doc_chunking/tags.json

+3
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,8 @@
33
"use_case_tags": ["data processing", "document chunking", "chunking", "langchain"],
44
"secondary_tags": {
55
"language": "English"
6+
},
7+
"driver_tags": {
8+
"executor": "multithreading"
69
}
710
}

contrib/hamilton/contrib/user/elijahbenizzy/parallel_load_dataframes_s3/tags.json

+3
Original file line numberDiff line numberDiff line change
@@ -4,5 +4,8 @@
44
"secondary_tags": {
55
"dataframe-library" : "pandas",
66
"cloud-provider": "aws"
7+
},
8+
"driver_tags": {
9+
"executor": "multithreading"
710
}
811
}

0 commit comments

Comments
 (0)