Skip to content

Commit 445d387

Browse files
committed
Clean up notebook for spaCy to Spark NLP
1 parent 9cf4d84 commit 445d387

File tree

1 file changed

+5
-77
lines changed

1 file changed

+5
-77
lines changed

examples/python/annotation/text/english/SpacyToAnnotation_Tutorial.ipynb

+5-77
Original file line numberDiff line numberDiff line change
@@ -115,73 +115,6 @@
115115
"! cat ./multi_doc_tokens.json"
116116
]
117117
},
118-
{
119-
"cell_type": "code",
120-
"execution_count": null,
121-
"metadata": {
122-
"colab": {
123-
"base_uri": "https://localhost:8080/"
124-
},
125-
"id": "FzG-UiGS3O5S",
126-
"outputId": "670f8619-0ab3-4082-cffc-e9ee265fb683"
127-
},
128-
"outputs": [
129-
{
130-
"name": "stdout",
131-
"output_type": "stream",
132-
"text": [
133-
"Mounted at /content/drive\n"
134-
]
135-
}
136-
],
137-
"source": [
138-
"from google.colab import drive\n",
139-
"drive.mount('/content/drive')"
140-
]
141-
},
142-
{
143-
"cell_type": "code",
144-
"execution_count": null,
145-
"metadata": {
146-
"id": "mw_IvCKa3QlD"
147-
},
148-
"outputs": [],
149-
"source": [
150-
"!cp drive/MyDrive/JSL/sparknlp/spark_nlp-4.3.0-py2.py3-none-any.whl .\n",
151-
"!cp drive/MyDrive/JSL/sparknlp/sparknlp.jar ."
152-
]
153-
},
154-
{
155-
"cell_type": "code",
156-
"execution_count": null,
157-
"metadata": {
158-
"colab": {
159-
"base_uri": "https://localhost:8080/"
160-
},
161-
"id": "4SVtLznZXe6K",
162-
"outputId": "0a2ac5ed-c0f7-44b1-d078-3e9f3dbfdb53"
163-
},
164-
"outputs": [
165-
{
166-
"name": "stdout",
167-
"output_type": "stream",
168-
"text": [
169-
"\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m281.4/281.4 MB\u001B[0m \u001B[31m4.8 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
170-
"\u001B[?25h Preparing metadata (setup.py) ... \u001B[?25l\u001B[?25hdone\n",
171-
"\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m199.0/199.0 KB\u001B[0m \u001B[31m14.3 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
172-
"\u001B[?25h Building wheel for pyspark (setup.py) ... \u001B[?25l\u001B[?25hdone\n",
173-
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
174-
"Processing ./spark_nlp-4.3.0-py2.py3-none-any.whl\n",
175-
"Installing collected packages: spark-nlp\n",
176-
"Successfully installed spark-nlp-4.3.0\n"
177-
]
178-
}
179-
],
180-
"source": [
181-
"! pip install --upgrade -q pyspark==3.2.1\n",
182-
"! pip install spark_nlp-4.3.0-py2.py3-none-any.whl"
183-
]
184-
},
185118
{
186119
"cell_type": "markdown",
187120
"metadata": {},
@@ -204,17 +137,12 @@
204137
"base_uri": "https://localhost:8080/"
205138
},
206139
"id": "HL7dLz15XTGr",
207-
"outputId": "1cb63f4c-e59f-49dc-8cc1-b5dea82989f8"
208-
},
209-
"outputs": [
210-
{
211-
"name": "stdout",
212-
"output_type": "stream",
213-
"text": [
214-
"Spark NLP version 4.3.0\n"
215-
]
140+
"outputId": "1cb63f4c-e59f-49dc-8cc1-b5dea82989f8",
141+
"pycharm": {
142+
"is_executing": true
216143
}
217-
],
144+
},
145+
"outputs": [],
218146
"source": [
219147
"import sparknlp\n",
220148
"from sparknlp.base import *\n",

0 commit comments

Comments
 (0)