Skip to content

Commit 622606a

Browse files
author
Adi
committed
fix jdbc issue by adding packages mysql:mysql-connector
1 parent de0cdc3 commit 622606a

File tree

1 file changed

+19
-38
lines changed

1 file changed

+19
-38
lines changed

getting-started/spark-jdbc.ipynb

Lines changed: 19 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,9 @@
7575
"%env DB_Name = \"\" # Database|Schema Name\n",
7676
"%env DB_TABLE = \"\" # Table Name\n",
7777
"%env DB_USER = \"\" # Database User Name\n",
78-
"%env DB_PASSWORD = \"\" # Database User's Password"
78+
"%env DB_PASSWORD = \"\" # Database User's Password\n",
79+
"\n",
80+
"os.environ[\"PYSPARK_SUBMIT_ARGS\"] = \"--packages mysql:mysql-connector-java:5.1.39 pyspark-shell\""
7981
]
8082
},
8183
{
@@ -156,33 +158,36 @@
156158
"text": [
157159
"[('spark.sql.catalogImplementation', 'in-memory'),\n",
158160
" ('spark.driver.extraLibraryPath', '/hadoop/etc/hadoop'),\n",
159-
" ('spark.jars',\n",
160-
" 'file:///spark/v3io-libs/v3io-hcfs_2.11.jar,file:///spark/v3io-libs/v3io-spark2-object-dataframe_2.11.jar,file:///spark/v3io-libs/v3io-spark2-streaming_2.11.jar'),\n",
161+
" ('spark.app.id', 'app-20190704070308-0001'),\n",
161162
" ('spark.executor.memory', '2G'),\n",
162-
" ('spark.repl.local.jars',\n",
163-
" 'file:///spark/v3io-libs/v3io-hcfs_2.11.jar,file:///spark/v3io-libs/v3io-spark2-object-dataframe_2.11.jar,file:///spark/v3io-libs/v3io-spark2-streaming_2.11.jar'),\n",
164163
" ('spark.executor.id', 'driver'),\n",
165-
" ('spark.driver.port', '41461'),\n",
164+
" ('spark.jars',\n",
165+
" 'file:///spark/v3io-libs/v3io-hcfs_2.11.jar,file:///spark/v3io-libs/v3io-spark2-object-dataframe_2.11.jar,file:///spark/v3io-libs/v3io-spark2-streaming_2.11.jar,file:///igz/.ivy2/jars/mysql_mysql-connector-java-5.1.39.jar'),\n",
166166
" ('spark.cores.max', '4'),\n",
167-
" ('spark.master', 'spark://spark-9nv9ola1rl-3qgje-master:7077'),\n",
167+
" ('spark.executorEnv.V3IO_ACCESS_KEY', 'bb79fffa-7582-4fd2-9347-a350335801fc'),\n",
168168
" ('spark.driver.extraClassPath',\n",
169169
" '/spark/3rd_party/mysql-connector-java-8.0.13.jar'),\n",
170170
" ('spark.executor.extraJavaOptions', '\"-Dsun.zip.disableMemoryMapping=true\"'),\n",
171+
" ('spark.driver.port', '33751'),\n",
172+
" ('spark.driver.host', '10.233.92.91'),\n",
171173
" ('spark.executor.extraLibraryPath', '/hadoop/etc/hadoop'),\n",
174+
" ('spark.submit.pyFiles',\n",
175+
" '/igz/.ivy2/jars/mysql_mysql-connector-java-5.1.39.jar'),\n",
172176
" ('spark.app.name', 'Spark JDBC to Databases - ipynb'),\n",
173-
" ('spark.driver.host', '10.233.92.90'),\n",
177+
" ('spark.repl.local.jars',\n",
178+
" 'file:///spark/v3io-libs/v3io-hcfs_2.11.jar,file:///spark/v3io-libs/v3io-spark2-object-dataframe_2.11.jar,file:///spark/v3io-libs/v3io-spark2-streaming_2.11.jar,file:///igz/.ivy2/jars/mysql_mysql-connector-java-5.1.39.jar'),\n",
174179
" ('spark.rdd.compress', 'True'),\n",
175-
" ('spark.executorEnv.V3IO_ACCESS_KEY', '07934877-3b89-4f88-b08e-a6ea1fb1092b'),\n",
176-
" ('spark.app.id', 'app-20190430214617-0002'),\n",
177180
" ('spark.serializer.objectStreamReset', '100'),\n",
181+
" ('spark.files',\n",
182+
" 'file:///igz/.ivy2/jars/mysql_mysql-connector-java-5.1.39.jar'),\n",
178183
" ('spark.executor.cores', '1'),\n",
179184
" ('spark.executor.extraClassPath',\n",
180185
" '/spark/3rd_party/mysql-connector-java-8.0.13.jar'),\n",
181186
" ('spark.submit.deployMode', 'client'),\n",
182-
" ('spark.submit.pyFiles', '/igz/java/libs/v3io-py.zip'),\n",
183187
" ('spark.driver.extraJavaOptions', '\"-Dsun.zip.disableMemoryMapping=true\"'),\n",
184188
" ('spark.ui.showConsoleProgress', 'true'),\n",
185-
" ('spark.executorEnv.V3IO_USERNAME', 'iguazio')]\n"
189+
" ('spark.executorEnv.V3IO_USERNAME', 'iguazio'),\n",
190+
" ('spark.master', 'spark://spark-jddcm4iwas-qxw13-master:7077')]\n"
186191
]
187192
}
188193
],
@@ -279,33 +284,9 @@
279284
},
280285
{
281286
"cell_type": "code",
282-
"execution_count": 5,
287+
"execution_count": null,
283288
"metadata": {},
284-
"outputs": [
285-
{
286-
"name": "stdout",
287-
"output_type": "stream",
288-
"text": [
289-
"+--------+------+-------------------+\n",
290-
"|fruit_id| name| variety|\n",
291-
"+--------+------+-------------------+\n",
292-
"| 1| Apple| Red Delicious|\n",
293-
"| 2| Pear| Comice|\n",
294-
"| 3|Orange| Navel|\n",
295-
"| 4| Pear| Bartlett|\n",
296-
"| 5|Orange| Blood|\n",
297-
"| 6| Apple|Cox's Orange Pippin|\n",
298-
"| 7| Apple| Granny Smith|\n",
299-
"| 8| Pear| Anjou|\n",
300-
"| 9|Orange| Valencia|\n",
301-
"| 10|Banana| Plantain|\n",
302-
"| 11|Banana| Burro|\n",
303-
"| 12|Banana| Cavendish|\n",
304-
"+--------+------+-------------------+\n",
305-
"\n"
306-
]
307-
}
308-
],
289+
"outputs": [],
309290
"source": [
310291
"dfMySQL = spark.read \\\n",
311292
" .format(\"jdbc\") \\\n",

0 commit comments

Comments
 (0)