|
190 | 190 | " print('CV',cv_loss / cv_steps)\n", |
191 | 191 | "\n", |
192 | 192 | " def save_model(self, step):\n", |
193 | | - " self.saver.save(self.sess, os.path.join(self.models_dir, 'prod2vec_skip_gram'), global_step=step)\n", |
| 193 | + " self.saver.save(self.sess, os.path.join(self.models_dir, 'prod2vec-skip-gram'), global_step=step)\n", |
194 | 194 | "\n", |
195 | 195 | " def load_model(self, path):\n", |
196 | 196 | " self.saver = tf.train.import_meta_graph(self.models_dir + path)\n", |
|
335 | 335 | }, |
336 | 336 | { |
337 | 337 | "cell_type": "code", |
338 | | - "execution_count": 35, |
| 338 | + "execution_count": null, |
339 | 339 | "metadata": {}, |
340 | | - "outputs": [ |
341 | | - { |
342 | | - "ename": "NotFoundError", |
343 | | - "evalue": "/Users/orz/projects/iguazio/tutorials/demos/location_based_recommendation/models; No such file or directory", |
344 | | - "output_type": "error", |
345 | | - "traceback": [ |
346 | | - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", |
347 | | - "\u001b[0;31mNotFoundError\u001b[0m Traceback (most recent call last)", |
348 | | - "\u001b[0;32m<ipython-input-35-488b1ef660e6>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreset_default_graph\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mmodel\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mProduct2VecSkipGram\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdf_train\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdf_cv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmax\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproducts\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mproduct_id\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'/trained/prod2vec_skip_gram-80000.meta'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m120001\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m20000\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdf_cv\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m//\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrates\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", |
349 | | - "\u001b[0;32m<ipython-input-23-070b0fe01d04>\u001b[0m in \u001b[0;36mload_model\u001b[0;34m(self, path)\u001b[0m\n\u001b[1;32m 64\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mload_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpath\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 65\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msaver\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mimport_meta_graph\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodels_dir\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mpath\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 66\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msaver\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrestore\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msess\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlatest_checkpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodels_dir\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 67\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 68\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mbuild_graph\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", |
350 | | - "\u001b[0;32m~/.pythonlibs/lib/python3.6/site-packages/tensorflow/python/training/checkpoint_management.py\u001b[0m in \u001b[0;36mlatest_checkpoint\u001b[0;34m(checkpoint_dir, latest_filename)\u001b[0m\n\u001b[1;32m 341\u001b[0m v1_path = _prefix_to_checkpoint_path(ckpt.model_checkpoint_path,\n\u001b[1;32m 342\u001b[0m saver_pb2.SaverDef.V1)\n\u001b[0;32m--> 343\u001b[0;31m if file_io.get_matching_files(v2_path) or file_io.get_matching_files(\n\u001b[0m\u001b[1;32m 344\u001b[0m v1_path):\n\u001b[1;32m 345\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mckpt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodel_checkpoint_path\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", |
351 | | - "\u001b[0;32m~/.pythonlibs/lib/python3.6/site-packages/tensorflow/python/lib/io/file_io.py\u001b[0m in \u001b[0;36mget_matching_files\u001b[0;34m(filename)\u001b[0m\n\u001b[1;32m 359\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpError\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mIf\u001b[0m \u001b[0mthere\u001b[0m \u001b[0mare\u001b[0m \u001b[0mfilesystem\u001b[0m \u001b[0;34m/\u001b[0m \u001b[0mdirectory\u001b[0m \u001b[0mlisting\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 360\u001b[0m \"\"\"\n\u001b[0;32m--> 361\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mget_matching_files_v2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilename\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 362\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 363\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", |
352 | | - "\u001b[0;32m~/.pythonlibs/lib/python3.6/site-packages/tensorflow/python/lib/io/file_io.py\u001b[0m in \u001b[0;36mget_matching_files_v2\u001b[0;34m(pattern)\u001b[0m\n\u001b[1;32m 387\u001b[0m \u001b[0;31m# Convert the filenames to string from bytes.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 388\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_str_any\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmatching_filename\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 389\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0msingle_filename\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mpattern\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 390\u001b[0m for matching_filename in pywrap_tensorflow.GetMatchingFiles(\n\u001b[1;32m 391\u001b[0m compat.as_bytes(single_filename), status)\n", |
353 | | - "\u001b[0;32m~/.pythonlibs/lib/python3.6/site-packages/tensorflow/python/framework/errors_impl.py\u001b[0m in \u001b[0;36m__exit__\u001b[0;34m(self, type_arg, value_arg, traceback_arg)\u001b[0m\n\u001b[1;32m 526\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 527\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mc_api\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_Message\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstatus\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstatus\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 528\u001b[0;31m c_api.TF_GetCode(self.status.status))\n\u001b[0m\u001b[1;32m 529\u001b[0m \u001b[0;31m# Delete the underlying status object from memory otherwise it stays alive\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 530\u001b[0m \u001b[0;31m# as there is a reference to status from this from the traceback due to\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", |
354 | | - "\u001b[0;31mNotFoundError\u001b[0m: /Users/orz/projects/iguazio/tutorials/demos/location_based_recommendation/models; No such file or directory" |
355 | | - ] |
356 | | - } |
357 | | - ], |
| 340 | + "outputs": [], |
358 | 341 | "source": [ |
359 | 342 | "tf.reset_default_graph()\n", |
360 | 343 | "model = Product2VecSkipGram(df_train, df_cv, batch_size, 1, 1, np.max(products.product_id) + 1)\n", |
361 | | - "model.load_model('/trained/prod2vec_skip_gram-80000.meta')\n", |
| 344 | + "model.load_model(os.path.join(MODELS_DIR, 'trained', 'prod2vec-skip-gram-80000.meta'))\n", |
362 | 345 | "model.train(120001, 20000, len(df_cv) // batch_size, rates)" |
363 | 346 | ] |
364 | 347 | }, |
|
377 | 360 | "source": [ |
378 | 361 | "tf.reset_default_graph()\n", |
379 | 362 | "model = Product2VecSkipGram(df_train, df_cv, len(products), 1, 1, np.max(products.product_id) + 1)\n", |
380 | | - "model.load_model('prod2vec_skip_gram-80000.meta')\n", |
| 363 | + "model.load_model(os.path.join(MODELS_DIR, 'trained', 'prod2vec-skip_gram-80000.meta'))\n", |
381 | 364 | "embd = model.predict(products.product_id.values)\n", |
382 | 365 | "products = pd.concat([products, pd.DataFrame(embd)], axis=1)\n", |
383 | 366 | "products.to_pickle(DATA_DIR + 'product_embeddings.pkl')" |
|
388 | 371 | "execution_count": null, |
389 | 372 | "metadata": {}, |
390 | 373 | "outputs": [], |
391 | | - "source": [ |
392 | | - "tutorials/demos/location_based_recommendation/models/trained/prod2vec_skip_gram-80000.meta" |
393 | | - ] |
| 374 | + "source": [] |
394 | 375 | } |
395 | 376 | ], |
396 | 377 | "metadata": { |
|
413 | 394 | } |
414 | 395 | }, |
415 | 396 | "nbformat": 4, |
416 | | - "nbformat_minor": 2 |
| 397 | + "nbformat_minor": 4 |
417 | 398 | } |
0 commit comments