diff --git a/academic-events/part_1_data_engineering.ipynb b/academic-events/part_1_data_engineering.ipynb index dcf5900..da6d98e 100644 --- a/academic-events/part_1_data_engineering.ipynb +++ b/academic-events/part_1_data_engineering.ipynb @@ -454,7 +454,7 @@ " # If time zones are unknown comment out this line and revert to UTC.\n", " parameters_dict.update(active__tz=timezone)\n", " \n", - " search_results = phq.events.search(parameters_dict).iter_all()\n", + " search_results = phq.events.search(**parameters_dict).iter_all()\n", "\n", " search_results = [result.to_dict() for result in search_results]\n", "\n", diff --git a/academic-events/part_2_data_exploration.ipynb b/academic-events/part_2_data_exploration.ipynb index 9a0845c..c037d64 100644 --- a/academic-events/part_2_data_exploration.ipynb +++ b/academic-events/part_2_data_exploration.ipynb @@ -176,7 +176,7 @@ " lat_long[0],\n", " lat_long[1])) \n", "\n", - " search_results = phq.events.search(parameters_dict).iter_all()\n", + " search_results = phq.events.search(**parameters_dict).iter_all()\n", "\n", " search_results = [result.to_dict() for result in search_results]\n", "\n", diff --git a/academic-events/part_3_feature_engineering.ipynb b/academic-events/part_3_feature_engineering.ipynb index 405d5a8..a0f056f 100644 --- a/academic-events/part_3_feature_engineering.ipynb +++ b/academic-events/part_3_feature_engineering.ipynb @@ -202,7 +202,7 @@ " parameters_dict.update(limit=500)\n", " parameters_dict.update(place__scope=place_id)\n", "\n", - " search_results = phq.events.search(parameters_dict).iter_all()\n", + " search_results = phq.events.search(**parameters_dict).iter_all()\n", "\n", " search_results = [result.to_dict() for result in search_results]\n", "\n", @@ -268,7 +268,7 @@ " lat_long[0],\n", " lat_long[1])) \n", "\n", - " search_results = phq.events.search(parameters_dict).iter_all()\n", + " search_results = phq.events.search(**parameters_dict).iter_all()\n", "\n", " search_results = [result.to_dict() for result in search_results]\n", "\n", diff --git a/attended-events/part_1_data_engineering.ipynb b/attended-events/part_1_data_engineering.ipynb index 3486e6a..544d19d 100644 --- a/attended-events/part_1_data_engineering.ipynb +++ b/attended-events/part_1_data_engineering.ipynb @@ -70,7 +70,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -397,15 +397,15 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ "results = []\n", "\n", "# Iterating through all the events that match our criteria and adding them to our results\n", - "for event in phq.events.search(parameters).iter_all():\n", - " results.append(event.to_dict())\n", + "for event in phq.events.search(**parameters).iter_all():\n", + " results.append(event.model_dump())\n", "\n", "# Converting the results to a DataFrame\n", "event_df = pd.DataFrame(results)" diff --git a/attended-events/part_2_data_exploration.ipynb b/attended-events/part_2_data_exploration.ipynb index becc9a5..b5800d4 100644 --- a/attended-events/part_2_data_exploration.ipynb +++ b/attended-events/part_2_data_exploration.ipynb @@ -177,8 +177,8 @@ "\n", " # Iterating through all the events that match our criteria and\n", " # adding them to our result_list.\n", - " for event in phq.events.search(params).iter_all():\n", - " result_list.append(event.to_dict())\n", + " for event in phq.events.search(**params).iter_all():\n", + " result_list.append(event.model_dump())\n", "\n", " event_df = pd.DataFrame(result_list)\n", "\n", diff --git a/attended-events/part_3_feature_engineering.ipynb b/attended-events/part_3_feature_engineering.ipynb index b29c8f1..740c349 100644 --- a/attended-events/part_3_feature_engineering.ipynb +++ b/attended-events/part_3_feature_engineering.ipynb @@ -241,8 +241,8 @@ "\n", " # Iterating through all the events that match our criteria and\n", " # adding them to our result_list\n", - " for event in phq.events.search(params).iter_all():\n", - " result_list.append(event.to_dict())\n", + " for event in phq.events.search(**params).iter_all():\n", + " result_list.append(event.model_dump())\n", "\n", " event_df = pd.DataFrame(result_list)\n", "\n", diff --git a/demand-surge/demand-surge-notebook.ipynb b/demand-surge/demand-surge-notebook.ipynb index 46101c6..bbcd676 100644 --- a/demand-surge/demand-surge-notebook.ipynb +++ b/demand-surge/demand-surge-notebook.ipynb @@ -1143,8 +1143,8 @@ "\n", " results = []\n", "\n", - " for event in client.events.search(parameters).iter_all():\n", - " results.append(event.to_dict())\n", + " for event in client.events.search(**parameters).iter_all():\n", + " results.append(event.model_dump())\n", "\n", " df = pd.DataFrame(results)\n", " df[\"category\"] = df[\"category\"].str.replace(\"-\", \"_\") # clean values\n", diff --git a/live-tv-events/part_1_data_engineering.ipynb b/live-tv-events/part_1_data_engineering.ipynb index f86ce77..9b17945 100644 --- a/live-tv-events/part_1_data_engineering.ipynb +++ b/live-tv-events/part_1_data_engineering.ipynb @@ -467,7 +467,7 @@ " #parameters_dict.update(location__place_id=place_id) # uncomment/comment as required. \n", " parameters_dict.update(location__origin=lat_long) # uncomment/comment as required. \n", " \n", - " search_results = phq.broadcasts.search(parameters_dict).iter_all()\n", + " search_results = phq.broadcasts.search(**parameters_dict).iter_all()\n", "\n", " search_results = [result.to_dict() for result in search_results]\n", "\n", diff --git a/severe-weather-events/part_1_data_engineering.ipynb b/severe-weather-events/part_1_data_engineering.ipynb index 0069b4d..0c17da8 100644 --- a/severe-weather-events/part_1_data_engineering.ipynb +++ b/severe-weather-events/part_1_data_engineering.ipynb @@ -328,8 +328,8 @@ "results = []\n", "\n", "# Iterating through all the events that match our criteria and adding them to our results\n", - "for event in phq.events.search(parameters).iter_all():\n", - " results.append(event.to_dict())\n", + "for event in phq.events.search(**parameters).iter_all():\n", + " results.append(event.model_dump())\n", "\n", "# Converting the results to a DataFrame\n", "event_df = pd.DataFrame(results)" diff --git a/severe-weather-events/part_3_feature_engineering.ipynb b/severe-weather-events/part_3_feature_engineering.ipynb index 504fb6d..61f4f7c 100644 --- a/severe-weather-events/part_3_feature_engineering.ipynb +++ b/severe-weather-events/part_3_feature_engineering.ipynb @@ -248,8 +248,8 @@ "\n", " # Iterating through all the events that match our criteria and\n", " # adding them to our result_list\n", - " for event in phq.events.search(params).iter_all():\n", - " result_list.append(event.to_dict())\n", + " for event in phq.events.search(**params).iter_all():\n", + " result_list.append(event.model_dump())\n", "\n", " event_df = pd.DataFrame(result_list)\n", " # Selecting the target fields\n", diff --git a/unattended-events/part_1_data_engineering.ipynb b/unattended-events/part_1_data_engineering.ipynb index 56e62ea..3d4b7fd 100644 --- a/unattended-events/part_1_data_engineering.ipynb +++ b/unattended-events/part_1_data_engineering.ipynb @@ -344,8 +344,8 @@ "results = []\n", "\n", "# Iterating through all the events that match our criteria and adding them to our results\n", - "for event in phq.events.search(parameters).iter_all():\n", - " results.append(event.to_dict())\n", + "for event in phq.events.search(**parameters).iter_all():\n", + " results.append(event.model_dump())\n", "\n", "# Converting the results to a DataFrame\n", "event_df = pd.DataFrame(results)" diff --git a/unattended-events/part_2_data_exploration.ipynb b/unattended-events/part_2_data_exploration.ipynb index e3906e1..ea0b09d 100644 --- a/unattended-events/part_2_data_exploration.ipynb +++ b/unattended-events/part_2_data_exploration.ipynb @@ -159,8 +159,8 @@ "\n", " # Iterating through all the events that match our criteria and\n", " # adding them to our result_list\n", - " for event in phq.events.search(params).iter_all():\n", - " result_list.append(event.to_dict())\n", + " for event in phq.events.search(**params).iter_all():\n", + " result_list.append(event.model_dump())\n", "\n", " event_df = pd.DataFrame(result_list)\n", " # Selecting the target fields\n", @@ -19004,8 +19004,8 @@ " result_list = []\n", "\n", " # Iterating through all the events that match our criteria and adding them to our result_list\n", - " for event in phq.events.search(parameters_dict).iter_all():\n", - " result_list.append(event.to_dict())\n", + " for event in phq.events.search(**parameters_dict).iter_all():\n", + " result_list.append(event.model_dump())\n", "\n", " if len(result_list) > 0:\n", " state_holiday_indicator[state] = 1\n", @@ -19131,7 +19131,7 @@ "\n", "# Iterating through all the events that match our criteria and adding them to our results\n", "for event in phq.events.search(mont_parameters).iter_all():\n", - " mont_results.append(event.to_dict())\n", + " mont_results.append(event.model_dump())\n", "\n", "# Converting the results to a DataFrame\n", "montana_school_holiday_df = pd.DataFrame(mont_results)\n", diff --git a/unattended-events/part_3_feature_engineering.ipynb b/unattended-events/part_3_feature_engineering.ipynb index 1863a83..76bb867 100644 --- a/unattended-events/part_3_feature_engineering.ipynb +++ b/unattended-events/part_3_feature_engineering.ipynb @@ -223,8 +223,8 @@ "\n", " # Iterating through all the events that match our criteria and\n", " # adding them to our result_list\n", - " for event in phq.events.search(params).iter_all():\n", - " result_list.append(event.to_dict())\n", + " for event in phq.events.search(**params).iter_all():\n", + " result_list.append(event.model_dump())\n", "\n", " event_df = pd.DataFrame(result_list)\n", " # Selecting the target fields\n", diff --git a/venues/venues-example.ipynb b/venues/venues-example.ipynb index bf203b5..492257f 100644 --- a/venues/venues-example.ipynb +++ b/venues/venues-example.ipynb @@ -428,8 +428,8 @@ "results = []\n", "\n", "# Iterating through all the events that match our criteria and adding them to our results\n", - "for event in phq.events.search(parameters).iter_all():\n", - " results.append(event.to_dict())\n", + "for event in phq.events.search(**parameters).iter_all():\n", + " results.append(event.model_dump())\n", "\n", "# Converting the results to a DataFrame\n", "event_df = pd.DataFrame(results)\n"