Skip to content

Commit 8dcca61

Browse files
feat(parsers): add grid alerts IESO parser (#8225)
* feat(parsers): add grid alerts IESO parser * fetch only current notices from Emergency and non-emergency notices (= day 0 advisory notices)
1 parent 1f47042 commit 8dcca61

File tree

2 files changed

+103
-3
lines changed

2 files changed

+103
-3
lines changed

config/zones/CA-ON.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -339,6 +339,7 @@ parsers:
339339
productionCapacity: CA_ON.fetch_production_capacity
340340
consumptionForecast: CA_ON.fetch_consumption_forecast
341341
productionPerModeForecast: CA_ON.fetch_wind_solar_forecasts
342+
gridAlerts: CA_ON.fetch_grid_alerts
342343
region: Americas
343344
sources:
344345
? "Mallia, E., Lewis, G. \"Life cycle greenhouse gas emissions of electricity generation\

parsers/CA_ON.py

Lines changed: 102 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,21 @@
55
from typing import Any
66
from xml.etree import ElementTree
77

8+
from bs4 import BeautifulSoup
89
from requests import Session
910

1011
from electricitymap.contrib.lib.models.event_lists import (
1112
ExchangeList,
13+
GridAlertList,
1214
PriceList,
1315
ProductionBreakdownList,
1416
TotalConsumptionList,
1517
)
16-
from electricitymap.contrib.lib.models.events import EventSourceType, ProductionMix
18+
from electricitymap.contrib.lib.models.events import (
19+
EventSourceType,
20+
GridAlertType,
21+
ProductionMix,
22+
)
1723
from electricitymap.contrib.lib.types import ZoneKey
1824

1925
# Some notes about timestamps:
@@ -486,6 +492,97 @@ def fetch_wind_solar_forecasts(
486492
return production_list.to_list()
487493

488494

495+
def fetch_grid_alerts(
496+
zone_key: ZoneKey = ZONE_KEY,
497+
session: Session | None = None,
498+
target_datetime: datetime | None = None,
499+
logger: Logger = getLogger(__name__),
500+
) -> list[dict[str, Any]]:
501+
"""
502+
Requests the grid alerts for IESO.
503+
For current emergency advisory notices: https://www.ieso.ca/Sector-Participants/RSS-Feeds/Emergency-Advisory-Notices
504+
For current day 0 advisory notices (non-emergency): https://www.ieso.ca/Sector-Participants/RSS-Feeds/Day-0-Advisory-Notices-Summary
505+
For archive: in https://www.ieso.ca/Sector-Participants/RSS-Feeds/Advisory-Notices-Archive
506+
Url for archive table json: https://www.ieso.ca/ieso/api/table/data?source=958805C0C35B4BF3BB6A6EA429850C06&ctx=CF5ADB2D-E44E-4899-AFAF-E16C97A7A36F
507+
"""
508+
session = session or Session()
509+
510+
urls = [
511+
# Current Emergency Advisory Notices
512+
"https://www.ieso.ca/ieso/api/table/data?source=E6324F9AEEA84B658C34E78C92E0BCEF&ctx=3435D509-B2FC-4B5D-824A-A21A8BC45822",
513+
# Current Day 0 Advisory Notices
514+
"https://www.ieso.ca/ieso/api/table/data?source=92EEB31A6F184529B467FD465EE8EFA2&ctx=DEA30159-2D04-48E5-9358-2B4FBAC28B83",
515+
]
516+
517+
grid_alert_list = GridAlertList(logger)
518+
519+
for url in urls:
520+
# Fetch the data from the URL
521+
response = session.get(url)
522+
json_data = response.json()
523+
entries = json_data[0]["TableRows"] # stored in a table
524+
525+
# Record events in grid_alert_list
526+
for notice in entries:
527+
alert_type = notice["Cells"][0]["Value"]
528+
529+
# "Alert", "Warning" --> "Informational", "Action" --> "Action" based on https://www.ieso.ca/Sector-Participants/RSS-Feeds/Day-0-Advisory-Notices-Summary
530+
if alert_type == "Alert" or alert_type == "Warning":
531+
alert_type = GridAlertType.informational
532+
elif alert_type == "Action":
533+
alert_type = GridAlertType.action
534+
else:
535+
alert_type = GridAlertType.undefined
536+
537+
notice_content = notice["Cells"][1]["Value"]
538+
539+
# Take the "Updated At" (the latest update)
540+
issued_at = notice["Cells"][3]["Value"]
541+
issued_at_dt = datetime.strptime(issued_at, "%a, %d %b %Y %H:%M").replace(
542+
tzinfo=TIMEZONE
543+
)
544+
545+
soup = BeautifulSoup(notice_content, "html.parser")
546+
link = soup.find_all("a")[0]["href"]
547+
subject = soup.find_all("a")[0]["title"]
548+
549+
# Extract the body
550+
url2 = "https://www.ieso.ca" + link
551+
response2 = session.get(url2)
552+
soup2 = BeautifulSoup(response2.text, "html.parser")
553+
554+
# Find start and end times before the table
555+
start_time = soup2.find_all("p")[0].get_text(strip=True)
556+
start_time_dt = datetime.strptime(
557+
start_time, "Start: %a, %d %b %Y %H:%M EST"
558+
).replace(tzinfo=TIMEZONE)
559+
end_time = soup2.find_all("p")[1].get_text(strip=True)
560+
end_time_dt = datetime.strptime(
561+
end_time, "End: %a, %d %b %Y %H:%M EST"
562+
).replace(tzinfo=TIMEZONE)
563+
564+
# Find all <tr> rows
565+
rows = soup2.find_all("tr")
566+
567+
# Extract the 2nd <td> (description) from each row
568+
for row in rows:
569+
tds = row.find_all("td")
570+
if len(tds) >= 2:
571+
description = tds[1].get_text(separator=" ", strip=True)
572+
573+
grid_alert_list.append(
574+
zoneKey=zone_key,
575+
locationRegion=None,
576+
source=SOURCE,
577+
alertType=alert_type,
578+
message=subject + "\n" + description,
579+
issuedTime=issued_at_dt,
580+
startTime=start_time_dt, # if None, it defaults to issuedTime
581+
endTime=end_time_dt,
582+
)
583+
return grid_alert_list.to_list()
584+
585+
489586
if __name__ == "__main__":
490587
"""Main method, never used by the Electricity Map backend, but handy for testing."""
491588

@@ -578,5 +675,7 @@ def fetch_wind_solar_forecasts(
578675
# print("Requesting fetch_wind_solar_forecasts")
579676
# pprint(fetch_wind_solar_forecasts())
580677

581-
print("Requesting fetch_consumption_forecast")
582-
pprint(fetch_consumption_forecast())
678+
# print("Requesting fetch_consumption_forecast")
679+
# pprint(fetch_consumption_forecast())
680+
681+
pprint(fetch_grid_alerts())

0 commit comments

Comments
 (0)