|
5 | 5 | from typing import Any |
6 | 6 | from xml.etree import ElementTree |
7 | 7 |
|
| 8 | +from bs4 import BeautifulSoup |
8 | 9 | from requests import Session |
9 | 10 |
|
10 | 11 | from electricitymap.contrib.lib.models.event_lists import ( |
11 | 12 | ExchangeList, |
| 13 | + GridAlertList, |
12 | 14 | PriceList, |
13 | 15 | ProductionBreakdownList, |
14 | 16 | TotalConsumptionList, |
15 | 17 | ) |
16 | | -from electricitymap.contrib.lib.models.events import EventSourceType, ProductionMix |
| 18 | +from electricitymap.contrib.lib.models.events import ( |
| 19 | + EventSourceType, |
| 20 | + GridAlertType, |
| 21 | + ProductionMix, |
| 22 | +) |
17 | 23 | from electricitymap.contrib.lib.types import ZoneKey |
18 | 24 |
|
19 | 25 | # Some notes about timestamps: |
@@ -486,6 +492,97 @@ def fetch_wind_solar_forecasts( |
486 | 492 | return production_list.to_list() |
487 | 493 |
|
488 | 494 |
|
| 495 | +def fetch_grid_alerts( |
| 496 | + zone_key: ZoneKey = ZONE_KEY, |
| 497 | + session: Session | None = None, |
| 498 | + target_datetime: datetime | None = None, |
| 499 | + logger: Logger = getLogger(__name__), |
| 500 | +) -> list[dict[str, Any]]: |
| 501 | + """ |
| 502 | + Requests the grid alerts for IESO. |
| 503 | + For current emergency advisory notices: https://www.ieso.ca/Sector-Participants/RSS-Feeds/Emergency-Advisory-Notices |
| 504 | + For current day 0 advisory notices (non-emergency): https://www.ieso.ca/Sector-Participants/RSS-Feeds/Day-0-Advisory-Notices-Summary |
| 505 | + For archive: in https://www.ieso.ca/Sector-Participants/RSS-Feeds/Advisory-Notices-Archive |
| 506 | + Url for archive table json: https://www.ieso.ca/ieso/api/table/data?source=958805C0C35B4BF3BB6A6EA429850C06&ctx=CF5ADB2D-E44E-4899-AFAF-E16C97A7A36F |
| 507 | + """ |
| 508 | + session = session or Session() |
| 509 | + |
| 510 | + urls = [ |
| 511 | + # Current Emergency Advisory Notices |
| 512 | + "https://www.ieso.ca/ieso/api/table/data?source=E6324F9AEEA84B658C34E78C92E0BCEF&ctx=3435D509-B2FC-4B5D-824A-A21A8BC45822", |
| 513 | + # Current Day 0 Advisory Notices |
| 514 | + "https://www.ieso.ca/ieso/api/table/data?source=92EEB31A6F184529B467FD465EE8EFA2&ctx=DEA30159-2D04-48E5-9358-2B4FBAC28B83", |
| 515 | + ] |
| 516 | + |
| 517 | + grid_alert_list = GridAlertList(logger) |
| 518 | + |
| 519 | + for url in urls: |
| 520 | + # Fetch the data from the URL |
| 521 | + response = session.get(url) |
| 522 | + json_data = response.json() |
| 523 | + entries = json_data[0]["TableRows"] # stored in a table |
| 524 | + |
| 525 | + # Record events in grid_alert_list |
| 526 | + for notice in entries: |
| 527 | + alert_type = notice["Cells"][0]["Value"] |
| 528 | + |
| 529 | + # "Alert", "Warning" --> "Informational", "Action" --> "Action" based on https://www.ieso.ca/Sector-Participants/RSS-Feeds/Day-0-Advisory-Notices-Summary |
| 530 | + if alert_type == "Alert" or alert_type == "Warning": |
| 531 | + alert_type = GridAlertType.informational |
| 532 | + elif alert_type == "Action": |
| 533 | + alert_type = GridAlertType.action |
| 534 | + else: |
| 535 | + alert_type = GridAlertType.undefined |
| 536 | + |
| 537 | + notice_content = notice["Cells"][1]["Value"] |
| 538 | + |
| 539 | + # Take the "Updated At" (the latest update) |
| 540 | + issued_at = notice["Cells"][3]["Value"] |
| 541 | + issued_at_dt = datetime.strptime(issued_at, "%a, %d %b %Y %H:%M").replace( |
| 542 | + tzinfo=TIMEZONE |
| 543 | + ) |
| 544 | + |
| 545 | + soup = BeautifulSoup(notice_content, "html.parser") |
| 546 | + link = soup.find_all("a")[0]["href"] |
| 547 | + subject = soup.find_all("a")[0]["title"] |
| 548 | + |
| 549 | + # Extract the body |
| 550 | + url2 = "https://www.ieso.ca" + link |
| 551 | + response2 = session.get(url2) |
| 552 | + soup2 = BeautifulSoup(response2.text, "html.parser") |
| 553 | + |
| 554 | + # Find start and end times before the table |
| 555 | + start_time = soup2.find_all("p")[0].get_text(strip=True) |
| 556 | + start_time_dt = datetime.strptime( |
| 557 | + start_time, "Start: %a, %d %b %Y %H:%M EST" |
| 558 | + ).replace(tzinfo=TIMEZONE) |
| 559 | + end_time = soup2.find_all("p")[1].get_text(strip=True) |
| 560 | + end_time_dt = datetime.strptime( |
| 561 | + end_time, "End: %a, %d %b %Y %H:%M EST" |
| 562 | + ).replace(tzinfo=TIMEZONE) |
| 563 | + |
| 564 | + # Find all <tr> rows |
| 565 | + rows = soup2.find_all("tr") |
| 566 | + |
| 567 | + # Extract the 2nd <td> (description) from each row |
| 568 | + for row in rows: |
| 569 | + tds = row.find_all("td") |
| 570 | + if len(tds) >= 2: |
| 571 | + description = tds[1].get_text(separator=" ", strip=True) |
| 572 | + |
| 573 | + grid_alert_list.append( |
| 574 | + zoneKey=zone_key, |
| 575 | + locationRegion=None, |
| 576 | + source=SOURCE, |
| 577 | + alertType=alert_type, |
| 578 | + message=subject + "\n" + description, |
| 579 | + issuedTime=issued_at_dt, |
| 580 | + startTime=start_time_dt, # if None, it defaults to issuedTime |
| 581 | + endTime=end_time_dt, |
| 582 | + ) |
| 583 | + return grid_alert_list.to_list() |
| 584 | + |
| 585 | + |
489 | 586 | if __name__ == "__main__": |
490 | 587 | """Main method, never used by the Electricity Map backend, but handy for testing.""" |
491 | 588 |
|
@@ -578,5 +675,7 @@ def fetch_wind_solar_forecasts( |
578 | 675 | # print("Requesting fetch_wind_solar_forecasts") |
579 | 676 | # pprint(fetch_wind_solar_forecasts()) |
580 | 677 |
|
581 | | - print("Requesting fetch_consumption_forecast") |
582 | | - pprint(fetch_consumption_forecast()) |
| 678 | + # print("Requesting fetch_consumption_forecast") |
| 679 | + # pprint(fetch_consumption_forecast()) |
| 680 | + |
| 681 | + pprint(fetch_grid_alerts()) |
0 commit comments