Skip to content

Commit fd7d3ac

Browse files
committed
Added resource for historic-generation-mix
1 parent 109d381 commit fd7d3ac

File tree

6 files changed

+80
-13
lines changed

6 files changed

+80
-13
lines changed

README.md

+4
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,13 @@ r: bytes = client.query(date_col=date_col, start_date=start_date, end_date=end_d
2525

2626
## Tested reports
2727

28+
### Queryable via NG's api
2829
* `historic-day-ahead-demand-forecast`
2930
* `day-ahead-demand-forecast`
3031
* `historic-2day-ahead-demand-forecast`
3132
* `2day-ahead-demand-forecast`
3233
* `historic-day-ahead-wind-forecast`
3334
* `day-ahead-wind-forecast`
35+
36+
### Download of files
37+
* `historic-generation-mix`

pyngeso/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
from .pyngeso import NgEso
22

3-
__version__ = '0.2.1'
3+
__version__ = '0.2.2'

pyngeso/pyngeso.py

+44-10
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,48 @@
11
import logging
2-
from typing import Optional, List
2+
from typing import Optional, List, Literal
33
from datetime import datetime, timedelta
44
import json
55
import re
66

77
import requests
88

99
from .configure_logging import setup_logger
10-
from .resources import resource_ids
10+
from .resources import api_resource_ids, file_resource_ids
1111
from .exceptions import UnsuccessfulRequest
1212

1313
logger = setup_logger(logging.getLogger("PyNgEso"))
1414

1515

1616
class NgEso:
17+
"""
18+
A class for fetching data from the National Grid ESO data portal.
19+
20+
Args:
21+
resource_id (str): id for the resource when using the ESO API functionality
22+
resource (str): name of the resource when using the ESO API functionality
23+
Returns:
24+
25+
"""
1726
def __init__(
1827
self,
1928
resource: str,
20-
dataset: Optional[str] = None
29+
backend: Literal['api', 'file'] = "api"
2130
):
2231
self.resource = resource
23-
self.dataset = dataset
24-
self.resource_id = resource_ids.get(resource).get("id")
32+
self.backend = backend
33+
34+
self.resource_id, self.dataset_id, self.filename = self.set_resource_info()
35+
36+
def set_resource_info(self) -> (str, str, str):
37+
dataset_id = None
38+
filename = None
39+
if self.backend == "api":
40+
resource_id = api_resource_ids.get(self.resource).get("id")
41+
else:
42+
dataset_id = file_resource_ids.get(self.resource).get("dataset_id")
43+
resource_id = file_resource_ids.get(self.resource).get("resource_id")
44+
filename = file_resource_ids.get(self.resource).get("filename")
45+
return resource_id, dataset_id, filename
2546

2647
def query(
2748
self,
@@ -120,19 +141,22 @@ def validate_date_range(start_date: str, end_date: str) -> None:
120141
end_date >= start_date
121142
), "end_date should be the same of greater than start_date"
122143

123-
@staticmethod
124-
def _check_for_errors(r: requests.Response) -> None:
144+
def _check_for_errors(self, r: requests.Response) -> None:
125145
"""Inspect the request response and the metadata in xml"""
126146
# http response errors
127-
status_code = r.status_code
128-
if status_code != 200 or r.content is None:
129-
raise UnsuccessfulRequest(f"status_code={status_code}:{r.content}")
147+
self._check_request_errors(r)
130148

131149
# inspect response body
132150
rb: dict = json.loads(r.content)
133151
if not rb.get("success"):
134152
logger.error(f"Request failed: {rb.get('error')}")
135153

154+
@staticmethod
155+
def _check_request_errors(r: requests.Response) -> None:
156+
status_code = r.status_code
157+
if status_code != 200 or r.content is None:
158+
raise UnsuccessfulRequest(f"status_code={status_code}:{r.content}")
159+
136160
@staticmethod
137161
def _missing_data(r: requests.Response) -> None:
138162
"""
@@ -144,3 +168,13 @@ def _missing_data(r: requests.Response) -> None:
144168
query = rb.get("query")
145169
if not records:
146170
logger.warning(f"{query}: No data found")
171+
172+
def download_file(self) -> bytes:
173+
url = (
174+
f"https://data.nationalgrideso.com/backend/dataset/{self.dataset_id}/"
175+
f"resource/{self.resource_id}/download/{self.filename}"
176+
)
177+
r = requests.get(url)
178+
self._check_request_errors(r)
179+
180+
return r.content

pyngeso/resources.py

+10-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from typing import Dict
22

3-
resource_ids: Dict[str, Dict[str, str]] = {
3+
api_resource_ids: Dict[str, Dict[str, str]] = {
44
"historic-day-ahead-demand-forecast": {
55
"id": "9847e7bb-986e-49be-8138-717b25933fbb",
66
"url": "https://data.nationalgrideso.com/demand/1-day-ahead-demand-forecast/r/historic_day_ahead_demand_forecasts"
@@ -26,3 +26,12 @@
2626
"url": "https://data.nationalgrideso.com/demand/day-ahead-wind-forecast/r/day_ahead_wind_forecast"
2727
}
2828
}
29+
30+
file_resource_ids: Dict[str, Dict[str, str]] = {
31+
"historic-generation-mix": {
32+
"dataset_id": "88313ae5-94e4-4ddc-a790-593554d8c6b9",
33+
"resource_id": "f93d1835-75bc-43e5-84ad-12472b180a98",
34+
"filename": "df_fuel_ckan.csv",
35+
"url": "https://data.nationalgrideso.com/carbon-intensity1/historic-generation-mix"
36+
}
37+
}

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "pyngeso"
3-
version = "0.2.1"
3+
version = "0.2.2"
44
description = "Simple python wrapper for the National Grid ESO Portal"
55
authors = ["atsangarides <[email protected]>"]
66
license = "MIT"

tests/test_pyngeso.py

+20
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import logging
22
import json
3+
import csv
34

45
import pytest
56

@@ -70,3 +71,22 @@ def test_historic_day_ahead_wind_forecast():
7071
assert len(records) > 0
7172
unique_target_dates = set([record.get(date_col) for record in records])
7273
assert len(unique_target_dates) == 1
74+
75+
76+
@pytest.mark.vcr
77+
def test_historic_generation_mix():
78+
client = NgEso("historic-generation-mix", "file")
79+
r = client.download_file()
80+
# test response type
81+
assert isinstance(r, bytes)
82+
83+
# test bytes -> csv
84+
decoded_content = r.decode('utf-8')
85+
c = csv.reader(decoded_content.splitlines(), delimiter=',')
86+
headers_row = next(c)
87+
first_row = next(c)
88+
89+
assert "DATETIME" in headers_row
90+
assert "2009-01-01 00:00:00" in first_row
91+
assert len(headers_row) == len(first_row)
92+

0 commit comments

Comments
 (0)