forked from SanPen/ESIOS
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathESIOS.py
More file actions
327 lines (258 loc) · 11.4 KB
/
ESIOS.py
File metadata and controls
327 lines (258 loc) · 11.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
"""
ESIOS: API to access the Spanish electricity market data in pandas format
Copyright 2016 Santiago Peñate Vera <santiago.penate.vera@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import json
import urllib
import pandas as pd
import numpy as np
import datetime
import pickle
class PandasDataBase:
"""
This class saves the downloaded data locally and expends it incrementally upon download calls from esios
"""
def __init__(self):
print()
class ESIOS(object):
def __init__(self, token):
"""
Class constructor
:param token: string given by the SIOS to you when asked to: Consultas Sios <consultasios@ree.es>
"""
# The token is unique: You should ask for yours to: Consultas Sios <consultasios@ree.es>
if token is None:
print('The token is unique: You should ask for yours to: Consultas Sios <consultasios@ree.es>')
self.token = token
self.allowed_geo_id = [3, 8741] # España y Peninsula
# standard format of a date for a query
self.dateformat = '%Y-%m-%dT%H:%M:%S'
# dictionary of available series
self.__offer_indicators_list = list()
self.__analysis_indicators_list = list()
self.__indicators_name__ = dict()
self.available_series = dict()
print('Getting the indicators...')
self.available_series = self.get_indicators()
def __get_headers__(self):
"""
Prepares the CURL headers
:return:
"""
# Prepare the arguments of the call
headers = dict()
headers['Accept'] = 'application/json; application/vnd.esios-api-v1+json'
headers['Content-Type'] = 'application/json'
headers['Host'] = 'api.esios.ree.es'
headers['Authorization'] = 'Token token=\"' + self.token + '\"'
headers['Cookie'] = ''
return headers
def get_indicators(self):
"""
Get the indicators and their name.
The indicators are the indices assigned to the available data series
:return:
"""
fname = 'indicators.pickle'
import os
if os.path.exists(fname):
# read the existing indicators file
with open(fname, "rb") as input_file:
all_indicators, self.__indicators_name__, self.__offer_indicators_list, self.__analysis_indicators_list = pickle.load(input_file)
else:
# create the indicators file querying the info to ESIOS
"""
curl "https://api.esios.ree.es/offer_indicators" -X GET
-H "Accept: application/json; application/vnd.esios-api-v1+json"
-H "Content-Type: application/json"
-H "Host: api.esios.ree.es"
-H "Authorization: Token token=\"5c7f9ca844f598ab7b86bffcad08803f78e9fc5bf3036eef33b5888877a04e38\""
-H "Cookie: "
"""
all_indicators = dict()
self.__indicators_name__ = dict()
# This is how the URL is built
url = 'https://api.esios.ree.es/offer_indicators'
# Perform the call
req = urllib.request.Request(url, headers=self.__get_headers__())
with urllib.request.urlopen(req) as response:
try:
json_data = response.read().decode('utf-8')
except:
json_data = response.readall().decode('utf-8')
result = json.loads(json_data)
# fill the dictionary
indicators = dict()
self.__offer_indicators_list = list()
for entry in result['indicators']:
name = entry['name']
id_ = entry['id']
indicators[name] = id_
self.__indicators_name__[id_] = name
self.__offer_indicators_list.append([name, id_])
all_indicators[u'indicadores de curvas de oferta'] = indicators
"""
curl "https://api.esios.ree.es/indicators" -X GET
-H "Accept: application/json; application/vnd.esios-api-v1+json"
-H "Content-Type: application/json" -H "Host: api.esios.ree.es"
-H "Authorization: Token token=\"5c7f9ca844f598ab7b86bffcad08803f78e9fc5bf3036eef33b5888877a04e38\""
-H "Cookie: "
"""
url = 'https://api.esios.ree.es/indicators'
req = urllib.request.Request(url, headers=self.__get_headers__())
with urllib.request.urlopen(req) as response:
try:
json_data = response.read().decode('utf-8')
except:
json_data = response.readall().decode('utf-8')
result = json.loads(json_data)
# continue filling the dictionary
indicators = dict()
self.__analysis_indicators_list = list()
for entry in result['indicators']:
name = entry['name']
id_ = entry['id']
indicators[name] = id_
self.__indicators_name__[id_] = name
self.__analysis_indicators_list.append([name, id_])
all_indicators[u'indicadores de análisis '] = indicators
# save the indictators
with open(fname, "wb") as output_file:
dta = [all_indicators, self.__indicators_name__, self.__offer_indicators_list, self.__analysis_indicators_list]
pickle.dump(dta, output_file)
return all_indicators
def get_names(self, indicators_list):
"""
Get a list of names of the given indicator indices
:param indicators_list:
:return:
"""
names = list()
for i in indicators_list:
names.append(self.__indicators_name__[i])
return np.array(names, dtype=np.object)
def save_indicators_table(self, fname='indicadores.xlsx'):
"""
Saves the list of indicators in an excel file for easy consultation
:param fname:
:return:
"""
data = self.__offer_indicators_list + self.__analysis_indicators_list
df = pd.DataFrame(data=data, columns=['Nombre', 'Indicador'])
df.to_excel(fname)
def __get_query_json__(self, indicator, start_str, end_str):
"""
Get a JSON series
:param indicator: series indicator
:param start: Start date
:param end: End date
:return:
"""
# This is how the URL is built
# https://www.esios.ree.es/es/analisis/1293?vis=2&start_date=21-06-2016T00%3A00&end_date=21-06-2016T23%3A50&compare_start_date=20-06-2016T00%3A00&groupby=minutes10&compare_indicators=545,544#JSON
url = 'https://api.esios.ree.es/indicators/' + indicator + '?start_date=' + start_str + '&end_date=' + end_str
# Perform the call
req = urllib.request.Request(url, headers=self.__get_headers__())
with urllib.request.urlopen(req) as response:
try:
json_data = response.read().decode('utf-8')
except:
json_data = response.readall().decode('utf-8')
result = json.loads(json_data)
return result
def get_data(self, indicator, start, end):
"""
:param indicator: Series indicator
:param start: Start date
:param end: End date
:return:
"""
# check types: Pass to string for the url
if type(start) is datetime.datetime:
start_str = start.strftime(self.dateformat)
else:
start_str = start
if type(end) is datetime.datetime:
end_str = end.strftime(self.dateformat)
else:
end_str = end
if type(indicator) is int:
indicator = str(indicator)
# get the JSON data
result = self.__get_query_json__(indicator, start_str, end_str)
# transform the data
d = result['indicator']['values'] # dictionary of values
if len(d) > 0:
hdr = list(d[0].keys()) # headers
data = np.empty((len(d), len(hdr)), dtype=np.object)
for i in range(len(d)): # iterate the data entries
for j in range(len(hdr)): # iterate the headers
h = hdr[j]
val = d[i][h]
data[i, j] = val
df = pd.DataFrame(data=data, columns=hdr) # make the DataFrame
df['datetime_utc'] = pd.to_datetime(df['datetime_utc']) # convert to datetime
df = df.set_index('datetime_utc') # Set the index column
del df.index.name # to avoid the index name bullshit
return df
else:
return None
def get_multiple_series(self, indicators, start, end):
"""
Get multiple series data
:param indicators: List of indicators
:param start: Start date
:param end: End date
:return:
"""
df_list = list()
names = list()
for indicator in indicators:
name = self.__indicators_name__[indicator]
names.append(name)
print('Parsing ' + name)
# download the series in a DataFrame
df_new = self.get_data(indicator, start, end)
if df_new is not None:
# the default name for the series is 'value' we must change it
df_new.rename(columns={'value': name}, inplace=True)
# save
file_handler = open(str(indicator) + ".pkl", "wb")
pickle.dump(df_new, file_handler)
file_handler.close()
df_list.append(df_new)
return df_list, names
def merge_series(self, df_list, names, pandas_sampling_interval='1H'):
"""
Merge a list of separately downloaded DataFrames into a single one
:param df_list: List of ESIOS downloaded DataFrames
:param names: list with the names of the main series of each DataFrame
:param pandas_sampling_interval: Pandas interval for resampling (1 hour as default)
:return: Merged DataFrame
"""
merged_df = None
print('merging')
for df, name in zip(df_list, names):
# print(name)
if df is not None:
if name == 'Precio mercado SPOT Diario':
df = df[df.geo_id == 3] # pick spain only
dfp = df[[name]].astype(float) # .resample(pandas_sampling_interval).pad()
# dfp2 = pd.DataFrame(data=dfp.values, index=dfp.index, columns=[name])
if merged_df is None:
merged_df = dfp
else:
merged_df = merged_df.join(dfp)
else:
print(name, ': The dataFrame is None')
return merged_df