Skip to content

Commit 9fb36cc

Browse files
committed
Added slicing and indexing
1 parent 9631112 commit 9fb36cc

File tree

2 files changed

+31
-30
lines changed

2 files changed

+31
-30
lines changed

finviz/save_data.py

+6-9
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,7 @@ def export_to_csv(headers, data):
2121
with open('screener_results.csv', 'w', newline='') as output_file:
2222
dict_writer = csv.DictWriter(output_file, headers)
2323
dict_writer.writeheader()
24-
25-
for n in data:
26-
dict_writer.writerows(n)
24+
dict_writer.writerows(data)
2725

2826

2927
def export_to_db(headers, data):
@@ -44,15 +42,14 @@ def export_to_db(headers, data):
4442
.format(tn=table_name, fl=field_list[:-2]))
4543

4644
inserts = ""
47-
for data in data:
45+
for row in data:
4846

49-
for level in data:
47+
insert_fields = "("
48+
for field, value in row.items():
5049

51-
insert_fields = "("
52-
for field, value in level.items():
53-
insert_fields += "\"" + value + "\", "
50+
insert_fields += "\"" + value + "\", "
5451

55-
inserts += insert_fields[:-2] + "), "
52+
inserts += insert_fields[:-2] + "), "
5653

5754
insert_lines = inserts[:-2]
5855

finviz/screener.py

+25-21
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,7 @@
77
import finviz.scraper_functions as scrape
88

99
# TODO > Add unittests
10-
# TODO > Make self.data list of dicts, not lists of lists of dicts
11-
# TODO > Implement __add__, __slice__, __iter__, __getitem__
10+
# TODO > Implement __add__ function
1211

1312

1413
class Screener(object):
@@ -30,7 +29,7 @@ def __init__(self, tickers=None, filters=None, rows=None, order='', signal='', t
3029
:type signal: str
3130
:param table: table type eg.: 'Performance'
3231
:type table: str
33-
:var self.data: pages containing data about each row inside a dictionary
32+
:var self.data: list of dictionaries containing row data
3433
:type self.data: list
3534
"""
3635

@@ -79,7 +78,7 @@ def __init__(self, tickers=None, filters=None, rows=None, order='', signal='', t
7978
else:
8079
self._rows = rows
8180

82-
self._headers = self.__get_table_headers()
81+
self.headers = self.__get_table_headers()
8382
self.data = self.__search_screener()
8483

8584
def __repr__(self):
@@ -101,11 +100,10 @@ def __str__(self):
101100
""" Returns a string containing readable representation of a table. """
102101

103102
table_string = ''
104-
table_list = [self._headers]
103+
table_list = [self.headers]
105104

106-
for page in self.data:
107-
for row in page:
108-
table_list.append([str(row[col] or '') for col in self._headers])
105+
for row in self.data:
106+
table_list.append([row[col] or '' for col in self.headers])
109107

110108
col_size = [max(map(len, col)) for col in zip(*table_list)]
111109
format_str = ' | '.join(["{{:<{}}}".format(i) for i in col_size])
@@ -122,21 +120,23 @@ def __len__(self):
122120
return int(self._rows)
123121

124122
def __getitem__(self, position):
125-
return self.data
123+
""" Returns a dictionary containting specific row data. """
124+
125+
return self.data[position]
126126

127127
def to_sqlite(self):
128128
""" Exports the generated table into a SQLite database, located in the user's current directory. """
129129

130-
export_to_db(self._headers, self.data)
130+
export_to_db(self.headers, self.data)
131131

132132
def to_csv(self):
133133
""" Exports the generated table into a CSV file, located in the user's current directory. """
134134

135-
export_to_csv(self._headers, self.data)
135+
export_to_csv(self.headers, self.data)
136136

137137
def get_charts(self, period='d', size='l', chart_type='c', ta='1'):
138138
"""
139-
Downloads the charts of tickers shown by the table.
139+
Downloads the charts of all tickers shown by the table.
140140
141141
:param period: table period eg. : 'd', 'w' or 'm' for daily, weekly and monthly periods
142142
:type period: str
@@ -158,15 +158,14 @@ def get_charts(self, period='d', size='l', chart_type='c', ta='1'):
158158
base_url = 'https://finviz.com/chart.ashx?' + urlencode(payload)
159159
chart_urls = []
160160

161-
for page in self.data:
162-
for row in page:
163-
chart_urls.append(base_url + '&t={}'.format(row.get('Ticker')))
161+
for row in self.data:
162+
chart_urls.append(base_url + '&t={}'.format(row.get('Ticker')))
164163

165164
async_connector = Connector(scrape.download_chart_image, chart_urls)
166165
async_connector.run_connector()
167166

168167
def __get_table_headers(self):
169-
""" Private function used to return the table headers. """
168+
""" Private function used to return table headers. """
170169

171170
first_row = self._page_content.cssselect('tr[valign="middle"]')
172171

@@ -182,7 +181,7 @@ def __get_table_headers(self):
182181
return headers
183182

184183
def __get_table_data(self, page=None, url=None):
185-
""" Private function used to return the table data from a single page. """
184+
""" Private function used to return table data from a single page. """
186185

187186
def scrape_row(line):
188187

@@ -203,21 +202,26 @@ def scrape_row(line):
203202
for row in all_rows:
204203

205204
if int(row[0].text) == self._rows:
206-
values = dict(zip(self._headers, scrape_row(row)))
205+
values = dict(zip(self.headers, scrape_row(row)))
207206
data_sets.append(values)
208207
break
209208

210209
else:
211-
values = dict(zip(self._headers, scrape_row(row)))
210+
values = dict(zip(self.headers, scrape_row(row)))
212211
data_sets.append(values)
213212

214213
return data_sets
215214

216215
def __search_screener(self):
217-
""" Private function used to return the data from the FinViz screener. """
216+
""" Private function used to return data from the FinViz screener. """
218217

219218
page_urls = scrape.get_page_urls(self._page_content, self._rows, self._url)
220219
async_connector = Connector(self.__get_table_data, page_urls)
221-
data = async_connector.run_connector()
220+
pages_data = async_connector.run_connector()
221+
222+
data = []
223+
for page in pages_data:
224+
for row in page:
225+
data.append(row)
222226

223227
return data

0 commit comments

Comments
 (0)