Skip to content

Commit 9631112

Browse files
committed
Added documentation for classes and methods
1 parent 4c7a2ac commit 9631112

File tree

3 files changed

+38
-10
lines changed

3 files changed

+38
-10
lines changed

finviz/error_handling.py

+14
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
class NoResults(Exception):
2+
""" Raise when there are no results found. """
3+
4+
def __init__(self, query):
5+
self.query = query
6+
super(NoResults, self).__init__("No results found for query: {}".format(query))
7+
8+
9+
class InvalidTableType(Exception):
10+
""" Raise when the given table type is invalid. """
11+
12+
def __init__(self, arg):
13+
self.arg = arg
14+
super(InvalidTableType, self).__init__("Invalid table type called: {}".format(arg))

finviz/scraper_functions.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,12 @@ def get_total_rows(page_content):
66
""" Returns the total number of rows(results). """
77

88
total_element = page_content.cssselect('td[width="140"]')
9+
total_number = etree.tostring(total_element[0]).decode('utf-8').split('</b>')[1].split()[0]
910

10-
return int(etree.tostring(total_element[0]).decode("utf-8").split('</b>')[1].split(' ')[0])
11+
try:
12+
return int(total_number)
13+
except ValueError:
14+
return 0
1115

1216

1317
def get_page_urls(page_content, rows, url):

finviz/screener.py

+19-9
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
from finviz.request_functions import Connector, http_request
2+
from finviz.error_handling import NoResults, InvalidTableType
23
from .save_data import export_to_db, export_to_csv
34
from urllib.parse import urlencode
45
from lxml import html
56
from lxml import etree
67
import finviz.scraper_functions as scrape
78

8-
# TODO > Add __add__ and __slice__(?) methods to the Screener class
9-
# TODO > Create Error class
109
# TODO > Add unittests
11-
# TODO > Improve performance
10+
# TODO > Make self.data list of dicts, not lists of lists of dicts
11+
# TODO > Implement __add__, __slice__, __iter__, __getitem__
1212

1313

1414
class Screener(object):
@@ -56,23 +56,30 @@ def __init__(self, tickers=None, filters=None, rows=None, order='', signal='', t
5656

5757
self._order = order
5858
self._signal = signal
59-
self._table = self._table_types[table]
59+
60+
try:
61+
self._table = self._table_types[table]
62+
except KeyError:
63+
raise InvalidTableType(table)
64+
6065
self._page_unparsed, self._url = http_request('https://finviz.com/screener.ashx', payload={
6166
'v': self._table,
6267
't': ','.join(self._tickers),
6368
'f': ','.join(self._filters),
6469
'o': self._order,
6570
's': self._signal
6671
})
67-
6872
self._page_content = html.fromstring(self._page_unparsed)
69-
self._headers = self.__get_table_headers()
73+
self._total_rows = scrape.get_total_rows(self._page_content)
7074

71-
if rows is None:
72-
self._rows = scrape.get_total_rows(self._page_content)
75+
if self._total_rows == 0:
76+
raise NoResults(self._url.split('?')[1])
77+
elif rows is None or rows > self._total_rows:
78+
self._rows = self._total_rows
7379
else:
7480
self._rows = rows
7581

82+
self._headers = self.__get_table_headers()
7683
self.data = self.__search_screener()
7784

7885
def __repr__(self):
@@ -114,6 +121,9 @@ def __len__(self):
114121

115122
return int(self._rows)
116123

124+
def __getitem__(self, position):
125+
return self.data
126+
117127
def to_sqlite(self):
118128
""" Exports the generated table into a SQLite database, located in the user's current directory. """
119129

@@ -192,7 +202,7 @@ def scrape_row(line):
192202

193203
for row in all_rows:
194204

195-
if int(row[0].text) is self._rows:
205+
if int(row[0].text) == self._rows:
196206
values = dict(zip(self._headers, scrape_row(row)))
197207
data_sets.append(values)
198208
break

0 commit comments

Comments
 (0)