|
1 | 1 | from finviz.request_functions import Connector, http_request
|
| 2 | +from finviz.error_handling import NoResults, InvalidTableType |
2 | 3 | from .save_data import export_to_db, export_to_csv
|
3 | 4 | from urllib.parse import urlencode
|
4 | 5 | from lxml import html
|
5 | 6 | from lxml import etree
|
6 | 7 | import finviz.scraper_functions as scrape
|
7 | 8 |
|
8 |
| -# TODO > Add __add__ and __slice__(?) methods to the Screener class |
9 |
| -# TODO > Create Error class |
10 | 9 | # TODO > Add unittests
|
11 |
| -# TODO > Improve performance |
| 10 | +# TODO > Make self.data list of dicts, not lists of lists of dicts |
| 11 | +# TODO > Implement __add__, __slice__, __iter__, __getitem__ |
12 | 12 |
|
13 | 13 |
|
14 | 14 | class Screener(object):
|
@@ -56,23 +56,30 @@ def __init__(self, tickers=None, filters=None, rows=None, order='', signal='', t
|
56 | 56 |
|
57 | 57 | self._order = order
|
58 | 58 | self._signal = signal
|
59 |
| - self._table = self._table_types[table] |
| 59 | + |
| 60 | + try: |
| 61 | + self._table = self._table_types[table] |
| 62 | + except KeyError: |
| 63 | + raise InvalidTableType(table) |
| 64 | + |
60 | 65 | self._page_unparsed, self._url = http_request('https://finviz.com/screener.ashx', payload={
|
61 | 66 | 'v': self._table,
|
62 | 67 | 't': ','.join(self._tickers),
|
63 | 68 | 'f': ','.join(self._filters),
|
64 | 69 | 'o': self._order,
|
65 | 70 | 's': self._signal
|
66 | 71 | })
|
67 |
| - |
68 | 72 | self._page_content = html.fromstring(self._page_unparsed)
|
69 |
| - self._headers = self.__get_table_headers() |
| 73 | + self._total_rows = scrape.get_total_rows(self._page_content) |
70 | 74 |
|
71 |
| - if rows is None: |
72 |
| - self._rows = scrape.get_total_rows(self._page_content) |
| 75 | + if self._total_rows == 0: |
| 76 | + raise NoResults(self._url.split('?')[1]) |
| 77 | + elif rows is None or rows > self._total_rows: |
| 78 | + self._rows = self._total_rows |
73 | 79 | else:
|
74 | 80 | self._rows = rows
|
75 | 81 |
|
| 82 | + self._headers = self.__get_table_headers() |
76 | 83 | self.data = self.__search_screener()
|
77 | 84 |
|
78 | 85 | def __repr__(self):
|
@@ -114,6 +121,9 @@ def __len__(self):
|
114 | 121 |
|
115 | 122 | return int(self._rows)
|
116 | 123 |
|
| 124 | + def __getitem__(self, position): |
| 125 | + return self.data |
| 126 | + |
117 | 127 | def to_sqlite(self):
|
118 | 128 | """ Exports the generated table into a SQLite database, located in the user's current directory. """
|
119 | 129 |
|
@@ -192,7 +202,7 @@ def scrape_row(line):
|
192 | 202 |
|
193 | 203 | for row in all_rows:
|
194 | 204 |
|
195 |
| - if int(row[0].text) is self._rows: |
| 205 | + if int(row[0].text) == self._rows: |
196 | 206 | values = dict(zip(self._headers, scrape_row(row)))
|
197 | 207 | data_sets.append(values)
|
198 | 208 | break
|
|
0 commit comments