1
- from .save_data import export_to_db , select_from_db , export_to_csv
1
+ from finviz .request_functions import Connector , http_request
2
+ from .save_data import export_to_db , export_to_csv
2
3
from urllib .parse import urlencode
3
4
from lxml import html
4
5
from lxml import etree
5
- import finviz .request_functions as send
6
6
import finviz .scraper_functions as scrape
7
7
8
8
@@ -11,45 +11,53 @@ class Screener(object):
11
11
def __init__ (self , tickers = None , filters = None , rows = None , order = '' , signal = '' , table = 'Overview' ):
12
12
13
13
if tickers is None :
14
- self .tickers = []
14
+ self ._tickers = []
15
15
else :
16
- self .tickers = tickers
16
+ self ._tickers = tickers
17
17
18
18
if filters is None :
19
- self .filters = []
19
+ self ._filters = []
20
20
else :
21
- self .filters = filters
22
-
23
- self .rows = rows
24
- self .order = order
25
- self .signal = signal
26
- self .table = table
27
- self .page_content = None
28
- self .url = None
29
- self .headers = None
30
- self .page_urls = None
31
- self .data = None
21
+ self ._filters = filters
32
22
33
- self .__search_screener ()
23
+ self ._table_types = {
24
+ 'Overview' : '110' ,
25
+ 'Valuation' : '120' ,
26
+ 'Ownership' : '130' ,
27
+ 'Performance' : '140' ,
28
+ 'Custom' : '150' ,
29
+ 'Financial' : '160' ,
30
+ 'Technical' : '170'
31
+ }
34
32
35
- def to_sqlite (self ):
33
+ self ._page_unparsed , self ._url = http_request ('https://finviz.com/screener.ashx' , payload = {
34
+ 'v' : self ._table_types [table ],
35
+ 't' : ',' .join (self ._tickers ),
36
+ 'f' : ',' .join (self ._filters ),
37
+ 'o' : order ,
38
+ 's' : signal
39
+ })
36
40
37
- export_to_db (self .headers , self .data )
41
+ self ._page_content = html .fromstring (self ._page_unparsed )
42
+ self ._headers = self .__get_table_headers ()
38
43
39
- def display_db (self ):
44
+ if rows is None :
45
+ self ._rows = scrape .get_total_rows (self ._page_content )
46
+ else :
47
+ self ._rows = rows
40
48
41
- select_from_db ()
49
+ self .data = None
50
+ self .__search_screener ()
42
51
43
- def to_csv (self ):
52
+ def to_sqlite (self ):
53
+ export_to_db (self ._headers , self .data )
44
54
45
- export_to_csv (self .headers , self .data )
55
+ def to_csv (self ):
56
+ export_to_csv (self ._headers , self .data )
46
57
47
- def get_charts (self , period = 'd' , size = 'l' , chart_type = 'c' , ta = None , save_to = None ):
58
+ def get_charts (self , period = 'd' , size = 'l' , chart_type = 'c' , ta = '1' ):
48
59
49
- if ta is True or None : # Charts include TA by default
50
- ta = '1'
51
- else :
52
- ta = '0'
60
+ """ Asynchronously downloads charts of tickers displayed by the screener. """
53
61
54
62
payload = {
55
63
'ty' : chart_type ,
@@ -65,13 +73,14 @@ def get_charts(self, period='d', size='l', chart_type='c', ta=None, save_to=None
65
73
for row in page :
66
74
chart_urls .append (base_url + '&t={}' .format (row .get ('Ticker' )))
67
75
68
- async_connector = send .Connector (scrape .download_image , chart_urls )
69
- async_connector .directory = save_to
76
+ async_connector = Connector (scrape .download_image , chart_urls )
70
77
async_connector .run_connector ()
71
78
72
79
def __get_table_headers (self ):
73
80
74
- first_row = self .page_content .cssselect ('tr[valign="middle"]' )
81
+ """ Scrapes the table headers from the initial page. """
82
+
83
+ first_row = self ._page_content .cssselect ('tr[valign="middle"]' )
75
84
76
85
headers = []
77
86
for table_content in first_row [0 ]:
@@ -82,10 +91,12 @@ def __get_table_headers(self):
82
91
else :
83
92
headers .append (table_content .text )
84
93
85
- self . headers = headers
94
+ return headers
86
95
87
96
def __get_table_data (self , page = None , url = None ):
88
97
98
+ """ Returns the data, from each row of the table, inside a dictionary ."""
99
+
89
100
def parse_row (line ):
90
101
91
102
row_data = []
@@ -104,46 +115,21 @@ def parse_row(line):
104
115
105
116
for row in all_rows :
106
117
107
- if int (row [0 ].text ) is self .rows :
108
- values = dict (zip (self .headers , parse_row (row )))
118
+ if int (row [0 ].text ) is self ._rows :
119
+ values = dict (zip (self ._headers , parse_row (row )))
109
120
data_sets .append (values )
110
121
break
111
122
112
123
else :
113
- values = dict (zip (self .headers , parse_row (row )))
124
+ values = dict (zip (self ._headers , parse_row (row )))
114
125
data_sets .append (values )
115
126
116
127
return data_sets
117
128
118
129
def __search_screener (self ):
119
130
120
- table = {
121
- 'Overview' : '110' ,
122
- 'Valuation' : '120' ,
123
- 'Ownership' : '130' ,
124
- 'Performance' : '140' ,
125
- 'Custom' : '150' ,
126
- 'Financial' : '160' ,
127
- 'Technical' : '170'
128
- }
129
-
130
- payload = {
131
- 'v' : table [self .table ],
132
- 't' : ',' .join (self .tickers ),
133
- 'f' : ',' .join (self .filters ),
134
- 'o' : self .order ,
135
- 's' : self .signal
136
- }
137
-
138
- self .page_content , self .url = send .http_request ('https://finviz.com/screener.ashx' , payload )
139
- self .page_content = html .fromstring (self .page_content .text ) # Parses the page with the default lxml parser
140
-
141
- self .__get_table_headers ()
142
-
143
- if self .rows is None :
144
- self .rows = scrape .get_total_rows (self .page_content )
145
-
146
- self .page_urls = scrape .get_page_urls (self .page_content , self .rows , self .url )
131
+ """ Saves data from the FinViz screener. """
147
132
148
- async_connector = send .Connector (self .__get_table_data , self .page_urls )
133
+ page_urls = scrape .get_page_urls (self ._page_content , self ._rows , self ._url )
134
+ async_connector = Connector (self .__get_table_data , page_urls )
149
135
self .data = async_connector .run_connector ()
0 commit comments