Skip to content

Commit 292abaf

Browse files
committed
PyPi release and support for downloading charts
1 parent 8955a64 commit 292abaf

8 files changed

+80
-48
lines changed

.idea/sqldialects.xml

-6
This file was deleted.

finviz/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
from finviz.screener import Screener
1+
from finviz.screener import Screener

finviz/request_functions.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -30,16 +30,16 @@ async def __http_request__async(self, url, session):
3030
async with session.get(url) as response:
3131
page_html = await response.read()
3232

33-
return self.scrape_function(page_html)
33+
return self.scrape_function(page_html, url)
3434

3535
async def __async_scraper(self):
3636

37-
tasks = []
37+
async_tasks = []
3838
async with aiohttp.ClientSession() as session:
3939
for n in self.tasks:
40-
tasks.append(self.__http_request__async(n, session))
40+
async_tasks.append(self.__http_request__async(n, session))
4141

42-
self.data = await asyncio.gather(*tasks)
42+
self.data = await asyncio.gather(*async_tasks)
4343

4444
def run_connector(self):
4545

finviz/save_data.py

+7-10
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,29 @@
11
import csv
2-
import os
32
import sqlite3
43
import re
54

5+
66
def create_connection():
7-
7+
88
sqlite_file = "../screener.sqlite"
9-
9+
1010
try:
1111
conn = sqlite3.connect(sqlite_file)
1212
return conn
1313
except sqlite3.Error as error:
1414
raise ("An error has occurred while connecting to the database: ", error.args[0])
1515

16-
def export_to_csv(headers, data, directory):
1716

18-
with open(directory + '/screener_results.csv', 'w', newline='') as output_file:
17+
def export_to_csv(headers, data):
18+
19+
with open('/screener_results.csv', 'w', newline='') as output_file:
1920
dict_writer = csv.DictWriter(output_file, headers)
2021
dict_writer.writeheader()
2122

2223
for n in data:
2324
dict_writer.writerows(n)
2425

26+
2527
def export_to_db(headers, data):
2628

2729
field_list = ""
@@ -71,8 +73,3 @@ def select_from_db():
7173

7274
for row in rows:
7375
print(row)
74-
75-
rows = c.fetchall()
76-
77-
for row in rows:
78-
print(row)

finviz/scraper_functions.py

+12
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from lxml import etree
2+
import os
23

34

45
def get_total_rows(page_content):
@@ -27,3 +28,14 @@ def get_page_urls(page_content, rows, url):
2728
urls.append(url + '&r={}'.format(str(sequence)))
2829

2930
return urls
31+
32+
33+
def download_image(page_content, url):
34+
35+
file_name = url.split('t=')[1] + '.jpg'
36+
37+
if not os.path.exists('charts'):
38+
os.mkdir('charts')
39+
40+
with open('charts/' + file_name, 'wb') as handle:
41+
handle.write(page_content)

finviz/screener.py

+27-27
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
from .save_data import export_to_db, select_from_db, export_to_csv
2+
from urllib.parse import urlencode
23
from lxml import html
34
from lxml import etree
45
import finviz.request_functions as send
56
import finviz.scraper_functions as scrape
6-
from os import getcwd
7+
78

89
class Screener(object):
910

10-
def __init__(self, tickers=None, filters=None, order='', rows=None, signal='', table='Overview'):
11+
def __init__(self, tickers=None, filters=None, rows=None, order='', signal='', table='Overview'):
1112

1213
if tickers is None:
1314
self.tickers = []
@@ -31,43 +32,42 @@ def __init__(self, tickers=None, filters=None, order='', rows=None, signal='', t
3132

3233
self.__search_screener()
3334

34-
def to_csv(self, directory=None):
35-
36-
if directory is None:
37-
directory = getcwd()
38-
39-
export_to_csv(self.headers, self.data, directory)
40-
4135
def to_sqlite(self):
36+
4237
export_to_db(self.headers, self.data)
4338

4439
def display_db(self):
45-
select_from_db()
4640

47-
def __get_total_rows(self):
41+
select_from_db()
4842

49-
total_element = self.page_content.cssselect('td[width="140"]')
50-
self.rows = int(etree.tostring(total_element[0]).decode("utf-8").split('</b>')[1].split(' ')[0])
43+
def to_csv(self):
5144

52-
def __get_page_urls(self):
45+
export_to_csv(self.headers, self.data)
5346

54-
try:
55-
total_pages = int([i.text.split('/')[1] for i in self.page_content.cssselect('option[value="1"]')][0])
56-
except IndexError: # No results found
57-
return None
47+
def get_charts(self, period='d', size='l', chart_type='c', ta=None, save_to=None):
5848

59-
urls = []
49+
if ta is True or None: # Charts include TA by default
50+
ta = '1'
51+
else:
52+
ta = '0'
6053

61-
for page_number in range(1, total_pages + 1):
54+
payload = {
55+
'ty': chart_type,
56+
'ta': ta,
57+
'p': period,
58+
's': size
59+
}
6260

63-
sequence = 1 + (page_number - 1) * 20
61+
base_url = 'https://finviz.com/chart.ashx?' + urlencode(payload)
62+
chart_urls = []
6463

65-
if sequence - 20 <= self.rows < sequence:
66-
break
67-
else:
68-
urls.append(self.url + '&r={}'.format(str(sequence)))
64+
for page in self.data:
65+
for row in page:
66+
chart_urls.append(base_url + '&t={}'.format(row.get('Ticker')))
6967

70-
self.page_urls = urls
68+
async_connector = send.Connector(scrape.download_image, chart_urls)
69+
async_connector.directory = save_to
70+
async_connector.run_connector()
7171

7272
def __get_table_headers(self):
7373

@@ -84,7 +84,7 @@ def __get_table_headers(self):
8484

8585
self.headers = headers
8686

87-
def __get_table_data(self, page=None):
87+
def __get_table_data(self, page=None, url=None):
8888

8989
def parse_row(line):
9090

setup.cfg

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# Inside of setup.cfg
2+
[metadata]
3+
description-file = README.md

setup.py

+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
from distutils.core import setup
2+
setup(
3+
name = 'finviz',
4+
packages = ['finviz'],
5+
version = '1.1',
6+
license='MIT',
7+
description = 'Unofficial API for finviz.com',
8+
author = 'Mario Stoev',
9+
author_email = '[email protected]', # Type in your E-Mail
10+
url = 'https://github.com/mariostoev/finviz', # Provide either the link to your github or to your website
11+
download_url = 'https://github.com/user/reponame/archive/v_01.tar.gz', # I explain this later on
12+
keywords = ['finviz', 'api', 'screener', 'finviz api', 'charts', 'scraper'], # Keywords that define your package best
13+
install_requires=[ # I get to this in a second
14+
'lxml',
15+
'requests',
16+
'aiohttp',
17+
'urllib3'
18+
],
19+
classifiers=[
20+
'Development Status :: 5 - Production/Stable',
21+
'Intended Audience :: Developers',
22+
'Topic :: Software Development :: Build Tools',
23+
'License :: OSI Approved :: MIT License',
24+
'Programming Language :: Python :: 3.6',
25+
],
26+
)

0 commit comments

Comments
 (0)