Skip to content

Commit 07c5c93

Browse files
authored
Merge pull request #8 from alisaraa/aa-implement-a-sqlite-db
Added SQLite support
2 parents 18fed91 + f115048 commit 07c5c93

File tree

2 files changed

+102
-7
lines changed

2 files changed

+102
-7
lines changed

finviz/save_data.py

+67
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,17 @@
11
import csv
2+
import os
3+
import sqlite3
4+
import re
25

6+
def create_connection():
7+
8+
sqlite_file = "../screener.sqlite"
9+
10+
try:
11+
conn = sqlite3.connect(sqlite_file)
12+
return conn
13+
except sqlite3.Error as error:
14+
raise ("An error has occurred while connecting to the database: ", error.args[0])
315

416
def export_to_csv(headers, data, directory):
517

@@ -9,3 +21,58 @@ def export_to_csv(headers, data, directory):
921

1022
for n in data:
1123
dict_writer.writerows(n)
24+
25+
def export_to_db(headers, data):
26+
27+
field_list = ""
28+
table_name = "screener_results" # name of the table to be created
29+
conn = create_connection()
30+
c = conn.cursor()
31+
32+
for field in headers:
33+
34+
field_cleaned = re.sub(r'[^\w\s]', '', field)
35+
field_cleaned = field_cleaned.replace(" ", "")
36+
field_list += field_cleaned + " TEXT, "
37+
38+
c.execute("CREATE TABLE IF NOT EXISTS {tn} ({fl})"
39+
.format(tn=table_name, fl=field_list[:-2]))
40+
41+
inserts = ""
42+
for data in data:
43+
44+
for level in data:
45+
46+
insert_fields = "("
47+
for field, value in level.items():
48+
insert_fields += "\"" + value + "\", "
49+
50+
inserts += insert_fields[:-2] + "), "
51+
52+
insert_lines = inserts[:-2]
53+
54+
try:
55+
c.execute("INSERT INTO {tn} VALUES {iv}".
56+
format(tn=table_name, iv=insert_lines))
57+
except sqlite3.Error as error:
58+
print("An error has occurred", error.args[0])
59+
60+
conn.commit()
61+
conn.close()
62+
63+
64+
def select_from_db():
65+
66+
conn = create_connection()
67+
c = conn.cursor()
68+
c.execute("SELECT * FROM screener_results")
69+
70+
rows = c.fetchall()
71+
72+
for row in rows:
73+
print(row)
74+
75+
rows = c.fetchall()
76+
77+
for row in rows:
78+
print(row)

finviz/screener.py

+35-7
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
1+
from .save_data import export_to_db, select_from_db, export_to_csv
12
from lxml import html
23
from lxml import etree
34
import finviz.request_functions as send
45
import finviz.scraper_functions as scrape
5-
6+
from os import getcwd
67

78
class Screener(object):
89

9-
def __init__(self, tickers=None, filters=None, rows=None, order='', signal='', table='Overview'):
10+
def __init__(self, tickers=None, filters=None, order='', rows=None, signal='', table='Overview'):
1011

1112
if tickers is None:
1213
self.tickers = []
@@ -32,15 +33,42 @@ def __init__(self, tickers=None, filters=None, rows=None, order='', signal='', t
3233

3334
def to_csv(self, directory=None):
3435

35-
from .save_data import export_to_csv
36-
3736
if directory is None:
38-
39-
import os
40-
directory = os.getcwd()
37+
directory = getcwd()
4138

4239
export_to_csv(self.headers, self.data, directory)
4340

41+
def to_sqlite(self):
42+
export_to_db(self.headers, self.data)
43+
44+
def display_db(self):
45+
select_from_db()
46+
47+
def __get_total_rows(self):
48+
49+
total_element = self.page_content.cssselect('td[width="140"]')
50+
self.rows = int(etree.tostring(total_element[0]).decode("utf-8").split('</b>')[1].split(' ')[0])
51+
52+
def __get_page_urls(self):
53+
54+
try:
55+
total_pages = int([i.text.split('/')[1] for i in self.page_content.cssselect('option[value="1"]')][0])
56+
except IndexError: # No results found
57+
return None
58+
59+
urls = []
60+
61+
for page_number in range(1, total_pages + 1):
62+
63+
sequence = 1 + (page_number - 1) * 20
64+
65+
if sequence - 20 <= self.rows < sequence:
66+
break
67+
else:
68+
urls.append(self.url + '&r={}'.format(str(sequence)))
69+
70+
self.page_urls = urls
71+
4472
def __get_table_headers(self):
4573

4674
first_row = self.page_content.cssselect('tr[valign="middle"]')

0 commit comments

Comments
 (0)