Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 20 additions & 23 deletions tests/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,13 @@
import sys
import os
import yfinance
# from requests_ratelimiter import LimiterSession
# from pyrate_limiter import Duration, RequestRate, Limiter

from pyrate_limiter import Duration, RequestRate, Limiter
from requests_ratelimiter import LimiterSession
from requests_cache import CacheMixin, SQLiteCache
from requests_ratelimiter import LimiterMixin
from requests import Session
from pyrate_limiter import MemoryQueueBucket

_parent_dp = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
_src_dp = _parent_dp
Expand All @@ -25,24 +30,16 @@
import shutil
shutil.rmtree(testing_cache_dirpath)

# Since switching to curl_cffi, the requests_ratelimiter|cache won't work.
session_gbl = None

# # Setup a session to only rate-limit
# history_rate = RequestRate(1, Duration.SECOND)
# limiter = Limiter(history_rate)
# session_gbl = LimiterSession(limiter=limiter)

# # Use this instead if you also want caching:
# from requests_cache import CacheMixin, SQLiteCache
# from requests_ratelimiter import LimiterMixin
# from requests import Session
# from pyrate_limiter import MemoryQueueBucket
# class CachedLimiterSession(CacheMixin, LimiterMixin, Session):
# pass
# cache_fp = os.path.join(testing_cache_dirpath, "unittests-cache")
# session_gbl = CachedLimiterSession(
# limiter=limiter,
# bucket_class=MemoryQueueBucket,
# backend=SQLiteCache(cache_fp, expire_after=_dt.timedelta(hours=1)),
# )
# Setup a session to only rate-limit
history_rate = RequestRate(1, Duration.SECOND)
limiter = Limiter(history_rate)
session_gbl = LimiterSession(limiter=limiter)
# Use this instead if you also want caching:
class CachedLimiterSession(CacheMixin, LimiterMixin, Session):
pass
cache_fp = os.path.join(testing_cache_dirpath, "unittests-cache")
session_gbl = CachedLimiterSession(
limiter=limiter,
bucket_class=MemoryQueueBucket,
backend=SQLiteCache(cache_fp, expire_after=_dt.timedelta(hours=1)),
)
150 changes: 150 additions & 0 deletions tests/test_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
"""
Tests for YfData sessions
"""

from tests.context import yfinance as yf
from yfinance.exceptions import YFDataException

import curl_cffi
import requests
from curl_adapter import CurlCffiAdapter

import unittest
from unittest.mock import patch
from requests.exceptions import HTTPError

class TestData(unittest.TestCase):
session = None

@classmethod
def setUpClass(cls):
# yf.enable_debug_mode()
# cls.session = session_gbl
cls.ticker = 'AMD'

# @classmethod
# def tearDownClass(cls):
# if cls.session is not None:
# cls.session.close()

def test_curlCffi(self):
session = curl_cffi.requests.Session(impersonate="chrome")

dat = yf.Ticker(self.ticker, session=session)

df = dat.history(period='1mo')
self.assertIsNotNone(df)
self.assertGreater(len(df), 1)

df = dat.history(period='1mo')
self.assertIsNotNone(df)
self.assertGreater(len(df), 1)

def test_requestsWithCurl(self):
session = requests.Session()
session.mount("http://", CurlCffiAdapter())
session.mount("https://", CurlCffiAdapter())

dat = yf.Ticker(self.ticker, session=session)

df = dat.history(period='1mo')
self.assertIsNotNone(df)
self.assertGreater(len(df), 1)

df = dat.history(period='1mo')
self.assertIsNotNone(df)
self.assertGreater(len(df), 1)

def test_cookie_strat_switch(self):
session = curl_cffi.requests.Session(impersonate="chrome")
dat = yf.Ticker(self.ticker, session=session)
dat._data._set_cookie_strategy('csrf')
dat._data._set_cookie_strategy('basic')
dat._data._set_cookie_strategy('csrf')
dat._data._set_cookie_strategy('basic')

session = requests.Session()
session.mount("http://", CurlCffiAdapter())
session.mount("https://", CurlCffiAdapter())
dat = yf.Ticker(self.ticker, session=session)
dat._data._n_strategy_flips = 0
dat._data._set_cookie_strategy('csrf')
dat._data._set_cookie_strategy('basic')
dat._data._set_cookie_strategy('csrf')
dat._data._set_cookie_strategy('basic')

def test_cookie_csrf_strategy(self):
session = curl_cffi.requests.Session(impersonate="chrome")
dat = yf.Ticker(self.ticker, session=session)
dat._data._set_cookie_strategy('csrf')
dat.history(period='1mo')

session = requests.Session()
session.mount("http://", CurlCffiAdapter())
session.mount("https://", CurlCffiAdapter())
dat = yf.Ticker(self.ticker, session=session)
dat._data._n_strategy_flips = 0
dat._data._set_cookie_strategy('csrf')
dat.history(period='1mo')

def test_requestsWithoutCurlRaise(self):
session = requests.Session()

# One of these functions below should raise this exception:
with self.assertRaises(YFDataException) as context:
dat = yf.Ticker(self.ticker, session=session)
dat.history(period='1mo')

self.assertIn("curl", str(context.exception).lower()) # Optional: check message content

def test_requestsWithCurlAndRateLimiter(self):
ReqSession = requests.Session
from requests_ratelimiter import LimiterMixin
from pyrate_limiter import Duration, RequestRate, Limiter
class LimiterSession(LimiterMixin, ReqSession):
"""Session class with cURL adapter and rate-limiting."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.mount("http://", CurlCffiAdapter())
self.mount("https://", CurlCffiAdapter())
limiter = Limiter(RequestRate(1, 5 * Duration.SECOND))
session = LimiterSession(limiter=limiter)

dat = yf.Ticker(self.ticker, session=session)

df = dat.history(period='1mo')
self.assertIsNotNone(df)
self.assertGreater(len(df), 1)

df = dat.history(period='1mo')
self.assertIsNotNone(df)
self.assertGreater(len(df), 1)

class TestDataWithBlock(unittest.TestCase):
def setUp(self):
self.blocked_url = "https://fc.yahoo.com"

def send_with_block_check(self, original_send, request, **kwargs):
if self.blocked_url_fragment in request.url:
raise HTTPError(f"Blocked URL: {request.url}")
return original_send(request, **kwargs)

def test_requestsWithCurl_blocked_url(self):
# Create real session with adapters
session = requests.Session()
session.mount("http://", CurlCffiAdapter())
session.mount("https://", CurlCffiAdapter())

# Save unpatched version of send
real_send = requests.sessions.Session.send

def send_with_block_check(self_obj, request, **kwargs):
if self.blocked_url_fragment in request.url:
raise HTTPError(f"Blocked URL: {request.url}")
return real_send(self_obj, request, **kwargs)

yf.enable_debug_mode()
with patch('requests.sessions.Session.send', new=send_with_block_check):
dat = yf.Ticker('AAPL', session=session)
df = dat.history(period='1mo')
print(df.shape)
6 changes: 3 additions & 3 deletions yfinance/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,9 +372,9 @@ def lookup(self, strategy):
return None

try:
data = _CookieSchema.get(_CookieSchema.strategy == strategy)
cookie = _pkl.loads(data.cookie_bytes)
return {'cookie':cookie, 'age':_datetime.datetime.now()-data.fetch_date}
row = _CookieSchema.get(_CookieSchema.strategy == strategy)
cookie = _pkl.loads(row.cookie_bytes)
return {'data':cookie, 'age':_datetime.datetime.now()-row.fetch_date}
except _CookieSchema.DoesNotExist:
return None

Expand Down
Loading
Loading