|
3 | 3 | """ |
4 | 4 |
|
5 | 5 | import asyncio |
| 6 | +import time |
| 7 | +import concurrent.futures |
6 | 8 |
|
7 | 9 | import pytest |
8 | 10 |
|
@@ -92,3 +94,140 @@ def inc(x: int): |
92 | 94 |
|
93 | 95 | result = asyncio.run(async_inc(41)) |
94 | 96 | assert result == 42 |
| 97 | + |
| 98 | + |
| 99 | +# ──────────────────────────────────────────────────────────────────────────── |
| 100 | +# Thread Pool Tests |
| 101 | +# ──────────────────────────────────────────────────────────────────────────── |
| 102 | +def test_thread_pool_lazy_initialization(func_hub): |
| 103 | + """Test that thread pool is lazily initialized.""" |
| 104 | + # Thread pool should be None initially |
| 105 | + assert func_hub._thread_pool is None |
| 106 | + |
| 107 | + # Accessing thread_pool property should initialize it |
| 108 | + pool = func_hub.thread_pool |
| 109 | + assert isinstance(pool, concurrent.futures.ThreadPoolExecutor) |
| 110 | + assert func_hub._thread_pool is not None |
| 111 | + |
| 112 | + # Second access should return the same instance |
| 113 | + pool2 = func_hub.thread_pool |
| 114 | + assert pool is pool2 |
| 115 | + |
| 116 | + |
| 117 | +@pytest.mark.asyncio |
| 118 | +async def test_sync_function_execution_with_thread_pool(func_hub): |
| 119 | + """Test that sync functions are executed in thread pool.""" |
| 120 | + execution_info = {"thread_id": None, "main_thread_id": None} |
| 121 | + |
| 122 | + @func_hub.tool("test sync function") |
| 123 | + def blocking_function(duration: float): |
| 124 | + """Simulate blocking operation.""" |
| 125 | + import threading |
| 126 | + execution_info["thread_id"] = threading.current_thread().ident |
| 127 | + time.sleep(duration) |
| 128 | + return f"completed in thread {execution_info['thread_id']}" |
| 129 | + |
| 130 | + # Get main thread ID |
| 131 | + import threading |
| 132 | + execution_info["main_thread_id"] = threading.current_thread().ident |
| 133 | + |
| 134 | + # Execute the function |
| 135 | + _, async_func = func_hub.func_dict["blocking_function"] |
| 136 | + result = await async_func(0.1) |
| 137 | + |
| 138 | + # Verify function executed in different thread |
| 139 | + assert execution_info["thread_id"] is not None |
| 140 | + assert execution_info["thread_id"] != execution_info["main_thread_id"] |
| 141 | + assert "completed in thread" in result |
| 142 | + |
| 143 | + |
| 144 | +@pytest.mark.asyncio |
| 145 | +async def test_sync_function_with_kwargs_in_thread_pool(func_hub): |
| 146 | + """Test sync functions with kwargs are executed in thread pool.""" |
| 147 | + @func_hub.tool("test function with kwargs") |
| 148 | + def function_with_kwargs(a: int, b: int, multiplier: float = 1.0): |
| 149 | + """Function that uses kwargs.""" |
| 150 | + time.sleep(0.01) # Small delay to simulate work |
| 151 | + return (a + b) * multiplier |
| 152 | + |
| 153 | + # Execute function with kwargs |
| 154 | + _, async_func = func_hub.func_dict["function_with_kwargs"] |
| 155 | + result = await async_func(2, 3, multiplier=2.0) |
| 156 | + |
| 157 | + assert result == 10.0 # (2 + 3) * 2.0 = 10.0 |
| 158 | + |
| 159 | + |
| 160 | +@pytest.mark.asyncio |
| 161 | +async def test_cleanup_shuts_down_thread_pool(func_hub): |
| 162 | + """Test that cleanup properly shuts down thread pool.""" |
| 163 | + # Initialize thread pool by accessing it |
| 164 | + pool = func_hub.thread_pool |
| 165 | + assert isinstance(pool, concurrent.futures.ThreadPoolExecutor) |
| 166 | + |
| 167 | + # Verify pool is active |
| 168 | + assert func_hub._thread_pool is not None |
| 169 | + |
| 170 | + # Cleanup should shut down the thread pool |
| 171 | + await func_hub.cleanup() |
| 172 | + |
| 173 | + # Thread pool should be None after cleanup |
| 174 | + assert func_hub._thread_pool is None |
| 175 | + |
| 176 | + |
| 177 | +@pytest.mark.asyncio |
| 178 | +async def test_multiple_cleanup_calls_safe(func_hub): |
| 179 | + """Test that multiple cleanup calls are safe.""" |
| 180 | + # Initialize thread pool |
| 181 | + func_hub.thread_pool |
| 182 | + |
| 183 | + # First cleanup |
| 184 | + await func_hub.cleanup() |
| 185 | + assert func_hub._thread_pool is None |
| 186 | + |
| 187 | + # Second cleanup should not raise error |
| 188 | + await func_hub.cleanup() |
| 189 | + assert func_hub._thread_pool is None |
| 190 | + |
| 191 | + |
| 192 | +@pytest.mark.asyncio |
| 193 | +async def test_cleanup_without_thread_pool_initialization(func_hub): |
| 194 | + """Test cleanup when thread pool was never initialized.""" |
| 195 | + # Ensure thread pool is not initialized |
| 196 | + assert func_hub._thread_pool is None |
| 197 | + |
| 198 | + # Cleanup should work without errors |
| 199 | + await func_hub.cleanup() |
| 200 | + assert func_hub._thread_pool is None |
| 201 | + |
| 202 | + |
| 203 | +@pytest.mark.asyncio |
| 204 | +async def test_concurrent_sync_function_execution(func_hub): |
| 205 | + """Test concurrent execution of multiple sync functions.""" |
| 206 | + results = [] |
| 207 | + |
| 208 | + @func_hub.tool("concurrent task") |
| 209 | + def concurrent_task(task_id: int, duration: float): |
| 210 | + """Simulate concurrent blocking operation.""" |
| 211 | + time.sleep(duration) |
| 212 | + return f"task_{task_id}_completed" |
| 213 | + |
| 214 | + # Execute multiple tasks concurrently |
| 215 | + _, async_func = func_hub.func_dict["concurrent_task"] |
| 216 | + tasks = [ |
| 217 | + async_func(1, 0.1), |
| 218 | + async_func(2, 0.15), |
| 219 | + async_func(3, 0.05), |
| 220 | + ] |
| 221 | + |
| 222 | + start_time = time.time() |
| 223 | + results = await asyncio.gather(*tasks) |
| 224 | + total_time = time.time() - start_time |
| 225 | + |
| 226 | + # Verify all tasks completed |
| 227 | + assert len(results) == 3 |
| 228 | + assert "task_1_completed" in results |
| 229 | + assert "task_2_completed" in results |
| 230 | + assert "task_3_completed" in results |
| 231 | + |
| 232 | + # Total time should be less than sum of individual times (due to concurrency) |
| 233 | + assert total_time < 0.3 # Should be around 0.15s (max duration) |
0 commit comments