Skip to content

Commit 9482180

Browse files
committed
fix: housekeeping
1 parent 289947b commit 9482180

File tree

9 files changed

+21
-27
lines changed

9 files changed

+21
-27
lines changed

.github/workflows/publish.yaml

+1-5
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,7 @@ permissions:
1313

1414
jobs:
1515
tests:
16-
name: "Run tests"
17-
runs-on: ubuntu-latest
18-
steps:
19-
- name: Test
20-
uses: ./.github/workflows/test.yaml
16+
uses: ./.github/workflows/test.yaml
2117
version:
2218
runs-on: ubuntu-latest
2319
outputs:

.github/workflows/test.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ on:
44
pull_request:
55
paths:
66
- '**.py'
7-
workflow_dispatch: ~
7+
workflow_call:
88

99
permissions:
1010
contents: write

docs/reference.md

+8-8
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ Your function with additional attribute `openai_schema`
4949

5050
```python
5151
async def stream_to_log(
52-
response: Union[Iterator[OAIResponse], AsyncIterator[OAIResponse]]
52+
response: Union[Iterator[OAIResponse], AsyncIterator[OAIResponse]]
5353
) -> List[OAIResponse]
5454
```
5555

@@ -77,7 +77,7 @@ This is useful for debugging, when you first save the stream to an array and the
7777

7878
**Arguments**:
7979

80-
- `log`:
80+
- `log`:
8181

8282
<a id="stream_processing"></a>
8383

@@ -134,7 +134,7 @@ The difference between the current dictionary and the previous one
134134
async def process_response(
135135
response: OAIResponse,
136136
content_func: Optional[Callable[[AsyncGenerator[str, None]],
137-
Awaitable[None]]] = None,
137+
Awaitable[None]]] = None,
138138
funcs: Optional[List[Callable[[], Awaitable[None]]]] = None,
139139
self: Optional = None) -> Tuple[Set[str], Dict[str, Any]]
140140
```
@@ -177,7 +177,7 @@ function in the func attribute).
177177

178178
**Arguments**:
179179

180-
- `func`:
180+
- `func`:
181181

182182
<a id="fn_dispatcher.dispatch_yielded_functions_with_args"></a>
183183

@@ -198,7 +198,7 @@ Dispatches function calls from a generator that yields function names and argume
198198
- `gen`: The generator that yields function names and arguments
199199
- `funcs`: The functions to dispatch to
200200
- `dict_preprocessor`: A function that takes a function name and a dictionary of arguments and returns a new
201-
dictionary of arguments
201+
dictionary of arguments
202202
- `self`: An optional self argument to pass to the functions
203203

204204
**Returns**:
@@ -267,9 +267,9 @@ Called when the parsing was terminated
267267

268268
```python
269269
async def process_struct_response(
270-
response: OAIResponse,
271-
handler: BaseHandler,
272-
output_serialization: OutputSerialization = "json"
270+
response: OAIResponse,
271+
handler: BaseHandler,
272+
output_serialization: OutputSerialization = "json"
273273
) -> Tuple[Optional[Union[TModel, Terminate]], Dict[str, Any]]
274274
```
275275

openai_streaming/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
from .stream_processing import process_response
21
from .decorator import openai_streaming_function
2+
from .stream_processing import process_response

openai_streaming/struct/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
from .handler import process_struct_response, Terminate, BaseHandler
1+
from .handler import process_struct_response, Terminate, BaseHandler

openai_streaming/struct/yaml_parser.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from typing import List, Dict, Tuple, Generator, Optional
2+
23
from json_streamer import Parser, ParseState
34

45

pyproject.toml

+4-4
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,11 @@ build-backend = "setuptools.build_meta"
66
name = "openai-streaming"
77
version = "0.0.0-dev"
88
description = "Work with OpenAI's streaming API at ease, with Python generators"
9-
authors = [{name = "Almog Baku", email = "[email protected]"}]
10-
license = {text = "MIT"}
9+
authors = [{ name = "Almog Baku", email = "[email protected]" }]
10+
license = { text = "MIT" }
1111
readme = "README.md"
1212
keywords = ["openai", "gpt", "llm", "streaming", "stream", "generator"]
13-
dependencies= [
13+
dependencies = [
1414
"openai>=1.14.0,<2.0.0",
1515
"json-streamer>=0.1.0,<0.2.0",
1616
"pydantic>=2.0.2,<3.0.0",
@@ -19,7 +19,7 @@ dependencies= [
1919
requires-python = ">=3.9"
2020

2121
[project.optional-dependencies]
22-
yaml = ["PyYAML>6.0.0<7.0.0"]
22+
yaml = ["PyYAML>6.0.0,<7.0.0"]
2323

2424
[project.urls]
2525
"Homepage" = "https://github.com/AlmogBaku/openai-streaming"

tests/example_struct.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
1+
import asyncio
12
import os
23
from time import sleep
4+
from typing import Optional, List
35

46
from openai import AsyncOpenAI
5-
import asyncio
6-
77
from pydantic import BaseModel
88

9-
from typing import Optional, List
109
from openai_streaming.struct import BaseHandler, process_struct_response, Terminate
1110

1211
# Initialize OpenAI Client

tests/test_with_struct.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,13 @@
11
import json
22
import unittest
33
from os.path import dirname
4-
5-
import openai
4+
from typing import Dict, Generator, Optional, List
65
from unittest.mock import patch, AsyncMock
76

7+
import openai
88
from openai import BaseModel
99
from openai.types.chat import ChatCompletionChunk
1010

11-
from typing import Dict, Generator, Optional, List
12-
1311
from openai_streaming.struct import Terminate, BaseHandler, process_struct_response
1412

1513
openai.api_key = '...'

0 commit comments

Comments
 (0)