Skip to content

Commit 5c64d9b

Browse files
authored
DX-1821: Deprecate Llms (#40)
* feat: deprecate QStash LLM * fix: bump version to 2.1.0 * fix: fmt * fix: tests * fix: change deprecation header name * feat: rm all api fields (Breaking Change) * fix: model name * feat: remove events apis in favor of logs * fix: rm duplicate example * fixes * fix: use log instead of logs
1 parent 0815cbb commit 5c64d9b

File tree

17 files changed

+86
-1864
lines changed

17 files changed

+86
-1864
lines changed

README.md

Lines changed: 0 additions & 69 deletions
Original file line numberDiff line numberDiff line change
@@ -75,75 +75,6 @@ receiver.verify(
7575
)
7676
```
7777

78-
#### Create Chat Completions
79-
80-
```python
81-
from qstash import QStash
82-
from qstash.chat import upstash
83-
84-
client = QStash("<QSTASH_TOKEN>")
85-
86-
res = client.chat.create(
87-
model="meta-llama/Meta-Llama-3-8B-Instruct",
88-
provider=upstash(),
89-
messages=[
90-
{
91-
"role": "user",
92-
"content": "What is the capital of Turkey?",
93-
}
94-
],
95-
)
96-
97-
print(res.choices[0].message.content)
98-
```
99-
100-
#### Create Chat Completions Using Custom Providers
101-
102-
```python
103-
from qstash import QStash
104-
from qstash.chat import openai
105-
106-
client = QStash("<QSTASH_TOKEN>")
107-
108-
res = client.chat.create(
109-
model="gpt-3.5-turbo",
110-
provider=openai("<OPENAI_API_KEY>"),
111-
messages=[
112-
{
113-
"role": "user",
114-
"content": "What is the capital of Turkey?",
115-
}
116-
],
117-
)
118-
119-
print(res.choices[0].message.content)
120-
```
121-
122-
#### Publish a JSON message to LLM
123-
124-
```python
125-
from qstash import QStash
126-
from qstash.chat import upstash
127-
128-
client = QStash("<QSTASH_TOKEN>")
129-
130-
res = client.message.publish_json(
131-
api={"name": "llm", "provider": upstash()},
132-
body={
133-
"model": "meta-llama/Meta-Llama-3-8B-Instruct",
134-
"messages": [
135-
{
136-
"role": "user",
137-
"content": "What is the capital of Turkey?",
138-
}
139-
],
140-
},
141-
callback="https://example-cb.com",
142-
)
143-
144-
print(res.message_id)
145-
```
146-
14778
#### Publish a JSON message to LLM Using Custom Providers
14879

14980
```python

examples/chat.py

Lines changed: 0 additions & 39 deletions
This file was deleted.

examples/llm.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
"""
77

88
from qstash import QStash
9-
from qstash.chat import upstash
9+
from qstash.chat import openai
1010

1111

1212
def main():
@@ -15,9 +15,9 @@ def main():
1515
)
1616

1717
client.message.publish_json(
18-
api={"name": "llm", "provider": upstash()},
18+
api={"name": "llm", "provider": openai("<OPENAI_API_KEY>")},
1919
body={
20-
"model": "meta-llama/Meta-Llama-3-8B-Instruct",
20+
"model": "gpt-4.1",
2121
"messages": [
2222
{
2323
"role": "user",

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "qstash"
3-
version = "2.0.5"
3+
version = "3.0.0"
44
description = "Python SDK for Upstash QStash"
55
license = "MIT"
66
authors = ["Upstash <[email protected]>"]

qstash/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,5 +2,5 @@
22
from qstash.client import QStash
33
from qstash.receiver import Receiver
44

5-
__version__ = "2.0.5"
5+
__version__ = "3.0.0"
66
__all__ = ["QStash", "AsyncQStash", "Receiver"]

0 commit comments

Comments
 (0)