Skip to content

Commit ba5beec

Browse files
committed
Merge branch 'main' of https://github.com/souradipp76/ReadMeReady into app_dev
2 parents f5ca27c + 5e2f088 commit ba5beec

File tree

7 files changed

+21
-11
lines changed

7 files changed

+21
-11
lines changed

.github/workflows/deploy_mkdocs.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ jobs:
2121
steps:
2222
# Checkout the repository
2323
- name: Checkout code
24-
uses: actions/checkout@v4
24+
uses: actions/checkout@v5
2525

2626
- name: Setup Pages
2727
uses: actions/configure-pages@v5

.github/workflows/main.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ jobs:
2222
os: [ubuntu-latest]
2323
runs-on: ${{ matrix.os }}
2424
steps:
25-
- uses: actions/checkout@v4
25+
- uses: actions/checkout@v5
2626
- uses: actions/setup-python@v5
2727
with:
2828
python-version: ${{ matrix.python-version }}
@@ -40,7 +40,7 @@ jobs:
4040
os: [ubuntu-latest]
4141
runs-on: ${{ matrix.os }}
4242
steps:
43-
- uses: actions/checkout@v4
43+
- uses: actions/checkout@v5
4444
- uses: actions/setup-python@v5
4545
with:
4646
python-version: ${{ matrix.python-version }}
@@ -65,7 +65,7 @@ jobs:
6565
# os: [macos-latest]
6666
# runs-on: ${{ matrix.os }}
6767
# steps:
68-
# - uses: actions/checkout@v4
68+
# - uses: actions/checkout@v5
6969
# - uses: actions/setup-python@v5
7070
# with:
7171
# python-version: ${{ matrix.python-version }}
@@ -83,7 +83,7 @@ jobs:
8383
# os: [windows-latest]
8484
# runs-on: ${{ matrix.os }}
8585
# steps:
86-
# - uses: actions/checkout@v4
86+
# - uses: actions/checkout@v5
8787
# - uses: actions/setup-python@v5
8888
# with:
8989
# python-version: ${{ matrix.python-version }}

.github/workflows/release.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ jobs:
1616
permissions:
1717
contents: write
1818
steps:
19-
- uses: actions/checkout@v4
19+
- uses: actions/checkout@v5
2020
with:
2121
# by default, it uses a depth of 1
2222
# this fetches all history so that we can read each commit
@@ -32,7 +32,7 @@ jobs:
3232
needs: release
3333
runs-on: ubuntu-latest
3434
steps:
35-
- uses: actions/checkout@v4
35+
- uses: actions/checkout@v5
3636
- name: Set up Python
3737
uses: actions/setup-python@v5
3838
with:

.github/workflows/rename_project.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ jobs:
99
if: ${{ !contains (github.repository, '/python-project-template') }}
1010
runs-on: ubuntu-latest
1111
steps:
12-
- uses: actions/checkout@v4
12+
- uses: actions/checkout@v5
1313
with:
1414
# by default, it uses a depth of 1
1515
# this fetches all history so that we can read each commit

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ lint: ## Run pep8, black, mypy linters.
3535
$(ENV_PREFIX)flake8 readme_ready/
3636
$(ENV_PREFIX)black -l 79 --check readme_ready/
3737
$(ENV_PREFIX)black -l 79 --check tests/
38-
$(ENV_PREFIX)mypy --ignore-missing-imports --disable-error-code=arg-type readme_ready/
38+
$(ENV_PREFIX)mypy --ignore-missing-imports --disable-error-code=arg-type --disable-error-code=assignment readme_ready/
3939

4040
.PHONY: test
4141
test: lint ## Run tests and generate coverage report.

readme_ready/main.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
"""CLI interface for readme_ready project.
2-
"""
1+
"""CLI interface for readme_ready project."""
32

43
from urllib.parse import urlparse
54

readme_ready/utils/llm_utils.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,17 @@ def get_llama_chat_model(model_name: str, streaming=False, model_kwargs=None):
129129
)
130130

131131

132+
def get_ollama_chat_model(model_name: str, streaming=False, model_kwargs=None):
133+
"""Get Ollama Chat Model"""
134+
from langchain_ollama import ChatOllama
135+
136+
return ChatOllama(
137+
model=model_name,
138+
temperature=model_kwargs["temperature"],
139+
num_ctx=model_kwargs["max_length"],
140+
disable_streaming=not streaming,
141+
)
142+
132143
def get_openai_chat_model(
133144
model: str, temperature=None, streaming=None, model_kwargs=None
134145
) -> ChatOpenAI:

0 commit comments

Comments
 (0)