-
Notifications
You must be signed in to change notification settings - Fork 0
47 lines (40 loc) · 1.43 KB
/
example.yml
File metadata and controls
47 lines (40 loc) · 1.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
# Example workflow -- copy this into your repo's .github/workflows/
name: Git Hygiene
on:
pull_request:
types: [opened, synchronize, reopened]
permissions:
contents: read
pull-requests: write # needed to post PR comments
jobs:
# -- Option A: Local Ollama model (no API key needed) -----------------------
# Runs a small LLM directly on the GitHub Actions runner via Ollama.
# Zero cost, fully private -- no data leaves the runner.
lint-commits-local:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run Git Hygiene (local Ollama model)
uses: iwillig/git-hygiene@main
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
use-local-model: "true"
llm-model: "qwen2.5:0.5b" # 397 MB, fast
# -- Option B: Remote API model ---------------------------------------------
# Uses a cloud LLM provider (OpenAI, Anthropic, etc.).
# Faster, but requires an API key secret.
# lint-commits-remote:
# runs-on: ubuntu-latest
# steps:
# - name: Checkout
# uses: actions/checkout@v4
#
# - name: Run Git Hygiene (remote model)
# uses: iwillig/git-hygiene@main
# with:
# github-token: ${{ secrets.GITHUB_TOKEN }}
# use-local-model: "false"
# llm-model: "gpt-4o-mini"
# llm-api-base: "https://api.openai.com/v1"
# llm-api-key: ${{ secrets.OPENAI_API_KEY }}