Skip to content

Commit 30e2e50

Browse files
committed
ci: automatically generate doc logs
1 parent c2b23bb commit 30e2e50

File tree

2 files changed

+336
-0
lines changed

2 files changed

+336
-0
lines changed

.github/workflows/docs-logs.yml

Lines changed: 148 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,148 @@
1+
name: Create PR With Docs Logs
2+
3+
on:
4+
push:
5+
6+
jobs:
7+
create-docs-logs-pr:
8+
runs-on: ubuntu-latest
9+
env:
10+
AMAZON_OPENSEARCH_HOST: ${{ secrets.INTEG_AMAZON_OPENSEARCH_HOST }}
11+
AMAZON_OPENSEARCH_INDEX_NAME: ${{ secrets.INTEG_AMAZON_OPENSEARCH_INDEX_NAME }}
12+
AMAZON_S3_BUCKET: ${{ secrets.INTEG_AMAZON_S3_BUCKET }}
13+
AMAZON_S3_KEY: ${{ secrets.INTEG_AMAZON_S3_KEY }}
14+
AMAZON_SQS_QUEUE_URL: ${{ secrets.INTEG_AMAZON_SQS_QUEUE_URL }}
15+
ANTHROPIC_API_KEY: ${{ secrets.INTEG_ANTHROPIC_API_KEY }}
16+
ASTRA_DB_API_ENDPOINT: ${{ secrets.INTEG_ASTRA_DB_API_ENDPOINT }}
17+
ASTRA_DB_APPLICATION_TOKEN: ${{ secrets.INTEG_ASTRA_DB_APPLICATION_TOKEN }}
18+
AWS_ACCESS_KEY_ID: ${{ secrets.INTEG_AWS_ACCESS_KEY_ID }}
19+
AWS_DEFAULT_REGION: ${{ secrets.INTEG_AWS_DEFAULT_REGION }}
20+
AWS_IOT_CORE_ENDPOINT: ${{ secrets.INTEG_AWS_IOT_CORE_ENDPOINT }}
21+
AWS_IOT_CORE_TOPIC: ${{ secrets.INTEG_AWS_IOT_CORE_TOPIC }}
22+
AWS_SECRET_ACCESS_KEY: ${{ secrets.INTEG_AWS_SECRET_ACCESS_KEY }}
23+
AZURE_MONGODB_COLLECTION_NAME: ${{ secrets.INTEG_AZURE_MONGODB_COLLECTION_NAME }}
24+
AZURE_MONGODB_DATABASE_NAME: ${{ secrets.INTEG_AZURE_MONGODB_DATABASE_NAME }}
25+
AZURE_MONGODB_HOST: ${{ secrets.INTEG_AZURE_MONGODB_HOST }}
26+
AZURE_MONGODB_INDEX_NAME: ${{ secrets.INTEG_AZURE_MONGODB_INDEX_NAME }}
27+
AZURE_MONGODB_PASSWORD: ${{ secrets.INTEG_AZURE_MONGODB_PASSWORD }}
28+
AZURE_MONGODB_USERNAME: ${{ secrets.INTEG_AZURE_MONGODB_USERNAME }}
29+
AZURE_MONGODB_VECTOR_PATH: ${{ secrets.INTEG_AZURE_MONGODB_VECTOR_PATH }}
30+
AZURE_OPENAI_35_TURBO_16K_DEPLOYMENT_ID: ${{ secrets.INTEG_OPENAI_35_TURBO_16K_DEPLOYMENT_ID }}
31+
AZURE_OPENAI_35_TURBO_DEPLOYMENT_ID: ${{ secrets.INTEG_OPENAI_35_TURBO_DEPLOYMENT_ID }}
32+
AZURE_OPENAI_4_32K_DEPLOYMENT_ID: ${{ secrets.INTEG_OPENAI_4_32K_DEPLOYMENT_ID }}
33+
AZURE_OPENAI_4_DEPLOYMENT_ID: ${{ secrets.INTEG_OPENAI_4_DEPLOYMENT_ID }}
34+
AZURE_OPENAI_API_BASE: ${{ secrets.INTEG_AZURE_OPENAI_API_BASE }}
35+
AZURE_OPENAI_API_KEY_1: ${{ secrets.INTEG_AZURE_OPENAI_API_KEY_1 }}
36+
AZURE_OPENAI_API_KEY_2: ${{ secrets.INTEG_AZURE_OPENAI_API_KEY_2 }}
37+
AZURE_OPENAI_API_KEY_3: ${{ secrets.INTEG_AZURE_OPENAI_API_KEY_3 }}
38+
AZURE_OPENAI_API_KEY_4: ${{ secrets.INTEG_AZURE_OPENAI_API_KEY_4 }}
39+
AZURE_OPENAI_ENDPOINT_1: ${{ vars.INTEG_AZURE_OPENAI_ENDPOINT_1 }}
40+
AZURE_OPENAI_ENDPOINT_2: ${{ vars.INTEG_AZURE_OPENAI_ENDPOINT_2 }}
41+
AZURE_OPENAI_ENDPOINT_3: ${{ vars.INTEG_AZURE_OPENAI_ENDPOINT_3 }}
42+
AZURE_OPENAI_ENDPOINT_4: ${{ vars.INTEG_AZURE_OPENAI_ENDPOINT_4 }}
43+
AZURE_OPENAI_DALL_E_3_DEPLOYMENT_ID: ${{ secrets.INTEG_AZURE_OPENAI_DALL_E_3_DEPLOYMENT_ID }}
44+
AZURE_OPENAI_DEV_2_API_BASE: ${{ secrets.INTEG_AZURE_OPENAI_DEV_2_API_BASE }}
45+
AZURE_OPENAI_DEV_2_API_KEY: ${{ secrets.INTEG_AZURE_OPENAI_DEV_2_API_KEY }}
46+
COHERE_API_KEY: ${{ secrets.INTEG_COHERE_API_KEY }}
47+
DYNAMODB_TABLE_NAME: ${{ secrets.INTEG_DYNAMODB_TABLE_NAME }}
48+
ELEVEN_LABS_API_KEY: ${{ secrets.INTEG_ELEVEN_LABS_API_KEY }}
49+
EXA_API_KEY: ${{ secrets.INTEG_EXA_API_KEY }}
50+
GOOGLE_API_KEY: ${{ secrets.INTEG_GOOGLE_API_KEY }}
51+
GOOGLE_API_SEARCH_ID: ${{ secrets.INTEG_GOOGLE_API_SEARCH_ID }}
52+
GOOGLE_OWNER_EMAIL: ${{ secrets.INTEG_GOOGLE_OWNER_EMAIL }}
53+
GT_CLOUD_API_KEY: ${{ secrets.INTEG_GT_CLOUD_API_KEY }}
54+
GT_CLOUD_ASSET_NAME: ${{ vars.INTEG_GT_CLOUD_ASSET_NAME }}
55+
GT_CLOUD_ASSISTANT_ID: ${{ vars.INTEG_GT_CLOUD_ASSISTANT_ID }}
56+
GT_CLOUD_BASE_URL: ${{ vars.INTEG_GT_CLOUD_BASE_URL }}
57+
GT_CLOUD_BUCKET_ID: ${{ vars.INTEG_GT_CLOUD_BUCKET_ID }}
58+
GT_CLOUD_KB_ID: ${{ vars.INTEG_GT_CLOUD_KB_ID }}
59+
GT_CLOUD_STRUCTURE_ID: ${{ vars.INTEG_GT_CLOUD_STRUCTURE_ID }}
60+
GT_CLOUD_STRUCTURE_RUN_ID: ${{ vars.INTEG_GT_CLOUD_STRUCTURE_RUN_ID }}
61+
GT_CLOUD_THREAD_ID: ${{ vars.INTEG_GT_CLOUD_THREAD_ID }}
62+
GT_CLOUD_TOOL_ID: ${{ vars.INTEG_GT_CLOUD_TOOL_ID }}
63+
HUGGINGFACE_HUB_ACCESS_TOKEN: ${{ secrets.INTEG_HUGGINGFACE_HUB_ACCESS_TOKEN }}
64+
LEONARDO_API_KEY: ${{ secrets.INTEG_LEONARDO_API_KEY }}
65+
LEONARDO_MODEL_ID: ${{ secrets.INTEG_LEONARDO_MODEL_ID }}
66+
MONGODB_COLLECTION_NAME: ${{ secrets.INTEG_MONGODB_COLLECTION_NAME }}
67+
MONGODB_DATABASE_NAME: ${{ secrets.INTEG_MONGODB_DATABASE_NAME }}
68+
MONGODB_HOST: ${{ secrets.INTEG_MONGODB_HOST }}
69+
MONGODB_INDEX_NAME: ${{ secrets.INTEG_MONGODB_INDEX_NAME }}
70+
MONGODB_PASSWORD: ${{ secrets.INTEG_MONGODB_PASSWORD }}
71+
MONGODB_USERNAME: ${{ secrets.INTEG_MONGODB_USERNAME }}
72+
MONGODB_VECTOR_PATH: ${{ secrets.INTEG_MONGODB_VECTOR_PATH }}
73+
MARQO_API_KEY: ${{ secrets.INTEG_MARQO_API_KEY }}
74+
MARQO_INDEX_NAME: ${{ secrets.INTEG_MARQO_INDEX_NAME }}
75+
MARQO_URL: ${{ secrets.INTEG_MARQO_URL }}
76+
OPENAI_API_KEY: ${{ secrets.INTEG_OPENAI_API_KEY}}
77+
OPENAI_ASSISTANT_ID: ${{ vars.INTEG_OPENAI_ASSISTANT_ID }}
78+
OPENAI_THREAD_ID: ${{ vars.INTEG_OPENAI_THREAD_ID }}
79+
OPENWEATHER_API_KEY: ${{ secrets.INTEG_OPENWEATHER_API_KEY }}
80+
PINECONE_API_KEY: ${{ secrets.INTEG_PINECONE_API_KEY }}
81+
PINECONE_ENVIRONMENT: ${{ secrets.INTEG_PINECONE_ENVIRONMENT }}
82+
PINECONE_INDEX_NAME: ${{ secrets.INTEG_PINECONE_INDEX_NAME }}
83+
POSTGRES_DB: ${{ secrets.INTEG_POSTGRES_DB }}
84+
POSTGRES_HOST: ${{ secrets.INTEG_POSTGRES_HOST }}
85+
POSTGRES_PASSWORD: ${{ secrets.INTEG_POSTGRES_PASSWORD }}
86+
POSTGRES_PORT: ${{ secrets.INTEG_POSTGRES_PORT }}
87+
POSTGRES_USER: ${{ secrets.INTEG_POSTGRES_USER }}
88+
PUSHER_APP_ID: ${{ secrets.INTEG_PUSHER_APP_ID }}
89+
PUSHER_CLUSTER: ${{ secrets.INTEG_PUSHER_CLUSTER }}
90+
PUSHER_KEY: ${{ secrets.INTEG_PUSHER_KEY }}
91+
PUSHER_SECRET: ${{ secrets.INTEG_PUSHER_SECRET }}
92+
QDRANT_CLUSTER_API_KEY: ${{ secrets.INTEG_QDRANT_CLUSTER_API_KEY }}
93+
QDRANT_CLUSTER_ENDPOINT: ${{ secrets.INTEG_QDRANT_CLUSTER_ENDPOINT }}
94+
REDIS_HOST: ${{ secrets.INTEG_REDIS_HOST }}
95+
REDIS_INDEX: ${{ secrets.INTEG_REDIS_INDEX }}
96+
REDIS_PASSWORD: ${{ secrets.INTEG_REDIS_PASSWORD }}
97+
REDIS_PORT: ${{ secrets.INTEG_REDIS_PORT }}
98+
REDSHIFT_CLUSTER_IDENTIFIER: ${{ secrets.INTEG_REDSHIFT_CLUSTER_IDENTIFIER }}
99+
REDSHIFT_DATABASE: ${{ secrets.INTEG_REDSHIFT_DATABASE }}
100+
SAGEMAKER_FALCON_ENDPOINT_NAME: ${{ secrets.INTEG_FALCON_ENDPOINT_NAME }}
101+
SAGEMAKER_FALCON_INFERENCE_COMPONENT_NAME: ${{ secrets.INTEG_FALCON_INFERENCE_COMPONENT_NAME }}
102+
SAGEMAKER_HUGGINGFACE_MODEL: ${{ secrets.INTEG_SAGEMAKER_HUGGINGFACE_MODEL }}
103+
SAGEMAKER_LLAMA_3_INSTRUCT_ENDPOINT_NAME: ${{ secrets.SAGEMAKER_LLAMA_3_INSTRUCT_ENDPOINT_NAME }}
104+
SAGEMAKER_LLAMA_3_INSTRUCT_INFERENCE_COMPONENT_NAME: ${{ secrets.SAGEMAKER_LLAMA_3_INSTRUCT_INFERENCE_COMPONENT_NAME }}
105+
SAGEMAKER_TENSORFLOW_HUB_MODEL: ${{ secrets.INTEG_SAGEMAKER_TENSORFLOW_HUB_MODEL }}
106+
SNOWFLAKE_ACCOUNT: ${{ secrets.INTEG_SNOWFLAKE_ACCOUNT }}
107+
SNOWFLAKE_DATABASE: ${{ secrets.INTEG_SNOWFLAKE_DATABASE }}
108+
SNOWFLAKE_PASSWORD: ${{ secrets.INTEG_SNOWFLAKE_PASSWORD }}
109+
SNOWFLAKE_SCHEMA: ${{ secrets.INTEG_SNOWFLAKE_SCHEMA }}
110+
SNOWFLAKE_USER: ${{ secrets.INTEG_SNOWFLAKE_USER }}
111+
SNOWFLAKE_WAREHOUSE: ${{ secrets.INTEG_SNOWFLAKE_WAREHOUSE }}
112+
TAVILY_API_KEY: ${{ secrets.INTEG_TAVILY_API_KEY }}
113+
VOYAGE_API_KEY: ${{ secrets.INTEG_VOYAGE_API_KEY }}
114+
WEBHOOK_URL: ${{ secrets.INTEG_WEBHOOK_URL }}
115+
ZENROWS_API_KEY: ${{ secrets.INTEG_ZENROWS_API_KEY }}
116+
117+
services:
118+
postgres:
119+
image: ankane/pgvector:v0.5.0
120+
env:
121+
POSTGRES_USER: ${{ secrets.INTEG_POSTGRES_USER }}
122+
POSTGRES_PASSWORD: ${{ secrets.INTEG_POSTGRES_PASSWORD }}
123+
POSTGRES_DB: ${{ secrets.INTEG_POSTGRES_DB }}
124+
ports:
125+
- 5432:5432
126+
options: >-
127+
--health-cmd pg_isready
128+
--health-interval 10s
129+
--health-timeout 5s
130+
--health-retries 5
131+
132+
steps:
133+
- name: Check out repository
134+
uses: actions/checkout@v3
135+
- name: Init environment
136+
uses: ./.github/actions/init-environment
137+
- name: Run code modification script
138+
run: |
139+
poetry run python docs/gen_snippet_logs.py docs/griptape-framework
140+
- name: Create Pull Request
141+
uses: peter-evans/create-pull-request@v4
142+
with:
143+
branch: "docs/logs-update"
144+
title: "Docs Logs Update"
145+
body: "This PR was automatically gerenated to update the code snippets logs in the documentation."
146+
commit-message: "chore(docs): update code snippets logs"
147+
add-paths: |
148+
docs/**/*

docs/gen_snippet_logs.py

Lines changed: 188 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,188 @@
1+
from __future__ import annotations
2+
3+
import concurrent.futures
4+
import os
5+
import re
6+
import subprocess
7+
import sys
8+
from re import Pattern
9+
10+
# This pattern detects an original code block referencing a .py file:
11+
# ```python
12+
# --8<-- "path/to/file.py"
13+
# ```
14+
CODE_BLOCK_PATTERN: Pattern[str] = re.compile(
15+
r"""```python\n(--8<--\s+\"([^\"]+)\")\n```""",
16+
re.MULTILINE,
17+
)
18+
19+
20+
def run_python_script(script_path: str) -> str:
21+
"""Runs a Python script at the given path and returns combined stdout/stderr.
22+
23+
If the script fails, capture the error message in the returned text.
24+
"""
25+
print(f"[DEBUG] Running script: {script_path}")
26+
try:
27+
result = subprocess.run(
28+
["python", script_path],
29+
capture_output=True,
30+
text=True,
31+
check=False, # Don't raise CalledProcessError if return code != 0
32+
)
33+
# Combine stdout and stderr
34+
output = result.stdout
35+
if result.stderr:
36+
output += "\n" + result.stderr
37+
except Exception as e:
38+
output = f"Error running {script_path}:\n{e}"
39+
return output
40+
41+
42+
def create_logs_path(original_path: str) -> str:
43+
"""Transforms a .py path into a logs path with .txt extension.
44+
45+
Example: docs/path/src/foo.py => docs/path/logs/foo.txt
46+
"""
47+
path_parts = original_path.split("/")
48+
# Replace 'src' with 'logs' if present, else insert 'logs' just before the filename
49+
if "src" in path_parts:
50+
idx = path_parts.index("src")
51+
path_parts[idx] = "logs"
52+
else:
53+
path_parts.insert(-1, "logs")
54+
# Convert the filename from .py to .txt
55+
path_parts[-1] = os.path.splitext(path_parts[-1])[0] + ".txt"
56+
return "/".join(path_parts)
57+
58+
59+
def process_md_file(md_path: str) -> bool:
60+
"""Process a single Markdown file sequentially.
61+
62+
1) Find all spans of "Code/Logs" that are already done (DONE_BLOCK_PATTERN).
63+
2) Find code blocks referencing .py files (CODE_BLOCK_PATTERN).
64+
3) For each code block, if it's inside a done-block span, skip it.
65+
4) Otherwise:
66+
- Run the .py script
67+
- If output is non-empty (after stripping), create logs file + transform snippet
68+
- If empty, skip
69+
5) Return True if the file changed, otherwise False.
70+
"""
71+
print(f"[DEBUG] Processing file: {md_path}")
72+
with open(md_path, encoding="utf-8") as f:
73+
original_content = f.read()
74+
75+
# We'll manually iterate over code-block matches with re.finditer
76+
# so we can do custom logic for each match.
77+
changes = []
78+
last_pos = 0 # track where we've appended up to in the new content
79+
new_content_parts: list[str] = []
80+
81+
for match in CODE_BLOCK_PATTERN.finditer(original_content):
82+
snippet_start, snippet_end = match.span()
83+
code_block_content = match.group(1) # entire code block text
84+
py_file_path = match.group(2) # the extracted "something.py"
85+
86+
# First, append everything from the last position up to snippet_start
87+
new_content_parts.append(original_content[last_pos:snippet_start])
88+
89+
# 3) Check if this snippet is inside a done block
90+
# Not in a done block, let's see if we can replace it
91+
print(f"[DEBUG] Found snippet referencing: {py_file_path}")
92+
93+
# Run the .py script
94+
log_output = run_python_script(py_file_path)
95+
log_output = log_output.rstrip() # strip trailing whitespace
96+
97+
if not log_output:
98+
# 4) If empty, leave snippet unchanged
99+
print(f"[DEBUG] Script output empty. Skipping update for {py_file_path}.")
100+
new_content_parts.append(original_content[snippet_start:snippet_end])
101+
else:
102+
# 4) If non-empty, write logs and transform snippet into Code/Logs
103+
logs_path = create_logs_path(py_file_path)
104+
logs_dir = os.path.dirname(logs_path)
105+
if logs_dir and not os.path.exists(logs_dir):
106+
os.makedirs(logs_dir, exist_ok=True)
107+
108+
print(f"[DEBUG] Writing log output to: {logs_path}")
109+
with open(logs_path, "w", encoding="utf-8") as logfile:
110+
logfile.write(log_output)
111+
112+
# Build new Code/Logs block
113+
# Original snippet's code block content is included in "Code" tab
114+
new_block = (
115+
'=== "Code"\n'
116+
" ```python\n"
117+
f" {code_block_content}\n"
118+
" ```\n\n"
119+
'=== "Logs"\n'
120+
" ```python\n"
121+
f' --8<-- "{logs_path}"\n'
122+
" ```"
123+
)
124+
new_content_parts.append(new_block)
125+
print(f"[DEBUG] Replaced snippet for {py_file_path} with Code/Logs block.")
126+
changes.append(True)
127+
128+
last_pos = snippet_end
129+
130+
# Append any remaining text after the last match
131+
new_content_parts.append(original_content[last_pos:])
132+
133+
# Join everything together
134+
new_content = "".join(new_content_parts)
135+
136+
if new_content != original_content:
137+
with open(md_path, "w", encoding="utf-8") as f:
138+
f.write(new_content)
139+
print(f"[INFO] Updated: {md_path}")
140+
return True
141+
142+
return False
143+
144+
145+
def update_markdown_files(root_dir: str) -> None:
146+
"""Recursively find all .md files under root_dir.
147+
148+
Process them in parallel (one file per thread). Each file's snippets are handled in sequence.
149+
"""
150+
# Collect Markdown files
151+
md_files: list[str] = []
152+
for subdir, _, files in os.walk(root_dir):
153+
for filename in files:
154+
if filename.endswith(".md"):
155+
md_files.append(os.path.join(subdir, filename))
156+
157+
if not md_files:
158+
print(f"[WARN] No Markdown files found in: {root_dir}")
159+
return
160+
161+
updated_files = 0
162+
with concurrent.futures.ThreadPoolExecutor() as executor:
163+
future_to_file = {executor.submit(process_md_file, md_path): md_path for md_path in md_files}
164+
for future in concurrent.futures.as_completed(future_to_file):
165+
md_path = future_to_file[future]
166+
try:
167+
changed = future.result()
168+
if changed:
169+
updated_files += 1
170+
except Exception as e:
171+
print(f"[ERROR] Exception processing {md_path}: {e}")
172+
173+
print(f"[INFO] Processed {len(md_files)} Markdown files. Updated {updated_files} files.")
174+
175+
176+
def main() -> None:
177+
"""Entry point: accept a directory from sys.argv, defaulting to ./docs."""
178+
root_directory = sys.argv[1] if len(sys.argv) > 1 else "./docs"
179+
180+
if not os.path.isdir(root_directory):
181+
print(f"[ERROR] {root_directory} is not a valid directory.")
182+
sys.exit(1)
183+
184+
update_markdown_files(root_directory)
185+
186+
187+
if __name__ == "__main__":
188+
main()

0 commit comments

Comments
 (0)