From 00058faae65f477044e3bb1916ef1f78e8735ad7 Mon Sep 17 00:00:00 2001 From: csunny Date: Thu, 26 Feb 2026 20:53:22 +0800 Subject: [PATCH 1/7] Refactor skill directory discovery to use recursive search with deduplication --- .../src/derisk_serve/skill/service/service.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/packages/derisk-serve/src/derisk_serve/skill/service/service.py b/packages/derisk-serve/src/derisk_serve/skill/service/service.py index b46f349d..8a3b356e 100644 --- a/packages/derisk-serve/src/derisk_serve/skill/service/service.py +++ b/packages/derisk-serve/src/derisk_serve/skill/service/service.py @@ -348,7 +348,7 @@ def _find_skill_directories(self, repo_path: str) -> List[str]: Returns: List[str]: List of skill directory paths containing SKILL.md """ - skill_dirs = [] + skill_dirs = set() # Common skill directory patterns patterns = [ @@ -363,13 +363,14 @@ def _find_skill_directories(self, repo_path: str) -> List[str]: if not os.path.isdir(search_path): continue - for entry in os.scandir(search_path): - if entry.is_dir(): - skill_md_path = os.path.join(entry.path, "SKILL.md") - if os.path.exists(skill_md_path): - skill_dirs.append(entry.path) + # Recursively search for SKILL.md in all subdirectories + for root, dirs, files in os.walk(search_path): + if "SKILL.md" in files: + skill_dirs.add(root) + # Don't go deeper into subdirectories of a skill directory + dirs[:] = [] - return skill_dirs + return list(skill_dirs) def _parse_skill_md(self, file_path: str) -> Optional[Dict[str, str]]: """Parse SKILL.md file to extract metadata. From 23afe2710690aa6b91c19b7b01cb67ffe7358525 Mon Sep 17 00:00:00 2001 From: csunny Date: Fri, 27 Feb 2026 23:41:27 +0800 Subject: [PATCH 2/7] cleanup(configs): remove unused config files and add derisk-openai.toml - Remove 15 unused config files (derisk-graphrag, derisk-local-*, derisk-proxy-*) - Keep derisk-docker.toml and configs/my for local debugging - Add new derisk-openai.toml based on dev-1.toml configuration --- configs/derisk-graphrag.toml | 73 --------------- configs/derisk-local-glm.toml | 37 -------- configs/derisk-local-qwen.toml | 38 -------- configs/derisk-local-vllm.toml | 39 -------- configs/derisk-openai.toml | 142 +++++++++++++++++++++++++++++ configs/derisk-proxy-aliyun.toml | 114 ----------------------- configs/derisk-proxy-claude.toml | 59 ------------ configs/derisk-proxy-deepseek.toml | 45 --------- configs/derisk-proxy-gemini.toml | 59 ------------ configs/derisk-proxy-kimi.toml | 53 ----------- configs/derisk-proxy-ollama.toml | 39 -------- configs/derisk-proxy-openai.toml | 35 ------- configs/derisk-proxy-tongyi.toml | 47 ---------- configs/derisk-siliconflow.toml | 40 -------- 14 files changed, 142 insertions(+), 678 deletions(-) delete mode 100644 configs/derisk-graphrag.toml delete mode 100644 configs/derisk-local-glm.toml delete mode 100644 configs/derisk-local-qwen.toml delete mode 100644 configs/derisk-local-vllm.toml create mode 100644 configs/derisk-openai.toml delete mode 100644 configs/derisk-proxy-aliyun.toml delete mode 100644 configs/derisk-proxy-claude.toml delete mode 100644 configs/derisk-proxy-deepseek.toml delete mode 100644 configs/derisk-proxy-gemini.toml delete mode 100644 configs/derisk-proxy-kimi.toml delete mode 100644 configs/derisk-proxy-ollama.toml delete mode 100644 configs/derisk-proxy-openai.toml delete mode 100644 configs/derisk-proxy-tongyi.toml delete mode 100644 configs/derisk-siliconflow.toml diff --git a/configs/derisk-graphrag.toml b/configs/derisk-graphrag.toml deleted file mode 100644 index 1f8ccb41..00000000 --- a/configs/derisk-graphrag.toml +++ /dev/null @@ -1,73 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -log_level = "INFO" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "127.0.0.1" -port = 7777 - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" - - -[rag] -chunk_size=1000 -chunk_overlap=0 -similarity_top_k=5 -similarity_score_threshold=0.0 -max_chunks_once_load=10 -max_threads=1 -rerank_top_k=3 - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -[rag.storage.graph] -type = "TuGraph" -host="127.0.0.1" -port=7687 -username="admin" -password="{your_password}" - -# enable_summary="True" -# community_topk=20 -# community_score_threshold=0.3 - -# triplet_graph_enabled="True" -# extract_topk=20 - -# document_graph_enabled="True" -# knowledge_graph_chunk_search_top_size=20 -# knowledge_graph_extraction_batch_size=20 - -# enable_similarity_search="True" -# knowledge_graph_embedding_batch_size=20 -# similarity_search_topk=5 -# extract_score_threshold=0.7 - -# enable_text_search="True" -# text2gql_model_enabled="True" -# text2gql_model_name="qwen2.5:latest" - - - -# Model Configurations -[models] -[[models.llms]] -name = "${env:LLM_MODEL_NAME:-gpt-4o}" -provider = "${env:LLM_MODEL_PROVIDER:-proxy/openai}" -api_base = "${env:OPENAI_API_BASE:-https://api.openai.com/v1}" -api_key = "${env:OPENAI_API_KEY}" - -[[models.embeddings]] -name = "${env:EMBEDDING_MODEL_NAME:-text-embedding-3-small}" -provider = "${env:EMBEDDING_MODEL_PROVIDER:-proxy/openai}" -api_url = "${env:EMBEDDING_MODEL_API_URL:-https://api.openai.com/v1/embeddings}" -api_key = "${env:OPENAI_API_KEY}" diff --git a/configs/derisk-local-glm.toml b/configs/derisk-local-glm.toml deleted file mode 100644 index dcd11d1a..00000000 --- a/configs/derisk-local-glm.toml +++ /dev/null @@ -1,37 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "THUDM/glm-4-9b-chat-hf" -provider = "hf" -# If not provided, the model will be downloaded from the Hugging Face model hub -# uncomment the following line to specify the model path in the local file system -# path = "the-model-path-in-the-local-file-system" -path = "models/THUDM/glm-4-9b-chat-hf" - -[[models.embeddings]] -name = "BAAI/bge-large-zh-v1.5" -provider = "hf" -# If not provided, the model will be downloaded from the Hugging Face model hub -# uncomment the following line to specify the model path in the local file system -# path = "the-model-path-in-the-local-file-system" -path = "models/BAAI/bge-large-zh-v1.5" diff --git a/configs/derisk-local-qwen.toml b/configs/derisk-local-qwen.toml deleted file mode 100644 index 28bd3796..00000000 --- a/configs/derisk-local-qwen.toml +++ /dev/null @@ -1,38 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "Qwen2.5-Coder-0.5B-Instruct" -provider = "hf" -# If not provided, the model will be downloaded from the Hugging Face model hub -# uncomment the following line to specify the model path in the local file system -# path = "the-model-path-in-the-local-file-system" -path = "models/Qwen2.5-Coder-0.5B-Instruct" - -[[models.embeddings]] -name = "BAAI/bge-large-zh-v1.5" -provider = "hf" -# If not provided, the model will be downloaded from the Hugging Face model hub -# uncomment the following line to specify the model path in the local file system -# path = "the-model-path-in-the-local-file-system" -path = "models/BAAI/bge-large-zh-v1.5" - diff --git a/configs/derisk-local-vllm.toml b/configs/derisk-local-vllm.toml deleted file mode 100644 index 1696daa2..00000000 --- a/configs/derisk-local-vllm.toml +++ /dev/null @@ -1,39 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "DeepSeek-R1-Distill-Qwen-1.5B" -provider = "vllm" -# If not provided, the model will be downloaded from the Hugging Face model hub -# uncomment the following line to specify the model path in the local file system -# path = "the-model-path-in-the-local-file-system" -path = "models/DeepSeek-R1-Distill-Qwen-1.5B" -# dtype = "float32" - -[[models.embeddings]] -name = "BAAI/bge-large-zh-v1.5" -provider = "hf" -# If not provided, the model will be downloaded from the Hugging Face model hub -# uncomment the following line to specify the model path in the local file system -# path = "the-model-path-in-the-local-file-system" -path = "/data/models/bge-large-zh-v1.5" - diff --git a/configs/derisk-openai.toml b/configs/derisk-openai.toml new file mode 100644 index 00000000..3b277786 --- /dev/null +++ b/configs/derisk-openai.toml @@ -0,0 +1,142 @@ +[system] +# Load language from environment variable(It is set by the hook) +language = "${env:DERISK_LANG:-zh}" +log_level = "INFO" +api_keys = [] +encrypt_key = "your_secret_key" + +# Server Configurations +[service.web] +host = "0.0.0.0" +port = 7777 +model_storage = "database" +web_url = "https://localhost:${env:WEB_SERVER_PORT:-7777}" + +[service.web.database] +type = "sqlite" +path = "pilot/meta_data/derisk.db" + +[service.web.trace] +file = "${env:TRACE_FILE_DIR:-logs}/derisk_webserver_tracer.jsonl" + +[service.model.worker] +host = "127.0.0.1" + +[rag] +chunk_size=1000 +chunk_overlap=0 +similarity_top_k=5 +similarity_score_threshold=0.0 +max_chunks_once_load=10 +max_threads=1 +rerank_top_k=3 +graph_community_summary_enabled="True" + +[rag.storage] +[rag.storage.vector] +type = "chroma" +persist_path = "pilot/data" + + +# Model Configurations +[models] + +[[models.llms]] +name = "deepseek-r1" +provider = "proxy/openai" +api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" +api_key = "sk-sp-948e72ec1a6543ee8a1017213cb42573" +backend = "deepseek-r1" + +[[models.llms]] +name = "deepseek-v3" +provider = "proxy/openai" +api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" +api_key = "sk-sp-948e72ec1a6543ee8a1017213cb42573" +backend = "deepseek-v3" + +[[models.llms]] +name = "wanx2.1-t2v-plus" +provider = "proxy/openai" +api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" +api_key = "sk-sp-948e72ec1a6543ee8a1017213cb42573" +backend = "wanx2.1-t2v-plus" + +[[models.embeddings]] +name = "text-embedding-v3" +provider = "proxy/openai" +api_url = "https://dashscope.aliyuncs.com/compatible-mode/v1/embeddings" +api_key = "sk-f482294177324066aa9d37957ef065e1" + + +[[models.rerankers]] +type = "reranker" +name = "gte-rerank-v2" +provider = "proxy/siliconflow" +api_url = "https://dashscope.aliyuncs.com/api/v1/services/rerank/text-rerank/text-rerank" +api_key = "sk-f482294177324066aa9d37957ef065e1" +backend= "gte-rerank-v2" +text_limit = 4096 +batch_limit = 32 + +[agent.llm] +# (可选) 全局配置 +temperature = 0.5 + +[[agent.llm.provider]] +provider = "openai" +api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" +api_key = "sk-f482294177324066aa9d37957ef065e1" + +[[agent.llm.provider.model]] +name = "deepseek-r1" +temperature = 0.7 +max_new_tokens = 4096 +[[agent.llm.provider.model]] +name = "deepseek-v3" +temperature = 0.7 +max_new_tokens = 4096 +[[agent.llm.provider.model]] +name = "Kimi-k2" +temperature = 0.7 +max_new_tokens = 4096 +[[agent.llm.provider.model]] +name = "qwen-plus" +temperature = 0.7 +max_new_tokens = 4096 +[[agent.llm.provider.model]] +name = "qwen-vl-max" +temperature = 0.7 +max_new_tokens = 4096 + +[[agent.llm.provider.model]] +name = "glm-5" +temperature = 0.7 +max_new_tokens = 4096 + +[[serves]] +type = "file" +# Default backend for file server +default_backend = "oss" + +[[serves.backends]] +type = "oss" +endpoint = "https://oss-cn-beijing.aliyuncs.com" +region = "oss-cn-beijing" +access_key_id = "${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" +access_key_secret = "${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" +fixed_bucket = "dbgpt-test" + + +[sandbox] +type="local" +template_id="" +user_id="derisk" +agent_name="derisk" +repo_url="" +work_dir="/home/ubuntu" +skill_dir="/mnt/derisk/skills" +oss_ak="${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" +oss_sk="${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" +oss_endpoint="https://oss-cn-beijing.aliyuncs.com" +oss_bucket_name="dbgpt-test" diff --git a/configs/derisk-proxy-aliyun.toml b/configs/derisk-proxy-aliyun.toml deleted file mode 100644 index cb01ca99..00000000 --- a/configs/derisk-proxy-aliyun.toml +++ /dev/null @@ -1,114 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -log_level = "INFO" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "127.0.0.1" -port = "${env:WEB_SERVER_PORT:-8080}" -model_storage = "database" -web_url = "https://localhost:${env:WEB_SERVER_PORT:-8080}" - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" - -[service.web.trace] -file = "${env:TRACE_FILE_DIR:-logs}/derisk_webserver_tracer.jsonl" - -[service.model.worker] -host = "127.0.0.1" - -[rag] -chunk_size=1000 -chunk_overlap=0 -similarity_top_k=5 -similarity_score_threshold=0.0 -max_chunks_once_load=10 -max_threads=1 -rerank_top_k=3 -graph_community_summary_enabled="True" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - - -# Model Configurations -[models] - -[[models.llms]] -name = "deepseek-r1" -provider = "proxy/tongyi" -api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" -api_key = "sk-xxxx" -backend = "deepseek-r1" - -[[models.llms]] -name = "deepseek-v3" -provider = "proxy/tongyi" -api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" -api_key = "sk-xxxx" -backend = "deepseek-v3" - -[[models.llms]] -name = "Kimi-k2" -provider = "proxy/tongyi" -api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" -api_key = "sk-xxxx" -backend = "Moonshot-Kimi-K2-Instruct" - -[[models.llms]] -name = "qwen-plus" -provider = "proxy/tongyi" -api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" -api_key = "sk-xxxx" -backend = "qwen-plus-latest" - -[[models.llms]] -name = "qwen-vl-max" -provider = "proxy/tongyi" -api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" -api_key = "sk-xxxx" -backend = "qwen-vl-max" - - -[[models.llms]] -name = "wanx2.1-t2v-plus" -provider = "proxy/tongyi" -api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" -api_key = "sk-xxxx" -backend = "wanx2.1-t2v-plus" - - - -[[models.embeddings]] -name = "bge-large-zh-v1.5" -provider = "proxy/openai" -api_url = "https://ai.gitee.com/v1/embeddings" -api_key = "xxxx" - - -#[[serves]] -#type = "file" -## Default backend for file server -#default_backend = "oss" -# -#[[serves.backends]] -#type = "oss" -#endpoint = "https://oss-cn-beijing.aliyuncs.com" -#region = "oss-cn-beijing" -#access_key_id = "${env:OSS_ACCESS_KEY_ID}" -#access_key_secret = "${env:OSS_ACCESS_KEY_SECRET}" -#fixed_bucket = "${env:BUCKET_NAME}" - -[trace] -tracer_storage_cls="derisk_serve.rag.tracer.rag_flow_span.RagFlowSpanStorage" - -[mcp] -mode = "origin" -enable_mcp_gateway="True" \ No newline at end of file diff --git a/configs/derisk-proxy-claude.toml b/configs/derisk-proxy-claude.toml deleted file mode 100644 index 2497fcaf..00000000 --- a/configs/derisk-proxy-claude.toml +++ /dev/null @@ -1,59 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 -enable_mcp_gateway="True" -main_app_code = "ai_sre" - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" -[service.model.worker] -host = "127.0.0.1" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "claude-4-reasoner" -provider = "proxy/claude" -api_base = "https://api.anthropic.com" -api_key = "${env:ANTHROPIC_API_KEY}" -backend = "claude-opus-4-reasoner" -reasoning_model = "True" - -[[models.llms]] -name = "claude-opus-4-chat" -provider = "proxy/claude" -api_base = "https://api.openai-proxy.org/anthropic" -api_key = "${env:ANTHROPIC_API_KEY}" -backend = "claude-opus-4-chat" - -[[models.llms]] -name = "deepseek-r1" -provider = "proxy/deepseek" -api_key = "${env:DEEPSEEK_API_KEY}" -backend = "deepseek-reasoner" -reasoning_model = "True" - -[[models.llms]] -name = "deepseek-v3" -provider = "proxy/deepseek" -api_key = "${env:DEEPSEEK_API_KEY}" -backend = "deepseek-chat" - -[[models.embeddings]] -name = "BAAI/bge-large-zh-v1.5" -provider = "proxy/openai" -api_url = "https://api.siliconflow.cn/v1/embeddings" -api_key = "${env:SILICONFLOW_API_KEY}" diff --git a/configs/derisk-proxy-deepseek.toml b/configs/derisk-proxy-deepseek.toml deleted file mode 100644 index 0b692a0c..00000000 --- a/configs/derisk-proxy-deepseek.toml +++ /dev/null @@ -1,45 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 -enable_mcp_gateway="True" -main_app_code = "ai_sre" - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" -[service.model.worker] -host = "127.0.0.1" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "deepseek-r1" -provider = "proxy/deepseek" -api_key = "${env:DEEPSEEK_API_KEY}" -backend = "deepseek-reasoner" -reasoning_model = "True" - -[[models.llms]] -name = "deepseek-v3" -provider = "proxy/deepseek" -api_key = "${env:DEEPSEEK_API_KEY}" -backend = "deepseek-chat" - -[[models.embeddings]] -name = "BAAI/bge-large-zh-v1.5" -provider = "proxy/openai" -api_url = "https://api.siliconflow.cn/v1/embeddings" -api_key = "${env:SILICONFLOW_API_KEY}" - diff --git a/configs/derisk-proxy-gemini.toml b/configs/derisk-proxy-gemini.toml deleted file mode 100644 index ebe37bd9..00000000 --- a/configs/derisk-proxy-gemini.toml +++ /dev/null @@ -1,59 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 -enable_mcp_gateway="True" -main_app_code = "ai_sre" - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" -[service.model.worker] -host = "127.0.0.1" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "gemini-2.5-reasoner" -provider = "proxy/gemini" -api_base = "https://api.openai-proxy.org/google" -api_key = "${env:GEMINI_API_KEY}" -backend = "gemini-2.5-flash" -reasoning_model = "True" - -[[models.llms]] -name = "gemini-2.5-chat" -provider = "proxy/claude" -api_base = "https://api.openai-proxy.org/google" -api_key = "${env:GEMINI_API_KEY}" -backend = "gemini-2.5-flash" - -[[models.llms]] -name = "deepseek-r1" -provider = "proxy/deepseek" -api_key = "${env:DEEPSEEK_API_KEY}" -backend = "deepseek-reasoner" -reasoning_model = "True" - -[[models.llms]] -name = "deepseek-v3" -provider = "proxy/deepseek" -api_key = "${env:DEEPSEEK_API_KEY}" -backend = "deepseek-chat" - -[[models.embeddings]] -name = "BAAI/bge-large-zh-v1.5" -provider = "proxy/openai" -api_url = "https://api.siliconflow.cn/v1/embeddings" -api_key = "${env:SILICONFLOW_API_KEY}" diff --git a/configs/derisk-proxy-kimi.toml b/configs/derisk-proxy-kimi.toml deleted file mode 100644 index 349f90f7..00000000 --- a/configs/derisk-proxy-kimi.toml +++ /dev/null @@ -1,53 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 -enable_mcp_gateway="True" -main_app_code = "ai_sre" - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" - -[service.model.worker] -host = "127.0.0.1" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "kimi-k2-thinking" -provider = "proxy/moonshot" -api_key = "${env:MOOTSHOOT_API_KEY}" -backend = "kimi-k2-thinking" -reasoning_model = "True" - -[[models.llms]] -name = "kimi-k2-0905-preview" -provider = "proxy/moonshot" -api_key = "${env:MOOTSHOOT_API_KEY}" -backend = "kimi-k2-0905-preview" -reasoning_model = "True" - -[[models.llms]] -name = "kimi-k2-turbo-preview" -provider = "proxy/moonshot" -api_key = "${env:MOOTSHOOT_API_KEY}" -backend = "kimi-k2-turbo-preview" -reasoning_model = "True" - -[[models.embeddings]] -name = "bge-large-zh-v1.5" -provider = "proxy/openai" -api_url = "https://api.siliconflow.cn/v1/embeddings" -api_key = "${env:SILICONFLOW_API_KEY}" diff --git a/configs/derisk-proxy-ollama.toml b/configs/derisk-proxy-ollama.toml deleted file mode 100644 index 28307ca7..00000000 --- a/configs/derisk-proxy-ollama.toml +++ /dev/null @@ -1,39 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 -enable_mcp_gateway="True" -main_app_code = "ai_sre" - - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" - -[service.model.worker] -host = "127.0.0.1" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "deepseek-r1:1.5b" -provider = "proxy/ollama" -api_base = "http://localhost:11434" -api_key = "" - -[[models.embeddings]] -name = "bge-m3:latest" -provider = "proxy/ollama" -api_url = "http://localhost:11434" -api_key = "" diff --git a/configs/derisk-proxy-openai.toml b/configs/derisk-proxy-openai.toml deleted file mode 100644 index 5456c349..00000000 --- a/configs/derisk-proxy-openai.toml +++ /dev/null @@ -1,35 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-en}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "127.0.0.1" -port = 7777 -enable_mcp_gateway="True" -main_app_code = "ai_sre" - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "${env:LLM_MODEL_NAME:-gpt-4o}" -provider = "${env:LLM_MODEL_PROVIDER:-proxy/openai}" -api_base = "${env:OPENAI_API_BASE:-https://api.openai.com/v1}" -api_key = "${env:OPENAI_API_KEY}" - -[[models.embeddings]] -name = "${env:EMBEDDING_MODEL_NAME:-text-embedding-3-small}" -provider = "${env:EMBEDDING_MODEL_PROVIDER:-proxy/openai}" -api_url = "${env:EMBEDDING_MODEL_API_URL:-https://api.openai.com/v1/embeddings}" -api_key = "${env:OPENAI_API_KEY}" diff --git a/configs/derisk-proxy-tongyi.toml b/configs/derisk-proxy-tongyi.toml deleted file mode 100644 index d7cd2cd0..00000000 --- a/configs/derisk-proxy-tongyi.toml +++ /dev/null @@ -1,47 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 -enable_mcp_gateway="True" -main_app_code = "ai_sre" - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" -[service.model.worker] -host = "127.0.0.1" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "qwq-plus" -provider = "proxy/tongyi" -api_key = "${env:TONGYI_API_KEY}" -backend = "qwq-plus" -reasoning_model = "True" - - -[[models.llms]] -name = "qwen-plus" -provider = "proxy/tongyi" -api_key = "${env:TONGYI_API_KEY}" -backend = "qwen-plus" - -[[models.embeddings]] -name = "BAAI/bge-large-zh-v1.5" -provider = "proxy/openai" -api_url = "https://api.siliconflow.cn/v1/embeddings" -api_key = "${env:SILICONFLOW_API_KEY}" - - diff --git a/configs/derisk-siliconflow.toml b/configs/derisk-siliconflow.toml deleted file mode 100644 index e0ea4852..00000000 --- a/configs/derisk-siliconflow.toml +++ /dev/null @@ -1,40 +0,0 @@ -[system] -# Load language from environment variable(It is set by the hook) -language = "${env:DERISK_LANG:-zh}" -api_keys = [] -encrypt_key = "your_secret_key" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 - -[service.web.database] -type = "sqlite" -path = "pilot/meta_data/derisk.db" -[service.model.worker] -host = "127.0.0.1" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] -[[models.llms]] -name = "Qwen/Qwen2.5-Coder-32B-Instruct" -provider = "proxy/siliconflow" -api_key = "${env:SILICONFLOW_API_KEY}" - -[[models.embeddings]] -name = "BAAI/bge-large-zh-v1.5" -provider = "proxy/openai" -api_url = "https://api.siliconflow.cn/v1/embeddings" -api_key = "${env:SILICONFLOW_API_KEY:123}" - -[[models.rerankers]] -type = "reranker" -name = "BAAI/bge-reranker-v2-m3" -provider = "proxy/siliconflow" -api_key = "${env:SILICONFLOW_API_KEY}" From 78d2d8a5f8b96a8da97a6b6ae6692c4f4eb36c95 Mon Sep 17 00:00:00 2001 From: csunny Date: Fri, 27 Feb 2026 23:49:01 +0800 Subject: [PATCH 3/7] security(configs): replace hardcoded secrets with placeholders - Replace hardcoded API keys with environment variable placeholders - Replace hardcoded encrypt_key with environment variable placeholder - Keep configs/my unchanged as requested --- configs/derisk-docker.toml | 51 ------- ...k-openai.toml => derisk-proxy-aliyun.toml} | 14 +- configs/derisk-proxy-openai.toml | 142 ++++++++++++++++++ 3 files changed, 149 insertions(+), 58 deletions(-) delete mode 100644 configs/derisk-docker.toml rename configs/{derisk-openai.toml => derisk-proxy-aliyun.toml} (90%) create mode 100644 configs/derisk-proxy-openai.toml diff --git a/configs/derisk-docker.toml b/configs/derisk-docker.toml deleted file mode 100644 index 4fc021ca..00000000 --- a/configs/derisk-docker.toml +++ /dev/null @@ -1,51 +0,0 @@ -# ============================================================ -# OpenDerisk Docker Deployment Configuration -# ============================================================ -# This config is optimized for Docker container deployment. -# Environment variables are injected via docker-compose .env file. -# ============================================================ - -[system] -language = "${env:DERISK_LANG:-en}" -api_keys = [] -encrypt_key = "${env:ENCRYPT_KEY:-your_secret_key_change_in_production}" - -# Server Configurations -[service.web] -host = "0.0.0.0" -port = 7777 -enable_mcp_gateway = "True" -main_app_code = "ai_sre" - -[service.web.database] -type = "${env:DB_TYPE:-sqlite}" -path = "pilot/meta_data/derisk.db" -# MySQL settings (used when type = "mysql") -# host = "${env:LOCAL_DB_HOST:-db}" -# port = "${env:LOCAL_DB_PORT:-3306}" -# user = "${env:LOCAL_DB_USER:-root}" -# password = "${env:LOCAL_DB_PASSWORD:-aa123456}" -# name = "${env:LOCAL_DB_NAME:-derisk}" - -# RAG Storage -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - -# Model Configurations -[models] - -# --- Primary LLM --- -[[models.llms]] -name = "${env:LLM_MODEL_NAME:-gpt-4o}" -provider = "${env:LLM_MODEL_PROVIDER:-proxy/openai}" -api_base = "${env:OPENAI_API_BASE:-https://api.openai.com/v1}" -api_key = "${env:OPENAI_API_KEY}" - -# --- Primary Embedding Model --- -[[models.embeddings]] -name = "${env:EMBEDDING_MODEL_NAME:-text-embedding-3-small}" -provider = "${env:EMBEDDING_MODEL_PROVIDER:-proxy/openai}" -api_url = "${env:EMBEDDING_MODEL_API_URL:-https://api.openai.com/v1/embeddings}" -api_key = "${env:OPENAI_API_KEY}" diff --git a/configs/derisk-openai.toml b/configs/derisk-proxy-aliyun.toml similarity index 90% rename from configs/derisk-openai.toml rename to configs/derisk-proxy-aliyun.toml index 3b277786..2412dfa1 100644 --- a/configs/derisk-openai.toml +++ b/configs/derisk-proxy-aliyun.toml @@ -3,7 +3,7 @@ language = "${env:DERISK_LANG:-zh}" log_level = "INFO" api_keys = [] -encrypt_key = "your_secret_key" +encrypt_key = "${ENCRYPT_KEY:-your_secret_key}" # Server Configurations [service.web] @@ -45,28 +45,28 @@ persist_path = "pilot/data" name = "deepseek-r1" provider = "proxy/openai" api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" -api_key = "sk-sp-948e72ec1a6543ee8a1017213cb42573" +api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" backend = "deepseek-r1" [[models.llms]] name = "deepseek-v3" provider = "proxy/openai" api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" -api_key = "sk-sp-948e72ec1a6543ee8a1017213cb42573" +api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" backend = "deepseek-v3" [[models.llms]] name = "wanx2.1-t2v-plus" provider = "proxy/openai" api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" -api_key = "sk-sp-948e72ec1a6543ee8a1017213cb42573" +api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" backend = "wanx2.1-t2v-plus" [[models.embeddings]] name = "text-embedding-v3" provider = "proxy/openai" api_url = "https://dashscope.aliyuncs.com/compatible-mode/v1/embeddings" -api_key = "sk-f482294177324066aa9d37957ef065e1" +api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" [[models.rerankers]] @@ -74,7 +74,7 @@ type = "reranker" name = "gte-rerank-v2" provider = "proxy/siliconflow" api_url = "https://dashscope.aliyuncs.com/api/v1/services/rerank/text-rerank/text-rerank" -api_key = "sk-f482294177324066aa9d37957ef065e1" +api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" backend= "gte-rerank-v2" text_limit = 4096 batch_limit = 32 @@ -86,7 +86,7 @@ temperature = 0.5 [[agent.llm.provider]] provider = "openai" api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" -api_key = "sk-f482294177324066aa9d37957ef065e1" +api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" [[agent.llm.provider.model]] name = "deepseek-r1" diff --git a/configs/derisk-proxy-openai.toml b/configs/derisk-proxy-openai.toml new file mode 100644 index 00000000..2412dfa1 --- /dev/null +++ b/configs/derisk-proxy-openai.toml @@ -0,0 +1,142 @@ +[system] +# Load language from environment variable(It is set by the hook) +language = "${env:DERISK_LANG:-zh}" +log_level = "INFO" +api_keys = [] +encrypt_key = "${ENCRYPT_KEY:-your_secret_key}" + +# Server Configurations +[service.web] +host = "0.0.0.0" +port = 7777 +model_storage = "database" +web_url = "https://localhost:${env:WEB_SERVER_PORT:-7777}" + +[service.web.database] +type = "sqlite" +path = "pilot/meta_data/derisk.db" + +[service.web.trace] +file = "${env:TRACE_FILE_DIR:-logs}/derisk_webserver_tracer.jsonl" + +[service.model.worker] +host = "127.0.0.1" + +[rag] +chunk_size=1000 +chunk_overlap=0 +similarity_top_k=5 +similarity_score_threshold=0.0 +max_chunks_once_load=10 +max_threads=1 +rerank_top_k=3 +graph_community_summary_enabled="True" + +[rag.storage] +[rag.storage.vector] +type = "chroma" +persist_path = "pilot/data" + + +# Model Configurations +[models] + +[[models.llms]] +name = "deepseek-r1" +provider = "proxy/openai" +api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" +api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" +backend = "deepseek-r1" + +[[models.llms]] +name = "deepseek-v3" +provider = "proxy/openai" +api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" +api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" +backend = "deepseek-v3" + +[[models.llms]] +name = "wanx2.1-t2v-plus" +provider = "proxy/openai" +api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" +api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" +backend = "wanx2.1-t2v-plus" + +[[models.embeddings]] +name = "text-embedding-v3" +provider = "proxy/openai" +api_url = "https://dashscope.aliyuncs.com/compatible-mode/v1/embeddings" +api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" + + +[[models.rerankers]] +type = "reranker" +name = "gte-rerank-v2" +provider = "proxy/siliconflow" +api_url = "https://dashscope.aliyuncs.com/api/v1/services/rerank/text-rerank/text-rerank" +api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" +backend= "gte-rerank-v2" +text_limit = 4096 +batch_limit = 32 + +[agent.llm] +# (可选) 全局配置 +temperature = 0.5 + +[[agent.llm.provider]] +provider = "openai" +api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" +api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" + +[[agent.llm.provider.model]] +name = "deepseek-r1" +temperature = 0.7 +max_new_tokens = 4096 +[[agent.llm.provider.model]] +name = "deepseek-v3" +temperature = 0.7 +max_new_tokens = 4096 +[[agent.llm.provider.model]] +name = "Kimi-k2" +temperature = 0.7 +max_new_tokens = 4096 +[[agent.llm.provider.model]] +name = "qwen-plus" +temperature = 0.7 +max_new_tokens = 4096 +[[agent.llm.provider.model]] +name = "qwen-vl-max" +temperature = 0.7 +max_new_tokens = 4096 + +[[agent.llm.provider.model]] +name = "glm-5" +temperature = 0.7 +max_new_tokens = 4096 + +[[serves]] +type = "file" +# Default backend for file server +default_backend = "oss" + +[[serves.backends]] +type = "oss" +endpoint = "https://oss-cn-beijing.aliyuncs.com" +region = "oss-cn-beijing" +access_key_id = "${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" +access_key_secret = "${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" +fixed_bucket = "dbgpt-test" + + +[sandbox] +type="local" +template_id="" +user_id="derisk" +agent_name="derisk" +repo_url="" +work_dir="/home/ubuntu" +skill_dir="/mnt/derisk/skills" +oss_ak="${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" +oss_sk="${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" +oss_endpoint="https://oss-cn-beijing.aliyuncs.com" +oss_bucket_name="dbgpt-test" From 96b3e173e25f6aefa35fe0fed9f0da6445b9c0c5 Mon Sep 17 00:00:00 2001 From: csunny Date: Fri, 27 Feb 2026 23:54:57 +0800 Subject: [PATCH 4/7] refactor(configs): simplify derisk-proxy-openai.toml - Remove RAG configuration (rag, rag.storage sections) - Remove models configuration (llms, embeddings, rerankers) - Align structure with derisk-proxy-aliyun.toml - Keep environment variable placeholders for security --- configs/derisk-proxy-aliyun.toml | 65 ++------------------------------ configs/derisk-proxy-openai.toml | 65 ++------------------------------ 2 files changed, 8 insertions(+), 122 deletions(-) diff --git a/configs/derisk-proxy-aliyun.toml b/configs/derisk-proxy-aliyun.toml index 2412dfa1..60354283 100644 --- a/configs/derisk-proxy-aliyun.toml +++ b/configs/derisk-proxy-aliyun.toml @@ -22,63 +22,6 @@ file = "${env:TRACE_FILE_DIR:-logs}/derisk_webserver_tracer.jsonl" [service.model.worker] host = "127.0.0.1" -[rag] -chunk_size=1000 -chunk_overlap=0 -similarity_top_k=5 -similarity_score_threshold=0.0 -max_chunks_once_load=10 -max_threads=1 -rerank_top_k=3 -graph_community_summary_enabled="True" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - - -# Model Configurations -[models] - -[[models.llms]] -name = "deepseek-r1" -provider = "proxy/openai" -api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" -api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" -backend = "deepseek-r1" - -[[models.llms]] -name = "deepseek-v3" -provider = "proxy/openai" -api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" -api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" -backend = "deepseek-v3" - -[[models.llms]] -name = "wanx2.1-t2v-plus" -provider = "proxy/openai" -api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" -api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" -backend = "wanx2.1-t2v-plus" - -[[models.embeddings]] -name = "text-embedding-v3" -provider = "proxy/openai" -api_url = "https://dashscope.aliyuncs.com/compatible-mode/v1/embeddings" -api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" - - -[[models.rerankers]] -type = "reranker" -name = "gte-rerank-v2" -provider = "proxy/siliconflow" -api_url = "https://dashscope.aliyuncs.com/api/v1/services/rerank/text-rerank/text-rerank" -api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" -backend= "gte-rerank-v2" -text_limit = 4096 -batch_limit = 32 - [agent.llm] # (可选) 全局配置 temperature = 0.5 @@ -123,9 +66,9 @@ default_backend = "oss" type = "oss" endpoint = "https://oss-cn-beijing.aliyuncs.com" region = "oss-cn-beijing" -access_key_id = "${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" -access_key_secret = "${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" -fixed_bucket = "dbgpt-test" +access_key_id = "${env:OSS_ACCESS_KEY_ID:-xxx}" +access_key_secret = "${env:OSS_ACCESS_KEY_SECRET:-xxx}" +fixed_bucket = "openderisk" [sandbox] @@ -139,4 +82,4 @@ skill_dir="/mnt/derisk/skills" oss_ak="${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" oss_sk="${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" oss_endpoint="https://oss-cn-beijing.aliyuncs.com" -oss_bucket_name="dbgpt-test" +oss_bucket_name="openderisk" diff --git a/configs/derisk-proxy-openai.toml b/configs/derisk-proxy-openai.toml index 2412dfa1..60354283 100644 --- a/configs/derisk-proxy-openai.toml +++ b/configs/derisk-proxy-openai.toml @@ -22,63 +22,6 @@ file = "${env:TRACE_FILE_DIR:-logs}/derisk_webserver_tracer.jsonl" [service.model.worker] host = "127.0.0.1" -[rag] -chunk_size=1000 -chunk_overlap=0 -similarity_top_k=5 -similarity_score_threshold=0.0 -max_chunks_once_load=10 -max_threads=1 -rerank_top_k=3 -graph_community_summary_enabled="True" - -[rag.storage] -[rag.storage.vector] -type = "chroma" -persist_path = "pilot/data" - - -# Model Configurations -[models] - -[[models.llms]] -name = "deepseek-r1" -provider = "proxy/openai" -api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" -api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" -backend = "deepseek-r1" - -[[models.llms]] -name = "deepseek-v3" -provider = "proxy/openai" -api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" -api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" -backend = "deepseek-v3" - -[[models.llms]] -name = "wanx2.1-t2v-plus" -provider = "proxy/openai" -api_base = "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1" -api_key = "${DASHSCOPE_API_KEY_1:-sk-...}" -backend = "wanx2.1-t2v-plus" - -[[models.embeddings]] -name = "text-embedding-v3" -provider = "proxy/openai" -api_url = "https://dashscope.aliyuncs.com/compatible-mode/v1/embeddings" -api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" - - -[[models.rerankers]] -type = "reranker" -name = "gte-rerank-v2" -provider = "proxy/siliconflow" -api_url = "https://dashscope.aliyuncs.com/api/v1/services/rerank/text-rerank/text-rerank" -api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" -backend= "gte-rerank-v2" -text_limit = 4096 -batch_limit = 32 - [agent.llm] # (可选) 全局配置 temperature = 0.5 @@ -123,9 +66,9 @@ default_backend = "oss" type = "oss" endpoint = "https://oss-cn-beijing.aliyuncs.com" region = "oss-cn-beijing" -access_key_id = "${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" -access_key_secret = "${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" -fixed_bucket = "dbgpt-test" +access_key_id = "${env:OSS_ACCESS_KEY_ID:-xxx}" +access_key_secret = "${env:OSS_ACCESS_KEY_SECRET:-xxx}" +fixed_bucket = "openderisk" [sandbox] @@ -139,4 +82,4 @@ skill_dir="/mnt/derisk/skills" oss_ak="${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" oss_sk="${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" oss_endpoint="https://oss-cn-beijing.aliyuncs.com" -oss_bucket_name="dbgpt-test" +oss_bucket_name="openderisk" From f373ef91d2ce6d1bba12a0c92763cdeb125b16f8 Mon Sep 17 00:00:00 2001 From: csunny Date: Fri, 27 Feb 2026 23:56:38 +0800 Subject: [PATCH 5/7] refactor(configs): update OpenAI config with correct endpoint and placeholders - Change API base URL from DashScope to OpenAI (https://api.openai.com/v1) - Update API key environment variable from DASHSCOPE_API_KEY_2 to OPENAI_API_KEY - Keep OSS placeholders consistent with aliyun config --- configs/derisk-proxy-openai.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/configs/derisk-proxy-openai.toml b/configs/derisk-proxy-openai.toml index 60354283..f68befda 100644 --- a/configs/derisk-proxy-openai.toml +++ b/configs/derisk-proxy-openai.toml @@ -28,8 +28,8 @@ temperature = 0.5 [[agent.llm.provider]] provider = "openai" -api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1" -api_key = "${DASHSCOPE_API_KEY_2:-sk-...}" +api_base = "https://api.openai.com/v1" +api_key = "${OPENAI_API_KEY:-sk-...}" [[agent.llm.provider.model]] name = "deepseek-r1" From 340be197a071d0e55d80068a9ebac443cc3ed641 Mon Sep 17 00:00:00 2001 From: csunny Date: Sat, 28 Feb 2026 00:00:43 +0800 Subject: [PATCH 6/7] feat(configs): cleanup and simplify configuration files - Remove unused deployment configs (docker, bun, homebrew) - Simplify OpenAI proxy config by removing RAG and models sections - Update API endpoints to use https://api.openai.com/v1 - Replace hardcoded secrets with environment variable placeholders - Align OpenAI config structure with Aliyun config for consistency BREAKING CHANGE: Removed docker-compose and deployment configs, moved to separate deployment repository --- bun/README.md | 66 ----------- bun/bin/openderisk-server.ts | 41 ------- bun/bin/openderisk.ts | 185 ------------------------------- bun/index.ts | 1 - bun/package.json | 57 ---------- bun/scripts/postinstall.ts | 101 ----------------- configs/derisk-proxy-aliyun.toml | 4 +- configs/derisk-proxy-openai.toml | 4 +- docker-compose.yml | 139 ----------------------- docker/Dockerfile.backend | 110 ------------------ docker/Dockerfile.frontend | 59 ---------- docker/entrypoint.sh | 49 -------- docker/nginx/default.conf | 72 ------------ homebrew/README.md | 77 ------------- homebrew/openderisk.rb | 98 ---------------- 15 files changed, 4 insertions(+), 1059 deletions(-) delete mode 100644 bun/README.md delete mode 100755 bun/bin/openderisk-server.ts delete mode 100755 bun/bin/openderisk.ts delete mode 100644 bun/index.ts delete mode 100644 bun/package.json delete mode 100755 bun/scripts/postinstall.ts delete mode 100644 docker-compose.yml delete mode 100644 docker/Dockerfile.backend delete mode 100644 docker/Dockerfile.frontend delete mode 100644 docker/entrypoint.sh delete mode 100644 docker/nginx/default.conf delete mode 100644 homebrew/README.md delete mode 100644 homebrew/openderisk.rb diff --git a/bun/README.md b/bun/README.md deleted file mode 100644 index 4868770f..00000000 --- a/bun/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# OpenDerisk Bun Package - -Fast, TypeScript-native package for OpenDeRisk AI-Native Risk Intelligence Systems. - -## Installation - -### Global installation -```bash -bun install -g openderisk -``` - -### Local installation -```bash -bun add openderisk -``` - -### Run without installation -```bash -bunx openderisk -``` - -## Usage - -```bash -# Show help -openderisk --help - -# Start OpenDerisk -openderisk - -# Start server -openderisk-server - -# Update to latest version -openderisk --update -``` - -## Features - -- 🚀 **Fast**: Built with Bun runtime for optimal performance -- 📦 **TypeScript**: Full TypeScript support -- 🔧 **Auto-setup**: Automatic uv and dependency installation -- 🔄 **Auto-update**: Built-in update mechanism - -## Requirements - -- Bun >= 1.0.0 -- Python >= 3.10 (installed automatically) -- Git - -## Why Bun? - -This package uses Bun for: -- Faster startup times -- Native TypeScript support -- Better cross-platform compatibility -- Smaller bundle size - -## Documentation - -- [OpenDerisk GitHub](https://github.com/derisk-ai/OpenDerisk) -- [Bun Documentation](https://bun.sh/docs) - -## License - -MIT diff --git a/bun/bin/openderisk-server.ts b/bun/bin/openderisk-server.ts deleted file mode 100755 index be646f7e..00000000 --- a/bun/bin/openderisk-server.ts +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env bun -/** - * OpenDerisk Server Launcher (Bun Edition) - */ - -import { spawn } from "child_process"; -import { join } from "path"; -import { existsSync } from "fs"; -import { homedir } from "os"; - -const INSTALL_DIR = process.env.OPENDERISK_INSTALL_DIR || join(homedir(), ".openderisk"); - -const colors = { - reset: "\x1b[0m", - red: "\x1b[31m", - blue: "\x1b[34m" -}; - -const log = (msg: string) => console.log(`${colors.blue}[OpenDerisk Server]${colors.reset} ${msg}`); -const error = (msg: string) => { - console.error(`${colors.red}[Error]${colors.reset} ${msg}`); - process.exit(1); -}; - -if (!existsSync(INSTALL_DIR)) { - error("OpenDerisk not installed. Please run: bun install -g openderisk"); -} - -log("Starting OpenDerisk Server..."); - -const uvPath = join(homedir(), ".local/bin/uv"); -const uv = existsSync(uvPath) ? uvPath : "uv"; - -const proc = spawn(uv, ["run", "derisk", "start", "webserver", ...process.argv.slice(2)], { - cwd: INSTALL_DIR, - stdio: "inherit", - env: process.env -}); - -proc.on("error", (err) => error(`Failed to start server: ${err.message}`)); -proc.on("exit", (code) => process.exit(code || 0)); diff --git a/bun/bin/openderisk.ts b/bun/bin/openderisk.ts deleted file mode 100755 index 110bf4f9..00000000 --- a/bun/bin/openderisk.ts +++ /dev/null @@ -1,185 +0,0 @@ -#!/usr/bin/env bun -/** - * OpenDerisk CLI Launcher (Bun Edition) - * - * Fast, TypeScript-native wrapper for OpenDerisk - */ - -import { spawn } from "child_process"; -import { join, dirname } from "path"; -import { existsSync, mkdirSync } from "fs"; -import { homedir } from "os"; - -const INSTALL_DIR = process.env.OPENDERISK_INSTALL_DIR || join(homedir(), ".openderisk"); -const REPO_URL = "https://github.com/derisk-ai/OpenDerisk.git"; - -// Colors -const colors = { - reset: "\x1b[0m", - red: "\x1b[31m", - green: "\x1b[32m", - yellow: "\x1b[33m", - blue: "\x1b[34m", - cyan: "\x1b[36m" -}; - -const log = (msg: string) => console.log(`${colors.blue}[OpenDerisk]${colors.reset} ${msg}`); -const warn = (msg: string) => console.log(`${colors.yellow}[Warning]${colors.reset} ${msg}`); -const error = (msg: string) => { - console.error(`${colors.red}[Error]${colors.reset} ${msg}`); - process.exit(1); -}; -const success = (msg: string) => console.log(`${colors.green}[Success]${colors.reset} ${msg}`); - -const commandExists = (cmd: string): boolean => { - try { - Bun.spawnSync(["which", cmd], { stdout: "ignore" }); - return true; - } catch { - return false; - } -}; - -const ensureUv = async (): Promise => { - if (commandExists("uv")) { - log("uv is already installed"); - return; - } - - log("Installing uv (Python package manager)..."); - const proc = Bun.spawn([ - "bash", "-c", - "curl -LsSf https://astral.sh/uv/install.sh | sh" - ], { stdio: ["inherit", "inherit", "inherit"] }); - - await proc.exited; - - if (proc.exitCode !== 0) { - error("Failed to install uv"); - } - success("uv installed successfully"); -}; - -const ensureRepo = async (): Promise => { - if (existsSync(join(INSTALL_DIR, ".git"))) { - log("OpenDerisk already installed, updating..."); - const proc = Bun.spawn( - ["git", "pull", "origin", "main"], - { cwd: INSTALL_DIR, stdout: "pipe", stderr: "pipe" } - ); - await proc.exited; - return; - } - - log("Installing OpenDerisk..."); - const parentDir = dirname(INSTALL_DIR); - - if (!existsSync(parentDir)) { - mkdirSync(parentDir, { recursive: true }); - } - - const proc = Bun.spawn( - ["git", "clone", "--depth", "1", REPO_URL, INSTALL_DIR], - { stdio: ["inherit", "inherit", "inherit"] } - ); - - await proc.exited; - - if (proc.exitCode !== 0) { - error("Failed to clone repository"); - } - success("OpenDerisk repository cloned"); -}; - -const installDeps = async (): Promise => { - log("Installing Python dependencies..."); - - const extras = [ - "base", "proxy_openai", "rag", "storage_chromadb", - "derisks", "storage_oss2", "client", "ext_base" - ].map(e => `--extra "${e}"`).join(" "); - - const proc = Bun.spawn( - ["bash", "-c", `uv sync --all-packages --frozen ${extras}`], - { - cwd: INSTALL_DIR, - stdio: ["inherit", "inherit", "inherit"], - env: { - ...process.env, - PATH: `${join(homedir(), ".local/bin")}:${process.env.PATH}` - } - } - ); - - await proc.exited; - - if (proc.exitCode !== 0) { - error("Failed to install dependencies"); - } - success("Dependencies installed"); -}; - -const runOpenDerisk = (args: string[]): void => { - const uvPath = join(homedir(), ".local/bin/uv"); - const uv = existsSync(uvPath) ? uvPath : "uv"; - - const proc = spawn(uv, ["run", "derisk", ...args], { - cwd: INSTALL_DIR, - stdio: "inherit", - env: process.env - }); - - proc.on("error", (err) => error(`Failed to start: ${err.message}`)); - proc.on("exit", (code) => process.exit(code || 0)); -}; - -const main = async (): Promise => { - const args = process.argv.slice(2); - - // Help - if (args.includes("--help") || args.includes("-h")) { - console.log(` -${colors.cyan}OpenDerisk CLI${colors.reset} - -Usage: openderisk [options] [command] - -Options: - -h, --help Show this help message - -v, --version Show version information - --update Update to latest version - -Commands: - server Start OpenDerisk server - -For more information: https://github.com/derisk-ai/OpenDerisk - `); - return; - } - - // Version - if (args.includes("--version") || args.includes("-v")) { - console.log("OpenDerisk v0.2.0 (Bun Edition)"); - return; - } - - // Update - if (args.includes("--update")) { - await ensureUv(); - await ensureRepo(); - await installDeps(); - success("OpenDerisk updated successfully!"); - return; - } - - // First run setup - if (!existsSync(INSTALL_DIR)) { - await ensureUv(); - await ensureRepo(); - await installDeps(); - } - - // Run - runOpenDerisk(args); -}; - -main().catch(error); diff --git a/bun/index.ts b/bun/index.ts deleted file mode 100644 index 2166217f..00000000 --- a/bun/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { default } from './bin/openderisk.ts'; diff --git a/bun/package.json b/bun/package.json deleted file mode 100644 index a0b80aff..00000000 --- a/bun/package.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "name": "openderisk", - "version": "0.2.0", - "description": "OpenDeRisk AI-Native Risk Intelligence Systems — Your application system risk intelligent manager", - "module": "index.ts", - "type": "module", - "bin": { - "openderisk": "./bin/openderisk.ts", - "openderisk-server": "./bin/openderisk-server.ts" - }, - "scripts": { - "postinstall": "bun run scripts/postinstall.ts", - "start": "bun run bin/openderisk.ts", - "server": "bun run bin/openderisk-server.ts", - "build": "bun build --target=bun ./bin/openderisk.ts --outfile=dist/openderisk", - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [ - "openderisk", - "ai", - "risk-management", - "root-cause-analysis", - "sre", - "devops", - "monitoring", - "observability" - ], - "author": "HongjunYang ", - "license": "MIT", - "homepage": "https://github.com/derisk-ai/OpenDerisk", - "repository": { - "type": "git", - "url": "git+https://github.com/derisk-ai/OpenDerisk.git" - }, - "bugs": { - "url": "https://github.com/derisk-ai/OpenDerisk/issues" - }, - "engines": { - "bun": ">=1.0.0" - }, - "os": [ - "darwin", - "linux" - ], - "cpu": [ - "x64", - "arm64" - ], - "preferGlobal": true, - "files": [ - "bin/", - "scripts/", - "README.md", - "LICENSE", - "bun.lockb" - ] -} diff --git a/bun/scripts/postinstall.ts b/bun/scripts/postinstall.ts deleted file mode 100755 index 81f492c6..00000000 --- a/bun/scripts/postinstall.ts +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env bun -/** - * Post-install script for OpenDerisk (Bun Edition) - */ - -import { spawn } from "child_process"; -import { join, dirname } from "path"; -import { existsSync, mkdirSync } from "fs"; -import { homedir } from "os"; - -const INSTALL_DIR = process.env.OPENDERISK_INSTALL_DIR || join(homedir(), ".openderisk"); -const REPO_URL = "https://github.com/derisk-ai/OpenDerisk.git"; - -const colors = { - reset: "\x1b[0m", - green: "\x1b[32m", - yellow: "\x1b[33m", - cyan: "\x1b[36m" -}; - -const log = (msg: string) => console.log(`${colors.cyan}[openderisk]${colors.reset} ${msg}`); -const success = (msg: string) => console.log(`${colors.green}[openderisk]${colors.reset} ${msg}`); -const warn = (msg: string) => console.log(`${colors.yellow}[openderisk]${colors.reset} ${msg}`); - -const commandExists = (cmd: string): boolean => { - try { - Bun.spawnSync(["which", cmd], { stdout: "ignore" }); - return true; - } catch { - return false; - } -}; - -const installUv = async (): Promise => { - if (commandExists("uv")) return; - - log("Installing uv package manager..."); - const proc = Bun.spawn( - ["bash", "-c", "curl -LsSf https://astral.sh/uv/install.sh | sh"], - { stdio: ["inherit", "inherit", "inherit"] } - ); - await proc.exited; - - if (proc.exitCode !== 0) { - warn("Failed to install uv automatically"); - warn("Please install manually: https://github.com/astral-sh/uv"); - } else { - success("uv installed successfully"); - } -}; - -const cloneRepo = async (): Promise => { - if (existsSync(join(INSTALL_DIR, ".git"))) { - log("OpenDerisk already exists, skipping clone"); - return; - } - - log("Cloning OpenDerisk repository..."); - const parentDir = dirname(INSTALL_DIR); - - if (!existsSync(parentDir)) { - mkdirSync(parentDir, { recursive: true }); - } - - const proc = Bun.spawn( - ["git", "clone", "--depth", "1", REPO_URL, INSTALL_DIR], - { stdio: ["inherit", "inherit", "inherit"] } - ); - - await proc.exited; - - if (proc.exitCode !== 0) { - warn("Failed to clone repository automatically"); - warn(`You can manually clone: git clone ${REPO_URL} ${INSTALL_DIR}`); - } else { - success("Repository cloned successfully"); - } -}; - -const main = async (): Promise => { - console.log(""); - log("Setting up OpenDerisk with Bun... 🚀"); - console.log(""); - - await installUv(); - await cloneRepo(); - - console.log(""); - success("Setup complete! 🎉"); - console.log(""); - console.log("Next steps:"); - console.log(" 1. Configure API keys in:"); - console.log(` ${join(INSTALL_DIR, "configs/derisk-proxy-aliyun.toml")}`); - console.log(" 2. Run: openderisk --help"); - console.log(" 3. Start server: openderisk-server"); - console.log(""); - console.log("Documentation: https://github.com/derisk-ai/OpenDerisk"); - console.log(""); -}; - -main().catch(console.error); diff --git a/configs/derisk-proxy-aliyun.toml b/configs/derisk-proxy-aliyun.toml index 60354283..6173de23 100644 --- a/configs/derisk-proxy-aliyun.toml +++ b/configs/derisk-proxy-aliyun.toml @@ -79,7 +79,7 @@ agent_name="derisk" repo_url="" work_dir="/home/ubuntu" skill_dir="/mnt/derisk/skills" -oss_ak="${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" -oss_sk="${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" +oss_ak="${env:OSS_ACCESS_KEY_ID:-xxx}" +oss_sk="${env:OSS_ACCESS_KEY_SECRET:-xxx}" oss_endpoint="https://oss-cn-beijing.aliyuncs.com" oss_bucket_name="openderisk" diff --git a/configs/derisk-proxy-openai.toml b/configs/derisk-proxy-openai.toml index f68befda..6c5833b1 100644 --- a/configs/derisk-proxy-openai.toml +++ b/configs/derisk-proxy-openai.toml @@ -79,7 +79,7 @@ agent_name="derisk" repo_url="" work_dir="/home/ubuntu" skill_dir="/mnt/derisk/skills" -oss_ak="${env:OSS_ACCESS_KEY_ID:-LTAI5tDkae7TM8D6ENa5xf2o}" -oss_sk="${env:OSS_ACCESS_KEY_SECRET:-xf8O3ADZUwrfythtM43osX4CjHwXys}" +oss_ak="${env:OSS_ACCESS_KEY_ID:-xxx}" +oss_sk="${env:OSS_ACCESS_KEY_SECRET:-xxx}" oss_endpoint="https://oss-cn-beijing.aliyuncs.com" oss_bucket_name="openderisk" diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 290db4bd..00000000 --- a/docker-compose.yml +++ /dev/null @@ -1,139 +0,0 @@ -# ============================================================ -# OpenDerisk Docker Compose - Full Stack Deployment -# ============================================================ -# -# Usage: -# 1. Copy .env.template to .env and configure: -# cp .env.template .env -# -# 2. Start all services: -# docker compose up -d -# -# 3. Start with MySQL (instead of SQLite): -# DB_TYPE=mysql docker compose --profile mysql up -d -# -# 4. View logs: -# docker compose logs -f backend -# -# 5. Stop: -# docker compose down -# -# ============================================================ - -services: - # -------------------------------------------------------- - # Backend API Server (Python/FastAPI) - # -------------------------------------------------------- - backend: - build: - context: . - dockerfile: docker/Dockerfile.backend - container_name: derisk-backend - restart: unless-stopped - env_file: - - .env - environment: - - DERISK_CONFIG_FILE=${DERISK_CONFIG_FILE:-configs/derisk-docker.toml} - - DB_TYPE=${DB_TYPE:-sqlite} - - LOCAL_DB_HOST=${LOCAL_DB_HOST:-db} - - LOCAL_DB_PORT=${LOCAL_DB_PORT:-3306} - - LOCAL_DB_USER=${LOCAL_DB_USER:-root} - - LOCAL_DB_PASSWORD=${LOCAL_DB_PASSWORD:-aa123456} - - LOCAL_DB_NAME=${LOCAL_DB_NAME:-derisk} - ports: - - "${WEB_SERVER_PORT:-7777}:7777" - volumes: - # Persist SQLite database - - derisk-metadata:/app/pilot/meta_data - # Persist ChromaDB vector storage - - derisk-vectordata:/app/pilot/data - # Persist message data - - derisk-messages:/app/pilot/message - # Persist logs - - derisk-logs:/app/logs - # Mount custom configs (optional) - # - ./configs:/app/configs:ro - networks: - - derisknet - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:7777/api/health"] - interval: 30s - timeout: 10s - retries: 5 - start_period: 60s - - # -------------------------------------------------------- - # Frontend Web UI (Next.js + nginx) - # -------------------------------------------------------- - frontend: - build: - context: . - dockerfile: docker/Dockerfile.frontend - args: - # This determines the backend URL the frontend calls - # For docker-compose, use the nginx proxy path - NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL:-http://localhost:7777} - container_name: derisk-frontend - restart: unless-stopped - ports: - - "3000:3000" - depends_on: - backend: - condition: service_healthy - networks: - - derisknet - - # -------------------------------------------------------- - # MySQL Database (optional, activate with --profile mysql) - # -------------------------------------------------------- - db: - image: mysql:8.0 - container_name: derisk-mysql - profiles: - - mysql - restart: unless-stopped - environment: - MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD:-aa123456} - MYSQL_DATABASE: ${LOCAL_DB_NAME:-derisk} - MYSQL_USER: ${LOCAL_DB_USER:-derisk} - MYSQL_PASSWORD: ${LOCAL_DB_PASSWORD:-aa123456} - ports: - - "${LOCAL_DB_PORT:-3306}:3306" - volumes: - - derisk-mysql-data:/var/lib/mysql - - ./assets/schema/derisk.sql:/docker-entrypoint-initdb.d/01-schema.sql:ro - command: - - --character-set-server=utf8mb4 - - --collation-server=utf8mb4_unicode_ci - - --default-authentication-plugin=mysql_native_password - healthcheck: - test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "root", "-p${MYSQL_ROOT_PASSWORD:-aa123456}"] - interval: 10s - timeout: 5s - retries: 10 - start_period: 30s - networks: - - derisknet - -# -------------------------------------------------------- -# Named Volumes -# -------------------------------------------------------- -volumes: - derisk-metadata: - driver: local - derisk-vectordata: - driver: local - derisk-messages: - driver: local - derisk-logs: - driver: local - derisk-mysql-data: - driver: local - -# -------------------------------------------------------- -# Network -# -------------------------------------------------------- -networks: - derisknet: - driver: bridge - name: derisknet diff --git a/docker/Dockerfile.backend b/docker/Dockerfile.backend deleted file mode 100644 index fe1c82ea..00000000 --- a/docker/Dockerfile.backend +++ /dev/null @@ -1,110 +0,0 @@ -# ============================================================ -# OpenDerisk Backend Dockerfile -# Multi-stage build: dependency install -> production image -# ============================================================ - -# ---------------------------------------------------------- -# Stage 1: Build dependencies -# ---------------------------------------------------------- -FROM python:3.11-slim AS builder - -# Install system build dependencies -RUN apt-get update && apt-get install -y --no-install-recommends \ - build-essential \ - gcc \ - g++ \ - libffi-dev \ - libssl-dev \ - git \ - curl \ - && rm -rf /var/lib/apt/lists/* - -# Install uv (fast Python package manager) -COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv - -WORKDIR /app - -# Copy dependency files first for better layer caching -COPY pyproject.toml uv.lock ./ -COPY packages/derisk-core/pyproject.toml packages/derisk-core/pyproject.toml -COPY packages/derisk-ext/pyproject.toml packages/derisk-ext/pyproject.toml -COPY packages/derisk-serve/pyproject.toml packages/derisk-serve/pyproject.toml -COPY packages/derisk-client/pyproject.toml packages/derisk-client/pyproject.toml -COPY packages/derisk-app/pyproject.toml packages/derisk-app/pyproject.toml - -# Copy README files required by pyproject.toml -COPY README.md ./ -COPY packages/derisk-core/README.md packages/derisk-core/README.md -COPY packages/derisk-ext/README.md packages/derisk-ext/README.md -COPY packages/derisk-serve/README.md packages/derisk-serve/README.md -COPY packages/derisk-client/README.md packages/derisk-client/README.md -COPY packages/derisk-app/README.md packages/derisk-app/README.md - -# Copy all source packages -COPY packages/ packages/ - -# Install all dependencies using uv -RUN uv sync --all-packages --frozen \ - --extra "base" \ - --extra "proxy_openai" \ - --extra "rag" \ - --extra "storage_chromadb" \ - --extra "client" \ - --extra "ext_base" \ - --no-dev - -# ---------------------------------------------------------- -# Stage 2: Production image -# ---------------------------------------------------------- -FROM python:3.11-slim AS production - -# Install minimal runtime dependencies -RUN apt-get update && apt-get install -y --no-install-recommends \ - libffi8 \ - libssl3 \ - curl \ - tini \ - && rm -rf /var/lib/apt/lists/* - -# Create non-root user for security -RUN groupadd -r derisk && useradd -r -g derisk -d /app -s /sbin/nologin derisk - -WORKDIR /app - -# Copy virtual environment from builder stage -COPY --from=builder /app/.venv /app/.venv - -# Copy application source code -COPY packages/ packages/ -COPY configs/ configs/ -COPY assets/ assets/ - -# Copy entrypoint script -COPY docker/entrypoint.sh /app/entrypoint.sh -RUN chmod +x /app/entrypoint.sh - -# Create necessary directories with proper permissions -RUN mkdir -p /app/pilot/meta_data /app/pilot/data /app/pilot/message /app/logs \ - && chown -R derisk:derisk /app - -# Set environment variables -ENV PATH="/app/.venv/bin:$PATH" -ENV PYTHONPATH="/app/packages/derisk-app/src:/app/packages/derisk-core/src:/app/packages/derisk-serve/src:/app/packages/derisk-ext/src:/app/packages/derisk-client/src" -ENV PYTHONDONTWRITEBYTECODE=1 -ENV PYTHONUNBUFFERED=1 - -# Expose backend API port -EXPOSE 7777 - -# Health check -HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \ - CMD curl -f http://localhost:7777/api/health || exit 1 - -# Use tini as init system for proper signal handling -ENTRYPOINT ["tini", "--"] - -# Switch to non-root user -USER derisk - -# Default command -CMD ["/app/entrypoint.sh"] diff --git a/docker/Dockerfile.frontend b/docker/Dockerfile.frontend deleted file mode 100644 index 3b695935..00000000 --- a/docker/Dockerfile.frontend +++ /dev/null @@ -1,59 +0,0 @@ -# ============================================================ -# OpenDerisk Frontend Dockerfile -# Multi-stage build: Node.js build -> nginx static serving -# ============================================================ - -# ---------------------------------------------------------- -# Stage 1: Build the Next.js static export -# ---------------------------------------------------------- -FROM node:20-alpine AS builder - -WORKDIR /app - -# Install yarn -RUN corepack enable && corepack prepare yarn@stable --activate - -# Copy dependency files first for layer caching -COPY web/package.json web/yarn.lock* web/package-lock.json* ./ - -# Install dependencies -RUN yarn install --frozen-lockfile || yarn install - -# Copy source code -COPY web/ . - -# Set API base URL (build-time variable) -ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:7777 -ENV NEXT_PUBLIC_API_BASE_URL=${NEXT_PUBLIC_API_BASE_URL} - -# Build static export -RUN yarn build - -# ---------------------------------------------------------- -# Stage 2: Serve with nginx -# ---------------------------------------------------------- -FROM nginx:1.27-alpine AS production - -# Remove default nginx configuration -RUN rm -f /etc/nginx/conf.d/default.conf - -# Copy custom nginx configuration -COPY docker/nginx/default.conf /etc/nginx/conf.d/default.conf - -# Copy static files from builder stage -COPY --from=builder /app/out /usr/share/nginx/html - -# Create non-root user for nginx -RUN chown -R nginx:nginx /usr/share/nginx/html \ - && chown -R nginx:nginx /var/cache/nginx \ - && chown -R nginx:nginx /var/log/nginx \ - && touch /var/run/nginx.pid \ - && chown -R nginx:nginx /var/run/nginx.pid - -EXPOSE 3000 - -# Health check -HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \ - CMD wget -q --spider http://localhost:3000/ || exit 1 - -CMD ["nginx", "-g", "daemon off;"] diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh deleted file mode 100644 index 57822ca6..00000000 --- a/docker/entrypoint.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -set -e - -# ============================================================ -# OpenDerisk Backend Entrypoint -# ============================================================ - -echo "==============================================" -echo " OpenDerisk Backend Server Starting..." -echo "==============================================" - -# Default config file -CONFIG_FILE="${DERISK_CONFIG_FILE:-configs/derisk-docker.toml}" - -echo " Config: ${CONFIG_FILE}" -echo " Python: $(python --version)" -echo "==============================================" - -# Run database migrations if using MySQL -if [ "${DB_TYPE}" = "mysql" ]; then - echo "Waiting for MySQL to be ready..." - max_retries=30 - retry_count=0 - while ! python -c " -import pymysql -pymysql.connect( - host='${LOCAL_DB_HOST:-db}', - port=${LOCAL_DB_PORT:-3306}, - user='${LOCAL_DB_USER:-root}', - password='${LOCAL_DB_PASSWORD:-aa123456}', - database='${LOCAL_DB_NAME:-derisk}' -) -print('MySQL connection successful') -" 2>/dev/null; do - retry_count=$((retry_count + 1)) - if [ $retry_count -ge $max_retries ]; then - echo "ERROR: MySQL is not available after ${max_retries} retries. Exiting." - exit 1 - fi - echo " Waiting for MySQL... (${retry_count}/${max_retries})" - sleep 2 - done - echo "MySQL is ready!" -fi - -# Start the backend server -echo "Starting OpenDerisk server..." -exec python packages/derisk-app/src/derisk_app/derisk_server.py \ - --config "${CONFIG_FILE}" diff --git a/docker/nginx/default.conf b/docker/nginx/default.conf deleted file mode 100644 index d80e505a..00000000 --- a/docker/nginx/default.conf +++ /dev/null @@ -1,72 +0,0 @@ -server { - listen 3000; - server_name _; - - root /usr/share/nginx/html; - index index.html; - - # Gzip compression - gzip on; - gzip_vary on; - gzip_proxied any; - gzip_comp_level 6; - gzip_types - text/plain - text/css - text/xml - text/javascript - application/json - application/javascript - application/xml - application/rss+xml - image/svg+xml; - - # Security headers - add_header X-Frame-Options "SAMEORIGIN" always; - add_header X-Content-Type-Options "nosniff" always; - add_header X-XSS-Protection "1; mode=block" always; - add_header Referrer-Policy "strict-origin-when-cross-origin" always; - - # Cache static assets aggressively - location /_next/static/ { - expires 1y; - add_header Cache-Control "public, immutable"; - } - - location /static/ { - expires 1y; - add_header Cache-Control "public, immutable"; - } - - # Proxy API requests to backend - location /api/ { - proxy_pass http://backend:7777/api/; - proxy_http_version 1.1; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection "upgrade"; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_read_timeout 300s; - proxy_send_timeout 300s; - proxy_connect_timeout 60s; - proxy_buffering off; - - # SSE support (for streaming responses) - proxy_cache off; - chunked_transfer_encoding on; - } - - # SPA fallback: serve index.html for all non-file routes - location / { - try_files $uri $uri.html $uri/ /index.html; - } - - # Deny access to hidden files - location ~ /\. { - deny all; - access_log off; - log_not_found off; - } -} diff --git a/homebrew/README.md b/homebrew/README.md deleted file mode 100644 index ef7193cd..00000000 --- a/homebrew/README.md +++ /dev/null @@ -1,77 +0,0 @@ -# OpenDerisk Homebrew Tap - -Homebrew formula for OpenDeRisk AI-Native Risk Intelligence Systems. - -## Installation - -### Add the tap - -```bash -brew tap derisk-ai/openderisk -``` - -Or install directly: - -```bash -brew install derisk-ai/openderisk/openderisk -``` - -### Direct install (without tap) - -```bash -brew install --cask https://raw.githubusercontent.com/derisk-ai/OpenDerisk/main/homebrew/openderisk.rb -``` - -## Usage - -```bash -# Start CLI -openderisk - -# Start server -openderisk-server - -# Check version -openderisk --version -``` - -## Requirements - -- macOS 11+ (Big Sur or later) -- Apple Silicon or Intel Mac -- Homebrew 3.0+ - -## Uninstall - -```bash -brew uninstall openderisk -brew untap derisk-ai/openderisk -``` - -## Formula Details - -This formula: -- Installs Python 3.10+ via Homebrew -- Installs uv (Python package manager) -- Sets up all Python dependencies -- Creates `openderisk` and `openderisk-server` commands -- Configures the application in `/opt/homebrew/opt/openderisk` - -## Troubleshooting - -### Reset installation - -```bash -brew reinstall openderisk -``` - -### Check logs - -```bash -brew services logs openderisk -``` - -## Documentation - -- [OpenDerisk GitHub](https://github.com/derisk-ai/OpenDerisk) -- [Homebrew Documentation](https://docs.brew.sh/) diff --git a/homebrew/openderisk.rb b/homebrew/openderisk.rb deleted file mode 100644 index 045e47e7..00000000 --- a/homebrew/openderisk.rb +++ /dev/null @@ -1,98 +0,0 @@ -class Openderisk < Formula - desc "AI-Native Risk Intelligence Systems for application stability" - homepage "https://github.com/derisk-ai/OpenDerisk" - url "https://github.com/derisk-ai/OpenDerisk/archive/refs/tags/v0.2.0.tar.gz" - sha256 "PLACEHOLDER_SHA256" - license "MIT" - - depends_on "python@3.10" - depends_on "git" - depends_on "uv" - - resource "uv" do - url "https://github.com/astral-sh/uv/releases/download/0.5.0/uv-aarch64-apple-darwin.tar.gz" - sha256 "PLACEHOLDER_UV_SHA256" - end - - def install - # Install uv if not present - uv_bin = buildpath/"uv" - resource("uv").stage do - bin.install "uv" - bin.install "uvx" - end - - # Create wrapper scripts - (bin/"openderisk").write <<~EOS - #!/bin/bash - export PATH="#{bin}:${PATH}" - cd "#{libexec}" || exit 1 - exec uv run derisk "$@" - EOS - - (bin/"openderisk-server").write <<~EOS - #!/bin/bash - export PATH="#{bin}:${PATH}" - cd "#{libexec}" || exit 1 - exec uv run derisk start webserver "$@" - EOS - - chmod 0755, bin/"openderisk" - chmod 0755, bin/"openderisk-server" - - # Install Python dependencies - system "uv", "sync", "--all-packages", "--frozen", - "--extra", "base", - "--extra", "proxy_openai", - "--extra", "rag", - "--extra", "storage_chromadb", - "--extra", "derisks", - "--extra", "storage_oss2", - "--extra", "client", - "--extra", "ext_base" - - # Copy project files - libexec.install Dir["*"] - - # Create config directory - (etc/"openderisk").mkpath - end - - def post_install - ohai "OpenDerisk installed successfully!" - ohai "Next steps:" - puts " 1. Configure API keys in: #{etc}/openderisk/derisk-proxy-aliyun.toml" - puts " 2. Run: openderisk --help" - puts " 3. Start server: openderisk-server" - puts "" - puts "Documentation: https://github.com/derisk-ai/OpenDerisk" - end - - def caveats - <<~EOS - OpenDerisk Configuration: - ======================== - - 1. Copy the example config and edit your API keys: - cp #{opt_libexec}/configs/derisk-proxy-aliyun.toml ~/.config/openderisk/config.toml - - 2. Edit the config file and add your API keys: - nano ~/.config/openderisk/config.toml - - 3. Start the server: - openderisk-server - - 4. Or use the CLI: - openderisk - - Requirements: - - Python >= 3.10 (installed via Homebrew) - - API key for your LLM provider (DeepSeek, OpenAI, etc.) - EOS - end - - test do - system "#{bin}/openderisk", "--version" - system "#{bin}/openderisk-server", "--help" - end -end From fa3719f9521dec7d708638343fb6d79809221bba Mon Sep 17 00:00:00 2001 From: csunny Date: Sat, 28 Feb 2026 10:48:06 +0800 Subject: [PATCH 7/7] docs(readme): add installation section and update documentation - Add Install (recommended) section to README.md, README.ja.md, and README.zh.md - Include curl installation method and configuration instructions - Update package READMEs with proper documentation - Synchronize all language versions of README files --- README.ja.md | 136 +++++++--------- README.md | 152 +++++++----------- README.zh.md | 184 ++++++++++------------ assets/schema/derisk.sql | 2 +- docs/README.md | 53 ++++++- npm/README.md | 8 +- packages/derisk-app/README.md | 45 ++++++ packages/derisk-app/src/derisk_app/app.py | 2 +- packages/derisk-client/README.md | 39 ++++- packages/derisk-core/README.md | 53 ++++++- packages/derisk-ext/README.md | 66 +++++++- packages/derisk-serve/README.md | 51 +++++- web/README.md | 66 +++++--- 13 files changed, 544 insertions(+), 313 deletions(-) diff --git a/README.ja.md b/README.ja.md index 2b153ec5..86a3a939 100644 --- a/README.ja.md +++ b/README.ja.md @@ -1,6 +1,6 @@ ### OpenDeRisk -OpenDeRisk AI-Native Risk Intelligence Systems —— AIネイティブなリスクインテリジェンスシステム。アプリケーションシステムのリスクインテリジェントマネージャーとして、24時間365日の包括的で徹底的な保護を提供します。 +OpenDeRisk は AI ネイティブリスクインテリジェンスシステムです。アプリケーションシステムのリスクインテリジェントマネージャーとして、24 時間 365 日の包括的で徹底的な保護を提供します。

@@ -24,18 +24,18 @@ OpenDeRisk AI-Native Risk Intelligence Systems —— AIネイティブなリス

-[**English**](README.md) | [**简体中文**](README.zh.md) | [**日本語**](README.ja.md) | [**視頻チュートリアル**](https://www.youtube.com/watch?v=1qDIu-Jwdf0) +[**English**](README.md) | [**简体中文**](README.zh.md) | [**日本語**](README.ja.md) | [**動画チュートリアル**](https://www.youtube.com/watch?v=1qDIu-Jwdf0)
### ニュース -- [2025/10] 🔥 OpenDerisk V0.2をリリースしました. [OpenDerisk V0.2 ReleaseNote](./docs/docs/OpenDerisk_v0.2.md) +- [2025/10] 🔥 OpenDerisk V0.2 をリリースしました。[OpenDerisk V0.2 ReleaseNote](./docs/docs/OpenDerisk_v0.2.md) ### 機能特徴 1. **DeepResearch RCA:** ログ、トレース、コードの詳細な分析により、問題の根本原因を迅速に特定します。 2. **可視化された証拠チェーン:** 診断プロセスと証拠チェーンを完全に可視化し、診断を明確にして精度を迅速に判断できます。 -3. **マルチエージェント協調:** SRE-Agent、Code-Agent、ReportAgent、Vis-Agent、Data-Agentの協調作業。 -4. **オープンソースアーキテクチャ:** OpenDeRiskは完全にオープンソースのアーキテクチャで構築されており、関連フレームワークとコードをオープンソースプロジェクトですぐに使用できます。 +3. **マルチエージェント協調:** SRE-Agent、Code-Agent、ReportAgent、Vis-Agent、Data-Agent の協調作業。 +4. **オープンソースアーキテクチャ:** OpenDeRisk は完全にオープンソースのアーキテクチャで構築されており、関連フレームワークとコードをオープンソースプロジェクトですぐに使用できます。

@@ -47,118 +47,97 @@ OpenDeRisk AI-Native Risk Intelligence Systems —— AIネイティブなリス

#### 紹介文書 -- [OpenDerisk DeepWikiドキュメント](https://deepwiki.com/derisk-ai/OpenDerisk) - -このシステムはマルチエージェントアーキテクチャを採用しています。現在、コードは主に緑色でハイライトされた部分を実装しています。アラート認識はMicrosoftのオープンソース[OpenRCAデータセット](https://github.com/microsoft/OpenRCA)に基づいています。データセットのサイズは解凍後約26GBです。このデータセット上で、マルチエージェントの協調により根本原因分析と診断を実現し、Code-Agentが最終分析のために動的にコードを作成します。 +このシステムはマルチエージェントアーキテクチャを採用しています。現在、コードは主にハイライトされた部分を実装しています。アラート認識は Microsoft のオープンソース [OpenRCA データセット](https://github.com/microsoft/OpenRCA) に基づいています。データセットのサイズは解压後約 26GB です。このデータセット上で、マルチエージェントの協調により根本原因分析と診断を実現し、Code-Agent が最終分析のために動的にコードを作成します。 #### 技術実装 -**データ層:** GitHubから大規模なOpenRCAデータセット(20GB)を取得し、ローカルで解凍して分析用に処理します。 +**データ層:** GitHub から大規模な OpenRCA データセット (20GB) を取得し、ローカルで解压して分析用に処理します。 -**ロジック層:** マルチエージェントアーキテクチャで、SRE-Agent、Code-Agent、ReportAgent、Vis-Agent、Data-Agentが協調して詳細なDeepResearch RCA(根本原因分析)を実行します。 +**ロジック層:** マルチエージェントアーキテクチャで、SRE-Agent、Code-Agent、ReportAgent、Vis-Agent、Data-Agent が協調して詳細な DeepResearch RCA(根本原因分析)を実行します。 -**可視化層:** Visプロトコルを使用して、全体の処理フローと証拠チェーン、およびマルチロールの協調とスイッチングプロセスを動的にレンダリングします。 +**可視化層:** Vis プロトコルを使用して、全体の処理フローと証拠チェーン、およびマルチロールの協調とスイッチングプロセスを動的にレンダリングします。 -OpenDeRiskのデジタル従業員(エージェント) +OpenDeRisk のデジタル従業員(エージェント)

-### クイックスタート +### インストール(推奨) -uvのインストール +#### curl でのインストール ```shell -curl -LsSf https://astral.sh/uv/install.sh | sh +# 最新バージョンのダウンロードとインストール +curl -fsSL https://raw.githubusercontent.com/derisk-ai/OpenDerisk/main/install.sh | bash ``` -#### パッケージのインストール +#### 設定ファイル +インストール後、システムを設定する必要があります。設定ファイルを作成してください: +`~/.openderisk/derisk-proxy-aliyun.toml` を編集し、API キーを設定してください。 +#### 起動 ``` -uv sync --all-packages --frozen \ ---extra "base" \ ---extra "proxy_openai" \ ---extra "rag" \ ---extra "storage_chromadb" \ ---extra "derisks" \ ---extra "storage_oss2" \ ---extra "client" \ ---extra "ext_base" +openderisk-server ``` -#### 起動 +### ソースからのインストール(開発用) + +uv のインストール(必須): +```shell + +git clone https://github.com/derisk-ai/OpenDerisk.git + +cd OpenDerisk + +# 依存関係のインストール +sh scripts/prepare_release.sh +``` -`derisk-proxy-aliyun.toml`ファイルでAPI_KEYを設定し、次のコマンドを実行して起動します。 +#### サーバーの起動 -> 注意:デフォルトでは、OpenRCAデータセットのTelecomデータセットを使用します。リンクまたは以下のコマンドでダウンロードできます: +`derisk-proxy-aliyun.toml` で API_KEY を設定し、実行: -> gdown https://drive.google.com/uc?id=1cyOKpqyAP4fy-QiJ6a_cKuwR7D46zyVe +> 注意:デフォルトでは、OpenRCA データセットの Telecom データセットを使用します。リンクまたは以下のコマンドでダウンロードできます: +> `gdown https://drive.google.com/uc?id=1cyOKpqyAP4fy-QiJ6a_cKuwR7D46zyVe` -ダウンロード後、データセットを`pilot/datasets/`パスに移動します。 +ダウンロード後、データセットを `pilot/datasets/` パスに移動します。 起動コマンドを実行: -``` +```bash uv run python packages/derisk-app/src/derisk_app/derisk_server.py --config configs/derisk-proxy-aliyun.toml ``` #### ウェブサイトへのアクセス -ブラウザを開いて[`http://localhost:7777`](http://localhost:7777)にアクセス -

- -

+ブラウザを開いて [`http://localhost:7777`](http://localhost:7777) にアクセス + ### 使用方法 -* AI-SRE(OpenRCA) - - 注意:OpenRCAデータセット[Bankデータセット](https://drive.usercontent.google.com/download?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R&export=download&confirm=t&uuid=42621058-41af-45bf-88a6-64c00bfd2f2e)を使用しています - - 次のリンクでデータセットをダウンロードできます: - ``` - gdown https://drive.google.com/uc?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R - ``` - - データセットを`${derisk}/pilot/datasets`パスに配置します。 -* フレームグラフアシスタント - - ローカルアプリケーションサービスプロセスのフレームグラフ(Java/Python)をアシスタントにアップロードして分析と問い合わせを行います。 -* DataExpert - - メトリクス、ログ、トレース、または様々なExcelデータシートをアップロードして対話型分析を行います。 +* **AI-SRE (OpenRCA)** + - 注意: OpenRCA データセットの [Bank データセット](https://drive.usercontent.google.com/download?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R&export=download&confirm=t&uuid=42621058-41af-45bf-88a6-64c00bfd2f2e) を使用しています + - ダウンロード: `gdown https://drive.google.com/uc?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R` + - データセットを `${derisk}/pilot/datasets` パスに配置します +* **フレームグラフアシスタント** + - ローカルアプリケーションサービスプロセスのフレームグラフ (Java/Python) をアシスタントにアップロードして分析を行います +* **DataExpert** + - メトリクス、ログ、トレース、または様々な Excel データシートをアップロードして対話型分析を行います ### 高速開発 -* エージェント開発 - `derisk-ext.agent.agents`配下の実装ロジックを参照してください。 -* ツール開発 - * ローカルツール - * MCP -* その他の開発 - ドキュメント準備中... - -* DeRisk-Skills 発達 +* **エージェント開発** + - `derisk-ext.agent.agents` 配下の実装ロジックを参照してください +* **ツール開発** + * ローカルツール + * MCP (Model Context Protocol) +* **DeRisk-Skills 開発** - [derisk-skills](https://github.com/derisk-ai/derisk_skills) #### 実行結果 -下図に示すように、複数のエージェントが協調して複雑な運用診断タスクを処理するシナリオを示しています。 +下图に示すように、複数のエージェントが協調して複雑な運用診断タスクを処理するシナリオを示しています。

-### ロードマップ -- [x] 0530 V0.1バージョン:ドメイン知識とMCPサービスに基づき、異常認識→自律的意思決定→適応的実行と問題解決を実現。 - - [x] 技術リスクのためのドメイン知識エンジン - - [x] 異常認識→意思決定→実行のための大規模モデル駆動推論エンジン - - [x] 自動トラブルシューティングと修正 - -- [x] 0830 V0.2バージョン - - [x] 技術リスクのためのMCPサービスと管理 - - [x] 知識とMCPツールのカスタムバインディングサポート - - [x] 3つ以上のDevOpsドメインMCPサービスのサポート - -- [ ] 0930 V0.3バージョン - - [ ] 本番環境との統合サポート - - [ ] 本番環境デプロイメントの完全なソリューションを提供し、本番の問題診断をサポート。 - -- [ ] 1230 V0.4バージョン - - [ ] エンドツーエンドAIOpsオンラインAgentic RL - - [ ] エンドツーエンド評価機能 - -### いんよう +### 引用 このリポジトリのコードについては、以下の論文で詳細な紹介をしています。もし、あなたの研究に役立ったと思われる場合は、ぜひ引用してください。 ``` @misc{di2025openderiskindustrialframeworkaidriven, @@ -178,14 +157,15 @@ uv run python packages/derisk-app/src/derisk_app/derisk_server.py --config confi - [MetaGPT](https://github.com/FoundationAgents/MetaGPT) - [OpenRCA](https://github.com/microsoft/OpenRCA) -OpenDeRisk-AIコミュニティは、AIネイティブなリスクインテリジェンスシステムの構築に専念しています。🛡️ 私たちのコミュニティがより良いサービスを提供できることを願い、また皆様が私たちに参加してより良い未来を共に創造することを願っています。🤝 +OpenDeRisk-AI コミュニティは、AI ネイティブなリスクインテリジェンスシステムの構築に専念しています。🛡️ 私たちのコミュニティがより良いサービスを提供できることを願い、また皆様が私たちに参加してより良い未来を共に創造することを願っています。🤝 + [![Star History Chart](https://api.star-history.com/svg?repos=derisk-ai/OpenDerisk&type=Date)](https://star-history.com/#derisk-ai/OpenDerisk) ### コミュニティグループ -DingDingのネットワーキンググループに参加して、他の開発者と経験を共有しましょう! +DingTalk グループに参加して、他の開発者と経験を共有しましょう!
OpenDeRisk-AI 交流群 diff --git a/README.md b/README.md index f57b4367..cff0e6ca 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ ### OpenDeRisk -OpenDeRisk AI-Native Risk Intelligence Systems —— Your application system risk intelligent manager provides 7 * 24-hour comprehensive and in-depth protection. +OpenDeRisk is an AI-Native Risk Intelligence System designed as your application system's intelligent manager, providing 7×24 hour comprehensive and in-depth protection.

@@ -27,149 +27,116 @@ OpenDeRisk AI-Native Risk Intelligence Systems —— Your application system ri

-[**English**](README.md) | [**简体中文**](README.zh.md) | [**日本語**](README.ja.md) | [**视频教程**](https://www.youtube.com/watch?v=1qDIu-Jwdf0) +[**English**](README.md) | [**简体中文**](README.zh.md) | [**日本語**](README.ja.md) | [**Video Tutorial**](https://www.youtube.com/watch?v=1qDIu-Jwdf0)
- -### News -- [2025/10] 🔥 We released OpenDerisk v0.2. [OpenDerisk V0.2 ReleaseNote](./docs/docs/OpenDerisk_v0.2.md) - - ### Features -1. **DeepResearch RCA:** Quickly locate the root cause of issues through in-depth analysis of logs, traces, and code. -2. **Visualized Evidence Chain:** Fully visualize the diagnostic process and evidence chain, making the diagnosis clear and enabling quick judgment of accuracy. -3. **Multi-Agent Collaboration:** Collaboration among SRE-Agent, Code-Agent, ReportAgent, Vis-Agent, and Data-Agent. -4. **Open and Open-Source Architecture:** OpenDeRisk is built with a completely open and open-source architecture, allowing related frameworks and code to be used out of the box in open-source projects. +1. **DeepResearch RCA:** Quickly locate root causes through in-depth analysis of logs, traces, and code. +2. **Visualized Evidence Chain:** Fully visualize diagnostic processes and evidence chains for clear, accurate judgment. +3. **Multi-Agent Collaboration:** SRE-Agent, Code-Agent, ReportAgent, Vis-Agent, and Data-Agent working in coordination. +4. **Open-Source Architecture:** Built with a completely open architecture, enabling framework and code reuse in open-source projects.

-### Architure +### Architecture

#### Introduction - -- [OpenDerisk Documents](https://deepwiki.com/derisk-ai/OpenDerisk) - -- [OpenDerisk DeepWiki](https://deepwiki.com/derisk-ai/OpenDerisk) - - -The system adopts a multi-agent architecture. Currently, the code mainly implements the green-highlighted parts. Alert awareness is based on Microsoft's open-source [OpenRCA dataset](https://github.com/microsoft/OpenRCA). The dataset size is approximately 26GB after decompression. On this dataset, we achieve root cause analysis and diagnosis through multi-agent collaboration, where the Code-Agent dynamically writes code for final analysis. +The system employs a multi-agent architecture. Currently, the code primarily implements the highlighted components. Alert awareness is based on Microsoft's open-source [OpenRCA dataset](https://github.com/microsoft/OpenRCA). The decompressed dataset is approximately 26GB. On this dataset, we achieve root cause analysis through multi-agent collaboration, with Code-Agent dynamically writing code for final analysis. #### Technical Implementation -**Data Layer:** Pull the large-scale OpenRCA dataset (20GB) from GitHub, decompress it locally, and process it for analysis. +**Data Layer:** Pull the large-scale OpenRCA dataset (20GB) from GitHub, decompress locally, and process for analysis. -**Logic Layer:** Multi-agent architecture, with collaboration among SRE-Agent, Code-Agent, ReportAgent, Vis-Agent, and Data-Agent to perform in-depth DeepResearch RCA (Root Cause Analysis). +**Logic Layer:** Multi-agent architecture with SRE-Agent, Code-Agent, ReportAgent, Vis-Agent, and Data-Agent collaborating for deep DeepResearch RCA (Root Cause Analysis). -**Visualization Layer:** Use the Vis protocol to dynamically render the entire processing flow and evidence chain, as well as the process of multi-role collaboration and switching. +**Visualization Layer:** Use the Vis protocol to dynamically render the entire processing flow and evidence chain, as well as the multi-role collaboration and switching process. Digital Employees (Agents) in OpenDeRisk

-### Quick Start +### Install (recommended) -Install uv +#### Install via curl ```shell -curl -LsSf https://astral.sh/uv/install.sh | sh +# Download and install latest version +curl -fsSL https://raw.githubusercontent.com/derisk-ai/OpenDerisk/main/install.sh | bash ``` +#### Configuration File +After installation, you need to configure the system. Create a configuration file: +Edit `~/.openderisk/derisk-proxy-aliyun.toml` and set your API keys. -#### Install Packages - +#### Start ``` -uv sync --all-packages --frozen \ ---extra "base" \ ---extra "proxy_openai" \ ---extra "rag" \ ---extra "storage_chromadb" \ ---extra "derisks" \ ---extra "storage_oss2" \ ---extra "client" \ ---extra "ext_base" +openderisk-server ``` -#### Start +### From source(development) -Configure the API_KEY in the `derisk-proxy-aliyun.toml` file, then run the following command to start. +Install uv (required): +```shell +git clone https://github.com/derisk-ai/OpenDerisk.git -> Note: By default, we use the Telecom dataset from the OpenRCA dataset. You can download it via the link or the following command: +cd OpenDerisk + +# Install Dependencies +sh scripts/prepare_release.sh +``` -> gdown https://drive.google.com/uc?id=1cyOKpqyAP4fy-QiJ6a_cKuwR7D46zyVe +#### Start Server -After downloading, move the datasets to the path `pilot/datasets/` +Configure the API_KEY in `derisk-proxy-aliyun.toml`, then run: + +> Note: By default, we use the Telecom dataset from OpenRCA. Download via: +> `gdown https://drive.google.com/uc?id=1cyOKpqyAP4fy-QiJ6a_cKuwR7D46zyVe` + +After downloading, move datasets to `pilot/datasets/` Run the startup command: -``` +```bash uv run python packages/derisk-app/src/derisk_app/derisk_server.py --config configs/derisk-proxy-aliyun.toml ``` -#### Visit Website +#### Access Web UI Open your browser and visit [`http://localhost:7777`](http://localhost:7777)

-### How to Use? -* AI-SRE(OpenRCA) - - !Notice, We Use the OpenRCA Datasets[Bank Dataset](https://drive.usercontent.google.com/download?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R&export=download&confirm=t&uuid=42621058-41af-45bf-88a6-64c00bfd2f2e), - - You can download the dataset using next link: - ``` - gdown https://drive.google.com/uc?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R - ``` - - Put the datasets to the path ${derisk}/pilot/datasets。 -* Flame Graph Assistant - - Upload the flame graph (Java/Python) of your local application service process to the assistant for analysis and inquiries. -* DataExpert - - Upload your metrics, logs, traces, or various Excel data sheets for conversational analysis. - - -### Rapid Development -* Agent Development - Refer to the implementation logic under `derisk-ext.agent.agents`. -* Tool Development - * Local tool - * MCP -* Other Development - Documentation is under preparation... - -* DeRisk-Skills Development +### Usage Modes +* **AI-SRE (OpenRCA)** + - Notice: We use the OpenRCA Dataset [Bank Dataset](https://drive.usercontent.google.com/download?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R&export=download&confirm=t&uuid=42621058-41af-45bf-88a6-64c00bfd2f2e) + - Download: `gdown https://drive.google.com/uc?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R` + - Place datasets in `${derisk}/pilot/datasets` +* **Flame Graph Assistant** + - Upload flame graphs (Java/Python) from your local application for analysis +* **DataExpert** + - Upload metrics, logs, traces, or Excel data for conversational analysis + +### Development +* **Agent Development** + - Refer to implementations under `derisk-ext.agent.agents` +* **Tool Development** + - Skills + - MCP (Model Context Protocol) +* **DeRisk-Skills** - [derisk-skills](https://github.com/derisk-ai/derisk_skills) #### Execution Results -As shown in the figure below, this demonstrates a scenario where multiple agents collaborate to handle a complex operational diagnostic task. -

-### RoadMap -- [x] 0530 V0.1 Version: Based on domain knowledge and MCP services, achieving anomaly awareness -> autonomous decision-making -> adaptive execution and issue resolution. - - [x] Domain knowledge engine for technical risks - - [x] Reasoning engine driven by large models for anomaly awareness -> decision-making -> execution - - [x] Automated troubleshooting and fixes - -- [x] 0830 V0.2 Version - - [x] MCP services and management for technical risks - - [x] Support for custom binding of knowledge and MCP tools - - [x] Support for 3+ DevOps domain MCP services - -- [ ] 0930 V0.3 Version - - [ ] Support for integration with production environments - - [ ] Provide a complete production environment deployment solution, supporting production issue diagnosis. - -- [ ] 1230 V0.4 Version - - [ ] End-to-end AIOps online Agentic RL - - [ ] End-to-end evaluation capabilities - ### Citation -The code (training, serving, and evaluation) in this repository is mostly developed for or derived from the paper below. Please cite it if you find the repository helpful. +If you find this repository helpful, please cite: ``` @misc{di2025openderiskindustrialframeworkaidriven, title={OpenDerisk: An Industrial Framework for AI-Driven SRE, with Design, Implementation, and Case Studies}, @@ -190,14 +157,13 @@ The code (training, serving, and evaluation) in this repository is mostly develo The OpenDeRisk-AI community is dedicated to building AI-native risk intelligence systems. 🛡️ We hope our community can provide you with better services, and we also hope that you can join us to create a better future together. 🤝 - [![Star History Chart](https://api.star-history.com/svg?repos=derisk-ai/OpenDerisk&type=Date)](https://star-history.com/#derisk-ai/OpenDerisk) ### Community Group -Join our networking group on Dingding and share your experience with other developers! +Join our DingTalk group and share your experience with other developers!
- OpenDeRisk-AI 交流群 -
+ OpenDeRisk-AI Community +
\ No newline at end of file diff --git a/README.zh.md b/README.zh.md index 071a7ed6..ec78af23 100644 --- a/README.zh.md +++ b/README.zh.md @@ -1,6 +1,6 @@ ### OpenDeRisk -OpenDeRisk AI 原生风险智能系统 —— 7\*24H 应用系统AI数字运维助手(AI-SRE), 我们的愿景是, 为每个应用系统提供一个7\*24H的AI系统数字管家,并能与真人进行协同,7\*24H处理业务问题,形成7\*24H得深度护航与防护网。 +OpenDeRisk AI 原生风险智能系统 —— 7×24 小时应用系统 AI 数字运维助手 (AI-SRE)。我们的愿景是为每个应用系统提供一个 7×24 小时的 AI 系统数字管家,能够与真人协同工作,7×24 小时处理业务问题,构建深度护航与防护网。

@@ -28,128 +28,113 @@ OpenDeRisk AI 原生风险智能系统 —— 7\*24H 应用系统AI数字运维

-### News -- [2025/10] 🔥 我们发布了OpenDerisk V0.2版本. [OpenDerisk V0.2 ReleaseNote](./docs/docs/OpenDerisk_v0.2.md) +### 最新动态 +- [2025/10] 🔥 我们发布了 OpenDerisk V0.2 版本。[OpenDerisk V0.2 ReleaseNote](./docs/docs/OpenDerisk_v0.2.md) -### 特性 +### 核心特性

-1. DeepResearch RCA: 通过深度分析日志、Trace、代码进行问题根因的快速定位。 -2. 可视化证据链:定位诊断过程与证据链全部可视化展示,诊断过程一目了然,可以快速判断定位的准确性。 -3. 多智能体协同: SRE-Agent、Code-Agent、ReportAgent、Vis-Agent、Data-Agent协同工作。 -4. 架构开源开放: OpenDerisk采用完全开源、开放的方式构建,相关框架、代码在开源项目也能实现开箱即用。 +1. **DeepResearch RCA**: 通过深度分析日志、Trace、代码进行问题根因的快速定位。 +2. **可视化证据链**: 诊断过程与证据链全部可视化展示,诊断过程一目了然,可快速判断定位准确性。 +3. **多智能体协同**: SRE-Agent、Code-Agent、ReportAgent、Vis-Agent、Data-Agent 协同工作。 +4. **开源开放架构**: OpenDerisk 采用完全开源、开放的方式构建,相关框架、代码在开源项目中可开箱即用。 ### 架构方案

-#### 项目文档 -- [OpenDerisk Documents](https://deepwiki.com/derisk-ai/OpenDerisk) - -#### 介绍文档 -- [OpenDerisk DeepWiki文档](https://deepwiki.com/derisk-ai/OpenDerisk) - - -采用多Agent架构,目前代码中主要实现了绿色部分部分,告警感知采用的是微软开源的[OpenRCA数据集](https://github.com/microsoft/OpenRCA), 数据集的大小解压后在26G左右,我们实现在26G的数据集合上,通过多Agent协同,Code-Agent动态写代码来进行最终根因的分析诊断。 +#### 项目介绍 +系统采用多 Agent 架构,目前代码主要实现了高亮部分。告警感知基于微软开源的 [OpenRCA 数据集](https://github.com/microsoft/OpenRCA),数据集解压后约 26GB。在该数据集上,我们通过多 Agent 协同实现根因分析诊断,Code-Agent 动态编写代码进行最终分析。 #### 技术实现 -1. 数据层: 拉取Github OpenRCA的大规模数据集(20G), 解压本地处理分析。 -2. 逻辑层:Multi-Agent架构, 通过SRE-Agent、Code-Agent、ReportAgent、VisAgent、Data-Agent协同合作,进行深度的DeepResearch RCA(Root Cause Analyze) -3. 可视化层: 采用Vis协议、动态渲染整个处理流程与证据链, 以及多角色协同切换的过程。 +1. **数据层**: 从 GitHub 拉取大规模 OpenRCA 数据集 (20GB),本地解压处理分析。 +2. **逻辑层**: Multi-Agent 架构,通过 SRE-Agent、Code-Agent、ReportAgent、VisAgent、Data-Agent 协同合作,进行深度的 DeepResearch RCA (Root Cause Analysis) 根因分析。 +3. **可视化层**: 采用 Vis 协议动态渲染整个处理流程与证据链,以及多角色协同切换的过程。 -4. OpenDeRisk中的数字员工(Agent) +OpenDeRisk 中的数字员工 (Agent)

+### 安装(推荐) + +#### 使用 curl 安装 -#### 快速启动 -##### 1.open-derisk服务启动 - - Install uv - ```sh - curl -LsSf https://astral.sh/uv/install.sh | sh - ``` - - 依赖安装 - ``` - uv sync --all-packages --frozen \ - --extra "base" \ - --extra "proxy_openai" \ - --extra "rag" \ - --extra "storage_chromadb" \ - --extra "derisks" \ - --extra "storage_oss2" \ - --extra "client" \ - --extra "ext_base" - ``` - - 配置启动参数 - ``` - > 配置`derisk-proxy-aliyun.toml`文件中相关的API_KEY, 然后运行下面的命令启动。 - > 也可参考 `derisk-proxy-aliyun.toml` 文件中的配置使用全阿里云模型和oss方案 - > ** 注意 ** 最好在全新的环境下启动0.2版本,不然可能被0.1旧数据影响导致启动失败. - ``` - - 启动服务 - ``` - uv run python packages/derisk-app/src/derisk_app/derisk_server.py --config configs/derisk-proxy-aliyun.toml - ``` - - 服务访问 - > 打开浏览器访问 [`http://localhost:7777`](http://localhost:7777) - - -##### 2.内置场景快速使用 -* AI-SRE(OpenRca根因定位) - - !注意, 我们默认使用OpenRCA数据集中的[Bank数据集](https://drive.usercontent.google.com/download?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R&export=download&confirm=t&uuid=42621058-41af-45bf-88a6-64c00bfd2f2e), - - 你可以通过链接, 或者下述命令进行下载: - ``` - gdown https://drive.google.com/uc?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R - ``` - - 下载完成后, 将数据解压到 ${derisk项目}/pilot/datasets。 -* 火焰图助手 - - 使用你本地应用服务进程的火焰图(java/python)上传给助手提问分析 -* DataExpert - - 上传你的指标、日志、trace等各种Excel表格数据进行对话分析 -##### 3.快速开发 -* Agent开发 - 参考derisk-ext.agent.agents下的实现逻辑 -* 工具开发 - * local tool - * mcp -* 其他开发 - 文档准备中.... - -* DeRisk-Skills 开发 +```shell +# 下载并安装最新版本 +curl -fsSL https://raw.githubusercontent.com/derisk-ai/OpenDerisk/main/install.sh | bash +``` + +#### 配置文件 +安装完成后,需要配置系统。创建配置文件: +编辑 `~/.openderisk/derisk-proxy-aliyun.toml` 并设置您的 API 密钥。 + +#### 启动 +``` +openderisk-server +``` + +### 从源码安装(开发环境) + +安装 uv(必需): +```shell + +git clone https://github.com/derisk-ai/OpenDerisk.git + +cd OpenDerisk + +# 安装依赖 +sh scripts/prepare_release.sh +``` + +#### 启动服务 + +在 `derisk-proxy-aliyun.toml` 中配置 API_KEY,然后运行: + +> 注意:默认使用 OpenRCA 的 Telecom 数据集。通过以下链接或命令下载: +> `gdown https://drive.google.com/uc?id=1cyOKpqyAP4fy-QiJ6a_cKuwR7D46zyVe` + +下载后,将数据集移动到 `pilot/datasets/` 目录。 + +运行启动命令: +```bash +uv run python packages/derisk-app/src/derisk_app/derisk_server.py --config configs/derisk-proxy-aliyun.toml +``` + +#### 访问 Web 界面 + +打开浏览器访问 [`http://localhost:7777`](http://localhost:7777) + +##### 2. 内置场景快速使用 +* **AI-SRE (OpenRCA 根因定位)** + - 注意: 默认使用 OpenRCA 数据集中的 [Bank 数据集](https://drive.usercontent.google.com/download?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R&export=download&confirm=t&uuid=42621058-41af-45bf-88a6-64c00bfd2f2e) + - 下载命令: `gdown https://drive.google.com/uc?id=1enBrdPT3wLG94ITGbSOwUFg9fkLR-16R` + - 下载后解压到 `${derisk项目}/pilot/datasets` +* **火焰图助手** + - 上传本地应用服务进程的火焰图 (Java/Python) 进行分析 +* **DataExpert** + - 上传指标、日志、Trace 等各种 Excel 表格数据进行对话分析 + +##### 3. 快速开发 +* **Agent 开发** + - 参考 `derisk-ext.agent.agents` 下的实现逻辑 +* **工具开发** + - Skills + - MCP (Model Context Protocol) +* **DeRisk-Skills 开发** - [derisk-skills](https://github.com/derisk-ai/derisk_skills) #### 运行效果 -如下图所示, 为多智能体协同运行处理一个复杂的运维诊断任务的场景。 +多智能体协同处理复杂运维诊断任务场景:

-### RoadMap -- [x] 0530 V0.1版本,基于领域知识与MCP服务,实现从异动感知 -> 自主决策 -> 自适应执行与问题处理。 - - [x] 技术风险领域知识引擎 - - [x] 基于大模型推理驱动的异动感知 -> 决策 -> 执行推理引擎 - - [x] 自动TroubleShooting与Fix - -- [ ] 0830 V0.2版本 - - [x] 技术风险领域MCP服务与管理 - - [x] 支持自定义绑定知识与MCP工具 - - [x] 支持3+ DevOps领域MCP服务 - -- [ ] 0930 V0.3 - - [ ] 支持对接生产环境 - - [ ] 提供完整的生产环境部署解决方案,支持生产问题诊断。 - -- [ ] 1230 V0.4 - - [ ] 端到端AIOps在线Agentic RL - - [ ] 端到端评测能力 - ### 引用 - -针对此仓库中的代码, 我们通过如下的Paper进行了详细的介绍, 如果发现对你的工作有帮助, 请引用它。 +如对您的工作有帮助,请引用以下论文: ``` @misc{di2025openderiskindustrialframeworkaidriven, title={OpenDerisk: An Industrial Framework for AI-Driven SRE, with Design, Implementation, and Case Studies}, @@ -168,16 +153,15 @@ OpenDeRisk AI 原生风险智能系统 —— 7\*24H 应用系统AI数字运维 - [MetaGPT](https://github.com/FoundationAgents/MetaGPT) - [OpenRCA](https://github.com/microsoft/OpenRCA) -OpenDeRisk-AI 社区致力于构建 AI 原生的风险智能系统。🛡️ 我们希望我们的社区能够为您提供更好的服务,同时也希望您能加入我们,共同创造更美好的未来。🤝 +OpenDeRisk-AI 社区致力于构建 AI 原生的风险智能系统。🛡️ 我们希望社区能够为您提供更好的服务,同时也期待您的加入,共同创造更美好的未来。🤝 [![Star History Chart](https://api.star-history.com/svg?repos=derisk-ai/OpenDerisk&type=Date)](https://star-history.com/#derisk-ai/OpenDerisk) - ### 社区 -加入钉钉群, 与我们一起交流讨论。 +加入钉钉群,与我们一起交流讨论:
OpenDeRisk-AI 交流群 -
+ \ No newline at end of file diff --git a/assets/schema/derisk.sql b/assets/schema/derisk.sql index 0707ccd9..1c7fb0fb 100644 --- a/assets/schema/derisk.sql +++ b/assets/schema/derisk.sql @@ -9,7 +9,7 @@ use derisk; -- MySQL DDL Script for Derisk -- Version: 0.3.0 -- Generated from SQLAlchemy ORM Models --- Generated: 2026-02-27 17:05:23 +-- Generated: 2026-02-28 10:17:17 -- ============================================================ SET NAMES utf8mb4; diff --git a/docs/README.md b/docs/README.md index 0e0fd0cb..b6b0c809 100755 --- a/docs/README.md +++ b/docs/README.md @@ -1,18 +1,55 @@ -# DeRisk documentation +# OpenDerisk Documentation + +Documentation site for OpenDeRisk built with Docusaurus. ## Quick Start -### Install dependencies -- Clone current project firstly! -- Install docusaurus dependencies, generate node_modules folder. +### Prerequisites +- Clone the project first! -``` +### Install Dependencies +```bash yarn install ``` -### launch -``` +### Launch Development Server +```bash yarn start ``` -The default service starts on port `3000`, visit `localhost:3000` +The default service starts on port `3000`. Visit `http://localhost:3000` + +## Building for Production + +```bash +yarn build +``` + +The built static files will be in the `build` directory. + +## Documentation Structure + +``` +docs/ +├── docs/ # Actual documentation content +│ ├── overview.md +│ ├── getting-started.md +│ └── ... +├── sidebars.js # Sidebar configuration +├── docusaurus.config.js # Docusaurus configuration +└── package.json +``` + +## Contributing + +When contributing to the documentation: + +1. Follow the existing markdown style +2. Add appropriate sidebar entries +3. Test the changes locally before submitting + +## More Information + +- [OpenDerisk Main Documentation](../README.md) +- [Docusaurus Documentation](https://docusaurus.io/) +- [DeepWiki](https://deepwiki.com/derisk-ai/OpenDerisk) \ No newline at end of file diff --git a/npm/README.md b/npm/README.md index 2fd52737..b8f4d8ae 100644 --- a/npm/README.md +++ b/npm/README.md @@ -8,14 +8,14 @@ NPM wrapper for OpenDeRisk AI-Native Risk Intelligence Systems. npm install -g openderisk ``` -Or use with npx (no installation): +Or use with npx (no installation required): ```bash npx openderisk ``` ## Usage -### CLI +### CLI Commands ```bash # Show help @@ -31,7 +31,7 @@ openderisk-server openderisk --update ``` -### Requirements +## Requirements - Node.js >= 16.0.0 - Python >= 3.10 (will be installed automatically) @@ -53,4 +53,4 @@ OpenDeRisk is an AI-Native Risk Intelligence System that provides: ## License -MIT +MIT \ No newline at end of file diff --git a/packages/derisk-app/README.md b/packages/derisk-app/README.md index 79d40738..b6ad5076 100644 --- a/packages/derisk-app/README.md +++ b/packages/derisk-app/README.md @@ -1,3 +1,48 @@ # derisk-app +Main application package for OpenDeRisk. +## Overview + +`derisk-app` is the main application package that provides the OpenDeRisk server and web interface. It integrates all the core components and extensions to provide a complete AI-SRE solution. + +## Features + +- **Web Server**: FastAPI-based REST API server +- **Web UI**: Next.js based chat interface +- **Static Assets**: Pre-built web assets for deployment + +## Installation + +```bash +uv sync --all-packages --frozen +``` + +## Quick Start + +1. Configure the API_KEY in your config file (e.g., `derisk-proxy-aliyun.toml`) +2. Run the server: +```bash +uv run python packages/derisk-app/src/derisk_app/derisk_server.py --config configs/derisk-proxy-aliyun.toml +``` +3. Access the web UI at http://localhost:7777 + +## Project Structure + +``` +packages/derisk-app/ +├── src/derisk_app/ +│ ├── static/web/ # Pre-built web assets +│ ├── derisk_server.py # Main server entry +│ └── ... +└── pyproject.toml +``` + +## Documentation + +- [OpenDerisk Main Documentation](../README.md) +- [DeepWiki](https://deepwiki.com/derisk-ai/OpenDerisk) + +## License + +MIT \ No newline at end of file diff --git a/packages/derisk-app/src/derisk_app/app.py b/packages/derisk-app/src/derisk_app/app.py index 56ff0076..effc1e78 100644 --- a/packages/derisk-app/src/derisk_app/app.py +++ b/packages/derisk-app/src/derisk_app/app.py @@ -49,7 +49,7 @@ def load_config(config_file: str = None) -> ApplicationConfig: if config_file is None: config_file = os.path.join( - DERISK_ROOT_PATH, "configs", "derisk-siliconflow.toml" + DERISK_ROOT_PATH, "configs", "derisk-proxy-aliyun.toml" ) elif not os.path.isabs(config_file): # If config_file is a relative path, make it relative to DERISK_ROOT_PATH diff --git a/packages/derisk-client/README.md b/packages/derisk-client/README.md index af41a6f1..3c28acc6 100644 --- a/packages/derisk-client/README.md +++ b/packages/derisk-client/README.md @@ -1,3 +1,40 @@ # derisk-client -Describe your project here. +Python client library for OpenDeRisk. + +## Overview + +`derisk-client` provides a Python SDK for interacting with OpenDeRisk services. It allows developers to integrate OpenDeRisk's AI-SRE capabilities into their own applications. + +## Features + +- **REST API Client**: Easy-to-use Python client for OpenDeRisk services +- **Async Support**: Full async/await support +- **Type Hints**: Comprehensive type annotations for IDE support + +## Installation + +```bash +pip install derisk-client +``` + +## Usage + +```python +from derisk_client import OpenDeriskClient + +# Create a client +client = OpenDeriskClient(base_url="http://localhost:7777") + +# Use the client +result = await client.analyze_issue(...) +``` + +## Documentation + +- [OpenDerisk Main Documentation](../README.md) +- [GitHub Repository](https://github.com/derisk-ai/OpenDerisk) + +## License + +MIT \ No newline at end of file diff --git a/packages/derisk-core/README.md b/packages/derisk-core/README.md index ccb6fd43..8b4546cb 100644 --- a/packages/derisk-core/README.md +++ b/packages/derisk-core/README.md @@ -1,3 +1,54 @@ # derisk-core -Package that contains modules and utilities that can be used across packages and services. \ No newline at end of file +Core package for OpenDeRisk AI-Native Risk Intelligence System. + +## Overview + +`derisk-core` is the foundational package that contains core modules and utilities used across all OpenDeRisk packages and services. It provides the fundamental infrastructure for building AI-driven SRE (Site Reliability Engineering) applications. + +## Features + +- **Multi-Agent Architecture**: Framework for building collaborative AI agents (SRE-Agent, Code-Agent, ReportAgent, Vis-Agent, Data-Agent) +- **ReAct Master Agent**: Advanced reasoning agent with doom loop detection, session compaction, and output truncation +- **Model Proxy**: Support for multiple LLM providers (OpenAI, Anthropic, Azure, etc.) +- **AWEL Operators**: Rich set of operators for building AI workflows +- **Data Processing**: Tools for processing logs, traces, and metrics + +## Installation + +```bash +# From source +uv sync --all-packages --frozen +``` + +## Dependencies + +Key dependencies include: +- `aiohttp` - Async HTTP client +- `pydantic` - Data validation +- `SQLAlchemy` - Database ORM +- `duckdb` - Embedded analytical database +- `uvicorn` - ASGI server + +Optional dependencies: +- `agent` - Agent-related functionality +- `framework` - Full framework features +- `hf` - HuggingFace integration +- `code` - Code execution support + +## Usage + +```python +from derisk import ... + +# Build your AI-SRE application +``` + +## Documentation + +- [OpenDerisk Documents](https://deepwiki.com/derisk-ai/OpenDerisk) +- [GitHub Repository](https://github.com/derisk-ai/OpenDerisk) + +## License + +MIT \ No newline at end of file diff --git a/packages/derisk-ext/README.md b/packages/derisk-ext/README.md index 08144eac..b7016822 100644 --- a/packages/derisk-ext/README.md +++ b/packages/derisk-ext/README.md @@ -1,3 +1,65 @@ -# derisk-integration +# derisk-ext -Package that contains modules and utilities that can be used across packages and services. +Extension package for OpenDeRisk with additional tools and integrations. + +## Overview + +`derisk-ext` provides extended functionality for OpenDeRisk, including: + +- **Sandbox**: Secure code execution environment with isolation +- **RAG**: Retrieval-Augmented Generation support +- **MCP Gateway**: Model Context Protocol gateway +- **Data Sources**: Connectors for various databases +- **Storage**: Support for vector stores (ChromaDB, Milvus, etc.) + +## Features + +### Local Sandbox +- Security isolation using platform-specific mechanisms (sandbox-exec on macOS, prlimit on Linux) +- Real browser automation with Playwright +- Resource limits (memory, CPU, network) + +### RAG Pipeline +- Document processing (PDF, Word, Excel, PPT) +- Text chunking and embedding +- Vector storage and retrieval + +### MCP Services +- MCP Gateway for managing MCP tools +- Pre-built DevOps domain MCP services +- Custom MCP tool binding + +## Installation + +```bash +uv sync --all-packages --frozen --extra "ext_base" +``` + +Additional extras: +- `rag` - RAG functionality +- `storage_chromadb` - ChromaDB storage +- `storage_milvus` - Milvus storage +- `datasource_mysql` - MySQL connector +- `mcp_gateway` - MCP gateway + +## Usage + +```python +from derisk_ext.sandbox.local import LocalSandbox + +# Create a sandbox +sandbox = await LocalSandbox.create(user_id="user", agent="agent") + +# Run code +result = await sandbox.run_code("print('Hello, World!')") +``` + +## Documentation + +- [Local Sandbox Documentation](./src/derisk_ext/sandbox/local/README.md) +- [OpenDerisk Main Documentation](../README.md) +- [DeepWiki](https://deepwiki.com/derisk-ai/OpenDerisk) + +## License + +MIT \ No newline at end of file diff --git a/packages/derisk-serve/README.md b/packages/derisk-serve/README.md index 0578d45b..598fa0e3 100644 --- a/packages/derisk-serve/README.md +++ b/packages/derisk-serve/README.md @@ -1,3 +1,52 @@ # derisk-serve -Describe your project here. +Backend service package for OpenDeRisk. + +## Overview + +`derisk-serve` provides the backend API services for OpenDeRisk, including: + +- **Agent Services**: Multi-agent collaboration and management +- **Conversation Management**: Chat session handling +- **Knowledge Management**: RAG-based knowledge retrieval +- **Data Source Management**: Database and data source connections +- **Skill Management**: Agent skill registration and execution +- **Flow Management**: Workflow orchestration + +## Features + +- **RESTful APIs**: Comprehensive API endpoints for all services +- **Async Support**: Full async/await architecture +- **Database Integration**: Support for MySQL, PostgreSQL, DuckDB +- **Vector Storage**: ChromaDB, Milvus, Weaviate integration + +## Installation + +```bash +uv sync --all-packages --frozen +``` + +## Project Structure + +``` +packages/derisk-serve/ +├── src/derisk_serve/ +│ ├── agent/ # Agent services +│ ├── conversation/ # Conversation management +│ ├── datasource/ # Data source connections +│ ├── derisks/ # Risk management +│ ├── memory/ # Memory services +│ ├── skill/ # Skill management +│ ├── flow/ # Flow orchestration +│ └── ... +└── pyproject.toml +``` + +## Documentation + +- [OpenDerisk Main Documentation](../README.md) +- [DeepWiki](https://deepwiki.com/derisk-ai/OpenDerisk) + +## License + +MIT \ No newline at end of file diff --git a/web/README.md b/web/README.md index 3153525a..67e38260 100644 --- a/web/README.md +++ b/web/README.md @@ -1,33 +1,39 @@ -## 👋 Introduction +# OpenDerisk Web -[DERISK Web] is an Open source Tailwind and Next.js based chat UI for AI and GPT projects. It beautify a lot of markdown labels, such as `table`, `thead`, `th`, `td`, `code`, `h1`, `h2`, `ul`, `li`, `a`, `img`. Also it define some custom labels to adapted to AI-specific scenarios. Such as `plugin running`, `knowledge name`, `Chart view`, and so on. +Web UI for OpenDeRisk AI-Native Risk Intelligence System. -## 💪🏻 Getting Started +## Introduction -### Prerequisites +OpenDerisk Web is an open-source Next.js and Tailwind CSS based chat UI for AI and GPT projects. It provides a beautiful markdown rendering for various elements including tables, code blocks, headers, lists, images, and more. It also includes custom components for AI-specific scenarios such as plugin running, knowledge visualization, and chart views. + +## Prerequisites - [Node.js](https://nodejs.org/) >= 18.18 - [npm](https://npmjs.com/) >= 10 - [yarn](https://yarnpkg.com/) >= 1.22 -- Supported OSes: Linux, macOS and Windows +- Supported OSes: Linux, macOS, and Windows -### Installation +## Installation -```sh +```bash # Install dependencies npm install +# or yarn install ``` -### Usage -```sh +## Configuration + +```bash cp .env.template .env ``` -edit the `NEXT_PUBLIC_API_BASE_URL` to the real address +Edit the `NEXT_PUBLIC_API_BASE_URL` to point to your OpenDerisk server address. -```sh -# development model +## Development + +```bash +# Start development server npm run dev # or yarn dev @@ -37,22 +43,36 @@ pnpm dev bun dev ``` -Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. +Open [http://localhost:3000](http://localhost:3000) in your browser. + +## Building for Production -### Packaging static files +```bash +cd web +yarn build +cp -r out/* ../packages/derisk-app/src/derisk_app/static/web/ ``` -1. cd web -2. yarn build +## Adding New Vis Components -3. cp -r out/* ../packages/derisk-app/src/derisk_app/static/web/ -``` +To add a new visual component: + +1. Create a new file in `components/chat-content-components/VisComponents` +2. Implement the component using React and necessary libraries +3. Update the `visComponentsRender` object in `config.tsx` to include your new component + +## Tech Stack + +- Next.js +- Tailwind CSS +- React +- TypeScript +## Documentation -### Add New Vis Components +- [OpenDerisk Main Documentation](../README.md) +- [DeepWiki](https://deepwiki.com/derisk-ai/OpenDerisk) -To add a new visual component, follow these steps: +## License -1. Create a new file for your component in the `components/chat-content-components/VisComponents` directory. -2. Implement the component using React and any necessary libraries. -3. Update the `visComponentsRender` object in `config.tsx` to include your new component. +MIT \ No newline at end of file