Skip to content

Commit ddc8b04

Browse files
committed
remove support for LLM base prompt
1 parent 75cc269 commit ddc8b04

File tree

2 files changed

+0
-17
lines changed

2 files changed

+0
-17
lines changed

README.md

-7
Original file line numberDiff line numberDiff line change
@@ -553,13 +553,6 @@ If the user's GPU is not supported, the LLM will fall back to the CPU
553553

554554
</details>
555555

556-
#### 🗨️ Chat Settings
557-
- <details><summary>Advanced options</summary>
558-
559-
- `Base Prompt` a common base prompt to use across all LLMCharacter objects using the LLM
560-
561-
</details>
562-
563556
### LLMCharacter Settings
564557

565558
- `Show/Hide Advanced Options` Toggle to show/hide advanced options from below

Runtime/LLM.cs

-10
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,6 @@ public class LLM : MonoBehaviour
4040
[DynamicRange("minContextLength", "maxContextLength", false), Model] public int contextSize = 8192;
4141
/// <summary> Batch size for prompt processing. </summary>
4242
[ModelAdvanced] public int batchSize = 512;
43-
/// <summary> a base prompt to use as a base for all LLMCaller objects </summary>
44-
[TextArea(5, 10), ChatAdvanced] public string basePrompt = "";
4543
/// <summary> Boolean set to true if the server has started and is ready to receive requests, false otherwise. </summary>
4644
public bool started { get; protected set; } = false;
4745
/// <summary> Boolean set to true if the server has failed to start. </summary>
@@ -132,7 +130,6 @@ public async void Awake()
132130
await Task.Run(() => StartLLMServer(arguments));
133131
if (!started) return;
134132
if (dontDestroyOnLoad) DontDestroyOnLoad(transform.root.gameObject);
135-
if (basePrompt != "") await SetBasePrompt(basePrompt);
136133
}
137134

138135
/// <summary>
@@ -778,13 +775,6 @@ public async Task<string> Completion(string json, Callback<string> streamCallbac
778775
return result;
779776
}
780777

781-
public async Task SetBasePrompt(string base_prompt)
782-
{
783-
AssertStarted();
784-
SystemPromptRequest request = new SystemPromptRequest() { system_prompt = base_prompt, prompt = " ", n_predict = 0 };
785-
await Completion(JsonUtility.ToJson(request));
786-
}
787-
788778
/// <summary>
789779
/// Allows to cancel the requests in a specific slot of the LLM
790780
/// </summary>

0 commit comments

Comments
 (0)