diff --git a/Runtime/LLM.cs b/Runtime/LLM.cs index 544d9727..82d9c905 100644 --- a/Runtime/LLM.cs +++ b/Runtime/LLM.cs @@ -45,27 +45,6 @@ public class LLM : MonoBehaviour [LLM] public bool debug = false; /// number of prompts that can happen in parallel (-1 = number of LLMCharacter objects) [LLMAdvanced] public int parallelPrompts = -1; - /// allows to start the server asynchronously. - /// This is useful to not block Unity while the server is initialised. - /// For example it can be used as follows: - /// \code - /// void Start(){ - /// StartCoroutine(Loading()); - /// ... - /// } - /// - /// IEnumerator Loading() - /// { - /// // show loading screen - /// while (!llm.started) - /// { - /// yield return null; - /// } - /// Debug.Log("Server is ready"); - /// } - /// \endcode - /// - [LLMAdvanced] public bool asynchronousStartup = true; /// select to not destroy the LLM GameObject when loading a new Scene. [LLMAdvanced] public bool dontDestroyOnLoad = true; /// Size of the prompt context (0 = context size of the model). @@ -119,8 +98,7 @@ public async void Awake() await AndroidSetup(); string arguments = GetLlamaccpArguments(); if (arguments == null) return; - if (asynchronousStartup) await Task.Run(() => StartLLMServer(arguments)); - else StartLLMServer(arguments); + await Task.Run(() => StartLLMServer(arguments)); if (dontDestroyOnLoad) DontDestroyOnLoad(transform.root.gameObject); if (basePrompt != "") await SetBasePrompt(basePrompt); }