@@ -68,17 +68,20 @@ public class LLM : MonoBehaviour
68
68
[ LLMAdvanced ] public bool asynchronousStartup = true ;
69
69
/// <summary> select to not destroy the LLM GameObject when loading a new Scene. </summary>
70
70
[ LLMAdvanced ] public bool dontDestroyOnLoad = true ;
71
+ /// <summary> toggle to enable model download on build </summary>
72
+ [ Model ] public bool downloadOnBuild = false ;
71
73
/// <summary> the path of the model being used (relative to the Assets/StreamingAssets folder).
72
74
/// Models with .gguf format are allowed.</summary>
73
75
[ Model ] public string model = "" ;
74
- /// <summary> toggle to enable model download on build </summary>
75
- [ Model ] public bool downloadOnBuild = false ;
76
76
/// <summary> the URL of the model to use.
77
77
/// Models with .gguf format are allowed.</summary>
78
78
[ ModelDownload ] public string modelURL = "" ;
79
79
/// <summary> the path of the LORA model being used (relative to the Assets/StreamingAssets folder).
80
80
/// Models with .bin format are allowed.</summary>
81
81
[ ModelAdvanced ] public string lora = "" ;
82
+ /// <summary> the URL of the LORA to use.
83
+ /// Models with .bin format are allowed.</summary>
84
+ [ ModelDownloadAdvanced ] public string loraURL = "" ;
82
85
/// <summary> Size of the prompt context (0 = context size of the model).
83
86
/// This is the number of tokens the model can take as input when generating responses. </summary>
84
87
[ ModelAdvanced ] public int contextSize = 0 ;
@@ -87,7 +90,7 @@ public class LLM : MonoBehaviour
87
90
/// <summary> a base prompt to use as a base for all LLMCharacter objects </summary>
88
91
[ TextArea ( 5 , 10 ) , ChatAdvanced ] public string basePrompt = "" ;
89
92
/// <summary> Boolean set to true if the server has started and is ready to receive requests, false otherwise. </summary>
90
- public bool modelDownloaded { get ; protected set ; } = false ;
93
+ public bool modelsDownloaded { get ; protected set ; } = false ;
91
94
/// <summary> Boolean set to true if the server has started and is ready to receive requests, false otherwise. </summary>
92
95
public bool started { get ; protected set ; } = false ;
93
96
/// <summary> Boolean set to true if the server has failed to start. </summary>
@@ -96,6 +99,7 @@ public class LLM : MonoBehaviour
96
99
/// \cond HIDE
97
100
public int SelectedModel = 0 ;
98
101
[ HideInInspector ] public float modelProgress = 1 ;
102
+ [ HideInInspector ] public float loraProgress = 1 ;
99
103
[ HideInInspector ] public float modelCopyProgress = 1 ;
100
104
[ HideInInspector ] public bool modelHide = true ;
101
105
@@ -107,12 +111,19 @@ public class LLM : MonoBehaviour
107
111
StreamWrapper logStreamWrapper = null ;
108
112
Thread llmThread = null ;
109
113
List < StreamWrapper > streamWrappers = new List < StreamWrapper > ( ) ;
110
- List < Callback < float > > progressCallbacks = new List < Callback < float > > ( ) ;
114
+ List < Callback < float > > modelProgressCallbacks = new List < Callback < float > > ( ) ;
115
+ List < Callback < float > > loraProgressCallbacks = new List < Callback < float > > ( ) ;
111
116
112
117
public void SetModelProgress ( float progress )
113
118
{
114
119
modelProgress = progress ;
115
- foreach ( Callback < float > progressCallback in progressCallbacks ) progressCallback ? . Invoke ( progress ) ;
120
+ foreach ( Callback < float > modelProgressCallback in modelProgressCallbacks ) modelProgressCallback ? . Invoke ( progress ) ;
121
+ }
122
+
123
+ public void SetLoraProgress ( float progress )
124
+ {
125
+ loraProgress = progress ;
126
+ foreach ( Callback < float > loraProgressCallback in loraProgressCallbacks ) loraProgressCallback ? . Invoke ( progress ) ;
116
127
}
117
128
118
129
/// \endcond
@@ -152,28 +163,39 @@ public async Task DownloadDefaultModel(int optionIndex)
152
163
await DownloadModel ( modelUrl , modelName ) ;
153
164
}
154
165
155
- public async Task DownloadModel ( string modelUrl , string modelName , Callback < float > progressCallback = null , bool overwrite = false )
166
+ public async Task DownloadModel ( string modelUrl , string modelName , bool overwrite = false )
156
167
{
157
168
modelProgress = 0 ;
158
169
string modelPath = LLMUnitySetup . GetAssetPath ( modelName ) ;
159
- Callback < float > callback = ( floatArg ) =>
160
- {
161
- progressCallback ? . Invoke ( floatArg ) ;
162
- SetModelProgress ( floatArg ) ;
163
- } ;
164
- await LLMUnitySetup . DownloadFile ( modelUrl , modelPath , overwrite , SetModel , callback ) ;
170
+ await LLMUnitySetup . DownloadFile ( modelUrl , modelPath , overwrite , SetModel , SetModelProgress ) ;
171
+ }
172
+
173
+ public async Task DownloadLora ( string loraUrl , string loraName , bool overwrite = false )
174
+ {
175
+ loraProgress = 0 ;
176
+ string loraPath = LLMUnitySetup . GetAssetPath ( loraName ) ;
177
+ await LLMUnitySetup . DownloadFile ( loraUrl , loraPath , overwrite , SetLora , SetLoraProgress ) ;
178
+ }
179
+
180
+ public async Task DownloadModels ( )
181
+ {
182
+ if ( modelURL != "" ) await DownloadModel ( modelURL , model ) ;
183
+ if ( loraURL != "" ) await DownloadLora ( loraURL , lora ) ;
165
184
}
166
185
167
- public async Task DownloadModel ( )
186
+ public async Task AndroidExtractModels ( )
168
187
{
169
- await DownloadModel ( modelURL , model ) ;
188
+ if ( ! downloadOnBuild || modelURL == "" ) await LLMUnitySetup . AndroidExtractFile ( model ) ;
189
+ if ( ! downloadOnBuild || loraURL == "" ) await LLMUnitySetup . AndroidExtractFile ( lora ) ;
170
190
}
171
191
172
- public async Task WaitUntilModelDownloaded ( Callback < float > progressCallback = null )
192
+ public async Task WaitUntilModelsDownloaded ( Callback < float > modelProgressCallback = null , Callback < float > loraProgressCallback = null )
173
193
{
174
- if ( progressCallback != null ) progressCallbacks . Add ( progressCallback ) ;
175
- while ( ! modelDownloaded ) await Task . Yield ( ) ;
176
- if ( progressCallback != null ) progressCallbacks . Remove ( progressCallback ) ;
194
+ if ( modelProgressCallback != null ) modelProgressCallbacks . Add ( modelProgressCallback ) ;
195
+ if ( loraProgressCallback != null ) loraProgressCallbacks . Add ( loraProgressCallback ) ;
196
+ while ( ! modelsDownloaded ) await Task . Yield ( ) ;
197
+ if ( modelProgressCallback != null ) modelProgressCallbacks . Remove ( modelProgressCallback ) ;
198
+ if ( loraProgressCallback != null ) loraProgressCallbacks . Remove ( loraProgressCallback ) ;
177
199
}
178
200
179
201
public async Task WaitUntilReady ( )
@@ -274,8 +296,9 @@ protected virtual string GetLlamaccpArguments()
274
296
public async void Awake ( )
275
297
{
276
298
if ( ! enabled ) return ;
277
- if ( downloadOnBuild ) await DownloadModel ( ) ;
278
- modelDownloaded = true ;
299
+ if ( downloadOnBuild ) await DownloadModels ( ) ;
300
+ modelsDownloaded = true ;
301
+ if ( Application . platform == RuntimePlatform . Android ) await AndroidExtractModels ( ) ;
279
302
string arguments = GetLlamaccpArguments ( ) ;
280
303
if ( arguments == null ) return ;
281
304
if ( asynchronousStartup ) await Task . Run ( ( ) => StartLLMServer ( arguments ) ) ;
0 commit comments