Skip to content
This repository has been archived by the owner on Jan 31, 2024. It is now read-only.

Commit

Permalink
Add some more code comments
Browse files Browse the repository at this point in the history
  • Loading branch information
saddam213 committed Sep 8, 2023
1 parent ba39bdd commit fbce228
Show file tree
Hide file tree
Showing 13 changed files with 139 additions and 9 deletions.
20 changes: 19 additions & 1 deletion LLamaStack.Core/Common/ExecutorType.cs
Original file line number Diff line number Diff line change
@@ -1,9 +1,27 @@
namespace LLamaStack.Core.Common

namespace LLamaStack.Core.Common
{
/// <summary>
/// LLamaSharp executor type
/// </summary>
public enum ExecutorType
{

/// <summary>
/// The interactive executor, for more personal type chatbot interaction
/// </summary>
Interactive = 0,


/// <summary>
/// The instruct executor, good for instruction/response type interaction
/// </summary>
Instruct = 1,


/// <summary>
/// The stateless executor, holds no state or context during interaction
/// </summary>
Stateless = 2
}
}
20 changes: 20 additions & 0 deletions LLamaStack.Core/Common/ModelLoadType.cs
Original file line number Diff line number Diff line change
@@ -1,10 +1,30 @@
namespace LLamaStack.Core.Common
{
/// <summary>
/// The type of model load caching to use
/// </summary>
public enum ModelLoadType
{

/// <summary>
/// Only one model will be loaded into memory at a time, any other models will be unloaded before the new one is loaded
/// </summary>
Single = 0,

/// <summary>
/// Multiple models will be loaded into memory, ensure you use the ModelConfigs to split the hardware resources
/// </summary>
Multiple = 1,

/// <summary>
/// The first model in the appsettings.json list will be preloaded into memory at app startup
/// </summary>
PreloadSingle = 2,


/// <summary>
/// All models in the appsettings.json list will be preloaded into memory at app startup, ensure you use the ModelConfigs to split the hardware resources
/// </summary>
PreloadMultiple = 3,
}
}
6 changes: 5 additions & 1 deletion LLamaStack.Core/Common/SamplerType.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
namespace LLamaStack.Core.Common

namespace LLamaStack.Core.Common
{
/// <summary>
/// The type of token sampling algo to use
/// </summary>
public enum SamplerType
{
/// <summary>
Expand Down
6 changes: 5 additions & 1 deletion LLamaStack.Core/Extensions/Extensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@ namespace LLamaStack.Core.Extensions
{
public static class Extensions
{

/// <summary>
/// Converts an IModelConfig to IModelParams.
/// </summary>
/// <param name="modelConfig">The model configuration.</param>
public static IModelParams ToModelParams(this IModelConfig modelConfig)
{
return new ModelParams(modelConfig.ModelPath)
Expand Down Expand Up @@ -72,7 +77,6 @@ public static InferenceParams ToInferenceParams(this IInferenceConfig inferenceC
/// Converts SamplerType to MirostatType.
/// </summary>
/// <param name="samplerType">Type of the sampler.</param>
/// <returns></returns>
public static MirostatType ToMirostatType(this SamplerType samplerType)
{
return samplerType switch
Expand Down
40 changes: 40 additions & 0 deletions LLamaStack.Core/LLamaStackContext.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,39 +2,79 @@

namespace LLamaStack.Core
{
/// <summary>
/// Wrapper class for LLamaSharp LLamaContext
/// </summary>
/// <seealso cref="System.IDisposable" />
public class LLamaStackContext : IDisposable
{
private readonly LLamaContext _context;

/// <summary>
/// Initializes a new instance of the <see cref="LLamaStackContext"/> class.
/// </summary>
/// <param name="context">The context.</param>
public LLamaStackContext(LLamaContext context)
{
_context = context;
}


/// <summary>
/// Gets the LLamaSharp context.
/// </summary>
public LLamaContext LLamaContext => _context;


/// <summary>
/// Gets the size of the context.
/// </summary>
public int ContextSize => _context.ContextSize;


/// <summary>
/// Loads the state.
/// </summary>
/// <param name="filename">The filename.</param>
public void LoadState(string filename)
{
_context.LoadState(filename);
}


/// <summary>
/// Loads the state asynchronous.
/// </summary>
/// <param name="filename">The filename.</param>
public async Task LoadStateAsync(string filename)
{
await Task.Run(() => LoadState(filename));
}


/// <summary>
/// Saves the state.
/// </summary>
/// <param name="filename">The filename.</param>
public void SaveState(string filename)
{
_context.SaveState(filename);
}


/// <summary>
/// Saves the state asynchronous.
/// </summary>
/// <param name="filename">The filename.</param>
public async Task SaveStateAsync(string filename)
{
await Task.Run(() => SaveState(filename));
}


/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
public void Dispose()
{
_context?.Dispose();
Expand Down
4 changes: 4 additions & 0 deletions LLamaStack.Core/LLamaStackModel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ public class LLamaStackModel<T> : IDisposable
private readonly LLamaWeights _weights;
private readonly ConcurrentDictionary<T, LLamaStackContext> _contexts;

/// <summary>
/// Initializes a new instance of the <see cref="LLamaStackModel{T}"/> class.
/// </summary>
/// <param name="modelParams">The model parameters.</param>
public LLamaStackModel(ModelConfig modelParams)
{
_config = modelParams;
Expand Down
3 changes: 3 additions & 0 deletions LLamaStack.Core/Registration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@

namespace LLamaStack.Core
{
/// <summary>
/// .NET Core Service and Dependancy Injection registration helpers
/// </summary>
public static class Registration
{

Expand Down
4 changes: 4 additions & 0 deletions LLamaStack.Core/Services/IModelSessionService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@

namespace LLamaStack.Core.Services
{
/// <summary>
/// Service for ineraticng with ModelSessions
/// </summary>
/// <typeparam name="T">Type used to identify contexts</typeparam>
public interface IModelSessionService<T> where T : IEquatable<T>, IComparable<T>
{

Expand Down
36 changes: 36 additions & 0 deletions LLamaStack.Core/Services/IModelSessionStateService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,48 @@

namespace LLamaStack.Core.Services
{
/// <summary>
/// Service for handling Loading and Saving of a ModelSessions state
/// </summary>
/// <typeparam name="T">Type used to identify contexts</typeparam>
public interface IModelSessionStateService<T> where T : IEquatable<T>, IComparable<T>
{

/// <summary>
/// Gets the ModelSessionState with the specified identifier.
/// </summary>
/// <param name="sessionId">The session identifier.</param>
Task<ModelSessionState<T>> GetAsync(T sessionId);


/// <summary>
/// Gets all ModelSessionStates.
/// </summary>
/// <returns></returns>
Task<IEnumerable<ModelSessionState<T>>> GetAllAsync();


/// <summary>
/// Removes the ModelSessionState with the specified identifier.
/// </summary>
/// <param name="sessionId">The session identifier.</param>
Task<bool> RemoveAsync(T sessionId);


/// <summary>
/// Loads the ModelSessionState with the specified identifier.
/// </summary>
/// <param name="sessionId">The session identifier.</param>
/// <param name="cancellationToken">The cancellation token.</param>
Task<ModelSessionState<T>> LoadAsync(T sessionId, CancellationToken cancellationToken = default);


/// <summary>
/// Saves the specified ModelSessionState.
/// </summary>
/// <param name="sessionId">The session identifier.</param>
/// <param name="session">The session.</param>
/// <param name="cancellationToken">The cancellation token.</param>
Task<ModelSessionState<T>> SaveAsync(T sessionId, ModelSession<T> session, CancellationToken cancellationToken = default);
}
}
3 changes: 2 additions & 1 deletion LLamaStack.Core/Services/ModelSessionStateService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,9 @@ public ModelSessionStateService(ILogger<ModelSessionStateService<T>> logger, LLa
}



/// <summary>
/// Gets ModleSessionState by id.
/// Gets the ModelSessionState with the specified identifier.
/// </summary>
/// <param name="sessionId">The session identifier.</param>
/// <returns></returns>
Expand Down
1 change: 0 additions & 1 deletion LLamaStack.WPF/App.xaml.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
using System;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Threading;

namespace LLamaStack.WPF
{
Expand Down
2 changes: 0 additions & 2 deletions LLamaStack.WPF/WindowLogger.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Logging;
using System;
using System.Windows.Threading;
using static LLama.Common.ChatHistory;

namespace LLamaStack.WPF
{
Expand Down
3 changes: 1 addition & 2 deletions LLamaStack.WebApi/Models/InferRequestBase.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using LLama.Common;
using LLamaStack.Core.Common;
using LLamaStack.Core.Common;
using LLamaStack.Core.Config;
using LLamaStack.Core.Models;
using System.ComponentModel;
Expand Down

0 comments on commit fbce228

Please sign in to comment.