|
| 1 | +using System; |
| 2 | +using System.Runtime.InteropServices; |
| 3 | +using System.Text; |
| 4 | + |
| 5 | +class Model |
| 6 | +{ |
| 7 | + private IntPtr _modelPtr; |
| 8 | + private IntPtr _libHandle; |
| 9 | + private delegate void HandlerDelegate(string piece); |
| 10 | + |
| 11 | + [DllImport("build/libllama2.so", CallingConvention = CallingConvention.Cdecl)] |
| 12 | + private static extern IntPtr init_model( |
| 13 | + [MarshalAs(UnmanagedType.LPStr)] string checkpointPath, |
| 14 | + [MarshalAs(UnmanagedType.LPStr)] string tokenizerPath, |
| 15 | + int vocabSize, |
| 16 | + float temperature, |
| 17 | + float topp, |
| 18 | + ulong rngSeed |
| 19 | + ); |
| 20 | + |
| 21 | + [DllImport("build/libllama2.so", CallingConvention = CallingConvention.Cdecl)] |
| 22 | + private static extern void free_model(IntPtr model); |
| 23 | + |
| 24 | + [DllImport("build/libllama2.so", CallingConvention = CallingConvention.Cdecl)] |
| 25 | + private static extern void generate( |
| 26 | + IntPtr model, |
| 27 | + [MarshalAs(UnmanagedType.LPStr)] string prompt, |
| 28 | + int steps, |
| 29 | + HandlerDelegate handler |
| 30 | + ); |
| 31 | + |
| 32 | + public Model(string checkpointPath, string tokenizerPath, int vocabSize, float temperature, float topp, ulong rngSeed) |
| 33 | + { |
| 34 | + _libHandle = NativeLibrary.Load("build/libllama2.so"); |
| 35 | + if (_libHandle == IntPtr.Zero) |
| 36 | + throw new Exception("Failed to load library"); |
| 37 | + |
| 38 | + _modelPtr = init_model(checkpointPath, tokenizerPath, vocabSize, temperature, topp, rngSeed); |
| 39 | + if (_modelPtr == IntPtr.Zero) |
| 40 | + throw new Exception("Failed to initialize the model"); |
| 41 | + } |
| 42 | + |
| 43 | + public string Generate(string prompt, int steps) |
| 44 | + { |
| 45 | + var pieces = new StringBuilder(); |
| 46 | + |
| 47 | + void Handler(string piece) |
| 48 | + { |
| 49 | + pieces.Append(piece); |
| 50 | + } |
| 51 | + |
| 52 | + var handlerDelegate = new HandlerDelegate(Handler); |
| 53 | + |
| 54 | + generate(_modelPtr, prompt, steps, handlerDelegate); |
| 55 | + |
| 56 | + return pieces.ToString(); |
| 57 | + } |
| 58 | + |
| 59 | + ~Model() |
| 60 | + { |
| 61 | + if (_modelPtr != IntPtr.Zero) |
| 62 | + { |
| 63 | + free_model(_modelPtr); |
| 64 | + _modelPtr = IntPtr.Zero; |
| 65 | + } |
| 66 | + |
| 67 | + if (_libHandle != IntPtr.Zero) |
| 68 | + { |
| 69 | + NativeLibrary.Free(_libHandle); |
| 70 | + _libHandle = IntPtr.Zero; |
| 71 | + } |
| 72 | + } |
| 73 | +} |
| 74 | + |
| 75 | +class Program |
| 76 | +{ |
| 77 | + static void Main() |
| 78 | + { |
| 79 | + Model model = new Model("/home/giles/models/llama2_cu_awg/llama2-7b-awq-q4.bin", "tokenizer.bin", 32000, 0.5f, 0.6f, 1337); |
| 80 | + string result = model.Generate("[INST] <<SYS>>\nYou are a helpful assistant\n<</SYS>>\n\n Hello how are you today [INST]", 500); |
| 81 | + Console.WriteLine(result); |
| 82 | + } |
| 83 | +} |
| 84 | + |
0 commit comments