Skip to content

Commit 6fb0c32

Browse files
committed
feat: make vcpkg a submodule
Signed-off-by: Gordon Smith <GordonJSmith@gmail.com>
1 parent e1d11f5 commit 6fb0c32

23 files changed

+230
-140
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ node_modules/
3030
types/
3131
/wasi-sdk*
3232
/wit-bindgen
33-
/vcpkg
33+
3434
__screenshots__/
3535
.coveralls.yml
3636
*.tsbuildinfo

.gitmodules

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
[submodule "vcpkg"]
2+
path = vcpkg
3+
url = https://github.com/microsoft/vcpkg.git

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@
77
This repository contains a collection of useful c++ libraries compiled to WASM for (re)use in Node JS, Web Browsers and JavaScript Libraries:
88
- [base91](https://base91.sourceforge.net/) - v0.6.0
99
- [duckdb](https://github.com/duckdb/duckdb) - v1.4.3
10-
- [expat](https://libexpat.github.io/) - v2.7.1
10+
- [expat](https://libexpat.github.io/) - v2.7.3
1111
- [graphviz](https://www.graphviz.org/) - 14.1.0
12-
- [llama.cpp](https://github.com/ggerganov/llama.cpp) - b3718
12+
- [llama.cpp](https://github.com/ggml-org/llama.cpp) - 7146
1313
- [zstd](https://github.com/facebook/zstd) - v1.5.7
1414
- ...more to follow...
1515

package-lock.json

Lines changed: 2 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,10 +36,11 @@
3636
"uninstall-build-deps": "rimraf ./emsdk ./vcpkg ./src-expat ./src-vcpkg ./wasi-sdk ./wit-bindgen",
3737
"uninstall": "rimraf --glob ./node_modules ./packages/**/node_modules ./package-lock.json",
3838
"clean-build": "rimraf --glob .nyc_output bin build coverage output-* tmp vcpkg/vcpkg .nx",
39+
"clean-vcpkg": "rimraf --glob vcpkg/buildtrees vcpkg/packages vcpkg/installed vcpkg/downloads",
3940
"clean-docs": "rimraf .vitepress/dist .vitepress/cache docs/base91 docs/duckdb docs/expat docs/graphviz docs/llama docs/zstd",
4041
"clean-root": "run-p clean-build clean-docs",
4142
"clean": "lerna run clean",
42-
"clean-all": "run-p clean clean-root",
43+
"clean-all": "run-p clean clean-root clean-vcpkg",
4344
"copy-res": "cp ./docs/*.png ./.vitepress/dist",
4445
"build-asm": "asc ./src-asm/index.ts --target release",
4546
"gen-docs": "typedoc",

packages/expat/tests/expat.spec.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ describe("expat", function () {
3636
let expat = await Expat.load();
3737
let v = await expat.version();
3838
expect(v).to.be.a.string;
39-
expect(v).to.equal("expat_2.7.1");
39+
expect(v).to.equal("expat_2.7.3"); // Update README if this changes
4040
console.log("expat version: " + v);
4141
Expat.unload();
4242

packages/llama/CMakeLists.txt

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,16 +29,21 @@ string(REPLACE ";" " " LINK_FLAGS "${EM_LINK_FLAGS}")
2929
include_directories(
3030
${VCPKG_INCLUDE_DIR}
3131
${CMAKE_CURRENT_BINARY_DIR}
32-
${Llama_DIR}/common
32+
${VCPKG_INCLUDE_DIR}/llama-cpp/common
33+
)
34+
35+
# Find the llama-common library (renamed to avoid conflict with graphviz libcommon)
36+
find_library(LLAMA_COMMON_LIBRARY llama-common
37+
REQUIRED
38+
HINTS ${VCPKG_INCLUDE_DIR}/../lib
39+
NO_CMAKE_FIND_ROOT_PATH
3340
)
3441

3542
add_executable(llamalib
3643
src-cpp/embedding.cpp
3744
src-cpp/log_wasm.cpp
3845
src-cpp/util.cpp
3946
src-cpp/stubs.c
40-
${Llama_DIR}/common/common.cpp
41-
${Llama_DIR}/common/sampling.cpp
4247
)
4348

4449
set_target_properties(llamalib PROPERTIES COMPILE_FLAGS "${CPP_FLAGS}")
@@ -47,4 +52,5 @@ set_target_properties(llamalib PROPERTIES LINK_FLAGS "${LINK_FLAGS}")
4752
target_link_libraries(llamalib
4853
PRIVATE llama
4954
PRIVATE ggml::ggml-cpu
55+
PRIVATE ${LLAMA_COMMON_LIBRARY}
5056
)

packages/llama/src-cpp/embedding.cpp

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -243,8 +243,8 @@ namespace embedding
243243
// load the model
244244
auto init = common_init_from_params(params);
245245

246-
llama_model *model = init ? init->model() : nullptr;
247-
llama_context *ctx = init ? init->context() : nullptr;
246+
llama_model *model = init.model.get();
247+
llama_context *ctx = init.context.get();
248248
if (model == NULL)
249249
{
250250
fprintf(stderr, "%s: error: unable to load model\n", __func__);
@@ -501,8 +501,7 @@ namespace embedding
501501
// clean up
502502
llama_batch_free(batch);
503503

504-
// common_init_result handles model/context lifetime
505-
init.reset();
504+
// common_init_result's unique_ptrs handle cleanup automatically
506505
llama_backend_free();
507506

508507
return 0;

packages/llama/src/llama.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import load, { reset } from "../../../build/packages/llama/llamalib.wasm";
33
import type { MainModule } from "../../../build/packages/llama/llamalib.js";
44
import { MainModuleEx } from "@hpcc-js/wasm-util";
5-
import llamaMeta from "../../../vcpkg-overlays/llama/vcpkg.json" with { type: "json" };
5+
import llamaMeta from "../../../vcpkg-overlays/llama-cpp/vcpkg.json" with { type: "json" };
66

77
// Ref: https://github.com/ggerganov/llama.cpp
88
// Ref: http://facebook.github.io/llama/llama_manual.html
@@ -69,7 +69,7 @@ export class Llama extends MainModuleEx<MainModule> {
6969
* @returns The Llama c++ version
7070
*/
7171
version(): string {
72-
return llamaMeta["version-string"];
72+
return llamaMeta.version;
7373
}
7474

7575
/**

packages/llama/tests/llama.spec.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ describe("llama", () => {
88
const v1 = v;
99
expect(v).to.be.a.string;
1010
expect(v).to.be.not.empty;
11-
expect(v).to.equal("b7445"); // Update README.md with the new version!!!
11+
expect(v).to.equal("7146"); // Update README.md with the new version!!!
1212

1313
llama = await Llama.load();
1414
v = llama.version();

0 commit comments

Comments
 (0)