Skip to content

Commit d493015

Browse files
chore: replace cloud package with llama-cloud-services (#2145)
Co-authored-by: Thuc Pham <thuc@lingble.com>
1 parent f648bb7 commit d493015

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

69 files changed

+115
-40908
lines changed

.changeset/shiny-olives-arrive.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"llamaindex": minor
3+
---
4+
5+
remove export cloud package from llamaindex

.github/workflows/test.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,6 @@ jobs:
122122
- nextjs-edge-runtime
123123
- nextjs-node-runtime
124124
- waku-query-engine
125-
- llama-parse-browser
126125
- vite-import-llamaindex
127126
runs-on: ubuntu-latest
128127
name: Build LlamaIndex Example (${{ matrix.packages }})

apps/next/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
"@huggingface/transformers": "^3.5.0",
1717
"@icons-pack/react-simple-icons": "^10.1.0",
1818
"@llamaindex/chat-ui-docs": "^0.1.0",
19-
"@llamaindex/cloud": "workspace:*",
19+
"llama-cloud-services": "^0.3.5",
2020
"@llamaindex/core": "workspace:*",
2121
"@llamaindex/node-parser": "workspace:*",
2222
"@llamaindex/openai": "workspace:*",

apps/next/scripts/update-llamacloud.mts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1-
import { upsertBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPut } from "@llamaindex/cloud/api";
21
import fg from "fast-glob";
32
import { fileGenerator, remarkDocGen, remarkInstall } from "fumadocs-docgen";
43
import { remarkAutoTypeTable } from "fumadocs-typescript";
54
import matter from "gray-matter";
5+
import { upsertBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPut } from "llama-cloud-services/api";
66
import * as fs from "node:fs/promises";
77
import path, { relative } from "node:path";
88
import { fileURLToPath } from "node:url";

apps/next/src/actions/index.tsx

Lines changed: 0 additions & 109 deletions
This file was deleted.

apps/next/src/app/layout.tsx

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import { AIProvider } from "@/actions";
21
import { TooltipProvider } from "@/components/ui/tooltip";
32
import { GoogleAnalytics, GoogleTagManager } from "@next/third-parties/google";
43
import { RootProvider } from "fumadocs-ui/provider";
@@ -39,9 +38,7 @@ export default function Layout({ children }: { children: ReactNode }) {
3938
<GoogleTagManager gtmId="GTM-WWRFB36R" />
4039
<body className="flex min-h-screen flex-col">
4140
<TooltipProvider>
42-
<AIProvider>
43-
<RootProvider>{children}</RootProvider>
44-
</AIProvider>
41+
<RootProvider>{children}</RootProvider>
4542
</TooltipProvider>
4643
</body>
4744
<GoogleAnalytics gaId="G-NB9B8LW9W5" />

apps/next/src/components/ai-chat.tsx

Lines changed: 0 additions & 143 deletions
This file was deleted.

apps/next/src/content/docs/llamaindex/integration/vercel.mdx

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,8 @@ Here's how to create a simple vector store index and query it using Vercel's AI
3838
import { openai } from "@ai-sdk/openai";
3939
import { llamaindex } from "@llamaindex/vercel";
4040
import { streamText } from "ai";
41-
import { Document, VectorStoreIndex } from "llamaindex";
41+
import { Document } from "llamaindex";
42+
import { LlamaCloudIndex } from "llama-cloud-services";
4243

4344
// Create an index from your documents
4445
const document = new Document({ text: yourText, id_: "unique-id" });
@@ -69,7 +70,7 @@ streamText({
6970
For production deployments, you can use LlamaCloud to store and manage your documents:
7071

7172
```typescript
72-
import { LlamaCloudIndex } from "@llamaindex/cloud";
73+
import { LlamaCloudIndex } from "llama-cloud-services";
7374

7475
// Create a LlamaCloud index
7576
const index = await LlamaCloudIndex.fromDocuments({

apps/next/src/content/docs/llamaindex/modules/data/data_index/managed.mdx

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,5 +28,4 @@ Here's an example of how to use a managed index together with a chat engine:
2828

2929
## API Reference
3030

31-
- [LlamaCloudIndex](/docs/api/classes/LlamaCloudIndex)
32-
- [LlamaCloudRetriever](/docs/api/classes/LlamaCloudRetriever)
31+
- [LlamaCloud Documentation](https://docs.cloud.llamaindex.ai/)

apps/next/src/content/docs/llamaindex/modules/data/readers/index.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ As the `PDFReader` is not working with the Edge runtime, here's how to use the `
7878

7979
```typescript
8080
import { SimpleDirectoryReader } from "@llamaindex/readers/directory";
81-
import { LlamaParseReader } from "@llamaindex/cloud";
81+
import { LlamaParseReader } from "llama-cloud-services";
8282

8383
export const DATA_DIR = "./data";
8484

0 commit comments

Comments
 (0)