diff --git a/MCP-TOOLBOX-EXTENSION.md b/MCP-TOOLBOX-EXTENSION.md index 1df0a87bce82..f78cdcf95234 100644 --- a/MCP-TOOLBOX-EXTENSION.md +++ b/MCP-TOOLBOX-EXTENSION.md @@ -166,16 +166,16 @@ instances, often by analyzing metrics and logs. ### Other Google Cloud Data Services -#### Dataplex +#### Knowledge Catalog (formerly known as Dataplex) -* For interacting with Dataplex data lakes and assets: +* For interacting with Knowledge Catalog data lakes and assets: ```bash - gemini extensions install https://github.com/gemini-cli-extensions/dataplex + gemini extensions install https://github.com/gemini-cli-extensions/knowledge-catalog ``` Configuration: - https://github.com/gemini-cli-extensions/dataplex/tree/main?tab=readme-ov-file#configuration + https://github.com/gemini-cli-extensions/knowledge-catalog/tree/main?tab=readme-ov-file#configuration #### Looker diff --git a/README.md b/README.md index 718babe15abd..74451991c00e 100644 --- a/README.md +++ b/README.md @@ -97,7 +97,7 @@ You can use the Toolbox in any MCP-compatible IDE or client (e.g., Gemini CLI, G When you run Toolbox with a `--prebuilt=` flag, you instantly get access to standard tools to interact with that database. Supported databases currently include: -- **Google Cloud:** AlloyDB, BigQuery, Cloud SQL (PostgreSQL, MySQL, SQL Server), Spanner, Firestore, Dataplex +- **Google Cloud:** AlloyDB, BigQuery, Cloud SQL (PostgreSQL, MySQL, SQL Server), Spanner, Firestore, Knowledge Catalog (formerly known as Dataplex). - **Other Databases:** PostgreSQL, MySQL, SQL Server, Oracle, MongoDB, Redis, Elasticsearch, CockroachDB, ClickHouse, Couchbase, Neo4j, Snowflake, Trino, and more. For a full list of available tools and their capabilities across all supported databases, see the [Prebuilt Tools Reference](https://mcp-toolbox.dev/documentation/configuration/prebuilt-configs/). diff --git a/cmd/internal/config_test.go b/cmd/internal/config_test.go index d9bbeaa53e67..83c3161c9858 100644 --- a/cmd/internal/config_test.go +++ b/cmd/internal/config_test.go @@ -1747,6 +1747,10 @@ func TestPrebuiltTools(t *testing.T) { Name: "replication", ToolNames: []string{"replication_stats", "list_replication_slots", "list_publication_tables", "list_roles", "list_pg_settings", "database_overview"}, }, + "vectorassist": { + Name: "vectorassist", + ToolNames: []string{"execute_sql", "define_spec", "modify_spec", "apply_spec", "generate_query"}, + }, }, }, { diff --git a/cmd/internal/imports.go b/cmd/internal/imports.go index edbf8d0cde29..6b34dfa5938c 100644 --- a/cmd/internal/imports.go +++ b/cmd/internal/imports.go @@ -79,6 +79,10 @@ import ( _ "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance" _ "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances" _ "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/cloudsqlpgupgradeprecheck" + _ "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/vectorassistapplyspec" + _ "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/vectorassistdefinespec" + _ "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/vectorassistgeneratequery" + _ "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/vectorassistmodifyspec" _ "github.com/googleapis/mcp-toolbox/internal/tools/cockroachdb/cockroachdbexecutesql" _ "github.com/googleapis/mcp-toolbox/internal/tools/cockroachdb/cockroachdblistschemas" _ "github.com/googleapis/mcp-toolbox/internal/tools/cockroachdb/cockroachdblisttables" diff --git a/docs/CLOUDSQLPG_README.md b/docs/CLOUDSQLPG_README.md index eaeb5f8e5f23..d734c3ee4747 100644 --- a/docs/CLOUDSQLPG_README.md +++ b/docs/CLOUDSQLPG_README.md @@ -72,6 +72,10 @@ The Cloud SQL for PostgreSQL MCP server provides the following tools: | `list_triggers` | Lists all non-internal triggers in a database. | | `list_indexes` | Lists available user indexes in the database. | | `list_sequences` | Lists sequences in the database. | +| `define_spec` | Defines a new vector specification for search workloads. | +| `modify_spec` | Modifies an existing vector specification. | +| `apply_spec` | Executes SQL recommendations for a vector specification. | +| `generate_query` | Generates optimized SQL queries for vector searches. | ## Custom MCP Server Configuration diff --git a/docs/DATAPLEX_README.md b/docs/KNOWLEDGE_CATALOG_README.md similarity index 75% rename from docs/DATAPLEX_README.md rename to docs/KNOWLEDGE_CATALOG_README.md index e79c4f06eb6e..b74e1bf9205b 100644 --- a/docs/DATAPLEX_README.md +++ b/docs/KNOWLEDGE_CATALOG_README.md @@ -1,12 +1,12 @@ -# Dataplex MCP Server +# Knowledge Catalog MCP Server -The Dataplex Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud Dataplex Catalog. It supports searching and looking up entries and aspect types. +The Knowledge Catalog (formerly known as Dataplex) Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud Knowledge Catalog. It supports searching and looking up entries and aspect types. ## Features -An editor configured to use the Dataplex MCP server can use its AI capabilities to help you: +An editor configured to use the Knowledge Catalog MCP server can use its AI capabilities to help you: -- **Search Catalog** - Search for entries in Dataplex Catalog +- **Search Catalog** - Search for entries in Knowledge Catalog - **Explore Metadata** - Lookup specific entries and search aspect types ## Prerequisites @@ -37,18 +37,18 @@ You'll now be able to see all enabled tools in the "Tools" tab. ## Usage -Once configured, the MCP server will automatically provide Dataplex capabilities to your AI assistant. You can: +Once configured, the MCP server will automatically provide Knowledge Catalog capabilities to your AI assistant. You can: -* "Search for entries related to 'sales' in Dataplex." +* "Search for entries related to 'sales' in Knowledge Catalog." * "Look up details for the entry 'projects/my-project/locations/us-central1/entryGroups/my-group/entries/my-entry'." ## Server Capabilities -The Dataplex MCP server provides the following tools: +The Knowledge Catalog MCP server provides the following tools: | Tool Name | Description | |:----------------------|:-----------------------------------------------------------------------------------------------------------------------------| -| `search_entries` | Search for entries in Dataplex Catalog. | +| `search_entries` | Search for entries in Knowledge Catalog. | | `lookup_entry` | Retrieve specific subset of metadata (for example, schema, usage, business overview, and contacts) of a specific data asset. | | `search_aspect_types` | Find aspect types relevant to the query. | | `lookup_context` | Retrieve rich metadata regarding one or more data assets along with their relationships. | @@ -79,4 +79,4 @@ Add the following configuration to your MCP client (e.g., `settings.json` for Ge ## Documentation -For more information, visit the [Dataplex documentation](https://cloud.google.com/dataplex/docs). +For more information, visit the [Knowledge Catalog documentation](https://cloud.google.com/dataplex/docs). diff --git a/docs/en/documentation/connect-to/gemini-cli/_index.md b/docs/en/documentation/connect-to/gemini-cli/_index.md index b6556144700d..012ffb9e0359 100644 --- a/docs/en/documentation/connect-to/gemini-cli/_index.md +++ b/docs/en/documentation/connect-to/gemini-cli/_index.md @@ -36,7 +36,7 @@ Below are a list of Gemini CLI Extensions powered by MCP Toolbox: * [cloud-sql-postgresql-observability](https://github.com/gemini-cli-extensions/cloud-sql-postgresql-observability) * [cloud-sql-sqlserver](https://github.com/gemini-cli-extensions/cloud-sql-sqlserver) * [cloud-sql-sqlserver-observability](https://github.com/gemini-cli-extensions/cloud-sql-sqlserver-observability) -* [dataplex](https://github.com/gemini-cli-extensions/dataplex) +* [knowledge-catalog](https://github.com/gemini-cli-extensions/knowledge-catalog) * [firestore-native](https://github.com/gemini-cli-extensions/firestore-native) * [looker](https://github.com/gemini-cli-extensions/looker) * [mcp-toolbox](https://github.com/gemini-cli-extensions/mcp-toolbox) diff --git a/docs/en/documentation/introduction/_index.md b/docs/en/documentation/introduction/_index.md index 102f8c9b6973..d84d927aa8fa 100644 --- a/docs/en/documentation/introduction/_index.md +++ b/docs/en/documentation/introduction/_index.md @@ -77,7 +77,7 @@ Set the appropriate environment variables to connect, see the [Prebuilt Tools Re When you run Toolbox with a `--prebuilt=` flag, you instantly get access to standard tools to interact with that database. Supported databases currently include: -- **Google Cloud:** AlloyDB, BigQuery, Cloud SQL (PostgreSQL, MySQL, SQL Server), Spanner, Firestore, Dataplex +- **Google Cloud:** AlloyDB, BigQuery, Cloud SQL (PostgreSQL, MySQL, SQL Server), Spanner, Firestore, Knowledge Catalog (formerly known as Dataplex). - **Other Databases:** PostgreSQL, MySQL, SQL Server, Oracle, MongoDB, Redis, Elasticsearch, CockroachDB, ClickHouse, Couchbase, Neo4j, Snowflake, Trino, and more. For a full list of available tools and their capabilities across all supported databases, see the [Prebuilt Tools Reference](https://mcp-toolbox.dev/documentation/configuration/prebuilt-configs/). diff --git a/docs/en/integrations/bigquery/tools/bigquery-search-catalog.md b/docs/en/integrations/bigquery/tools/bigquery-search-catalog.md index d2b0ffa6380e..ff23e376016a 100644 --- a/docs/en/integrations/bigquery/tools/bigquery-search-catalog.md +++ b/docs/en/integrations/bigquery/tools/bigquery-search-catalog.md @@ -8,7 +8,7 @@ description: > ## About -A `bigquery-search-catalog` tool returns all entries in Dataplex Catalog (e.g. +A `bigquery-search-catalog` tool returns all entries in Knowledge Catalog (e.g. tables, views, models) with system=bigquery that matches given user query. `bigquery-search-catalog` takes a required `query` parameter based on which @@ -30,14 +30,14 @@ following parameters: ### IAM Permissions Bigquery uses [Identity and Access Management (IAM)][iam-overview] to control -user and group access to Dataplex resources. Toolbox will use your +user and group access to Knowledge Catalog (formerly known as Dataplex) resources. Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize and authenticate when -interacting with [Dataplex][dataplex-docs]. +interacting with [Knowledge Catalog][dataplex-docs]. In addition to [setting the ADC for your server][set-adc], you need to ensure the IAM identity has been given the correct IAM permissions for the tasks you -intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions] -and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on +intend to perform. See [Knowledge Catalog IAM permissions][iam-permissions] +and [Knowledge Catalog IAM roles][iam-roles] for more information on applying IAM permissions and roles to an identity. [iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control diff --git a/docs/en/integrations/cloud-sql-pg/tools/vector-assist-apply-spec.md b/docs/en/integrations/cloud-sql-pg/tools/vector-assist-apply-spec.md new file mode 100644 index 000000000000..21890e76ed1d --- /dev/null +++ b/docs/en/integrations/cloud-sql-pg/tools/vector-assist-apply-spec.md @@ -0,0 +1,59 @@ +--- +title: "vector-assist-apply-spec" +type: docs +weight: 1 +description: > + The "vector-assist-apply-spec" tool automatically executes all SQL recommendations + associated with a specific vector specification or table to finalize the + vector search setup. +--- + +## About + +The `vector-assist-apply-spec` tool automatically executes all the SQL recommendations associated with a specific vector specification (spec_id) or table. It runs the necessary commands in the correct sequence to provision the workload, marking each step as applied once successful. + +Use this tool when the user has reviewed the generated recommendations from a defined (or modified) spec and is ready to apply the changes directly to their database instance to finalize the vector search setup. Under the hood, this tool connects to the target database and executes the `vector_assist.apply_spec` function. + +## Compatible Sources + +{{< compatible-sources >}} + +## Requirements + +{{< notice tip >}} +Ensure that your target PostgreSQL database has the required `vector_assist` extension installed, in order for this tool to execute successfully. +{{< /notice >}} + +## Parameters + +The tool takes the following input parameters: + +| Parameter | Type | Description | Required | +| :------------ | :----- | :-------------------------------------------------------------------- | :------- | +| `spec_id` | string | Unique ID of the vector specification to apply. | No | +| `table_name` | string | Target table name for applying the vector specification. | No | +| `column_name` | string | Text or vector column name to uniquely identify the specification. | No | +| `schema_name` | string | Schema name for the target table. | No | + +> Note +> Parameters are marked as required or optional based on the vector assist function definitions. +> The function may perform further validation on optional parameters to ensure all necessary +> data is available before returning a response. + +## Example + +```yaml +kind: tool +name: apply_spec +type: vector-assist-apply-spec +source: my-database-source +description: "This tool automatically executes all the SQL recommendations associated with a specific vector specification (spec_id) or table. It runs the necessary commands in the correct sequence to provision the workload, marking each step as applied once successful. Use this tool when the user has reviewed the generated recommendations from a defined (or modified) spec and is ready to apply the changes directly to their database instance to finalize the vector search setup." +``` + +## Reference + +| **field** | **type** | **required** | **description** | +|-------------|:--------:|:------------:|------------------------------------------------------| +| type | string | true | Must be "vector-assist-apply-spec". | +| source | string | true | Name of the source the SQL should execute on. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/integrations/cloud-sql-pg/tools/vector-assist-define-spec.md b/docs/en/integrations/cloud-sql-pg/tools/vector-assist-define-spec.md new file mode 100644 index 000000000000..8d7a7c278154 --- /dev/null +++ b/docs/en/integrations/cloud-sql-pg/tools/vector-assist-define-spec.md @@ -0,0 +1,73 @@ +--- +title: "vector-assist-define-spec" +type: docs +weight: 1 +description: > + The "vector-assist-define-spec" tool defines a new vector specification by + capturing the user's intent and requirements for a vector search workload, + generating SQL recommendations for setting up database, embeddings, and + vector indexes. +--- + +## About + +The `vector-assist-define-spec` tool defines a new vector specification by capturing the user's intent and requirements for a vector search workload. It generates a complete, ordered set of SQL recommendations required to set up the database, embeddings, and vector indexes. + +Use this tool at the very beginning of the vector setup process when an agent or user first wants to configure a table for vector search, generate embeddings, or create a new vector index. Under the hood, this tool connects to the target database and executes the `vector_assist.define_spec` function to generate the necessary specifications. + +## Compatible Sources + +{{< compatible-sources >}} + +## Requirements + +{{< notice tip >}} +Ensure that your target PostgreSQL database has the required `vector_assist` extension installed, in order for this tool to execute successfully. +{{< /notice >}} + +## Parameters + +The tool takes the following input parameters: + +| Parameter | Type | Description | Required | +| :----------------------- | :------ | :--------------------------------------------------------------------- | :------- | +| `table_name` | string | Target table name for setting up the vector workload. | Yes | +| `schema_name` | string | Name of the schema containing the target table. | No | +| `spec_id` | string | Unique ID for the vector specification; auto-generated if omitted. | No | +| `vector_column_name` | string | Name of the column containing the vector embeddings. | No | +| `text_column_name` | string | Name of the text column for setting up vector search. | No | +| `vector_index_type` | string | Type of vector index ('hnsw', 'ivfflat', or 'scann'). | No | +| `embeddings_available` | boolean | Indicates if vector embeddings already exist in the table. | No | +| `num_vectors` | integer | Expected total number of vectors in the dataset. | No | +| `dimensionality` | integer | Dimension of existing vectors or the chosen embedding model. | No | +| `embedding_model` | string | Model to be used for generating vector embeddings. | No | +| `prefilter_column_names` | array | List of columns to use for prefiltering vector queries. | No | +| `distance_func` | string | Distance function for comparing vectors ('cosine', 'ip', 'l2', 'l1'). | No | +| `quantization` | string | Quantization method for vector indexes ('none', 'halfvec', 'bit'). | No | +| `memory_budget_kb` | integer | Maximum memory (in KB) the index can use during build. | No | +| `target_recall` | float | Target recall rate for standard vector queries using this index. | No | +| `target_top_k` | integer | Number of top results (top-K) to retrieve per query. | No | +| `tune_vector_index` | boolean | Indicates whether automatic tuning is required for the index. | No | + +> Note +> Parameters are marked as required or optional based on the vector assist function definitions. +> The function may perform further validation on optional parameters to ensure all necessary +> data is available before returning a response. + +## Example + +```yaml +kind: tool +name: define_spec +type: vector-assist-define-spec +source: my-database-source +description: "This tool defines a new vector specification by capturing the user's intent and requirements for a vector search workload. This generates a complete, ordered set of SQL recommendations required to set up the database, embeddings, and vector indexes. Use this tool at the very beginning of the vector setup process when a user first wants to configure a table for vector search, generate embeddings, or create a new vector index." +``` + +## Reference + +| **field** | **type** | **required** | **description** | +|-------------|:--------:|:------------:|------------------------------------------------------| +| type | string | true | Must be "vector-assist-define-spec". | +| source | string | true | Name of the source the SQL should execute on. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/integrations/cloud-sql-pg/tools/vector-assist-generate-query.md b/docs/en/integrations/cloud-sql-pg/tools/vector-assist-generate-query.md new file mode 100644 index 000000000000..7f147a18303a --- /dev/null +++ b/docs/en/integrations/cloud-sql-pg/tools/vector-assist-generate-query.md @@ -0,0 +1,65 @@ +--- +title: "vector-assist-generate-query" +type: docs +weight: 1 +description: > + The "vector-assist-generate-query" tool produces optimized SQL queries for + vector search, leveraging metadata and specifications to enable semantic + and similarity searches. +--- + +## About + +The `vector-assist-generate-query` tool generates optimized SQL queries for vector search by leveraging the metadata and vector specifications defined in a specific spec_id. It serves as the primary actionable tool for generating the executable SQL required to retrieve relevant results based on vector similarity. + +The tool contextually understands requirements such as distance functions, quantization, and filtering to ensure the resulting query is compatible with the corresponding vector index. Additionally, it can automatically handle iterative index scans for filtered queries and calculate the necessary search parameters (like ef_search) to meet a target recall. +## Compatible Sources + +{{< compatible-sources >}} + +## Requirements + +{{< notice tip >}} +Ensure that your target PostgreSQL database has the required `vector_assist` extension installed, in order for this tool to execute successfully. +{{< /notice >}} + +## Parameters + +The tool takes the following input parameters: + +| Parameter | Type | Description | Required | +| :----------------------- | :------ | :------------------------------------------------------------------ | :------- | +| `spec_id` | string | Unique ID of the vector spec for query generation. | No | +| `table_name` | string | Target table name for generating the vector query. | No | +| `schema_name` | string | Schema name for the query's target table. | No | +| `column_name` | string | Text or vector column name identifying the specific spec. | No | +| `search_text` | string | Text string to search for; embeddings are auto-generated. | No | +| `search_vector` | string | Vector to search for; use instead of search_text. | No | +| `output_column_names` | array | List of columns to retrieve in the search results. | No | +| `top_k` | integer | Number of nearest neighbors to return (defaults to 10). | No | +| `filter_expressions` | array | List of filter expressions applied to the vector query. | No | +| `target_recall` | float | Target recall rate, overriding the spec-level default. | No | +| `iterative_index_search` | boolean | Enables iterative search for filtered queries to guarantee results. | No | + +> Note +> Parameters are marked as required or optional based on the vector assist function definitions. +> The function may perform further validation on optional parameters to ensure all necessary +> data is available before returning a response. + +## Example + +```yaml +kind: tool +name: generate_query +type: vector-assist-generate-query +source: my-database-source +description: "This tool generates optimized SQL queries for vector search by leveraging the metadata and vector specifications defined in a specific spec_id. It may return a single query or a sequence of multiple SQL queries that can be executed sequentially. Use this tool when a user wants to perform semantic or similarity searches on their data. It serves as the primary actionable tool to invoke for generating the executable SQL required to retrieve relevant results based on vector similarity." +``` + +## Reference + +| **field** | **type** | **required** | **description** | +|-------------|:--------:|:------------:|------------------------------------------------------| +| type | string | true | Must be "vector-assist-generate-query". | +| source | string | true | Name of the source the SQL should execute on. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/integrations/cloud-sql-pg/tools/vector-assist-modify-spec.md b/docs/en/integrations/cloud-sql-pg/tools/vector-assist-modify-spec.md new file mode 100644 index 000000000000..6018a85a34b0 --- /dev/null +++ b/docs/en/integrations/cloud-sql-pg/tools/vector-assist-modify-spec.md @@ -0,0 +1,72 @@ +--- +title: "vector-assist-modify-spec" +type: docs +weight: 1 +description: > + The "vector-assist-modify-spec" tool modifies an existing vector specification + with new parameters or overrides, recalculating the generated SQL + recommendations to match the updated requirements. +--- + +## About + +The `vector-assist-modify-spec` tool modifies an existing vector specification (identified by a required `spec_id`) with new parameters or overrides. Upon modification, it automatically recalculates and refreshes the list of generated recommendations by `vector_assist.define-spec` to match the updated spec requirements. + +Use this tool when a user or agent wants to adjust or fine-tune the configuration of an already defined vector spec (such as changing the target recall, embedding model, or quantization) before actually executing the setup commands. Under the hood, this tool connects to the target database and executes the `vector_assist.modify_spec` function to generate the updated specifications. + +## Compatible Sources + +{{< compatible-sources >}} + +## Requirements + +{{< notice tip >}} +Ensure that your target PostgreSQL database has the required `vector_assist` extension installed, and that the `vector_assist.modify_spec` function is available in order for this tool to execute successfully. +{{< /notice >}} + +## Parameters + +The tool takes the following input parameters: + +| Parameter | Type | Description | Required | +| :----------------------- | :------ | :--------------------------------------------------------------------- | :------- | +| `spec_id` | string | Unique ID of the vector specification to modify. | Yes | +| `table_name` | string | New table name for the vector workload setup. | No | +| `schema_name` | string | New schema name containing the target table. | No | +| `vector_column_name` | string | New name for the column containing vector embeddings. | No | +| `text_column_name` | string | New name for the text column for vector search. | No | +| `vector_index_type` | string | New vector index type ('hnsw', 'ivfflat', or 'scann'). | No | +| `embeddings_available` | boolean | Update if vector embeddings already exist in the table. | No | +| `num_vectors` | integer | Update the expected total number of vectors. | No | +| `dimensionality` | integer | Update the dimension of vectors or the embedding model. | No | +| `embedding_model` | string | Update the model used for generating vector embeddings. | No | +| `prefilter_column_names` | array | Update the columns used for prefiltering vector queries. | No | +| `distance_func` | string | Update the distance function ('cosine', 'ip', 'l2', 'l1'). | No | +| `quantization` | string | Update the quantization method ('none', 'halfvec', 'bit'). | No | +| `memory_budget_kb` | integer | Update maximum memory (in KB) for index building. | No | +| `target_recall` | float | Update the target recall rate for the index. | No | +| `target_top_k` | integer | Update the number of top results (top-K) to retrieve. | No | +| `tune_vector_index` | boolean | Update whether automatic tuning is required for the index. | No | + +> Note +> Parameters are marked as required or optional based on the vector assist function definitions. +> The function may perform further validation on optional parameters to ensure all necessary +> data is available before returning a response. + +## Example + +```yaml +kind: tool +name: modify_spec +type: vector-assist-modify-spec +source: my-database-source +description: "This tool modifies an existing vector specification (identified by a required spec_id) with new parameters or overrides. Upon modification, it automatically recalculates and refreshes the list of generated SQL recommendations to match the updated requirements. Use this tool when a user wants to adjust or fine-tune the configuration of an already defined vector spec (such as changing the target recall, embedding model, or quantization) before actually executing the setup commands." +``` + +## Reference + +| **field** | **type** | **required** | **description** | +|-------------|:--------:|:------------:|------------------------------------------------------| +| type | string | true | Must be "vector-assist-modify-spec". | +| source | string | true | Name of the source the SQL should execute on. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/integrations/dataplex/_index.md b/docs/en/integrations/dataplex/_index.md deleted file mode 100644 index a239db7f7d46..000000000000 --- a/docs/en/integrations/dataplex/_index.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -title: "Dataplex" -weight: 1 ---- \ No newline at end of file diff --git a/docs/en/integrations/dataplex/prebuilt-configs/_index.md b/docs/en/integrations/dataplex/prebuilt-configs/_index.md deleted file mode 100644 index a48730323e58..000000000000 --- a/docs/en/integrations/dataplex/prebuilt-configs/_index.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: "Prebuilt Configs" -type: docs -description: "Prebuilt configurations for Dataplex." ---- diff --git a/docs/en/integrations/knowledge-catalog/_index.md b/docs/en/integrations/knowledge-catalog/_index.md new file mode 100644 index 000000000000..1be7e993b4f7 --- /dev/null +++ b/docs/en/integrations/knowledge-catalog/_index.md @@ -0,0 +1,4 @@ +--- +title: "Knowledge Catalog" +weight: 1 +--- \ No newline at end of file diff --git a/docs/en/integrations/knowledge-catalog/prebuilt-configs/_index.md b/docs/en/integrations/knowledge-catalog/prebuilt-configs/_index.md new file mode 100644 index 000000000000..ee061f1eac08 --- /dev/null +++ b/docs/en/integrations/knowledge-catalog/prebuilt-configs/_index.md @@ -0,0 +1,5 @@ +--- +title: "Prebuilt Configs" +type: docs +description: "Prebuilt configurations for Knowledge Catalog (formerly known as Dataplex)." +--- diff --git a/docs/en/integrations/dataplex/prebuilt-configs/dataplex.md b/docs/en/integrations/knowledge-catalog/prebuilt-configs/dataplex.md similarity index 64% rename from docs/en/integrations/dataplex/prebuilt-configs/dataplex.md rename to docs/en/integrations/knowledge-catalog/prebuilt-configs/dataplex.md index 6781bfd95251..aa5204803060 100644 --- a/docs/en/integrations/dataplex/prebuilt-configs/dataplex.md +++ b/docs/en/integrations/knowledge-catalog/prebuilt-configs/dataplex.md @@ -1,10 +1,10 @@ --- -title: "Dataplex" +title: "Knowledge Catalog (formerly known as Dataplex)" type: docs -description: "Details of the Dataplex prebuilt configuration." +description: "Details of the Knowledge Catalog prebuilt configuration." --- -## Dataplex +## Knowledge Catalog * `--prebuilt` value: `dataplex` * **Environment Variables:** @@ -14,9 +14,8 @@ description: "Details of the Dataplex prebuilt configuration." entries. * **Dataplex Editor** (`roles/dataplex.editor`) to modify entries. * **Tools:** - * `search_entries`: Searches for entries in Dataplex Catalog. - * `lookup_entry`: Retrieves a specific entry from Dataplex - Catalog. + * `search_entries`: Searches for entries in Knowledge Catalog. + * `lookup_entry`: Retrieves a specific entry from Knowledge Catalog. * `search_aspect_types`: Finds aspect types relevant to the query. * `lookup_context`: Retrieves rich metadata regarding one or more data assets along with their relationships. diff --git a/docs/en/integrations/dataplex/source.md b/docs/en/integrations/knowledge-catalog/source.md similarity index 95% rename from docs/en/integrations/dataplex/source.md rename to docs/en/integrations/knowledge-catalog/source.md index 29af3506e818..021318caa817 100644 --- a/docs/en/integrations/dataplex/source.md +++ b/docs/en/integrations/knowledge-catalog/source.md @@ -1,22 +1,22 @@ --- -title: "Dataplex Source" +title: "Knowledge Catalog (formerly known as Dataplex) Source" type: docs linkTitle: "Source" weight: 1 description: > - Dataplex Universal Catalog is a unified, intelligent governance solution for data and AI assets in Google Cloud. Dataplex Universal Catalog powers AI, analytics, and business intelligence at scale. + Knowledge Catalog is a unified, intelligent governance solution for data and AI assets in Google Cloud. Knowledge Catalog powers AI, analytics, and business intelligence at scale. no_list: true --- ## About -[Dataplex][dataplex-docs] Universal Catalog is a unified, intelligent governance -solution for data and AI assets in Google Cloud. Dataplex Universal Catalog +[Knowledge Catalog][dataplex-docs] is a unified, intelligent governance +solution for data and AI assets in Google Cloud. Knowledge Catalog powers AI, analytics, and business intelligence at scale. At the heart of these governance capabilities is a catalog that contains a -centralized inventory of the data assets in your organization. Dataplex -Universal Catalog holds business, technical, and runtime metadata for all of +centralized inventory of the data assets in your organization. Knowledge Catalog +holds business, technical, and runtime metadata for all of your data. It helps you discover relationships and semantics in the metadata by applying artificial intelligence and machine learning. @@ -182,7 +182,7 @@ Entries are organized within Entry Groups, which are logical groupings of Entrie Entries can be linked together using EntryLinks to represent relationships between data assets (e.g. foreign keys). # Tool instructions -## Tool: dataplex_search_entries +## Tool: search_entries ## General - Do not try to search within search results on your own. - Do not fetch multiple pages of results unless explicitly asked. @@ -359,10 +359,17 @@ This abbreviated syntax works for the qualified predicates except for `label` in 1. Explain that no search result was found 2. Suggest to provide a more specific search query. -## Tool: dataplex_lookup_entry +## Tool: lookup_entry ### Request -1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter. If you do not know the name of the aspect type, use the `dataplex_search_aspect_types` tool. -2. If you do not know the name of the entry, use `dataplex_search_entries` tool +1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter. If you do not know the name of the aspect type, use the `search_aspect_types` tool. +2. If you do not know the name of the entry, use `search_entries` tool ### Response 1. Unless asked for a specific aspect, respond with all aspects attached to the entry. + +## Tool: lookup_context +### Request +1. Use this tool to retrieve rich metadata regarding one or more data assets along with their relationships. +2. You must provide the `resources` list with full resource names. +### Response +1. Present the requested metadata and relationship information. ``` \ No newline at end of file diff --git a/docs/en/integrations/dataplex/tools/_index.md b/docs/en/integrations/knowledge-catalog/tools/_index.md similarity index 100% rename from docs/en/integrations/dataplex/tools/_index.md rename to docs/en/integrations/knowledge-catalog/tools/_index.md diff --git a/docs/en/integrations/dataplex/tools/dataplex-lookup-context.md b/docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-lookup-context.md similarity index 87% rename from docs/en/integrations/dataplex/tools/dataplex-lookup-context.md rename to docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-lookup-context.md index 60138d94a769..69874f452bf6 100644 --- a/docs/en/integrations/dataplex/tools/dataplex-lookup-context.md +++ b/docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-lookup-context.md @@ -24,15 +24,15 @@ following form: projects/{project}/locations/{location}/entryGroups/{group}/entr ### IAM Permissions -Dataplex uses [Identity and Access Management (IAM)][iam-overview] to control -user and group access to Dataplex resources. Toolbox will use your +Knowledge Catalog (formerly known as Dataplex) uses [Identity and Access Management (IAM)][iam-overview] to control +user and group access to Knowledge Catalog resources. Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize and authenticate when -interacting with [Dataplex][dataplex-docs]. +interacting with [Knowledge Catalog][dataplex-docs]. In addition to [setting the ADC for your server][set-adc], you need to ensure the IAM identity has been given the correct IAM permissions for the tasks you -intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions] -and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on +intend to perform. See [Knowledge Catalog IAM permissions][iam-permissions] +and [Knowledge Catalog IAM roles][iam-roles] for more information on applying IAM permissions and roles to an identity. **Note on Lookup Context Tool Behavior:** This specific tool utilizes a post-filtering diff --git a/docs/en/integrations/dataplex/tools/dataplex-lookup-entry.md b/docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-lookup-entry.md similarity index 83% rename from docs/en/integrations/dataplex/tools/dataplex-lookup-entry.md rename to docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-lookup-entry.md index 57c9dfc0d690..d7ea5294aab5 100644 --- a/docs/en/integrations/dataplex/tools/dataplex-lookup-entry.md +++ b/docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-lookup-entry.md @@ -3,13 +3,12 @@ title: "dataplex-lookup-entry" type: docs weight: 1 description: > - A "dataplex-lookup-entry" tool returns details of a particular entry in Dataplex Catalog. + A "dataplex-lookup-entry" tool returns details of a particular entry in Knowledge Catalog. --- ## About -A `dataplex-lookup-entry` tool returns details of a particular entry in Dataplex -Catalog. +A `dataplex-lookup-entry` tool returns details of a particular entry in Knowledge Catalog. `dataplex-lookup-entry` takes a required `name` parameter which contains the project and location to which the request should be attributed in the following @@ -37,15 +36,15 @@ It also optionally accepts following parameters: ### IAM Permissions -Dataplex uses [Identity and Access Management (IAM)][iam-overview] to control -user and group access to Dataplex resources. Toolbox will use your +Knowledge Catalog uses [Identity and Access Management (IAM)][iam-overview] to control +user and group access to Knowledge Catalog resources. Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize and authenticate when -interacting with [Dataplex][dataplex-docs]. +interacting with [Knowledge Catalog][dataplex-docs]. In addition to [setting the ADC for your server][set-adc], you need to ensure the IAM identity has been given the correct IAM permissions for the tasks you -intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions] -and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on +intend to perform. See [Knowledge Catalog IAM permissions][iam-permissions] +and [Knowledge Catalog IAM roles][iam-roles] for more information on applying IAM permissions and roles to an identity. [iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control @@ -61,7 +60,7 @@ kind: tool name: lookup_entry type: dataplex-lookup-entry source: my-dataplex-source -description: Use this tool to retrieve a specific entry in Dataplex Catalog. +description: Use this tool to retrieve a specific entry in Knowledge Catalog. ``` ## Reference diff --git a/docs/en/integrations/dataplex/tools/dataplex-search-aspect-types.md b/docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-search-aspect-types.md similarity index 85% rename from docs/en/integrations/dataplex/tools/dataplex-search-aspect-types.md rename to docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-search-aspect-types.md index 1fdcf4d24a57..156c659ed57b 100644 --- a/docs/en/integrations/dataplex/tools/dataplex-search-aspect-types.md +++ b/docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-search-aspect-types.md @@ -28,15 +28,15 @@ aspect types based on search query. ### IAM Permissions -Dataplex uses [Identity and Access Management (IAM)][iam-overview] to control -user and group access to Dataplex resources. Toolbox will use your +Knowledge Catalog uses [Identity and Access Management (IAM)][iam-overview] to control +user and group access to Knowledge Catalog resources. Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize and authenticate when -interacting with [Dataplex][dataplex-docs]. +interacting with [Knowledge Catalog][dataplex-docs]. In addition to [setting the ADC for your server][set-adc], you need to ensure the IAM identity has been given the correct IAM permissions for the tasks you -intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions] -and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on +intend to perform. See [Knowledge Catalog IAM permissions][iam-permissions] +and [Knowledge Catalog IAM roles][iam-roles] for more information on applying IAM permissions and roles to an identity. [iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control @@ -50,7 +50,7 @@ applying IAM permissions and roles to an identity. ```yaml kind: tool -name: dataplex-search-aspect-types +name: search_aspect_types type: dataplex-search-aspect-types source: my-dataplex-source description: Use this tool to find aspect types relevant to the query. diff --git a/docs/en/integrations/dataplex/tools/dataplex-search-entries.md b/docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-search-entries.md similarity index 82% rename from docs/en/integrations/dataplex/tools/dataplex-search-entries.md rename to docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-search-entries.md index 7f4fcabefcaa..f5ecdcfbe04a 100644 --- a/docs/en/integrations/dataplex/tools/dataplex-search-entries.md +++ b/docs/en/integrations/knowledge-catalog/tools/knowledge-catalog-search-entries.md @@ -8,7 +8,7 @@ description: > ## About -A `dataplex-search-entries` tool returns all entries in Dataplex Catalog (e.g. +A `dataplex-search-entries` tool returns all entries in Knowledge Catalog (formerly known as Dataplex) (e.g. tables, views, models) that matches given user query. `dataplex-search-entries` takes a required `query` parameter based on which @@ -31,15 +31,15 @@ following parameters: ### IAM Permissions -Dataplex uses [Identity and Access Management (IAM)][iam-overview] to control -user and group access to Dataplex resources. Toolbox will use your +Knowledge Catalog uses [Identity and Access Management (IAM)][iam-overview] to control +user and group access to Knowledge Catalog resources. Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize and authenticate when -interacting with [Dataplex][dataplex-docs]. +interacting with [Knowledge Catalog][dataplex-docs]. In addition to [setting the ADC for your server][set-adc], you need to ensure the IAM identity has been given the correct IAM permissions for the tasks you -intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions] -and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on +intend to perform. See [Knowledge Catalog IAM permissions][iam-permissions] +and [Knowledge Catalog IAM roles][iam-roles] for more information on applying IAM permissions and roles to an identity. [iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control @@ -53,7 +53,7 @@ applying IAM permissions and roles to an identity. ```yaml kind: tool -name: dataplex-search-entries +name: search_entries type: dataplex-search-entries source: my-dataplex-source description: Use this tool to get all the entries based on the provided query. diff --git a/docs/en/integrations/looker/source.md b/docs/en/integrations/looker/source.md index b30c4336205e..445c1d07ab94 100644 --- a/docs/en/integrations/looker/source.md +++ b/docs/en/integrations/looker/source.md @@ -63,17 +63,37 @@ To initialize the application default credential run `gcloud auth login ## Example +Initialize a Looker source for standard and development tools: + ```yaml kind: source name: my-looker-source type: looker -base_url: http://looker.example.com -client_id: ${LOOKER_CLIENT_ID} -client_secret: ${LOOKER_CLIENT_SECRET} -project: ${LOOKER_PROJECT} -location: ${LOOKER_LOCATION} -verify_ssl: true +base_url: ${LOOKER_BASE_URL} +client_id: ${LOOKER_CLIENT_ID:} +client_secret: ${LOOKER_CLIENT_SECRET:} +verify_ssl: ${LOOKER_VERIFY_SSL:true} +timeout: 600s +use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false} +show_hidden_models: ${LOOKER_SHOW_HIDDEN_MODELS:true} +show_hidden_explores: ${LOOKER_SHOW_HIDDEN_EXPLORES:true} +show_hidden_fields: ${LOOKER_SHOW_HIDDEN_FIELDS:true} +``` + +Initialize a Looker source for conversational analytics: + +```yaml +kind: source +name: my-looker-conversational-source +type: looker +base_url: ${LOOKER_BASE_URL} +client_id: ${LOOKER_CLIENT_ID:} +client_secret: ${LOOKER_CLIENT_SECRET:} +verify_ssl: ${LOOKER_VERIFY_SSL:true} timeout: 600s +use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false} +project: ${LOOKER_PROJECT:} +location: ${LOOKER_LOCATION:} ``` The Looker base url will look like "https://looker.example.com", don't include diff --git a/docs/en/integrations/looker/tools/looker-validate-project.md b/docs/en/integrations/looker/tools/looker-validate-project.md index dd8bd49549e5..4683b52f3f3b 100644 --- a/docs/en/integrations/looker/tools/looker-validate-project.md +++ b/docs/en/integrations/looker/tools/looker-validate-project.md @@ -19,27 +19,27 @@ A "looker-validate-project" tool checks the syntax of a LookML project and repor ## Example ```yaml -tools: - validate_project: - kind: looker-validate-project - source: looker-source - description: | - This tool checks a LookML project for syntax errors. +kind: tool +name: validate_project +type: looker-validate-project +source: looker-source +description: | + This tool checks a LookML project for syntax errors. - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - Parameters: - - project_id (required): The unique ID of the LookML project. + Parameters: + - project_id (required): The unique ID of the LookML project. - Output: - A list of error details including the file path and line number, and also a list of models - that are not currently valid due to LookML errors. + Output: + A list of error details including the file path and line number, and also a list of models + that are not currently valid due to LookML errors. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-validate-project". | +| type | string | true | Must be "looker-validate-project". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/internal/prebuiltconfigs/tools/cloud-sql-postgres.yaml b/internal/prebuiltconfigs/tools/cloud-sql-postgres.yaml index 7030a06e000e..9dd1f378a70a 100644 --- a/internal/prebuiltconfigs/tools/cloud-sql-postgres.yaml +++ b/internal/prebuiltconfigs/tools/cloud-sql-postgres.yaml @@ -284,6 +284,18 @@ tools: get_instance: kind: cloud-sql-get-instance source: cloud-sql-admin-source + define_spec: + kind: vector-assist-define-spec + source: cloudsql-pg-source + modify_spec: + kind: vector-assist-modify-spec + source: cloudsql-pg-source + apply_spec: + kind: vector-assist-apply-spec + source: cloudsql-pg-source + generate_query: + kind: vector-assist-generate-query + source: cloudsql-pg-source toolsets: admin: - create_instance @@ -343,3 +355,9 @@ toolsets: - list_roles - list_pg_settings - database_overview + vectorassist: + - execute_sql + - define_spec + - modify_spec + - apply_spec + - generate_query diff --git a/internal/prebuiltconfigs/tools/looker-conversational-analytics.yaml b/internal/prebuiltconfigs/tools/looker-conversational-analytics.yaml index 4a51cf38fd43..30441910733e 100644 --- a/internal/prebuiltconfigs/tools/looker-conversational-analytics.yaml +++ b/internal/prebuiltconfigs/tools/looker-conversational-analytics.yaml @@ -1,4 +1,4 @@ -# Copyright 2025 Google LLC +# Copyright 2026 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,57 +12,60 @@ # See the License for the specific language governing permissions and # limitations under the License. -sources: - looker-source: - kind: looker - base_url: ${LOOKER_BASE_URL} - client_id: ${LOOKER_CLIENT_ID:} - client_secret: ${LOOKER_CLIENT_SECRET:} - verify_ssl: ${LOOKER_VERIFY_SSL:true} - timeout: 600s - use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false} - project: ${LOOKER_PROJECT:} - location: ${LOOKER_LOCATION:} +kind: source +name: looker-source +type: looker +base_url: ${LOOKER_BASE_URL} +client_id: ${LOOKER_CLIENT_ID:} +client_secret: ${LOOKER_CLIENT_SECRET:} +verify_ssl: ${LOOKER_VERIFY_SSL:true} +timeout: 600s +use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false} +project: ${LOOKER_PROJECT:} +location: ${LOOKER_LOCATION:} +--- +kind: tool +name: ask_data_insights +type: looker-conversational-analytics +source: looker-source +description: | + Use this tool to ask questions about your data using the Looker Conversational + Analytics API. You must provide a natural language query and a list of + 1 to 5 model and explore combinations (e.g. [{'model': 'the_model', 'explore': 'the_explore'}]). + Use the 'get_models' and 'get_explores' tools to discover available models and explores. +--- +kind: tool +name: get_models +type: looker-get-models +source: looker-source +description: | + get_models Tool + + This tool retrieves a list of available LookML models in the Looker instance. + LookML models define the data structure and relationships that users can query. + The output includes details like the model's `name` and `label`, which are + essential for subsequent calls to tools like `get_explores` or `query`. + + This tool takes no parameters. +--- +kind: tool +name: get_explores +type: looker-get-explores +source: looker-source +description: | + get_explores Tool -tools: - ask_data_insights: - kind: looker-conversational-analytics - source: looker-source - description: | - Use this tool to ask questions about your data using the Looker Conversational - Analytics API. You must provide a natural language query and a list of - 1 to 5 model and explore combinations (e.g. [{'model': 'the_model', 'explore': 'the_explore'}]). - Use the 'get_models' and 'get_explores' tools to discover available models and explores. - - get_models: - kind: looker-get-models - source: looker-source - description: | - get_models Tool - - This tool retrieves a list of available LookML models in the Looker instance. - LookML models define the data structure and relationships that users can query. - The output includes details like the model's `name` and `label`, which are - essential for subsequent calls to tools like `get_explores` or `query`. - - This tool takes no parameters. + This tool retrieves a list of explores defined within a specific LookML model. + Explores represent a curated view of your data, typically joining several + tables together to allow for focused analysis on a particular subject area. + The output provides details like the explore's `name` and `label`. - get_explores: - kind: looker-get-explores - source: looker-source - description: | - get_explores Tool - - This tool retrieves a list of explores defined within a specific LookML model. - Explores represent a curated view of your data, typically joining several - tables together to allow for focused analysis on a particular subject area. - The output provides details like the explore's `name` and `label`. - - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - -toolsets: - looker_conversational_analytics_tools: - - ask_data_insights - - get_models - - get_explores \ No newline at end of file + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. +--- +kind: toolset +name: looker_conversational_analytics_tools +tools: +- ask_data_insights +- get_models +- get_explores \ No newline at end of file diff --git a/internal/prebuiltconfigs/tools/looker-dev.yaml b/internal/prebuiltconfigs/tools/looker-dev.yaml index 75eafda2440e..2b97161f8c63 100644 --- a/internal/prebuiltconfigs/tools/looker-dev.yaml +++ b/internal/prebuiltconfigs/tools/looker-dev.yaml @@ -12,457 +12,485 @@ # See the License for the specific language governing permissions and # limitations under the License. -sources: - looker-source: - kind: looker - base_url: ${LOOKER_BASE_URL} - client_id: ${LOOKER_CLIENT_ID:} - client_secret: ${LOOKER_CLIENT_SECRET:} - verify_ssl: ${LOOKER_VERIFY_SSL:true} - timeout: 600s - use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false} - show_hidden_models: ${LOOKER_SHOW_HIDDEN_MODELS:true} - show_hidden_explores: ${LOOKER_SHOW_HIDDEN_EXPLORES:true} - show_hidden_fields: ${LOOKER_SHOW_HIDDEN_FIELDS:true} - +kind: source +name: looker-source +type: looker +base_url: ${LOOKER_BASE_URL} +client_id: ${LOOKER_CLIENT_ID:} +client_secret: ${LOOKER_CLIENT_SECRET:} +verify_ssl: ${LOOKER_VERIFY_SSL:true} +timeout: 600s +use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false} +show_hidden_models: ${LOOKER_SHOW_HIDDEN_MODELS:true} +show_hidden_explores: ${LOOKER_SHOW_HIDDEN_EXPLORES:true} +show_hidden_fields: ${LOOKER_SHOW_HIDDEN_FIELDS:true} +--- +kind: tool +name: health_pulse +type: looker-health-pulse +source: looker-source +description: | + This tool performs various health checks on a Looker instance. + + Parameters: + - action (required): Specifies the type of health check to perform. + Choose one of the following: + - `check_db_connections`: Verifies database connectivity. + - `check_dashboard_performance`: Assesses dashboard loading performance. + - `check_dashboard_errors`: Identifies errors within dashboards. + - `check_explore_performance`: Evaluates explore query performance. + - `check_schedule_failures`: Reports on failed scheduled deliveries. + - `check_legacy_features`: Checks for the usage of legacy features. + + Note on `check_legacy_features`: + This action is exclusively available in Looker Core instances. If invoked + on a non-Looker Core instance, it will return a notice rather than an error. + This notice should be considered normal behavior and not an indication of an issue. +--- +kind: tool +name: health_analyze +type: looker-health-analyze +source: looker-source +description: | + This tool calculates the usage statistics for Looker projects, models, and explores. + + Parameters: + - action (required): The type of resource to analyze. Can be `"projects"`, `"models"`, or `"explores"`. + - project (optional): The specific project ID to analyze. + - model (optional): The specific model name to analyze. Requires `project` if used without `explore`. + - explore (optional): The specific explore name to analyze. Requires `model` if used. + - timeframe (optional): The lookback period in days for usage data. Defaults to `90` days. + - min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`. + + Output: + The result is a JSON object containing usage metrics for the specified resources. +--- +kind: tool +name: health_vacuum +type: looker-health-vacuum +source: looker-source +description: | + This tool identifies and suggests LookML models or explores that can be + safely removed due to inactivity or low usage. + + Parameters: + - action (required): The type of resource to analyze for removal candidates. Can be `"models"` or `"explores"`. + - project (optional): The specific project ID to consider. + - model (optional): The specific model name to consider. Requires `project` if used without `explore`. + - explore (optional): The specific explore name to consider. Requires `model` if used. + - timeframe (optional): The lookback period in days to assess usage. Defaults to `90` days. + - min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`. + + Output: + A JSON array of objects, each representing a model or explore that is a candidate for deletion due to low usage. +--- +kind: tool +name: dev_mode +type: looker-dev-mode +source: looker-source +description: | + This tool allows toggling the Looker IDE session between Development Mode and Production Mode. + Development Mode enables making and testing changes to LookML projects. + + Parameters: + - enable (required): A boolean value. + - `true`: Switches the current session to Development Mode. + - `false`: Switches the current session to Production Mode. +--- +kind: tool +name: get_projects +type: looker-get-projects +source: looker-source +description: | + This tool retrieves a list of all LookML projects available on the Looker instance. + It is useful for identifying projects before performing actions like retrieving + project files or making modifications. + + Parameters: + This tool takes no parameters. + + Output: + A JSON array of objects, each containing the `project_id` and `project_name` + for a LookML project. +--- +kind: tool +name: get_project_files +type: looker-get-project-files +source: looker-source +description: | + This tool retrieves a list of all LookML files within a specified project, + providing details about each file. + + Parameters: + - project_id (required): The unique ID of the LookML project, obtained from `get_projects`. + + Output: + A JSON array of objects, each representing a LookML file and containing + details such as `path`, `id`, `type`, and `git_status`. +--- +kind: tool +name: get_project_file +type: looker-get-project-file +source: looker-source +description: | + This tool retrieves the raw content of a specific LookML file from within a project. + + Parameters: + - project_id (required): The unique ID of the LookML project, obtained from `get_projects`. + - file_path (required): The path to the LookML file within the project, + typically obtained from `get_project_files`. + + Output: + The raw text content of the specified LookML file. +--- +kind: tool +name: create_project_file +type: looker-create-project-file +source: looker-source +description: | + This tool creates a new LookML file within a specified project, populating + it with the provided content. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - file_path (required): The desired path and filename for the new file within the project. + - content (required): The full LookML content to write into the new file. + + Output: + A confirmation message upon successful file creation. +--- +kind: tool +name: update_project_file +type: looker-update-project-file +source: looker-source +description: | + This tool modifies the content of an existing LookML file within a specified project. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - file_path (required): The exact path to the LookML file to modify within the project. + - content (required): The new, complete LookML content to overwrite the existing file. + + Output: + A confirmation message upon successful file modification. +--- +kind: tool +name: delete_project_file +type: looker-delete-project-file +source: looker-source +description: | + This tool permanently deletes a specified LookML file from within a project. + Use with caution, as this action cannot be undone through the API. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - file_path (required): The exact path to the LookML file to delete within the project. + + Output: + A confirmation message upon successful file deletion. +--- +kind: tool +name: get_project_directories +type: looker-get-project-directories +source: looker-source +description: | + This tool retrieves the list of directories within a specified LookML project. + + Parameters: + - project_id (required): The unique ID of the LookML project. + + Output: + A JSON array of strings, where each string is the name of a directory within the project. +--- +kind: tool +name: create_project_directory +type: looker-create-project-directory +source: looker-source +description: | + This tool creates a new directory within a specified LookML project. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - directory_path (required): The path to the new directory within the project. + + Output: + A confirmation message upon successful directory creation. +--- +kind: tool +name: delete_project_directory +type: looker-delete-project-directory +source: looker-source +description: | + This tool permanently deletes a specified directory within a LookML project. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - directory_path (required): The path to the directory within the project. + + Output: + A confirmation message upon successful directory deletion. +--- +kind: tool +name: validate_project +type: looker-validate-project +source: looker-source +description: | + This tool checks a LookML project for syntax errors. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + + Output: + A list of error details including the file path and line number, and also a list of models + that are not currently valid due to LookML errors. +--- +kind: tool +name: get_connections +type: looker-get-connections +source: looker-source +description: | + This tool retrieves a list of all database connections configured in the Looker system. + + Parameters: + This tool takes no parameters. + + Output: + A JSON array of objects, each representing a database connection and including details such as: + - `name`: The connection's unique identifier. + - `dialect`: The database dialect (e.g., "mysql", "postgresql", "bigquery"). + - `default_schema`: The default schema for the connection. + - `database`: The associated database name (if applicable). + - `supports_multiple_databases`: A boolean indicating if the connection can access multiple databases. +--- +kind: tool +name: get_connection_schemas +type: looker-get-connection-schemas +source: looker-source +description: | + This tool retrieves a list of database schemas available through a specified + Looker connection. + + Parameters: + - connection_name (required): The name of the database connection, obtained from `get_connections`. + - database (optional): An optional database name to filter the schemas. + Only applicable for connections that support multiple databases. + + Output: + A JSON array of strings, where each string is the name of an available schema. +--- +kind: tool +name: get_connection_databases +type: looker-get-connection-databases +source: looker-source +description: | + This tool retrieves a list of databases available through a specified Looker connection. + This is only applicable for connections that support multiple databases. + Use `get_connections` to check if a connection supports multiple databases. + + Parameters: + - connection_name (required): The name of the database connection, obtained from `get_connections`. + + Output: + A JSON array of strings, where each string is the name of an available database. + If the connection does not support multiple databases, an empty list or an error will be returned. +--- +kind: tool +name: get_connection_tables +type: looker-get-connection-tables +source: looker-source +description: | + This tool retrieves a list of tables available within a specified database schema + through a Looker connection. + + Parameters: + - connection_name (required): The name of the database connection, obtained from `get_connections`. + - schema (required): The name of the schema to list tables from, obtained from `get_connection_schemas`. + - database (optional): The name of the database to filter by. Only applicable for connections + that support multiple databases (check with `get_connections`). + + Output: + A JSON array of strings, where each string is the name of an available table. +--- +kind: tool +name: get_connection_table_columns +type: looker-get-connection-table-columns +source: looker-source +description: | + This tool retrieves a list of columns for one or more specified tables within a + given database schema and connection. + + Parameters: + - connection_name (required): The name of the database connection, obtained from `get_connections`. + - schema (required): The name of the schema where the tables reside, obtained from `get_connection_schemas`. + - tables (required): A comma-separated string of table names for which to retrieve columns + (e.g., "users,orders,products"), obtained from `get_connection_tables`. + - database (optional): The name of the database to filter by. Only applicable for connections + that support multiple databases (check with `get_connections`). + + Output: + A JSON array of objects, where each object represents a column and contains details + such as `table_name`, `column_name`, `data_type`, and `is_nullable`. +--- +kind: tool +name: get_lookml_tests +type: looker-get-lookml-tests +source: looker-source +description: | + Returns a list of tests which can be run to validate a project's LookML code and/or the underlying data, optionally filtered by the file id. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - file_id (optional): The ID of the file to filter tests by. This must be the complete file path from the project root (e.g., `models/my_model.model.lkml` or `views/my_view.view.lkml`). + + Output: + A JSON array of LookML test objects, each containing: + - model_name: The name of the model. + - name: The name of the test. + - explore_name: The name of the explore being tested. + - query_url_params: The query parameters used for the test. + - file: The file path where the test is defined. + - line: The line number where the test is defined. +--- +kind: tool +name: run_lookml_tests +type: looker-run-lookml-tests +source: looker-source +description: | + This tool runs LookML tests in the project, filtered by file, test, and/or model. These filters work in conjunction (logical AND). + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the project to run LookML tests for. + - file_id (optional): The ID of the file to run tests for. This must be the complete file path from the project root (e.g., `models/my_model.model.lkml` or `views/my_view.view.lkml`). + - test (optional): The name of the test to run. + - model (optional): The name of the model to run tests for. + + Output: + A JSON array containing the results of the executed tests, where each object includes: + - model_name: Name of the model tested. + - test_name: Name of the test. + - assertions_count: Total number of assertions in the test. + - assertions_failed: Number of assertions that failed. + - success: Boolean indicating if the test passed. + - errors: Array of error objects (if any), containing details like `message`, `file_path`, `line_number`, and `severity`. + - warnings: Array of warning messages (if any). +--- +kind: tool +name: create_view_from_table +type: looker-create-view-from-table +source: looker-source +description: | + This tool generates boilerplate LookML views directly from the database schema. + It does not create model or explore files, only view files in the specified folder. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - connection (required): The database connection name. + - tables (required): A list of objects to generate views for. Each object must contain `schema` and `table_name` (note: table names are case-sensitive). Optional fields include `primary_key`, `base_view`, and `columns` (array of objects with `column_name`). + - folder_name (optional): The folder to place the view files in (defaults to 'views/'). + + Output: + A confirmation message upon successful view generation, or an error message if the operation fails. +--- +kind: tool +name: list_git_branches +type: looker-list-git-branches +source: looker-source +description: | + This tool is used to retrieve the list of available git branches of a LookML project. + + Parameters: + - project_id (required): The unique ID of the LookML project. +--- +kind: tool +name: get_git_branch +type: looker-get-git-branch +source: looker-source +description: | + This tool is used to retrieve the current git branch of a LookML project. + + Parameters: + - project_id (required): The unique ID of the LookML project. +--- +kind: tool +name: create_git_branch +type: looker-create-git-branch +source: looker-source +description: | + This tool is used to create a new git branch of a LookML project. This only works in dev mode. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - branch (required): The branch to create. + - ref (optional): The ref to start a newly created branch. +--- +kind: tool +name: switch_git_branch +type: looker-switch-git-branch +source: looker-source +description: | + This tool is used to switch the git branch of a LookML project. This only works in dev mode. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - branch (required): The branch to switch to. + - ref (optional): The ref to change a branch with `reset --hard` on a switch operation. +--- +kind: tool +name: delete_git_branch +type: looker-delete-git-branch +source: looker-source +description: | + This tool is used to delete a git branch of a LookML project. This only works in dev mode. + + Parameters: + - project_id (required): The unique ID of the LookML project. + - branch (required): The branch to delete. +--- +kind: toolset +name: looker_dev_tools tools: - health_pulse: - kind: looker-health-pulse - source: looker-source - description: | - This tool performs various health checks on a Looker instance. - - Parameters: - - action (required): Specifies the type of health check to perform. - Choose one of the following: - - `check_db_connections`: Verifies database connectivity. - - `check_dashboard_performance`: Assesses dashboard loading performance. - - `check_dashboard_errors`: Identifies errors within dashboards. - - `check_explore_performance`: Evaluates explore query performance. - - `check_schedule_failures`: Reports on failed scheduled deliveries. - - `check_legacy_features`: Checks for the usage of legacy features. - - Note on `check_legacy_features`: - This action is exclusively available in Looker Core instances. If invoked - on a non-Looker Core instance, it will return a notice rather than an error. - This notice should be considered normal behavior and not an indication of an issue. - - health_analyze: - kind: looker-health-analyze - source: looker-source - description: | - This tool calculates the usage statistics for Looker projects, models, and explores. - - Parameters: - - action (required): The type of resource to analyze. Can be `"projects"`, `"models"`, or `"explores"`. - - project (optional): The specific project ID to analyze. - - model (optional): The specific model name to analyze. Requires `project` if used without `explore`. - - explore (optional): The specific explore name to analyze. Requires `model` if used. - - timeframe (optional): The lookback period in days for usage data. Defaults to `90` days. - - min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`. - - Output: - The result is a JSON object containing usage metrics for the specified resources. - - health_vacuum: - kind: looker-health-vacuum - source: looker-source - description: | - This tool identifies and suggests LookML models or explores that can be - safely removed due to inactivity or low usage. - - Parameters: - - action (required): The type of resource to analyze for removal candidates. Can be `"models"` or `"explores"`. - - project (optional): The specific project ID to consider. - - model (optional): The specific model name to consider. Requires `project` if used without `explore`. - - explore (optional): The specific explore name to consider. Requires `model` if used. - - timeframe (optional): The lookback period in days to assess usage. Defaults to `90` days. - - min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`. - - Output: - A JSON array of objects, each representing a model or explore that is a candidate for deletion due to low usage. - - dev_mode: - kind: looker-dev-mode - source: looker-source - description: | - This tool allows toggling the Looker IDE session between Development Mode and Production Mode. - Development Mode enables making and testing changes to LookML projects. - - Parameters: - - enable (required): A boolean value. - - `true`: Switches the current session to Development Mode. - - `false`: Switches the current session to Production Mode. - - get_projects: - kind: looker-get-projects - source: looker-source - description: | - This tool retrieves a list of all LookML projects available on the Looker instance. - It is useful for identifying projects before performing actions like retrieving - project files or making modifications. - - Parameters: - This tool takes no parameters. - - Output: - A JSON array of objects, each containing the `project_id` and `project_name` - for a LookML project. - - get_project_files: - kind: looker-get-project-files - source: looker-source - description: | - This tool retrieves a list of all LookML files within a specified project, - providing details about each file. - - Parameters: - - project_id (required): The unique ID of the LookML project, obtained from `get_projects`. - - Output: - A JSON array of objects, each representing a LookML file and containing - details such as `path`, `id`, `type`, and `git_status`. - - get_project_file: - kind: looker-get-project-file - source: looker-source - description: | - This tool retrieves the raw content of a specific LookML file from within a project. - - Parameters: - - project_id (required): The unique ID of the LookML project, obtained from `get_projects`. - - file_path (required): The path to the LookML file within the project, - typically obtained from `get_project_files`. - - Output: - The raw text content of the specified LookML file. - - create_project_file: - kind: looker-create-project-file - source: looker-source - description: | - This tool creates a new LookML file within a specified project, populating - it with the provided content. - - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - file_path (required): The desired path and filename for the new file within the project. - - content (required): The full LookML content to write into the new file. - - Output: - A confirmation message upon successful file creation. - - update_project_file: - kind: looker-update-project-file - source: looker-source - description: | - This tool modifies the content of an existing LookML file within a specified project. - - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - file_path (required): The exact path to the LookML file to modify within the project. - - content (required): The new, complete LookML content to overwrite the existing file. - - Output: - A confirmation message upon successful file modification. - - delete_project_file: - kind: looker-delete-project-file - source: looker-source - description: | - This tool permanently deletes a specified LookML file from within a project. - Use with caution, as this action cannot be undone through the API. - - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - file_path (required): The exact path to the LookML file to delete within the project. - - Output: - A confirmation message upon successful file deletion. - - get_project_directories: - kind: looker-get-project-directories - source: looker-source - description: | - This tool retrieves the list of directories within a specified LookML project. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - Output: - A JSON array of strings, where each string is the name of a directory within the project. - - create_project_directory: - kind: looker-create-project-directory - source: looker-source - description: | - This tool creates a new directory within a specified LookML project. - - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - directory_path (required): The path to the new directory within the project. - - Output: - A confirmation message upon successful directory creation. - - delete_project_directory: - kind: looker-delete-project-directory - source: looker-source - description: | - This tool permanently deletes a specified directory within a LookML project. - - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - directory_path (required): The path to the directory within the project. - - Output: - A confirmation message upon successful directory deletion. - - validate_project: - kind: looker-validate-project - source: looker-source - description: | - This tool checks a LookML project for syntax errors. - - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - Output: - A list of error details including the file path and line number, and also a list of models - that are not currently valid due to LookML errors. - - get_connections: - kind: looker-get-connections - source: looker-source - description: | - This tool retrieves a list of all database connections configured in the Looker system. - - Parameters: - This tool takes no parameters. - - Output: - A JSON array of objects, each representing a database connection and including details such as: - - `name`: The connection's unique identifier. - - `dialect`: The database dialect (e.g., "mysql", "postgresql", "bigquery"). - - `default_schema`: The default schema for the connection. - - `database`: The associated database name (if applicable). - - `supports_multiple_databases`: A boolean indicating if the connection can access multiple databases. - - get_connection_schemas: - kind: looker-get-connection-schemas - source: looker-source - description: | - This tool retrieves a list of database schemas available through a specified - Looker connection. - - Parameters: - - connection_name (required): The name of the database connection, obtained from `get_connections`. - - database (optional): An optional database name to filter the schemas. - Only applicable for connections that support multiple databases. - - Output: - A JSON array of strings, where each string is the name of an available schema. - - get_connection_databases: - kind: looker-get-connection-databases - source: looker-source - description: | - This tool retrieves a list of databases available through a specified Looker connection. - This is only applicable for connections that support multiple databases. - Use `get_connections` to check if a connection supports multiple databases. - - Parameters: - - connection_name (required): The name of the database connection, obtained from `get_connections`. - - Output: - A JSON array of strings, where each string is the name of an available database. - If the connection does not support multiple databases, an empty list or an error will be returned. - - get_connection_tables: - kind: looker-get-connection-tables - source: looker-source - description: | - This tool retrieves a list of tables available within a specified database schema - through a Looker connection. - - Parameters: - - connection_name (required): The name of the database connection, obtained from `get_connections`. - - schema (required): The name of the schema to list tables from, obtained from `get_connection_schemas`. - - database (optional): The name of the database to filter by. Only applicable for connections - that support multiple databases (check with `get_connections`). - - Output: - A JSON array of strings, where each string is the name of an available table. - - get_connection_table_columns: - kind: looker-get-connection-table-columns - source: looker-source - description: | - This tool retrieves a list of columns for one or more specified tables within a - given database schema and connection. - - Parameters: - - connection_name (required): The name of the database connection, obtained from `get_connections`. - - schema (required): The name of the schema where the tables reside, obtained from `get_connection_schemas`. - - tables (required): A comma-separated string of table names for which to retrieve columns - (e.g., "users,orders,products"), obtained from `get_connection_tables`. - - database (optional): The name of the database to filter by. Only applicable for connections - that support multiple databases (check with `get_connections`). - - Output: - A JSON array of objects, where each object represents a column and contains details - such as `table_name`, `column_name`, `data_type`, and `is_nullable`. - - get_lookml_tests: - kind: looker-get-lookml-tests - source: looker-source - description: | - Returns a list of tests which can be run to validate a project's LookML code and/or the underlying data, optionally filtered by the file id. - - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - file_id (optional): The ID of the file to filter tests by. This must be the complete file path from the project root (e.g., `models/my_model.model.lkml` or `views/my_view.view.lkml`). - - Output: - A JSON array of LookML test objects, each containing: - - model_name: The name of the model. - - name: The name of the test. - - explore_name: The name of the explore being tested. - - query_url_params: The query parameters used for the test. - - file: The file path where the test is defined. - - line: The line number where the test is defined. - - run_lookml_tests: - kind: looker-run-lookml-tests - source: looker-source - description: | - This tool runs LookML tests in the project, filtered by file, test, and/or model. These filters work in conjunction (logical AND). - - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - - Parameters: - - project_id (required): The unique ID of the project to run LookML tests for. - - file_id (optional): The ID of the file to run tests for. This must be the complete file path from the project root (e.g., `models/my_model.model.lkml` or `views/my_view.view.lkml`). - - test (optional): The name of the test to run. - - model (optional): The name of the model to run tests for. - - Output: - A JSON array containing the results of the executed tests, where each object includes: - - model_name: Name of the model tested. - - test_name: Name of the test. - - assertions_count: Total number of assertions in the test. - - assertions_failed: Number of assertions that failed. - - success: Boolean indicating if the test passed. - - errors: Array of error objects (if any), containing details like `message`, `file_path`, `line_number`, and `severity`. - - warnings: Array of warning messages (if any). - - create_view_from_table: - kind: looker-create-view-from-table - source: looker-source - description: | - This tool generates boilerplate LookML views directly from the database schema. - It does not create model or explore files, only view files in the specified folder. - - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - connection (required): The database connection name. - - tables (required): A list of objects to generate views for. Each object must contain `schema` and `table_name` (note: table names are case-sensitive). Optional fields include `primary_key`, `base_view`, and `columns` (array of objects with `column_name`). - - folder_name (optional): The folder to place the view files in (defaults to 'views/'). - - Output: - A confirmation message upon successful view generation, or an error message if the operation fails. - - list_git_branches: - kind: looker-list-git-branches - source: looker-source - description: | - This tool is used to retrieve the list of available git branches of a LookML project. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - get_git_branch: - kind: looker-get-git-branch - source: looker-source - description: | - This tool is used to retrieve the current git branch of a LookML project. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - create_git_branch: - kind: looker-create-git-branch - source: looker-source - description: | - This tool is used to create a new git branch of a LookML project. This only works in dev mode. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - branch (required): The branch to create. - - ref (optional): The ref to start a newly created branch. - - switch_git_branch: - kind: looker-switch-git-branch - source: looker-source - description: | - This tool is used to switch the git branch of a LookML project. This only works in dev mode. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - branch (required): The branch to switch to. - - ref (optional): The ref to change a branch with `reset --hard` on a switch operation. - - delete_git_branch: - kind: looker-delete-git-branch - source: looker-source - description: | - This tool is used to delete a git branch of a LookML project. This only works in dev mode. - - Parameters: - - project_id (required): The unique ID of the LookML project. - - branch (required): The branch to delete. -toolsets: - looker_dev_tools: - - health_pulse - - health_analyze - - health_vacuum - - dev_mode - - get_projects - - get_project_files - - get_project_file - - create_project_file - - update_project_file - - delete_project_file - - get_project_directories - - create_project_directory - - delete_project_directory - - validate_project - - get_connections - - get_connection_schemas - - get_connection_databases - - get_connection_tables - - get_connection_table_columns - - get_lookml_tests - - run_lookml_tests - - create_view_from_table - - list_git_branches - - get_git_branch - - create_git_branch - - switch_git_branch - - delete_git_branch +- health_pulse +- health_analyze +- health_vacuum +- dev_mode +- get_projects +- get_project_files +- get_project_file +- create_project_file +- update_project_file +- delete_project_file +- get_project_directories +- create_project_directory +- delete_project_directory +- validate_project +- get_connections +- get_connection_schemas +- get_connection_databases +- get_connection_tables +- get_connection_table_columns +- get_lookml_tests +- run_lookml_tests +- create_view_from_table +- list_git_branches +- get_git_branch +- create_git_branch +- switch_git_branch +- delete_git_branch \ No newline at end of file diff --git a/internal/prebuiltconfigs/tools/looker.yaml b/internal/prebuiltconfigs/tools/looker.yaml index d3954ffd95c9..b1fb2d3cafd5 100644 --- a/internal/prebuiltconfigs/tools/looker.yaml +++ b/internal/prebuiltconfigs/tools/looker.yaml @@ -1,4 +1,4 @@ -# Copyright 2025 Google LLC +# Copyright 2026 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,810 +12,828 @@ # See the License for the specific language governing permissions and # limitations under the License. -sources: - looker-source: - kind: looker - base_url: ${LOOKER_BASE_URL} - client_id: ${LOOKER_CLIENT_ID:} - client_secret: ${LOOKER_CLIENT_SECRET:} - verify_ssl: ${LOOKER_VERIFY_SSL:true} - timeout: 600s - use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false} - show_hidden_models: ${LOOKER_SHOW_HIDDEN_MODELS:true} - show_hidden_explores: ${LOOKER_SHOW_HIDDEN_EXPLORES:true} - show_hidden_fields: ${LOOKER_SHOW_HIDDEN_FIELDS:true} - +kind: source +name: looker-source +type: looker +base_url: ${LOOKER_BASE_URL} +client_id: ${LOOKER_CLIENT_ID:} +client_secret: ${LOOKER_CLIENT_SECRET:} +verify_ssl: ${LOOKER_VERIFY_SSL:true} +timeout: 600s +use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false} +show_hidden_models: ${LOOKER_SHOW_HIDDEN_MODELS:true} +show_hidden_explores: ${LOOKER_SHOW_HIDDEN_EXPLORES:true} +show_hidden_fields: ${LOOKER_SHOW_HIDDEN_FIELDS:true} +--- +kind: tool +name: get_models +type: looker-get-models +source: looker-source +description: | + This tool retrieves a list of available LookML models in the Looker instance. + LookML models define the data structure and relationships that users can query. + The output includes details like the model's `name` and `label`, which are + essential for subsequent calls to tools like `get_explores` or `query`. + + This tool takes no parameters. +--- +kind: tool +name: get_explores +type: looker-get-explores +source: looker-source +description: | + This tool retrieves a list of explores defined within a specific LookML model. + Explores represent a curated view of your data, typically joining several + tables together to allow for focused analysis on a particular subject area. + The output provides details like the explore's `name` and `label`. + + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. +--- +kind: tool +name: get_dimensions +type: looker-get-dimensions +source: looker-source +description: | + This tool retrieves a list of dimensions defined within a specific Looker explore. + Dimensions are non-aggregatable attributes or characteristics of your data + (e.g., product name, order date, customer city) that can be used for grouping, + filtering, or segmenting query results. + + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. + - explore_name (required): The name of the explore within the model, obtained from `get_explores`. + + Output Details: + - If a dimension includes a `suggestions` field, its contents are valid values + that can be used directly as filters for that dimension. + - If a `suggest_explore` and `suggest_dimension` are provided, you can query + that specified explore and dimension to retrieve a list of valid filter values. +--- +kind: tool +name: get_measures +type: looker-get-measures +source: looker-source +description: | + This tool retrieves a list of measures defined within a specific Looker explore. + Measures are aggregatable metrics (e.g., total sales, average price, count of users) + that are used for calculations and quantitative analysis in your queries. + + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. + - explore_name (required): The name of the explore within the model, obtained from `get_explores`. + + Output Details: + - If a measure includes a `suggestions` field, its contents are valid values + that can be used directly as filters for that measure. + - If a `suggest_explore` and `suggest_dimension` are provided, you can query + that specified explore and dimension to retrieve a list of valid filter values. +--- +kind: tool +name: get_filters +type: looker-get-filters +source: looker-source +description: | + This tool retrieves a list of "filter-only fields" defined within a specific + Looker explore. These are special fields defined in LookML specifically to + create user-facing filter controls that do not directly affect the `GROUP BY` + clause of the SQL query. They are often used in conjunction with liquid templating + to create dynamic queries. + + Note: Regular dimensions and measures can also be used as filters in a query. + This tool *only* returns fields explicitly defined as `filter:` in LookML. + + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. + - explore_name (required): The name of the explore within the model, obtained from `get_explores`. +--- +kind: tool +name: get_parameters +type: looker-get-parameters +source: looker-source +description: | + This tool retrieves a list of parameters defined within a specific Looker explore. + LookML parameters are dynamic input fields that allow users to influence query + behavior without directly modifying the underlying LookML. They are often used + with `liquid` templating to create flexible dashboards and reports, enabling + users to choose dimensions, measures, or other query components at runtime. + + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. + - explore_name (required): The name of the explore within the model, obtained from `get_explores`. +--- +kind: tool +name: query +type: looker-query +source: looker-source +description: | + This tool runs a query against a LookML model and returns the results in JSON format. + + Required Parameters: + - model_name: The name of the LookML model (from `get_models`). + - explore_name: The name of the explore (from `get_explores`). + - fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query. + + Optional Parameters: + - pivots: A list of fields to pivot the results by. These fields must also be included in the `fields` list. + - filters: A map of filter expressions, e.g., `{"view.field": "value", "view.date": "7 days"}`. + - Do not quote field names. + - Use `not null` instead of `-NULL`. + - If a value contains a comma, enclose it in single quotes (e.g., "'New York, NY'"). + - sorts: A list of fields to sort by, optionally including direction (e.g., `["view.field desc"]`). + - limit: Row limit (default 500). Use "-1" for unlimited. + - query_timezone: specific timezone for the query (e.g. `America/Los_Angeles`). + + Note: Use `get_dimensions`, `get_measures`, `get_filters`, and `get_parameters` to find valid fields. +--- +kind: tool +name: query_sql +type: looker-query-sql +source: looker-source +description: | + This tool generates the underlying SQL query that Looker would execute + against the database for a given set of parameters. It is useful for + understanding how Looker translates a request into SQL. + + Parameters: + All parameters for this tool are identical to those of the `query` tool. + This includes `model_name`, `explore_name`, `fields` (required), + and optional parameters like `pivots`, `filters`, `sorts`, `limit`, and `query_timezone`. + + Output: + The result of this tool is the raw SQL text. +--- +kind: tool +name: query_url +type: looker-query-url +source: looker-source +description: | + This tool generates a shareable URL for a Looker query, allowing users to + explore the query further within the Looker UI. It returns the generated URL, + along with the `query_id` and `slug`. + + Parameters: + All query parameters (e.g., `model_name`, `explore_name`, `fields`, `pivots`, + `filters`, `sorts`, `limit`, `query_timezone`) are the same as the `query` tool. + + Additionally, it accepts an optional `vis_config` parameter: + - vis_config (optional): A JSON object that controls the default visualization + settings for the generated query. + + vis_config Details: + The `vis_config` object supports a wide range of properties for various chart types. + Here are some notes on making visualizations. + + ### Cartesian Charts (Area, Bar, Column, Line, Scatter) + + These chart types share a large number of configuration options. + + **General** + * `type`: The type of visualization (`looker_area`, `looker_bar`, `looker_column`, `looker_line`, `looker_scatter`). + * `series_types`: Override the chart type for individual series. + * `show_view_names`: Display view names in labels and tooltips (`true`/`false`). + * `series_labels`: Provide custom names for series. + + **Styling & Colors** + * `colors`: An array of color values to be used for the chart series. + * `series_colors`: A mapping of series names to specific color values. + * `color_application`: Advanced controls for color palette application (collection, palette, reverse, etc.). + * `font_size`: Font size for labels (e.g., '12px'). + + **Legend** + * `hide_legend`: Show or hide the chart legend (`true`/`false`). + * `legend_position`: Placement of the legend (`'center'`, `'left'`, `'right'`). + + **Axes** + * `swap_axes`: Swap the X and Y axes (`true`/`false`). + * `x_axis_scale`: Scale of the x-axis (`'auto'`, `'ordinal'`, `'linear'`, `'time'`). + * `x_axis_reversed`, `y_axis_reversed`: Reverse the direction of an axis (`true`/`false`). + * `x_axis_gridlines`, `y_axis_gridlines`: Display gridlines for an axis (`true`/`false`). + * `show_x_axis_label`, `show_y_axis_label`: Show or hide the axis title (`true`/`false`). + * `show_x_axis_ticks`, `show_y_axis_ticks`: Show or hide axis tick marks (`true`/`false`). + * `x_axis_label`, `y_axis_label`: Set a custom title for an axis. + * `x_axis_datetime_label`: A format string for datetime labels on the x-axis (e.g., `'%Y-%m'`). + * `x_padding_left`, `x_padding_right`: Adjust padding on the ends of the x-axis. + * `x_axis_label_rotation`, `x_axis_label_rotation_bar`: Set rotation for x-axis labels. + * `x_axis_zoom`, `y_axis_zoom`: Enable zooming on an axis (`true`/`false`). + * `y_axes`: An array of configuration objects for multiple y-axes. + + **Data & Series** + * `stacking`: How to stack series (`''` for none, `'normal'`, `'percent'`). + * `ordering`: Order of series in a stack (`'none'`, etc.). + * `limit_displayed_rows`: Enable or disable limiting the number of rows displayed (`true`/`false`). + * `limit_displayed_rows_values`: Configuration for the row limit (e.g., `{ "first_last": "first", "show_hide": "show", "num_rows": 10 }`). + * `discontinuous_nulls`: How to render null values in line charts (`true`/`false`). + * `point_style`: Style for points on line and area charts (`'none'`, `'circle'`, `'circle_outline'`). + * `series_point_styles`: Override point styles for individual series. + * `interpolation`: Line interpolation style (`'linear'`, `'monotone'`, `'step'`, etc.). + * `show_value_labels`: Display values on data points (`true`/`false`). + * `label_value_format`: A format string for value labels. + * `show_totals_labels`: Display total labels on stacked charts (`true`/`false`). + * `totals_color`: Color for total labels. + * `show_silhouette`: Display a "silhouette" of hidden series in stacked charts (`true`/`false`). + * `hidden_series`: An array of series names to hide from the visualization. + + **Scatter/Bubble Specific** + * `size_by_field`: The field used to determine the size of bubbles. + * `color_by_field`: The field used to determine the color of bubbles. + * `plot_size_by_field`: Whether to display the size-by field in the legend. + * `cluster_points`: Group nearby points into clusters (`true`/`false`). + * `quadrants_enabled`: Display quadrants on the chart (`true`/`false`). + * `quadrant_properties`: Configuration for quadrant labels and colors. + * `custom_quadrant_value_x`, `custom_quadrant_value_y`: Set quadrant boundaries as a percentage. + * `custom_quadrant_point_x`, `custom_quadrant_point_y`: Set quadrant boundaries to a specific value. + + **Miscellaneous** + * `reference_lines`: Configuration for displaying reference lines. + * `trend_lines`: Configuration for displaying trend lines. + * `trellis`: Configuration for creating trellis (small multiple) charts. + * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering interactions. + + ### Boxplot + + * Inherits most of the Cartesian chart options. + * `type`: Must be `looker_boxplot`. + + ### Funnel + + * `type`: Must be `looker_funnel`. + * `orientation`: How data is read (`'automatic'`, `'dataInRows'`, `'dataInColumns'`). + * `percentType`: How percentages are calculated (`'percentOfMaxValue'`, `'percentOfPriorRow'`). + * `labelPosition`, `valuePosition`, `percentPosition`: Placement of labels (`'left'`, `'right'`, `'inline'`, `'hidden'`). + * `labelColor`, `labelColorEnabled`: Set a custom color for labels. + * `labelOverlap`: Allow labels to overlap (`true`/`false`). + * `barColors`: An array of colors for the funnel steps. + * `color_application`: Advanced color palette controls. + * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. + + ### Pie / Donut + + * Pie charts must have exactly one dimension and one numerical measure. + * `type`: Must be `looker_pie`. + * `value_labels`: Where to display values (`'legend'`, `'labels'`). + * `label_type`: The format of data labels (`'labPer'`, `'labVal'`, `'lab'`, `'val'`, `'per'`). + * `start_angle`, `end_angle`: The start and end angles of the pie chart. + * `inner_radius`: The inner radius, used to create a donut chart. + * `series_colors`, `series_labels`: Override colors and labels for specific slices. + * `color_application`: Advanced color palette controls. + * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. + * `advanced_vis_config`: A string containing JSON for advanced Highcharts configuration. + + ### Waterfall + + * Inherits most of the Cartesian chart options. + * `type`: Must be `looker_waterfall`. + * `up_color`: Color for positive (increasing) values. + * `down_color`: Color for negative (decreasing) values. + * `total_color`: Color for the total bar. + + ### Word Cloud + + * `type`: Must be `looker_wordcloud`. + * `rotation`: Enable random word rotation (`true`/`false`). + * `colors`: An array of colors for the words. + * `color_application`: Advanced color palette controls. + * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. + + These are some sample vis_config settings. + + A bar chart - + {{ + "defaults_version": 1, + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "ordering": "none", + "plot_size_by_field": false, + "point_style": "none", + "show_null_labels": false, + "show_silhouette": false, + "show_totals_labels": false, + "show_value_labels": false, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "normal", + "totals_color": "#808080", + "trellis": "", + "type": "looker_bar", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "x_axis_zoom": true, + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5, + "y_axis_zoom": true + }} + + A column chart with an option advanced_vis_config - + {{ + "advanced_vis_config": "{ chart: { type: 'pie', spacingBottom: 50, spacingLeft: 50, spacingRight: 50, spacingTop: 50, }, legend: { enabled: false, }, plotOptions: { pie: { dataLabels: { enabled: true, format: '\\u003cb\\u003e{key}\\u003c/b\\u003e\\u003cspan style=\"font-weight: normal\"\\u003e - {percentage:.2f}%\\u003c/span\\u003e', }, showInLegend: false, }, }, series: [], }", + "colors": [ + "grey" + ], + "defaults_version": 1, + "hidden_fields": [], + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "note_display": "below", + "note_state": "collapsed", + "note_text": "Unsold inventory only", + "ordering": "none", + "plot_size_by_field": false, + "point_style": "none", + "series_colors": {}, + "show_null_labels": false, + "show_silhouette": false, + "show_totals_labels": false, + "show_value_labels": true, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "normal", + "totals_color": "#808080", + "trellis": "", + "type": "looker_column", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "x_axis_zoom": true, + "y_axes": [], + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5, + "y_axis_zoom": true + }} + + A line chart - + {{ + "defaults_version": 1, + "hidden_pivots": {}, + "hidden_series": [], + "interpolation": "linear", + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "plot_size_by_field": false, + "point_style": "none", + "series_types": {}, + "show_null_points": true, + "show_value_labels": false, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "", + "trellis": "", + "type": "looker_line", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5 + }} + + An area chart - + {{ + "defaults_version": 1, + "interpolation": "linear", + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "plot_size_by_field": false, + "point_style": "none", + "series_types": {}, + "show_null_points": true, + "show_silhouette": false, + "show_totals_labels": false, + "show_value_labels": false, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "normal", + "totals_color": "#808080", + "trellis": "", + "type": "looker_area", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "x_axis_zoom": true, + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5, + "y_axis_zoom": true + }} + + A scatter plot - + {{ + "cluster_points": false, + "custom_quadrant_point_x": 5, + "custom_quadrant_point_y": 5, + "custom_value_label_column": "", + "custom_x_column": "", + "custom_y_column": "", + "defaults_version": 1, + "hidden_fields": [], + "hidden_pivots": {}, + "hidden_points_if_no": [], + "hidden_series": [], + "interpolation": "linear", + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "limit_displayed_rows_values": { + "first_last": "first", + "num_rows": 0, + "show_hide": "hide" + }, + "plot_size_by_field": false, + "point_style": "circle", + "quadrant_properties": { + "0": { + "color": "", + "label": "Quadrant 1" + }, + "1": { + "color": "", + "label": "Quadrant 2" + }, + "2": { + "color": "", + "label": "Quadrant 3" + }, + "3": { + "color": "", + "label": "Quadrant 4" + } + }, + "quadrants_enabled": false, + "series_labels": {}, + "series_types": {}, + "show_null_points": false, + "show_value_labels": false, + "show_view_names": true, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "size_by_field": "roi", + "stacking": "normal", + "swap_axes": true, + "trellis": "", + "type": "looker_scatter", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "x_axis_zoom": true, + "y_axes": [ + { + "label": "", + "orientation": "bottom", + "series": [ + { + "axisId": "Channel_0 - average_of_roi_first", + "id": "Channel_0 - average_of_roi_first", + "name": "Channel_0" + }, + { + "axisId": "Channel_1 - average_of_roi_first", + "id": "Channel_1 - average_of_roi_first", + "name": "Channel_1" + }, + { + "axisId": "Channel_2 - average_of_roi_first", + "id": "Channel_2 - average_of_roi_first", + "name": "Channel_2" + }, + { + "axisId": "Channel_3 - average_of_roi_first", + "id": "Channel_3 - average_of_roi_first", + "name": "Channel_3" + }, + { + "axisId": "Channel_4 - average_of_roi_first", + "id": "Channel_4 - average_of_roi_first", + "name": "Channel_4" + } + ], + "showLabels": true, + "showValues": true, + "tickDensity": "custom", + "tickDensityCustom": 100, + "type": "linear", + "unpinAxis": false + } + ], + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5, + "y_axis_zoom": true + }} + + A single record visualization - + {{ + "defaults_version": 1, + "show_view_names": false, + "type": "looker_single_record" + }} + + A single value visualization - + {{ + "comparison_reverse_colors": false, + "comparison_type": "value", "conditional_formatting_include_nulls": false, "conditional_formatting_include_totals": false, + "custom_color": "#1A73E8", + "custom_color_enabled": true, + "defaults_version": 1, + "enable_conditional_formatting": false, + "series_types": {}, + "show_comparison": false, + "show_comparison_label": true, + "show_single_value_title": true, + "single_value_title": "Total Clicks", + "type": "single_value" + }} + + A Pie chart - + {{ + "defaults_version": 1, + "label_density": 25, + "label_type": "labPer", + "legend_position": "center", + "limit_displayed_rows": false, + "ordering": "none", + "plot_size_by_field": false, + "point_style": "none", + "series_types": {}, + "show_null_labels": false, + "show_silhouette": false, + "show_totals_labels": false, + "show_value_labels": false, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "", + "totals_color": "#808080", + "trellis": "", + "type": "looker_pie", + "value_labels": "legend", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5 + }} + + The result is a JSON object with the id, slug, the url, and + the long_url. +--- +kind: tool +name: get_looks +type: looker-get-looks +source: looker-source +description: | + This tool searches for saved Looks (pre-defined queries and visualizations) + in a Looker instance. It returns a list of JSON objects, each representing a Look. + + Search Parameters: + - title (optional): Filter by Look title (supports wildcards). + - folder_id (optional): Filter by the ID of the folder where the Look is saved. + - user_id (optional): Filter by the ID of the user who created the Look. + - description (optional): Filter by description content (supports wildcards). + - id (optional): Filter by specific Look ID. + - limit (optional): Maximum number of results to return. Defaults to a system limit. + - offset (optional): Starting point for pagination. + + String Search Behavior: + - Case-insensitive matching. + - Supports SQL LIKE pattern match wildcards: + - `%`: Matches any sequence of zero or more characters. (e.g., `"dan%"` matches "danger", "Danzig") + - `_`: Matches any single character. (e.g., `"D_m%"` matches "Damage", "dump") + - Special expressions for null checks: + - `"IS NULL"`: Matches Looks where the field is null. + - `"NOT NULL"`: Excludes Looks where the field is null. +--- +kind: tool +name: run_look +type: looker-run-look +source: looker-source +description: | + This tool executes the query associated with a saved Look and + returns the resulting data in a JSON structure. + + Parameters: + - look_id (required): The unique identifier of the Look to run, + typically obtained from the `get_looks` tool. + + Output: + The query results are returned as a JSON object. +--- +kind: tool +name: make_look +type: looker-make-look +source: looker-source +description: | + This tool creates a new Look (saved query with visualization) in Looker. + The Look will be saved in the user's personal folder, and its name must be unique. + + Required Parameters: + - title: A unique title for the new Look. + - description: A brief description of the Look's purpose. + - model_name: The name of the LookML model (from `get_models`). + - explore_name: The name of the explore (from `get_explores`). + - fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query. + + Optional Parameters: + - pivots, filters, sorts, limit, query_timezone: These parameters are identical + to those described for the `query` tool. + - vis_config: A JSON object defining the visualization settings for the Look. + The structure and options are the same as for the `query_url` tool's `vis_config`. + + Output: + A JSON object containing a link (`url`) to the newly created Look, along with its `id` and `slug`. +--- +kind: tool +name: get_dashboards +type: looker-get-dashboards +source: looker-source +description: | + This tool searches for saved dashboards in a Looker instance. It returns a list of JSON objects, each representing a dashboard. + + Search Parameters: + - title (optional): Filter by dashboard title (supports wildcards). + - folder_id (optional): Filter by the ID of the folder where the dashboard is saved. + - user_id (optional): Filter by the ID of the user who created the dashboard. + - description (optional): Filter by description content (supports wildcards). + - id (optional): Filter by specific dashboard ID. + - limit (optional): Maximum number of results to return. Defaults to a system limit. + - offset (optional): Starting point for pagination. + + String Search Behavior: + - Case-insensitive matching. + - Supports SQL LIKE pattern match wildcards: + - `%`: Matches any sequence of zero or more characters. (e.g., `"finan%"` matches "financial", "finance") + - `_`: Matches any single character. (e.g., `"s_les"` matches "sales") + - Special expressions for null checks: + - `"IS NULL"`: Matches dashboards where the field is null. + - `"NOT NULL"`: Excludes dashboards where the field is null. +--- +kind: tool +name: run_dashboard +type: looker-run-dashboard +source: looker-source +description: | + This tool executes the queries associated with each tile in a specified dashboard + and returns the aggregated data in a JSON structure. + + Parameters: + - dashboard_id (required): The unique identifier of the dashboard to run, + typically obtained from the `get_dashboards` tool. + + Output: + The data from all dashboard tiles is returned as a JSON object. +--- +kind: tool +name: make_dashboard +type: looker-make-dashboard +source: looker-source +description: | + This tool creates a new, empty dashboard in Looker. Dashboards are stored + in the user's personal folder, and the dashboard name must be unique. + After creation, use `add_dashboard_filter` to add filters and + `add_dashboard_element` to add content tiles. + + Required Parameters: + - title (required): A unique title for the new dashboard. + - description (required): A brief description of the dashboard's purpose. + + Output: + A JSON object containing a link (`url`) to the newly created dashboard and + its unique `id`. This `dashboard_id` is crucial for subsequent calls to + `add_dashboard_filter` and `add_dashboard_element`. +--- +kind: tool +name: add_dashboard_element +type: looker-add-dashboard-element +source: looker-source +description: | + This tool creates a new tile (element) within an existing Looker dashboard. + Tiles are added in the order this tool is called for a given `dashboard_id`. + + CRITICAL ORDER OF OPERATIONS: + 1. Create the dashboard using `make_dashboard`. + 2. Add any dashboard-level filters using `add_dashboard_filter`. + 3. Then, add elements (tiles) using this tool. + + Required Parameters: + - dashboard_id: The ID of the target dashboard, obtained from `make_dashboard`. + - model_name, explore_name, fields: These query parameters are inherited + from the `query` tool and are required to define the data for the tile. + + Optional Parameters: + - title: An optional title for the dashboard tile. + - pivots, filters, sorts, limit, query_timezone: These query parameters are + inherited from the `query` tool and can be used to customize the tile's query. + - vis_config: A JSON object defining the visualization settings for this tile. + The structure and options are the same as for the `query_url` tool's `vis_config`. + + Connecting to Dashboard Filters: + A dashboard element can be connected to one or more dashboard filters (created with + `add_dashboard_filter`). To do this, specify the `name` of the dashboard filter + and the `field` from the element's query that the filter should apply to. + The format for specifying the field is `view_name.field_name`. +--- +kind: tool +name: add_dashboard_filter +type: looker-add-dashboard-filter +source: looker-source +description: | + This tool adds a filter to a Looker dashboard. + + CRITICAL ORDER OF OPERATIONS: + 1. Create a dashboard using `make_dashboard`. + 2. Add all desired filters using this tool (`add_dashboard_filter`). + 3. Finally, add dashboard elements (tiles) using `add_dashboard_element`. + + Parameters: + - dashboard_id (required): The ID from `make_dashboard`. + - name (required): A unique internal identifier for the filter. You will use this `name` later in `add_dashboard_element` to bind tiles to this filter. + - title (required): The label displayed to users in the UI. + - filter_type (required): One of `date_filter`, `number_filter`, `string_filter`, or `field_filter`. + - default_value (optional): The initial value for the filter. + + Field Filters (`filter_type: field_filter`): + If creating a field filter, you must also provide: + - model + - explore + - dimension + The filter will inherit suggestions and type information from this LookML field. +--- +kind: tool +name: generate_embed_url +type: looker-generate-embed-url +source: looker-source +description: | + This tool generates a signed, private embed URL for specific Looker content, + allowing users to access it directly. + + Parameters: + - type (required): The type of content to embed. Common values include: + - `dashboards` + - `looks` + - `explore` + - id (required): The unique identifier for the content. + - For dashboards and looks, use the numeric ID (e.g., "123"). + - For explores, use the format "model_name/explore_name". +--- +kind: toolset +name: looker_tools tools: - get_models: - kind: looker-get-models - source: looker-source - description: | - This tool retrieves a list of available LookML models in the Looker instance. - LookML models define the data structure and relationships that users can query. - The output includes details like the model's `name` and `label`, which are - essential for subsequent calls to tools like `get_explores` or `query`. - - This tool takes no parameters. - - get_explores: - kind: looker-get-explores - source: looker-source - description: | - This tool retrieves a list of explores defined within a specific LookML model. - Explores represent a curated view of your data, typically joining several - tables together to allow for focused analysis on a particular subject area. - The output provides details like the explore's `name` and `label`. - - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - - get_dimensions: - kind: looker-get-dimensions - source: looker-source - description: | - This tool retrieves a list of dimensions defined within a specific Looker explore. - Dimensions are non-aggregatable attributes or characteristics of your data - (e.g., product name, order date, customer city) that can be used for grouping, - filtering, or segmenting query results. - - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - - explore_name (required): The name of the explore within the model, obtained from `get_explores`. - - Output Details: - - If a dimension includes a `suggestions` field, its contents are valid values - that can be used directly as filters for that dimension. - - If a `suggest_explore` and `suggest_dimension` are provided, you can query - that specified explore and dimension to retrieve a list of valid filter values. - - get_measures: - kind: looker-get-measures - source: looker-source - description: | - This tool retrieves a list of measures defined within a specific Looker explore. - Measures are aggregatable metrics (e.g., total sales, average price, count of users) - that are used for calculations and quantitative analysis in your queries. - - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - - explore_name (required): The name of the explore within the model, obtained from `get_explores`. - - Output Details: - - If a measure includes a `suggestions` field, its contents are valid values - that can be used directly as filters for that measure. - - If a `suggest_explore` and `suggest_dimension` are provided, you can query - that specified explore and dimension to retrieve a list of valid filter values. - - get_filters: - kind: looker-get-filters - source: looker-source - description: | - This tool retrieves a list of "filter-only fields" defined within a specific - Looker explore. These are special fields defined in LookML specifically to - create user-facing filter controls that do not directly affect the `GROUP BY` - clause of the SQL query. They are often used in conjunction with liquid templating - to create dynamic queries. - - Note: Regular dimensions and measures can also be used as filters in a query. - This tool *only* returns fields explicitly defined as `filter:` in LookML. - - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - - explore_name (required): The name of the explore within the model, obtained from `get_explores`. - - get_parameters: - kind: looker-get-parameters - source: looker-source - description: | - This tool retrieves a list of parameters defined within a specific Looker explore. - LookML parameters are dynamic input fields that allow users to influence query - behavior without directly modifying the underlying LookML. They are often used - with `liquid` templating to create flexible dashboards and reports, enabling - users to choose dimensions, measures, or other query components at runtime. - - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - - explore_name (required): The name of the explore within the model, obtained from `get_explores`. - - query: - kind: looker-query - source: looker-source - description: | - This tool runs a query against a LookML model and returns the results in JSON format. - - Required Parameters: - - model_name: The name of the LookML model (from `get_models`). - - explore_name: The name of the explore (from `get_explores`). - - fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query. - - Optional Parameters: - - pivots: A list of fields to pivot the results by. These fields must also be included in the `fields` list. - - filters: A map of filter expressions, e.g., `{"view.field": "value", "view.date": "7 days"}`. - - Do not quote field names. - - Use `not null` instead of `-NULL`. - - If a value contains a comma, enclose it in single quotes (e.g., "'New York, NY'"). - - sorts: A list of fields to sort by, optionally including direction (e.g., `["view.field desc"]`). - - limit: Row limit (default 500). Use "-1" for unlimited. - - query_timezone: specific timezone for the query (e.g. `America/Los_Angeles`). - - Note: Use `get_dimensions`, `get_measures`, `get_filters`, and `get_parameters` to find valid fields. - - query_sql: - kind: looker-query-sql - source: looker-source - description: | - This tool generates the underlying SQL query that Looker would execute - against the database for a given set of parameters. It is useful for - understanding how Looker translates a request into SQL. - - Parameters: - All parameters for this tool are identical to those of the `query` tool. - This includes `model_name`, `explore_name`, `fields` (required), - and optional parameters like `pivots`, `filters`, `sorts`, `limit`, and `query_timezone`. - - Output: - The result of this tool is the raw SQL text. - - query_url: - kind: looker-query-url - source: looker-source - description: | - This tool generates a shareable URL for a Looker query, allowing users to - explore the query further within the Looker UI. It returns the generated URL, - along with the `query_id` and `slug`. - - Parameters: - All query parameters (e.g., `model_name`, `explore_name`, `fields`, `pivots`, - `filters`, `sorts`, `limit`, `query_timezone`) are the same as the `query` tool. - - Additionally, it accepts an optional `vis_config` parameter: - - vis_config (optional): A JSON object that controls the default visualization - settings for the generated query. - - vis_config Details: - The `vis_config` object supports a wide range of properties for various chart types. - Here are some notes on making visualizations. - - ### Cartesian Charts (Area, Bar, Column, Line, Scatter) - - These chart types share a large number of configuration options. - - **General** - * `type`: The type of visualization (`looker_area`, `looker_bar`, `looker_column`, `looker_line`, `looker_scatter`). - * `series_types`: Override the chart type for individual series. - * `show_view_names`: Display view names in labels and tooltips (`true`/`false`). - * `series_labels`: Provide custom names for series. - - **Styling & Colors** - * `colors`: An array of color values to be used for the chart series. - * `series_colors`: A mapping of series names to specific color values. - * `color_application`: Advanced controls for color palette application (collection, palette, reverse, etc.). - * `font_size`: Font size for labels (e.g., '12px'). - - **Legend** - * `hide_legend`: Show or hide the chart legend (`true`/`false`). - * `legend_position`: Placement of the legend (`'center'`, `'left'`, `'right'`). - - **Axes** - * `swap_axes`: Swap the X and Y axes (`true`/`false`). - * `x_axis_scale`: Scale of the x-axis (`'auto'`, `'ordinal'`, `'linear'`, `'time'`). - * `x_axis_reversed`, `y_axis_reversed`: Reverse the direction of an axis (`true`/`false`). - * `x_axis_gridlines`, `y_axis_gridlines`: Display gridlines for an axis (`true`/`false`). - * `show_x_axis_label`, `show_y_axis_label`: Show or hide the axis title (`true`/`false`). - * `show_x_axis_ticks`, `show_y_axis_ticks`: Show or hide axis tick marks (`true`/`false`). - * `x_axis_label`, `y_axis_label`: Set a custom title for an axis. - * `x_axis_datetime_label`: A format string for datetime labels on the x-axis (e.g., `'%Y-%m'`). - * `x_padding_left`, `x_padding_right`: Adjust padding on the ends of the x-axis. - * `x_axis_label_rotation`, `x_axis_label_rotation_bar`: Set rotation for x-axis labels. - * `x_axis_zoom`, `y_axis_zoom`: Enable zooming on an axis (`true`/`false`). - * `y_axes`: An array of configuration objects for multiple y-axes. - - **Data & Series** - * `stacking`: How to stack series (`''` for none, `'normal'`, `'percent'`). - * `ordering`: Order of series in a stack (`'none'`, etc.). - * `limit_displayed_rows`: Enable or disable limiting the number of rows displayed (`true`/`false`). - * `limit_displayed_rows_values`: Configuration for the row limit (e.g., `{ "first_last": "first", "show_hide": "show", "num_rows": 10 }`). - * `discontinuous_nulls`: How to render null values in line charts (`true`/`false`). - * `point_style`: Style for points on line and area charts (`'none'`, `'circle'`, `'circle_outline'`). - * `series_point_styles`: Override point styles for individual series. - * `interpolation`: Line interpolation style (`'linear'`, `'monotone'`, `'step'`, etc.). - * `show_value_labels`: Display values on data points (`true`/`false`). - * `label_value_format`: A format string for value labels. - * `show_totals_labels`: Display total labels on stacked charts (`true`/`false`). - * `totals_color`: Color for total labels. - * `show_silhouette`: Display a "silhouette" of hidden series in stacked charts (`true`/`false`). - * `hidden_series`: An array of series names to hide from the visualization. - - **Scatter/Bubble Specific** - * `size_by_field`: The field used to determine the size of bubbles. - * `color_by_field`: The field used to determine the color of bubbles. - * `plot_size_by_field`: Whether to display the size-by field in the legend. - * `cluster_points`: Group nearby points into clusters (`true`/`false`). - * `quadrants_enabled`: Display quadrants on the chart (`true`/`false`). - * `quadrant_properties`: Configuration for quadrant labels and colors. - * `custom_quadrant_value_x`, `custom_quadrant_value_y`: Set quadrant boundaries as a percentage. - * `custom_quadrant_point_x`, `custom_quadrant_point_y`: Set quadrant boundaries to a specific value. - - **Miscellaneous** - * `reference_lines`: Configuration for displaying reference lines. - * `trend_lines`: Configuration for displaying trend lines. - * `trellis`: Configuration for creating trellis (small multiple) charts. - * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering interactions. - - ### Boxplot - - * Inherits most of the Cartesian chart options. - * `type`: Must be `looker_boxplot`. - - ### Funnel - - * `type`: Must be `looker_funnel`. - * `orientation`: How data is read (`'automatic'`, `'dataInRows'`, `'dataInColumns'`). - * `percentType`: How percentages are calculated (`'percentOfMaxValue'`, `'percentOfPriorRow'`). - * `labelPosition`, `valuePosition`, `percentPosition`: Placement of labels (`'left'`, `'right'`, `'inline'`, `'hidden'`). - * `labelColor`, `labelColorEnabled`: Set a custom color for labels. - * `labelOverlap`: Allow labels to overlap (`true`/`false`). - * `barColors`: An array of colors for the funnel steps. - * `color_application`: Advanced color palette controls. - * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. - - ### Pie / Donut - - * Pie charts must have exactly one dimension and one numerical measure. - * `type`: Must be `looker_pie`. - * `value_labels`: Where to display values (`'legend'`, `'labels'`). - * `label_type`: The format of data labels (`'labPer'`, `'labVal'`, `'lab'`, `'val'`, `'per'`). - * `start_angle`, `end_angle`: The start and end angles of the pie chart. - * `inner_radius`: The inner radius, used to create a donut chart. - * `series_colors`, `series_labels`: Override colors and labels for specific slices. - * `color_application`: Advanced color palette controls. - * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. - * `advanced_vis_config`: A string containing JSON for advanced Highcharts configuration. - - ### Waterfall - - * Inherits most of the Cartesian chart options. - * `type`: Must be `looker_waterfall`. - * `up_color`: Color for positive (increasing) values. - * `down_color`: Color for negative (decreasing) values. - * `total_color`: Color for the total bar. - - ### Word Cloud - - * `type`: Must be `looker_wordcloud`. - * `rotation`: Enable random word rotation (`true`/`false`). - * `colors`: An array of colors for the words. - * `color_application`: Advanced color palette controls. - * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. - - These are some sample vis_config settings. - - A bar chart - - {{ - "defaults_version": 1, - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "ordering": "none", - "plot_size_by_field": false, - "point_style": "none", - "show_null_labels": false, - "show_silhouette": false, - "show_totals_labels": false, - "show_value_labels": false, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "normal", - "totals_color": "#808080", - "trellis": "", - "type": "looker_bar", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "x_axis_zoom": true, - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5, - "y_axis_zoom": true - }} - - A column chart with an option advanced_vis_config - - {{ - "advanced_vis_config": "{ chart: { type: 'pie', spacingBottom: 50, spacingLeft: 50, spacingRight: 50, spacingTop: 50, }, legend: { enabled: false, }, plotOptions: { pie: { dataLabels: { enabled: true, format: '\u003cb\u003e{key}\u003c/b\u003e\u003cspan style=\"font-weight: normal\"\u003e - {percentage:.2f}%\u003c/span\u003e', }, showInLegend: false, }, }, series: [], }", - "colors": [ - "grey" - ], - "defaults_version": 1, - "hidden_fields": [], - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "note_display": "below", - "note_state": "collapsed", - "note_text": "Unsold inventory only", - "ordering": "none", - "plot_size_by_field": false, - "point_style": "none", - "series_colors": {}, - "show_null_labels": false, - "show_silhouette": false, - "show_totals_labels": false, - "show_value_labels": true, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "normal", - "totals_color": "#808080", - "trellis": "", - "type": "looker_column", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "x_axis_zoom": true, - "y_axes": [], - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5, - "y_axis_zoom": true - }} - - A line chart - - {{ - "defaults_version": 1, - "hidden_pivots": {}, - "hidden_series": [], - "interpolation": "linear", - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "plot_size_by_field": false, - "point_style": "none", - "series_types": {}, - "show_null_points": true, - "show_value_labels": false, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "", - "trellis": "", - "type": "looker_line", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5 - }} - - An area chart - - {{ - "defaults_version": 1, - "interpolation": "linear", - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "plot_size_by_field": false, - "point_style": "none", - "series_types": {}, - "show_null_points": true, - "show_silhouette": false, - "show_totals_labels": false, - "show_value_labels": false, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "normal", - "totals_color": "#808080", - "trellis": "", - "type": "looker_area", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "x_axis_zoom": true, - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5, - "y_axis_zoom": true - }} - - A scatter plot - - {{ - "cluster_points": false, - "custom_quadrant_point_x": 5, - "custom_quadrant_point_y": 5, - "custom_value_label_column": "", - "custom_x_column": "", - "custom_y_column": "", - "defaults_version": 1, - "hidden_fields": [], - "hidden_pivots": {}, - "hidden_points_if_no": [], - "hidden_series": [], - "interpolation": "linear", - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "limit_displayed_rows_values": { - "first_last": "first", - "num_rows": 0, - "show_hide": "hide" - }, - "plot_size_by_field": false, - "point_style": "circle", - "quadrant_properties": { - "0": { - "color": "", - "label": "Quadrant 1" - }, - "1": { - "color": "", - "label": "Quadrant 2" - }, - "2": { - "color": "", - "label": "Quadrant 3" - }, - "3": { - "color": "", - "label": "Quadrant 4" - } - }, - "quadrants_enabled": false, - "series_labels": {}, - "series_types": {}, - "show_null_points": false, - "show_value_labels": false, - "show_view_names": true, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "size_by_field": "roi", - "stacking": "normal", - "swap_axes": true, - "trellis": "", - "type": "looker_scatter", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "x_axis_zoom": true, - "y_axes": [ - { - "label": "", - "orientation": "bottom", - "series": [ - { - "axisId": "Channel_0 - average_of_roi_first", - "id": "Channel_0 - average_of_roi_first", - "name": "Channel_0" - }, - { - "axisId": "Channel_1 - average_of_roi_first", - "id": "Channel_1 - average_of_roi_first", - "name": "Channel_1" - }, - { - "axisId": "Channel_2 - average_of_roi_first", - "id": "Channel_2 - average_of_roi_first", - "name": "Channel_2" - }, - { - "axisId": "Channel_3 - average_of_roi_first", - "id": "Channel_3 - average_of_roi_first", - "name": "Channel_3" - }, - { - "axisId": "Channel_4 - average_of_roi_first", - "id": "Channel_4 - average_of_roi_first", - "name": "Channel_4" - } - ], - "showLabels": true, - "showValues": true, - "tickDensity": "custom", - "tickDensityCustom": 100, - "type": "linear", - "unpinAxis": false - } - ], - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5, - "y_axis_zoom": true - }} - - A single record visualization - - {{ - "defaults_version": 1, - "show_view_names": false, - "type": "looker_single_record" - }} - - A single value visualization - - {{ - "comparison_reverse_colors": false, - "comparison_type": "value", "conditional_formatting_include_nulls": false, "conditional_formatting_include_totals": false, - "custom_color": "#1A73E8", - "custom_color_enabled": true, - "defaults_version": 1, - "enable_conditional_formatting": false, - "series_types": {}, - "show_comparison": false, - "show_comparison_label": true, - "show_single_value_title": true, - "single_value_title": "Total Clicks", - "type": "single_value" - }} - - A Pie chart - - {{ - "defaults_version": 1, - "label_density": 25, - "label_type": "labPer", - "legend_position": "center", - "limit_displayed_rows": false, - "ordering": "none", - "plot_size_by_field": false, - "point_style": "none", - "series_types": {}, - "show_null_labels": false, - "show_silhouette": false, - "show_totals_labels": false, - "show_value_labels": false, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "", - "totals_color": "#808080", - "trellis": "", - "type": "looker_pie", - "value_labels": "legend", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5 - }} - - The result is a JSON object with the id, slug, the url, and - the long_url. - - get_looks: - kind: looker-get-looks - source: looker-source - description: | - This tool searches for saved Looks (pre-defined queries and visualizations) - in a Looker instance. It returns a list of JSON objects, each representing a Look. - - Search Parameters: - - title (optional): Filter by Look title (supports wildcards). - - folder_id (optional): Filter by the ID of the folder where the Look is saved. - - user_id (optional): Filter by the ID of the user who created the Look. - - description (optional): Filter by description content (supports wildcards). - - id (optional): Filter by specific Look ID. - - limit (optional): Maximum number of results to return. Defaults to a system limit. - - offset (optional): Starting point for pagination. - - String Search Behavior: - - Case-insensitive matching. - - Supports SQL LIKE pattern match wildcards: - - `%`: Matches any sequence of zero or more characters. (e.g., `"dan%"` matches "danger", "Danzig") - - `_`: Matches any single character. (e.g., `"D_m%"` matches "Damage", "dump") - - Special expressions for null checks: - - `"IS NULL"`: Matches Looks where the field is null. - - `"NOT NULL"`: Excludes Looks where the field is null. - - run_look: - kind: looker-run-look - source: looker-source - description: | - This tool executes the query associated with a saved Look and - returns the resulting data in a JSON structure. - - Parameters: - - look_id (required): The unique identifier of the Look to run, - typically obtained from the `get_looks` tool. - - Output: - The query results are returned as a JSON object. - - make_look: - kind: looker-make-look - source: looker-source - description: | - This tool creates a new Look (saved query with visualization) in Looker. - The Look will be saved in the user's personal folder, and its name must be unique. - - Required Parameters: - - title: A unique title for the new Look. - - description: A brief description of the Look's purpose. - - model_name: The name of the LookML model (from `get_models`). - - explore_name: The name of the explore (from `get_explores`). - - fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query. - - Optional Parameters: - - pivots, filters, sorts, limit, query_timezone: These parameters are identical - to those described for the `query` tool. - - vis_config: A JSON object defining the visualization settings for the Look. - The structure and options are the same as for the `query_url` tool's `vis_config`. - - Output: - A JSON object containing a link (`url`) to the newly created Look, along with its `id` and `slug`. - - get_dashboards: - kind: looker-get-dashboards - source: looker-source - description: | - This tool searches for saved dashboards in a Looker instance. It returns a list of JSON objects, each representing a dashboard. - - Search Parameters: - - title (optional): Filter by dashboard title (supports wildcards). - - folder_id (optional): Filter by the ID of the folder where the dashboard is saved. - - user_id (optional): Filter by the ID of the user who created the dashboard. - - description (optional): Filter by description content (supports wildcards). - - id (optional): Filter by specific dashboard ID. - - limit (optional): Maximum number of results to return. Defaults to a system limit. - - offset (optional): Starting point for pagination. - - String Search Behavior: - - Case-insensitive matching. - - Supports SQL LIKE pattern match wildcards: - - `%`: Matches any sequence of zero or more characters. (e.g., `"finan%"` matches "financial", "finance") - - `_`: Matches any single character. (e.g., `"s_les"` matches "sales") - - Special expressions for null checks: - - `"IS NULL"`: Matches dashboards where the field is null. - - `"NOT NULL"`: Excludes dashboards where the field is null. - - run_dashboard: - kind: looker-run-dashboard - source: looker-source - description: | - This tool executes the queries associated with each tile in a specified dashboard - and returns the aggregated data in a JSON structure. - - Parameters: - - dashboard_id (required): The unique identifier of the dashboard to run, - typically obtained from the `get_dashboards` tool. - - Output: - The data from all dashboard tiles is returned as a JSON object. - - make_dashboard: - kind: looker-make-dashboard - source: looker-source - description: | - This tool creates a new, empty dashboard in Looker. Dashboards are stored - in the user's personal folder, and the dashboard name must be unique. - After creation, use `add_dashboard_filter` to add filters and - `add_dashboard_element` to add content tiles. - - Required Parameters: - - title (required): A unique title for the new dashboard. - - description (required): A brief description of the dashboard's purpose. - - Output: - A JSON object containing a link (`url`) to the newly created dashboard and - its unique `id`. This `dashboard_id` is crucial for subsequent calls to - `add_dashboard_filter` and `add_dashboard_element`. - - add_dashboard_element: - kind: looker-add-dashboard-element - source: looker-source - description: | - This tool creates a new tile (element) within an existing Looker dashboard. - Tiles are added in the order this tool is called for a given `dashboard_id`. - - CRITICAL ORDER OF OPERATIONS: - 1. Create the dashboard using `make_dashboard`. - 2. Add any dashboard-level filters using `add_dashboard_filter`. - 3. Then, add elements (tiles) using this tool. - - Required Parameters: - - dashboard_id: The ID of the target dashboard, obtained from `make_dashboard`. - - model_name, explore_name, fields: These query parameters are inherited - from the `query` tool and are required to define the data for the tile. - - Optional Parameters: - - title: An optional title for the dashboard tile. - - pivots, filters, sorts, limit, query_timezone: These query parameters are - inherited from the `query` tool and can be used to customize the tile's query. - - vis_config: A JSON object defining the visualization settings for this tile. - The structure and options are the same as for the `query_url` tool's `vis_config`. - - Connecting to Dashboard Filters: - A dashboard element can be connected to one or more dashboard filters (created with - `add_dashboard_filter`). To do this, specify the `name` of the dashboard filter - and the `field` from the element's query that the filter should apply to. - The format for specifying the field is `view_name.field_name`. - - add_dashboard_filter: - kind: looker-add-dashboard-filter - source: looker-source - description: | - This tool adds a filter to a Looker dashboard. - - CRITICAL ORDER OF OPERATIONS: - 1. Create a dashboard using `make_dashboard`. - 2. Add all desired filters using this tool (`add_dashboard_filter`). - 3. Finally, add dashboard elements (tiles) using `add_dashboard_element`. - - Parameters: - - dashboard_id (required): The ID from `make_dashboard`. - - name (required): A unique internal identifier for the filter. You will use this `name` later in `add_dashboard_element` to bind tiles to this filter. - - title (required): The label displayed to users in the UI. - - flter_type (required): One of `date_filter`, `number_filter`, `string_filter`, or `field_filter`. - - default_value (optional): The initial value for the filter. - - Field Filters (`flter_type: field_filter`): - If creating a field filter, you must also provide: - - model - - explore - - dimension - The filter will inherit suggestions and type information from this LookML field. - - generate_embed_url: - kind: looker-generate-embed-url - source: looker-source - description: | - This tool generates a signed, private embed URL for specific Looker content, - allowing users to access it directly. - - Parameters: - - type (required): The type of content to embed. Common values include: - - `dashboards` - - `looks` - - `explore` - - id (required): The unique identifier for the content. - - For dashboards and looks, use the numeric ID (e.g., "123"). - - For explores, use the format "model_name/explore_name". - -toolsets: - looker_tools: - - get_models - - get_explores - - get_dimensions - - get_measures - - get_filters - - get_parameters - - query - - query_sql - - query_url - - get_looks - - run_look - - make_look - - get_dashboards - - run_dashboard - - make_dashboard - - add_dashboard_element - - add_dashboard_filter - - generate_embed_url \ No newline at end of file +- get_models +- get_explores +- get_dimensions +- get_measures +- get_filters +- get_parameters +- query +- query_sql +- query_url +- get_looks +- run_look +- make_look +- get_dashboards +- run_dashboard +- make_dashboard +- add_dashboard_element +- add_dashboard_filter +- generate_embed_url \ No newline at end of file diff --git a/internal/tools/cloudsqlpg/vectorassistapplyspec/vectorassistapplyspec.go b/internal/tools/cloudsqlpg/vectorassistapplyspec/vectorassistapplyspec.go new file mode 100644 index 000000000000..f1e34b8e6acd --- /dev/null +++ b/internal/tools/cloudsqlpg/vectorassistapplyspec/vectorassistapplyspec.go @@ -0,0 +1,160 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vectorassistapplyspec + +import ( + "context" + "fmt" + "net/http" + + yaml "github.com/goccy/go-yaml" + "github.com/googleapis/mcp-toolbox/internal/embeddingmodels" + "github.com/googleapis/mcp-toolbox/internal/sources" + "github.com/googleapis/mcp-toolbox/internal/tools" + "github.com/googleapis/mcp-toolbox/internal/util" + "github.com/googleapis/mcp-toolbox/internal/util/parameters" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" +) + +const resourceType string = "vector-assist-apply-spec" + +const applySpecQuery = ` + SELECT * FROM vector_assist.apply_spec(spec_id => @spec_id::TEXT, table_name => @table_name::TEXT, + column_name => @column_name::TEXT, schema_name => @schema_name::TEXT); +` + +func init() { + if !tools.Register(resourceType, newConfig) { + panic(fmt.Sprintf("tool type %q already registered", resourceType)) + } +} + +func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) { + actual := Config{Name: name} + if err := decoder.DecodeContext(ctx, &actual); err != nil { + return nil, err + } + return actual, nil +} + +type compatibleSource interface { + PostgresPool() *pgxpool.Pool + RunSQL(context.Context, string, []any) (any, error) +} + +type Config struct { + Name string `yaml:"name" validate:"required"` + Type string `yaml:"type" validate:"required"` + Source string `yaml:"source" validate:"required"` + Description string `yaml:"description"` + AuthRequired []string `yaml:"authRequired"` +} + +var _ tools.ToolConfig = Config{} + +func (cfg Config) ToolConfigType() string { + return resourceType +} + +func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) { + // parameters are marked required/ optional based on the vector assist function defintions + allParameters := parameters.Parameters{ + parameters.NewStringParameterWithRequired("spec_id", "The unique ID of the vector specification to apply.", false), + parameters.NewStringParameterWithRequired("table_name", "The name of the table to apply the vector specification to (in case of a single spec defined on the table).", false), + parameters.NewStringParameterWithRequired("column_name", "The text_column_name or vector_column_name of the spec to identify the exact spec in case there are multiple specs defined on a table.", false), + parameters.NewStringParameterWithRequired("schema_name", "The schema name for the table.", false), + } + paramManifest := allParameters.Manifest() + + if cfg.Description == "" { + cfg.Description = "This tool automatically executes all the SQL recommendations associated with a specific vector specification (spec_id) or table. It runs the necessary commands in the correct sequence to provision the workload, marking each step as applied once successful. Use this tool when the user has reviewed the generated recommendations from a defined (or modified) spec and is ready to apply the changes directly to their database instance to finalize the vector search setup. This tool can be used as a follow-up action after invoking the 'define_spec' or 'modify_spec' tool." + } + + mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters, nil) + + return Tool{ + Config: cfg, + allParams: allParameters, + manifest: tools.Manifest{ + Description: cfg.Description, + Parameters: paramManifest, + AuthRequired: cfg.AuthRequired, + }, + mcpManifest: mcpManifest, + }, nil +} + +var _ tools.Tool = Tool{} + +type Tool struct { + Config + allParams parameters.Parameters `yaml:"allParams"` + manifest tools.Manifest + mcpManifest tools.McpManifest +} + +func (t Tool) ToConfig() tools.ToolConfig { + return t.Config +} + +func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, params parameters.ParamValues, accessToken tools.AccessToken) (any, util.ToolboxError) { + source, err := tools.GetCompatibleSource[compatibleSource](resourceMgr, t.Source, t.Name, t.Type) + if err != nil { + return nil, util.NewClientServerError("source used is not compatible with the tool", http.StatusInternalServerError, err) + } + paramsMap := params.AsMap() + + // Convert our parsed parameters directly into pgx.NamedArgs + namedArgs := pgx.NamedArgs{} + for key, value := range paramsMap { + namedArgs[key] = value + } + + // As long as source.RunSQL unwraps args into pgx.Query(ctx, sql, args...), pgx handles the mapping of @param to the named parameter. + resp, err := source.RunSQL(ctx, applySpecQuery, []any{namedArgs}) + if err != nil { + return nil, util.ProcessGeneralError(err) + } + return resp, nil +} + +func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) { + return parameters.EmbedParams(ctx, t.allParams, paramValues, embeddingModelsMap, nil) +} + +func (t Tool) Manifest() tools.Manifest { + return t.manifest +} + +func (t Tool) McpManifest() tools.McpManifest { + return t.mcpManifest +} + +func (t Tool) Authorized(verifiedAuthServices []string) bool { + return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices) +} + +func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (bool, error) { + return false, nil +} + +func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) { + return "Authorization", nil +} + +func (t Tool) GetParameters() parameters.Parameters { + return t.allParams +} diff --git a/internal/tools/cloudsqlpg/vectorassistapplyspec/vectorassistapplyspec_test.go b/internal/tools/cloudsqlpg/vectorassistapplyspec/vectorassistapplyspec_test.go new file mode 100644 index 000000000000..a9d932b8f83e --- /dev/null +++ b/internal/tools/cloudsqlpg/vectorassistapplyspec/vectorassistapplyspec_test.go @@ -0,0 +1,67 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vectorassistapplyspec_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/googleapis/mcp-toolbox/internal/server" + "github.com/googleapis/mcp-toolbox/internal/testutils" + "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/vectorassistapplyspec" +) + +func TestParseFromYaml(t *testing.T) { + ctx, err := testutils.ContextWithNewLogger() + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + tcs := []struct { + desc string + in string + want server.ToolConfigs + }{ + { + desc: "basic example", + in: ` + kind: tool + name: apply-spec-tool + type: vector-assist-apply-spec + description: a test description + source: a-source + `, + want: server.ToolConfigs{ + "apply-spec-tool": vectorassistapplyspec.Config{ + Name: "apply-spec-tool", + Type: "vector-assist-apply-spec", + Source: "a-source", + Description: "a test description", + AuthRequired: []string{}, + }, + }, + }, + } + for _, tc := range tcs { + t.Run(tc.desc, func(t *testing.T) { + _, _, _, got, _, _, err := server.UnmarshalResourceConfig(ctx, testutils.FormatYaml(tc.in)) + if err != nil { + t.Fatalf("unable to unmarshal: %s", err) + } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Fatalf("incorrect parse: diff %v", diff) + } + }) + } +} diff --git a/internal/tools/cloudsqlpg/vectorassistdefinespec/vectorassistdefinespec.go b/internal/tools/cloudsqlpg/vectorassistdefinespec/vectorassistdefinespec.go new file mode 100644 index 000000000000..5c56090a9a71 --- /dev/null +++ b/internal/tools/cloudsqlpg/vectorassistdefinespec/vectorassistdefinespec.go @@ -0,0 +1,180 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vectorassistdefinespec + +import ( + "context" + "fmt" + "net/http" + + yaml "github.com/goccy/go-yaml" + "github.com/googleapis/mcp-toolbox/internal/embeddingmodels" + "github.com/googleapis/mcp-toolbox/internal/sources" + "github.com/googleapis/mcp-toolbox/internal/tools" + "github.com/googleapis/mcp-toolbox/internal/util" + "github.com/googleapis/mcp-toolbox/internal/util/parameters" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" +) + +const resourceType string = "vector-assist-define-spec" + +const defineSpecQuery = ` + SELECT recommendation_id, vector_spec_id, table_name, schema_name, query, recommendation, applied, modified, created_at + FROM vector_assist.define_spec(table_name => @table_name::TEXT, schema_name => @schema_name::TEXT, spec_id => @spec_id::TEXT, + vector_column_name => @vector_column_name::TEXT, text_column_name => @text_column_name::TEXT, + vector_index_type => @vector_index_type::TEXT, embeddings_available => @embeddings_available::BOOLEAN, + num_vectors => @num_vectors::INTEGER, dimensionality => @dimensionality::INTEGER, + embedding_model => @embedding_model::TEXT, prefilter_column_names => @prefilter_column_names, + distance_func => @distance_func::TEXT, quantization => @quantization::TEXT, + memory_budget_kb => @memory_budget_kb::INTEGER, target_recall => @target_recall::FLOAT, + target_top_k => @target_top_k::INTEGER, tune_vector_index => @tune_vector_index::BOOLEAN); +` + +func init() { + if !tools.Register(resourceType, newConfig) { + panic(fmt.Sprintf("tool type %q already registered", resourceType)) + } +} + +func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) { + actual := Config{Name: name} + if err := decoder.DecodeContext(ctx, &actual); err != nil { + return nil, err + } + return actual, nil +} + +type compatibleSource interface { + PostgresPool() *pgxpool.Pool + RunSQL(context.Context, string, []any) (any, error) +} + +type Config struct { + Name string `yaml:"name" validate:"required"` + Type string `yaml:"type" validate:"required"` + Source string `yaml:"source" validate:"required"` + Description string `yaml:"description"` + AuthRequired []string `yaml:"authRequired"` +} + +var _ tools.ToolConfig = Config{} + +func (cfg Config) ToolConfigType() string { + return resourceType +} + +func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) { + // parameters are marked required/ optional based on the vector assist function defintions + allParameters := parameters.Parameters{ + parameters.NewStringParameterWithRequired("table_name", "Table name on which vector workload needs to be set up.", true), + parameters.NewStringParameterWithRequired("schema_name", "Schema containing the given table.", false), + parameters.NewStringParameterWithRequired("spec_id", "Unique ID for the vector spec. Auto-generated, if not specified.", false), + parameters.NewStringParameterWithRequired("vector_column_name", "Column name for the column with vector embeddings.", false), + parameters.NewStringParameterWithRequired("text_column_name", "Column name for the column with text on which vector search needs to be set up.", false), + parameters.NewStringParameterWithRequired("vector_index_type", "Type of the vector index to be created (Allowed inputs: 'hnsw', 'ivfflat', 'scann').", false), + parameters.NewBooleanParameterWithRequired("embeddings_available", "Boolean parameter to know if vector embeddings are already available in the table.", false), + parameters.NewIntParameterWithRequired("num_vectors", "Number of vectors expected in the dataset.", false), + parameters.NewIntParameterWithRequired("dimensionality", "If vectors are already generated, set to dimension of vectors. If not, set to dimensionality of the embedding_model.", false), + parameters.NewStringParameterWithRequired("embedding_model", "Optional parameter: Model to be used for generating embeddings. If not provided, it has an internally selected default value.", false), + parameters.NewArrayParameterWithRequired("prefilter_column_names", "Columns based on which prefiltering will happen in vector search queries.", false, parameters.NewStringParameter("prefilter_column_name", "Pre filter column name")), + parameters.NewStringParameterWithRequired("distance_func", "Distance function to be used for comparing vectors (Allowed inputs: 'cosine', 'ip', 'l2', 'l1').", false), + parameters.NewStringParameterWithRequired("quantization", "Quantization to be used for creating the vector indexes (Allowed inputs: 'none', 'halfvec', 'bit').", false), + parameters.NewIntParameterWithRequired("memory_budget_kb", "Maximum size in KB that the index can consume in memory while building.", false), + parameters.NewFloatParameterWithRequired("target_recall", "The recall that the user would like to target with the given index for standard vector queries.", false), + parameters.NewIntParameterWithRequired("target_top_k", "The top-K values that need to be retrieved for the given query.", false), + parameters.NewBooleanParameterWithRequired("tune_vector_index", "Boolean parameter to specify if the auto tuning is required for the index.", false), + } + paramManifest := allParameters.Manifest() + + if cfg.Description == "" { + cfg.Description = "This tool defines a new vector specification by capturing the user's intent and requirements for a vector search workload. This generates a complete, ordered set of SQL recommendations required to set up the database, embeddings, and vector indexes. While highly customizable, any optional parameters left unspecified will use internally determined defaults optimized for the specific workload. Use this tool at the very beginning of the vector setup process when a user first wants to configure a table for vector search, generate embeddings, or create a new vector index." + } + + mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters, nil) + + return Tool{ + Config: cfg, + allParams: allParameters, + manifest: tools.Manifest{ + Description: cfg.Description, + Parameters: paramManifest, + AuthRequired: cfg.AuthRequired, + }, + mcpManifest: mcpManifest, + }, nil +} + +var _ tools.Tool = Tool{} + +type Tool struct { + Config + allParams parameters.Parameters `yaml:"allParams"` + manifest tools.Manifest + mcpManifest tools.McpManifest +} + +func (t Tool) ToConfig() tools.ToolConfig { + return t.Config +} + +func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, params parameters.ParamValues, accessToken tools.AccessToken) (any, util.ToolboxError) { + source, err := tools.GetCompatibleSource[compatibleSource](resourceMgr, t.Source, t.Name, t.Type) + if err != nil { + return nil, util.NewClientServerError("source used is not compatible with the tool", http.StatusInternalServerError, err) + } + paramsMap := params.AsMap() + + // Convert our parsed parameters directly into pgx.NamedArgs + namedArgs := pgx.NamedArgs{} + for key, value := range paramsMap { + namedArgs[key] = value + } + + // As long as source.RunSQL unwraps args into pgx.Query(ctx, sql, args...), pgx handles the mapping of @param to the named parameter. + resp, err := source.RunSQL(ctx, defineSpecQuery, []any{namedArgs}) + if err != nil { + return nil, util.ProcessGeneralError(err) + } + return resp, nil +} + +func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) { + return parameters.EmbedParams(ctx, t.allParams, paramValues, embeddingModelsMap, nil) +} + +func (t Tool) Manifest() tools.Manifest { + return t.manifest +} + +func (t Tool) McpManifest() tools.McpManifest { + return t.mcpManifest +} + +func (t Tool) Authorized(verifiedAuthServices []string) bool { + return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices) +} + +func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (bool, error) { + return false, nil +} + +func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) { + return "Authorization", nil +} + +func (t Tool) GetParameters() parameters.Parameters { + return t.allParams +} diff --git a/internal/tools/cloudsqlpg/vectorassistdefinespec/vectorassistdefinespec_test.go b/internal/tools/cloudsqlpg/vectorassistdefinespec/vectorassistdefinespec_test.go new file mode 100644 index 000000000000..e03938d012c1 --- /dev/null +++ b/internal/tools/cloudsqlpg/vectorassistdefinespec/vectorassistdefinespec_test.go @@ -0,0 +1,67 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vectorassistdefinespec_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/googleapis/mcp-toolbox/internal/server" + "github.com/googleapis/mcp-toolbox/internal/testutils" + "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/vectorassistdefinespec" +) + +func TestParseFromYaml(t *testing.T) { + ctx, err := testutils.ContextWithNewLogger() + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + tcs := []struct { + desc string + in string + want server.ToolConfigs + }{ + { + desc: "basic example", + in: ` + kind: tool + name: define-spec-tool + type: vector-assist-define-spec + description: a test description + source: a-source + `, + want: server.ToolConfigs{ + "define-spec-tool": vectorassistdefinespec.Config{ + Name: "define-spec-tool", + Type: "vector-assist-define-spec", + Source: "a-source", + Description: "a test description", + AuthRequired: []string{}, + }, + }, + }, + } + for _, tc := range tcs { + t.Run(tc.desc, func(t *testing.T) { + _, _, _, got, _, _, err := server.UnmarshalResourceConfig(ctx, testutils.FormatYaml(tc.in)) + if err != nil { + t.Fatalf("unable to unmarshal: %s", err) + } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Fatalf("incorrect parse: diff %v", diff) + } + }) + } +} diff --git a/internal/tools/cloudsqlpg/vectorassistgeneratequery/vectorassistgeneratequery.go b/internal/tools/cloudsqlpg/vectorassistgeneratequery/vectorassistgeneratequery.go new file mode 100644 index 000000000000..13842b8453dd --- /dev/null +++ b/internal/tools/cloudsqlpg/vectorassistgeneratequery/vectorassistgeneratequery.go @@ -0,0 +1,175 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vectorassistgeneratequery + +import ( + "context" + "fmt" + "net/http" + + yaml "github.com/goccy/go-yaml" + "github.com/googleapis/mcp-toolbox/internal/embeddingmodels" + "github.com/googleapis/mcp-toolbox/internal/sources" + "github.com/googleapis/mcp-toolbox/internal/tools" + "github.com/googleapis/mcp-toolbox/internal/util" + "github.com/googleapis/mcp-toolbox/internal/util/parameters" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" +) + +const resourceType string = "vector-assist-generate-query" + +const generateQueryStatement = ` + SELECT vector_assist.generate_query( + spec_id => @spec_id::TEXT, table_name => @table_name::TEXT, + schema_name => @schema_name::TEXT, column_name => @column_name::TEXT, + search_text => @search_text::TEXT, search_vector => @search_vector::vector, + output_column_names => @output_column_names, + top_k => @top_k::INTEGER, + filter_expressions => @filter_expressions, + target_recall => @target_recall::FLOAT, + iterative_index_search => @iterative_index_search::BOOLEAN + ); +` + +func init() { + if !tools.Register(resourceType, newConfig) { + panic(fmt.Sprintf("tool type %q already registered", resourceType)) + } +} + +func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) { + actual := Config{Name: name} + if err := decoder.DecodeContext(ctx, &actual); err != nil { + return nil, err + } + return actual, nil +} + +type compatibleSource interface { + PostgresPool() *pgxpool.Pool + RunSQL(context.Context, string, []any) (any, error) +} + +type Config struct { + Name string `yaml:"name" validate:"required"` + Type string `yaml:"type" validate:"required"` + Source string `yaml:"source" validate:"required"` + Description string `yaml:"description"` + AuthRequired []string `yaml:"authRequired"` +} + +var _ tools.ToolConfig = Config{} + +func (cfg Config) ToolConfigType() string { + return resourceType +} + +func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) { + // parameters are marked required/ optional based on the vector assist function defintions + allParameters := parameters.Parameters{ + parameters.NewStringParameterWithRequired("spec_id", "Generate the vector query corresponding to this vector spec.", false), + parameters.NewStringParameterWithRequired("table_name", "Generate the vector query corresponding to this table (in case of a single spec defined on the table).", false), + parameters.NewStringParameterWithRequired("schema_name", "Schema name for the table related to the vector query generation.", false), + parameters.NewStringParameterWithRequired("column_name", "text_column_name or vector_column_name of the spec to identify the exact spec in case there are multiple specs defined on a table.", false), + parameters.NewStringParameterWithRequired("search_text", "Text search for which query needs to be generated. Embeddings are generated using the model defined in the vector spec.", false), + parameters.NewStringParameterWithRequired("search_vector", "Vector for which query needs to be generated. Only one of search_text or search_vector must be populated.", false), + parameters.NewArrayParameterWithRequired("output_column_names", "Column names to retrieve in the output search query. Defaults to retrieving all columns.", false, parameters.NewStringParameter("output_column_name", "Output column name")), + parameters.NewIntParameterWithRequired("top_k", "Number of nearest neighbors to be returned in the vector search query. Defaults to 10.", false), + parameters.NewArrayParameterWithRequired("filter_expressions", "Any filter expressions to be applied on the vector search query.", false, parameters.NewStringParameter("filter_expression", "Filter expression")), + parameters.NewFloatParameterWithRequired("target_recall", "The recall that the user would like to target with the given query. Overrides the spec-level target_recall.", false), + parameters.NewBooleanParameterWithRequired("iterative_index_search", "Perform iterative index search for filtered queries to ensure enough results are returned.", false), + } + paramManifest := allParameters.Manifest() + + if cfg.Description == "" { + cfg.Description = "This tool generates optimized SQL queries for vector search by leveraging the metadata and vector specifications defined in a specific spec_id. It may return a single query or a sequence of multiple SQL queries that can be executed sequentially. Use this tool when a user wants to perform semantic or similarity searches on their data. It serves as the primary actionable tool to invoke for generating the executable SQL required to retrieve relevant results based on vector similarity. The 'execute_sql' tool can be used as a follow-up action after invoking this tool." + } + + mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters, nil) + + return Tool{ + Config: cfg, + allParams: allParameters, + manifest: tools.Manifest{ + Description: cfg.Description, + Parameters: paramManifest, + AuthRequired: cfg.AuthRequired, + }, + mcpManifest: mcpManifest, + }, nil +} + +var _ tools.Tool = Tool{} + +type Tool struct { + Config + allParams parameters.Parameters `yaml:"allParams"` + manifest tools.Manifest + mcpManifest tools.McpManifest +} + +func (t Tool) ToConfig() tools.ToolConfig { + return t.Config +} + +func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, params parameters.ParamValues, accessToken tools.AccessToken) (any, util.ToolboxError) { + source, err := tools.GetCompatibleSource[compatibleSource](resourceMgr, t.Source, t.Name, t.Type) + if err != nil { + return nil, util.NewClientServerError("source used is not compatible with the tool", http.StatusInternalServerError, err) + } + paramsMap := params.AsMap() + + // Convert our parsed parameters directly into pgx.NamedArgs + namedArgs := pgx.NamedArgs{} + for key, value := range paramsMap { + namedArgs[key] = value + } + + // As long as source.RunSQL unwraps args into pgx.Query(ctx, sql, args...), pgx handles the mapping of @param to the named parameter. + resp, err := source.RunSQL(ctx, generateQueryStatement, []any{namedArgs}) + if err != nil { + return nil, util.ProcessGeneralError(err) + } + return resp, nil +} + +func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) { + return parameters.EmbedParams(ctx, t.allParams, paramValues, embeddingModelsMap, nil) +} + +func (t Tool) Manifest() tools.Manifest { + return t.manifest +} + +func (t Tool) McpManifest() tools.McpManifest { + return t.mcpManifest +} + +func (t Tool) Authorized(verifiedAuthServices []string) bool { + return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices) +} + +func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (bool, error) { + return false, nil +} + +func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) { + return "Authorization", nil +} + +func (t Tool) GetParameters() parameters.Parameters { + return t.allParams +} diff --git a/internal/tools/cloudsqlpg/vectorassistgeneratequery/vectorassistgeneratequery_test.go b/internal/tools/cloudsqlpg/vectorassistgeneratequery/vectorassistgeneratequery_test.go new file mode 100644 index 000000000000..06c6cc781cb9 --- /dev/null +++ b/internal/tools/cloudsqlpg/vectorassistgeneratequery/vectorassistgeneratequery_test.go @@ -0,0 +1,67 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vectorassistgeneratequery_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/googleapis/mcp-toolbox/internal/server" + "github.com/googleapis/mcp-toolbox/internal/testutils" + "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/vectorassistgeneratequery" +) + +func TestParseFromYaml(t *testing.T) { + ctx, err := testutils.ContextWithNewLogger() + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + tcs := []struct { + desc string + in string + want server.ToolConfigs + }{ + { + desc: "basic example", + in: ` + kind: tool + name: generate-query-tool + type: vector-assist-generate-query + description: a test description + source: a-source + `, + want: server.ToolConfigs{ + "generate-query-tool": vectorassistgeneratequery.Config{ + Name: "generate-query-tool", + Type: "vector-assist-generate-query", + Source: "a-source", + Description: "a test description", + AuthRequired: []string{}, + }, + }, + }, + } + for _, tc := range tcs { + t.Run(tc.desc, func(t *testing.T) { + _, _, _, got, _, _, err := server.UnmarshalResourceConfig(ctx, testutils.FormatYaml(tc.in)) + if err != nil { + t.Fatalf("unable to unmarshal: %s", err) + } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Fatalf("incorrect parse: diff %v", diff) + } + }) + } +} diff --git a/internal/tools/cloudsqlpg/vectorassistmodifyspec/vectorassistmodifyspec.go b/internal/tools/cloudsqlpg/vectorassistmodifyspec/vectorassistmodifyspec.go new file mode 100644 index 000000000000..3ddd49238b98 --- /dev/null +++ b/internal/tools/cloudsqlpg/vectorassistmodifyspec/vectorassistmodifyspec.go @@ -0,0 +1,180 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vectorassistmodifyspec + +import ( + "context" + "fmt" + "net/http" + + yaml "github.com/goccy/go-yaml" + "github.com/googleapis/mcp-toolbox/internal/embeddingmodels" + "github.com/googleapis/mcp-toolbox/internal/sources" + "github.com/googleapis/mcp-toolbox/internal/tools" + "github.com/googleapis/mcp-toolbox/internal/util" + "github.com/googleapis/mcp-toolbox/internal/util/parameters" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" +) + +const resourceType string = "vector-assist-modify-spec" + +const modifySpecQuery = ` + SELECT recommendation_id, vector_spec_id, table_name, schema_name, query, recommendation, applied, modified, created_at + FROM vector_assist.modify_spec(spec_id => @spec_id::TEXT, table_name => @table_name::TEXT, schema_name => @schema_name::TEXT, + vector_column_name => @vector_column_name::TEXT, text_column_name => @text_column_name::TEXT, + vector_index_type => @vector_index_type::TEXT, embeddings_available => @embeddings_available::BOOLEAN, + num_vectors => @num_vectors::INTEGER, dimensionality => @dimensionality::INTEGER, + embedding_model => @embedding_model::TEXT, prefilter_column_names => @prefilter_column_names, + distance_func => @distance_func::TEXT, quantization => @quantization::TEXT, + memory_budget_kb => @memory_budget_kb::INTEGER, target_recall => @target_recall::FLOAT, + target_top_k => @target_top_k::INTEGER, tune_vector_index => @tune_vector_index::BOOLEAN); +` + +func init() { + if !tools.Register(resourceType, newConfig) { + panic(fmt.Sprintf("tool type %q already registered", resourceType)) + } +} + +func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) { + actual := Config{Name: name} + if err := decoder.DecodeContext(ctx, &actual); err != nil { + return nil, err + } + return actual, nil +} + +type compatibleSource interface { + PostgresPool() *pgxpool.Pool + RunSQL(context.Context, string, []any) (any, error) +} + +type Config struct { + Name string `yaml:"name" validate:"required"` + Type string `yaml:"type" validate:"required"` + Source string `yaml:"source" validate:"required"` + Description string `yaml:"description"` + AuthRequired []string `yaml:"authRequired"` +} + +var _ tools.ToolConfig = Config{} + +func (cfg Config) ToolConfigType() string { + return resourceType +} + +func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) { + // parameters are marked required/ optional based on the vector assist function defintions + allParameters := parameters.Parameters{ + parameters.NewStringParameterWithRequired("spec_id", "Unique ID for the vector spec you want to modify.", true), + parameters.NewStringParameterWithRequired("table_name", "Modify the table name on which vector workload needs to be set up.", false), + parameters.NewStringParameterWithRequired("schema_name", "Modify the schema containing the given table.", false), + parameters.NewStringParameterWithRequired("vector_column_name", "Modify the column name for the column with vector embeddings.", false), + parameters.NewStringParameterWithRequired("text_column_name", "Modify the column name for the column with text on which vector search needs to be set up.", false), + parameters.NewStringParameterWithRequired("vector_index_type", "Modify the type of the vector index to be created (Allowed inputs: 'hnsw', 'ivfflat', 'scann').", false), + parameters.NewBooleanParameterWithRequired("embeddings_available", "Modify whether vector embeddings are already available in the table.", false), + parameters.NewIntParameterWithRequired("num_vectors", "Modify the number of vectors expected in the dataset.", false), + parameters.NewIntParameterWithRequired("dimensionality", "Modify the dimensionality of the vectors or embedding model.", false), + parameters.NewStringParameterWithRequired("embedding_model", "Modify the model used for generating embeddings.", false), + parameters.NewArrayParameterWithRequired("prefilter_column_names", "Modify the column(s) based on which prefiltering will happen in vector search queries.", false, parameters.NewStringParameter("prefilter_column_name", "Pre filter column name")), + parameters.NewStringParameterWithRequired("distance_func", "Modify the distance function to be used for comparing vectors (Allowed inputs: 'cosine', 'ip', 'l2', 'l1').", false), + parameters.NewStringParameterWithRequired("quantization", "Modify the quantization to be used for creating the vector indexes (Allowed inputs: 'none', 'halfvec', 'bit').", false), + parameters.NewIntParameterWithRequired("memory_budget_kb", "Modify the maximum size that the index can consume in memory while building.", false), + parameters.NewFloatParameterWithRequired("target_recall", "Modify the recall that the user would like to target with the given index.", false), + parameters.NewIntParameterWithRequired("target_top_k", "Modify the Top-K matching values that need to be retrieved for the given query.", false), + parameters.NewBooleanParameterWithRequired("tune_vector_index", "Modify whether to tune vector index build and search parameters.", false), + } + paramManifest := allParameters.Manifest() + + if cfg.Description == "" { + cfg.Description = "This tool modifies an existing vector specification (identified by a required spec_id) with new parameters or overrides. Upon modification, it automatically recalculates and refreshes the list of generated SQL recommendations to match the updated requirements. This tool provides a way to modify column(s) in the vector spec before applying and taking action on the recommendations. While highly customizable, any optional parameters left unspecified will use internally determined defaults optimized for the specific workload. Use this tool to modify configurations established via 'define_spec' tool such as adjusting target recall, embedding models, or quantization settings, etc." + } + + mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters, nil) + + return Tool{ + Config: cfg, + allParams: allParameters, + manifest: tools.Manifest{ + Description: cfg.Description, + Parameters: paramManifest, + AuthRequired: cfg.AuthRequired, + }, + mcpManifest: mcpManifest, + }, nil +} + +var _ tools.Tool = Tool{} + +type Tool struct { + Config + allParams parameters.Parameters `yaml:"allParams"` + manifest tools.Manifest + mcpManifest tools.McpManifest +} + +func (t Tool) ToConfig() tools.ToolConfig { + return t.Config +} + +func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, params parameters.ParamValues, accessToken tools.AccessToken) (any, util.ToolboxError) { + source, err := tools.GetCompatibleSource[compatibleSource](resourceMgr, t.Source, t.Name, t.Type) + if err != nil { + return nil, util.NewClientServerError("source used is not compatible with the tool", http.StatusInternalServerError, err) + } + paramsMap := params.AsMap() + + // Convert our parsed parameters directly into pgx.NamedArgs + namedArgs := pgx.NamedArgs{} + for key, value := range paramsMap { + namedArgs[key] = value + } + + // As long as source.RunSQL unwraps args into pgx.Query(ctx, sql, args...), pgx handles the mapping of @param to the named parameter. + resp, err := source.RunSQL(ctx, modifySpecQuery, []any{namedArgs}) + if err != nil { + return nil, util.ProcessGeneralError(err) + } + return resp, nil +} + +func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) { + return parameters.EmbedParams(ctx, t.allParams, paramValues, embeddingModelsMap, nil) +} + +func (t Tool) Manifest() tools.Manifest { + return t.manifest +} + +func (t Tool) McpManifest() tools.McpManifest { + return t.mcpManifest +} + +func (t Tool) Authorized(verifiedAuthServices []string) bool { + return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices) +} + +func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (bool, error) { + return false, nil +} + +func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) { + return "Authorization", nil +} + +func (t Tool) GetParameters() parameters.Parameters { + return t.allParams +} diff --git a/internal/tools/cloudsqlpg/vectorassistmodifyspec/vectorassistmodifyspec_test.go b/internal/tools/cloudsqlpg/vectorassistmodifyspec/vectorassistmodifyspec_test.go new file mode 100644 index 000000000000..38a79c8ec1eb --- /dev/null +++ b/internal/tools/cloudsqlpg/vectorassistmodifyspec/vectorassistmodifyspec_test.go @@ -0,0 +1,67 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vectorassistmodifyspec_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/googleapis/mcp-toolbox/internal/server" + "github.com/googleapis/mcp-toolbox/internal/testutils" + "github.com/googleapis/mcp-toolbox/internal/tools/cloudsqlpg/vectorassistmodifyspec" +) + +func TestParseFromYaml(t *testing.T) { + ctx, err := testutils.ContextWithNewLogger() + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + tcs := []struct { + desc string + in string + want server.ToolConfigs + }{ + { + desc: "basic example", + in: ` + kind: tool + name: modify-spec-tool + type: vector-assist-modify-spec + description: a test description + source: a-source + `, + want: server.ToolConfigs{ + "modify-spec-tool": vectorassistmodifyspec.Config{ + Name: "modify-spec-tool", + Type: "vector-assist-modify-spec", + Source: "a-source", + Description: "a test description", + AuthRequired: []string{}, + }, + }, + }, + } + for _, tc := range tcs { + t.Run(tc.desc, func(t *testing.T) { + _, _, _, got, _, _, err := server.UnmarshalResourceConfig(ctx, testutils.FormatYaml(tc.in)) + if err != nil { + t.Fatalf("unable to unmarshal: %s", err) + } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Fatalf("incorrect parse: diff %v", diff) + } + }) + } +} diff --git a/tests/cloudsqlpg/cloud_sql_pg_vectorassist_test.go b/tests/cloudsqlpg/cloud_sql_pg_vectorassist_test.go new file mode 100644 index 000000000000..0f2c2159314b --- /dev/null +++ b/tests/cloudsqlpg/cloud_sql_pg_vectorassist_test.go @@ -0,0 +1,398 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cloudsqlpg + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "regexp" + "strings" + "testing" + "time" + + "github.com/google/uuid" + "github.com/googleapis/mcp-toolbox/internal/testutils" + "github.com/googleapis/mcp-toolbox/tests" + "github.com/jackc/pgx/v5/pgxpool" +) + +func createPostgresExtension(t *testing.T, ctx context.Context, pool *pgxpool.Pool, extensionName string) func() { + createExtensionCmd := fmt.Sprintf("CREATE EXTENSION IF NOT EXISTS %s", extensionName) + _, err := pool.Exec(ctx, createExtensionCmd) + if err != nil { + t.Fatalf("failed to create extension: %v", err) + } + return func() { + dropExtensionCmd := fmt.Sprintf("DROP EXTENSION IF EXISTS %s", extensionName) + _, err := pool.Exec(ctx, dropExtensionCmd) + if err != nil { + t.Fatalf("failed to drop extension: %v", err) + } + } +} + +// setupVectorAssistTable prepares the database extensions and test data needed +// to test the definespec, modifyspec, applyspec, and generatequery tools. +func setupVectorAssistTable(t *testing.T, ctx context.Context, pool *pgxpool.Pool) (string, func(t *testing.T), func()) { + // Install necessary extensions for VectorAssist + dropExtensionFunc := createPostgresExtension(t, ctx, pool, "vector_assist") + + uniqueID := strings.ReplaceAll(uuid.New().String(), "-", "") + tableName := "vector_assist_test_" + uniqueID + + // Create a table with vector data for defining/modifying/applying specs + createStmt := fmt.Sprintf(` + CREATE TABLE %s ( + name TEXT, + category TEXT, + content TEXT, + embedding vector(3) + ); + `, tableName) + + _, err := pool.Exec(ctx, createStmt) + if err != nil { + t.Fatalf("failed to create vector assist test table: %v", err) + } + + // Insert sample data to generate queries against + insertDataStmt := fmt.Sprintf(` + INSERT INTO %s (name, category, content, embedding) + VALUES + ('Item 1', 'Document', 'Sample text document about AI', array_fill(0.1, ARRAY[3])::vector), + ('Item 2', 'Document', 'Sample text document about databases', array_fill(0.2, ARRAY[3])::vector); + `, tableName) + + _, err = pool.Exec(ctx, insertDataStmt) + if err != nil { + t.Fatalf("failed to insert data into vector assist table: %v", err) + } + + // Return teardown function + teardown := func(t *testing.T) { + _, err := pool.Exec(context.Background(), fmt.Sprintf("DROP TABLE IF EXISTS %s;", tableName)) + if err != nil { + t.Errorf("failed to drop vector assist table %s: %v", tableName, err) + } + } + + return tableName, teardown, dropExtensionFunc +} + +// TODO: Remove the test from this file and follow the existing test pattern +// by calling the tests from cloudsqlpg_integration_test.go +func TestVectorAssistIntegration(t *testing.T) { + sourceConfig := getCloudSQLPgVars(t) + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + args := []string{"--enable-api"} + + pool, err := initCloudSQLPgConnectionPool(CloudSQLPostgresProject, CloudSQLPostgresRegion, CloudSQLPostgresInstance, "public", CloudSQLPostgresUser, CloudSQLPostgresPass, CloudSQLPostgresDatabase) + if err != nil { + t.Fatalf("unable to create Cloud SQL connection pool: %s", err) + } + + // Generate a unique ID + uniqueID := strings.ReplaceAll(uuid.New().String(), "-", "") + + // This will execute after all tool tests complete (success, fail, or t.Fatal) + t.Cleanup(func() { + tests.CleanupPostgresTables(t, context.Background(), pool, uniqueID) + }) + + //Create table names using the UUID + tableNameParam := "param_table_" + uniqueID + tableNameAuth := "auth_table_" + uniqueID + + // set up data for param tool + createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam) + teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams) + defer teardownTable1(t) + + // set up data for auth tool + createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth) + teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams) + defer teardownTable2(t) + + // // Set up data for vector assist tools + vectorAssistTableName, teardownVectorAssistTable, dropExtension := setupVectorAssistTable(t, ctx, pool) + defer teardownVectorAssistTable(t) + defer dropExtension() + + // Write config into a file and pass it to command + toolsFile := tests.GetToolsConfig(sourceConfig, CloudSQLPostgresToolType, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt) + toolsFile = tests.AddExecuteSqlConfig(t, toolsFile, "postgres-execute-sql") + tmplSelectCombined, tmplSelectFilterCombined := tests.GetPostgresSQLTmplToolStatement() + toolsFile = tests.AddTemplateParamConfig(t, toolsFile, CloudSQLPostgresToolType, tmplSelectCombined, tmplSelectFilterCombined, "") + + // Add vector assist tools to the configuration + toolsFile = AddVectorAssistConfig(t, toolsFile, "my-instance") + + toolsFile = tests.AddPostgresPrebuiltConfig(t, toolsFile) + cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...) + if err != nil { + t.Fatalf("command initialization returned an error: %s", err) + } + defer cleanup() + + waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) + defer cancel() + out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out) + if err != nil { + t.Logf("toolbox command logs: \n%s", out) + t.Fatalf("toolbox didn't start successfully: %s", err) + } + + // Run vectorassist tool tests + specID := "va_spec_001" + RunVectorAssistDefineSpecToolInvokeTest(t, ctx, pool, vectorAssistTableName, specID) + RunVectorAssistModifySpecToolInvokeTest(t, ctx, pool, specID) + RunVectorAssistApplySpecToolInvokeTest(t, ctx, pool, specID) + RunVectorAssistGenerateQueryToolInvokeTest(t, ctx, pool, specID) +} + +// AddVectorAssistConfig appends the vector assist tool configurations to the given tools file. +func AddVectorAssistConfig(t *testing.T, config map[string]any, sourceName string) map[string]any { + tools, ok := config["tools"].(map[string]any) + if !ok { + t.Fatalf("unable to get tools from config") + } + + tools["define_spec"] = map[string]any{ + "type": "vector-assist-define-spec", + "source": sourceName, + } + tools["modify_spec"] = map[string]any{ + "type": "vector-assist-modify-spec", + "source": sourceName, + } + tools["apply_spec"] = map[string]any{ + "type": "vector-assist-apply-spec", + "source": sourceName, + } + tools["generate_query"] = map[string]any{ + "type": "vector-assist-generate-query", + "source": sourceName, + } + config["tools"] = tools + return config +} + +func RunVectorAssistDefineSpecToolInvokeTest(t *testing.T, ctx context.Context, pool *pgxpool.Pool, tableName string, specID string) { + validPayload := fmt.Sprintf(`{ + "table_name": "%s", + "schema_name": "public", + "spec_id": "%s", + "vector_column_name": "embedding", + "text_column_name": "content", + "vector_index_type": "hnsw", + "embeddings_available": true, + "num_vectors": 2, + "dimensionality": 3, + "embedding_model": "textembedding-gecko", + "prefilter_column_names": ["category"], + "distance_func": "cosine", + "quantization": "halfvec", + "memory_budget_kb": 1024, + "target_recall": 0.95, + "target_top_k": 10, + "tune_vector_index": true + }`, tableName, specID) + + tcs := []struct { + name string + requestBody io.Reader + api string + wantStatusCode int + wantContains []string + }{ + { + name: "invoke define_spec with all valid parameters", + requestBody: bytes.NewBuffer([]byte(validPayload)), + api: "http://127.0.0.1:5000/api/tool/define_spec/invoke", + wantStatusCode: http.StatusOK, + // Check for key identifiers instead of the entire JSON string + wantContains: []string{ + `"vector_spec_id":"va_spec_001"`, + fmt.Sprintf(`"table_name":"%s"`, tableName), + `"recommendation_id"`, // Ensure a recommendation was generated + }, + }, + { + name: "invoke define_spec with missing required table_name", + requestBody: bytes.NewBuffer([]byte(`{"schema_name": "public", "spec_id": "va_spec_002"}`)), + api: "http://127.0.0.1:5000/api/tool/define_spec/invoke", + wantStatusCode: http.StatusOK, + wantContains: []string{ + `"error"`, + }, + }, + } + for _, tc := range tcs { + runVectorAssistToolInvokeTest(t, tc) + } +} + +func RunVectorAssistModifySpecToolInvokeTest(t *testing.T, ctx context.Context, pool *pgxpool.Pool, specID string) { + validPayload := fmt.Sprintf(`{ + "spec_id": "%s", + "memory_budget_kb": 2048, + "target_recall": 0.99 + }`, specID) + + tcs := []struct { + name string + requestBody io.Reader + api string + wantStatusCode int + wantContains []string + }{ + { + name: "modify existing spec with new constraints", + requestBody: bytes.NewBuffer([]byte(validPayload)), + api: "http://127.0.0.1:5000/api/tool/modify_spec/invoke", + wantStatusCode: http.StatusOK, + wantContains: []string{ + `"recommendation_id"`, + }, + }, + { + name: "modify existing spec without required spec id", + requestBody: bytes.NewBuffer([]byte(`{"target_recall": 0.99}`)), + api: "http://127.0.0.1:5000/api/tool/modify_spec/invoke", + wantStatusCode: http.StatusOK, + wantContains: []string{ + `"error"`, + }, + }, + } + for _, tc := range tcs { + runVectorAssistToolInvokeTest(t, tc) + } +} + +func RunVectorAssistApplySpecToolInvokeTest(t *testing.T, ctx context.Context, pool *pgxpool.Pool, recommendationID string) { + validPayload := fmt.Sprintf(`{ + "spec_id": "%s" + }`, recommendationID) + + tcs := []struct { + name string + requestBody io.Reader + api string + wantStatusCode int + wantContains []string + }{ + { + name: "apply recommendation to database", + requestBody: bytes.NewBuffer([]byte(validPayload)), + api: "http://127.0.0.1:5000/api/tool/apply_spec/invoke", + wantStatusCode: http.StatusOK, + wantContains: []string{ + `{"apply_spec":true}`, + }, + }, + { + name: "apply recommendation to database without spec id", + requestBody: bytes.NewBuffer([]byte(`{"schema_name": "public"}`)), + api: "http://127.0.0.1:5000/api/tool/apply_spec/invoke", + wantStatusCode: http.StatusOK, + wantContains: []string{ + `"error"`, + }, + }, + } + + for _, tc := range tcs { + runVectorAssistToolInvokeTest(t, tc) + } +} + +func RunVectorAssistGenerateQueryToolInvokeTest(t *testing.T, ctx context.Context, pool *pgxpool.Pool, specID string) { + validPayload := fmt.Sprintf(`{ + "spec_id": "%s", + "search_text": "What is the capital of France?", + "top_k": 5 + }`, specID) + + tcs := []struct { + name string + requestBody io.Reader + api string + wantStatusCode int + wantContains []string + }{ + { + name: "generate SQL for vector search", + requestBody: bytes.NewBuffer([]byte(validPayload)), + api: "http://127.0.0.1:5000/api/tool/generate_query/invoke", + wantStatusCode: http.StatusOK, + wantContains: []string{ + `"generate_query"`, + `LIMIT 5`, + }, + }, + } + + for _, tc := range tcs { + runVectorAssistToolInvokeTest(t, tc) + } +} + +func runVectorAssistToolInvokeTest(t *testing.T, tc struct { + name string + requestBody io.Reader + api string + wantStatusCode int + wantContains []string +}) { + t.Run(tc.name, func(t *testing.T) { + resp, body := tests.RunRequest(t, http.MethodPost, tc.api, tc.requestBody, nil) + + if resp.StatusCode != tc.wantStatusCode { + t.Fatalf("tool %s: wrong status code: got %d, want %d, body: %s", tc.api, resp.StatusCode, tc.wantStatusCode, string(body)) + } + + if tc.wantStatusCode != http.StatusOK { + return + } + + // Unmarshal the standard response wrapper + var bodyWrapper struct { + Result json.RawMessage `json:"result"` + } + if err := json.Unmarshal(body, &bodyWrapper); err != nil { + t.Fatalf("error decoding response wrapper: %v", err) + } + + // Handle the double-unmarshal logic for stringified results + var resultString string + if err := json.Unmarshal(bodyWrapper.Result, &resultString); err != nil { + resultString = string(bodyWrapper.Result) + } + + // Verification loop + for _, expectedSubstr := range tc.wantContains { + if !strings.Contains(resultString, expectedSubstr) { + t.Errorf("Expected result to contain %q, but it did not.\nFull result: %s", expectedSubstr, resultString) + } + } + }) +}