From fd2730e14dff36f1fb26610a0fa18aba2b07bee6 Mon Sep 17 00:00:00 2001 From: stephenheg Date: Tue, 27 May 2025 16:28:34 +0100 Subject: [PATCH 1/2] adding tableflow topic and catalog integrations tools + fix for error logging --- README.md | 45 ++++++ openapi.json | 55 ++++++- src/confluent/openapi-schema.d.ts | 23 ++- ...e-tableflow-catalog-integration-handler.ts | 115 ++++++++++++++ ...e-tableflow-catalog-integration-handler.ts | 100 +++++++++++++ ...-tableflow-catalog-integrations-handler.ts | 110 ++++++++++++++ ...d-tableflow-catalog-integration-handler.ts | 100 +++++++++++++ ...e-tableflow-catalog-integration-handler.ts | 112 ++++++++++++++ .../list-tableflow-regions-handler.ts | 90 +++++++++++ .../topic/create-tableflow-topic-handler.ts | 135 +++++++++++++++++ .../topic/delete-tableflow-topic-handler.ts | 102 +++++++++++++ .../topic/list-tableflow-topics-handler.ts | 108 ++++++++++++++ .../topic/read-tableflow-topic-handler.ts | 100 +++++++++++++ .../topic/update-tableflow-topic-handler.ts | 140 ++++++++++++++++++ src/confluent/tools/tool-factory.ts | 37 +++++ src/confluent/tools/tool-name.ts | 11 ++ src/index.ts | 2 +- 17 files changed, 1382 insertions(+), 3 deletions(-) create mode 100644 src/confluent/tools/handlers/tableflow/catalog/create-tableflow-catalog-integration-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/catalog/delete-tableflow-catalog-integration-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/catalog/list-tableflow-catalog-integrations-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/catalog/read-tableflow-catalog-integration-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/catalog/update-tableflow-catalog-integration-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/list-tableflow-regions-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/topic/create-tableflow-topic-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/topic/delete-tableflow-topic-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/topic/list-tableflow-topics-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/topic/read-tableflow-topic-handler.ts create mode 100644 src/confluent/tools/handlers/tableflow/topic/update-tableflow-topic-handler.ts diff --git a/README.md b/README.md index 106b778..014f7e2 100644 --- a/README.md +++ b/README.md @@ -339,6 +339,17 @@ npx @confluentinc/mcp-confluent -e .env --allow-tools produce-message,consume-me {"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":46818,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool read-environment disabled due to allow/block list rules"} {"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":46818,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool list-schemas disabled due to allow/block list rules"} {"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":46818,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool get-topic-config disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool create-tableflow-topic disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool list-tableflow-regions disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool list-tableflow-topics disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool read-tableflow-topic disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool update-tableflow-topic disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool delete-tableflow-topic disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool create-tableflow-catalog-integration disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool list-tableflow-catalog-integrations disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool read-tableflow-catalog-integration disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool update-tableflow-catalog-integration disabled due to allow/block list rules"} +{"level":"warn","time":"2025-05-14T16:52:34.923Z","pid":53394,"hostname":"YXR2D4NCM9","name":"mcp-confluent","msg":"Tool delete-tableflow-catalog-integration disabled due to allow/block list rules"} {"level":"info","time":"2025-05-14T16:52:34.924Z","pid":46818,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Starting transports: stdio on localhost:3000"} {"level":"info","time":"2025-05-14T16:52:34.924Z","pid":46818,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"STDIO transport connected"} {"level":"info","time":"2025-05-14T16:52:34.924Z","pid":46818,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"All transports started successfully"} @@ -382,6 +393,17 @@ npx @confluentinc/mcp-confluent -e .env --block-tools produce-message,consume-me {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool read-environment enabled"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool list-schemas enabled"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool get-topic-config enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool create-tableflow-topic enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool list-tableflow-regions enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool list-tableflow-topics enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool read-tableflow-topic enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool update-tableflow-topic enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool delete-tableflow-topic enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool create-tableflow-catalog-integration enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool list-tableflow-catalog-integrations enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool read-tableflow-catalog-integration enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool update-tableflow-catalog-integration enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool delete-tableflow-catalog-integration enabled"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Starting transports: stdio"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"STDIO transport connected"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"All transports started successfully"} @@ -425,6 +447,17 @@ npx -y @confluentinc/mcp-confluent -e .env --allow-tools-file allow.txt --block- {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool read-environment enabled"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool list-schemas enabled"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool get-topic-config enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool create-tableflow-topic enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool list-tableflow-regions enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool list-tableflow-topics enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool read-tableflow-topic enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool update-tableflow-topic enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool delete-tableflow-topic enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool create-tableflow-catalog-integration enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool list-tableflow-catalog-integrations enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool read-tableflow-catalog-integration enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool update-tableflow-catalog-integration enabled"} +{"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Tool delete-tableflow-catalog-integration enabled"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"Starting transports: stdio"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"STDIO transport connected"} {"level":"info","time":"2025-05-14T16:55:45.911Z","pid":47344,"hostname":"G9PW1FJH64","name":"mcp-confluent","msg":"All transports started successfully"} @@ -468,6 +501,18 @@ read-flink-statement: Make a request to read a statement and its results remove-tag-from-entity: Remove tag from an entity in Confluent Cloud. search-topics-by-name: List all topics in the Kafka cluster matching the specified name. search-topics-by-tag: List all topics in the Kafka cluster with the specified tag. +create-tableflow-topic: Make a request to create a tableflow topic. +create-tableflow-topic: Make a request to create a tableflow topic. +list-tableflow-regions: Retrieve a sorted, filtered, paginated list of all tableflow regions. +list-tableflow-topics: Retrieve a sorted, filtered, paginated list of all tableflow topics. +read-tableflow-topic: Make a request to read a tableflow topic. +update-tableflow-topic: Make a request to update a tableflow topic. +delete-tableflow-topic: Make a request to delete a tableflow topic. +create-tableflow-catalog-integration: Make a request to create a catalog integration. +list-tableflow-catalog-integrations: Retrieve a sorted, filtered, paginated list of all catalog integrations. +read-tableflow-catalog-integration: Make a request to read a catalog integration. +update-tableflow-catalog-integration: Make a request to update a catalog integration. +delete-tableflow-catalog-integration: Make a request to delete a tableflow catalog integration. ``` diff --git a/openapi.json b/openapi.json index 2933617..023cb9c 100644 --- a/openapi.json +++ b/openapi.json @@ -59137,6 +59137,8 @@ "is_simple", "partition_assignor", "state", + "type", + "is_mixed_consumer_group", "coordinator", "consumers", "lag_summary" @@ -59157,10 +59159,16 @@ "state": { "$ref": "#/components/schemas/ConsumerGroupState" }, + "type": { + "$ref": "#/components/schemas/ConsumerGroupType" + }, + "is_mixed_consumer_group": { + "type": "boolean" + }, "coordinator": { "$ref": "#/components/schemas/Relationship" }, - "consumer": { + "consumers": { "$ref": "#/components/schemas/Relationship" }, "lag_summary": { @@ -59195,11 +59203,17 @@ "UNKNOWN", "PREPARING_REBALANCE", "COMPLETING_REBALANCE", + "ASSIGNING", + "RECONCILING", "STABLE", "DEAD", "EMPTY" ] }, + "ConsumerGroupType": { + "type": "string", + "x-extensible-enum": ["UNKNOWN", "CLASSIC", "CONSUMER", "SHARE"] + }, "ConsumerLagData": { "allOf": [ { @@ -60409,6 +60423,30 @@ } } }, + "LinkCategory": { + "type": "object", + "required": ["state_category", "count"], + "properties": { + "state_category": { + "$ref": "#/components/schemas/StateCategory" + }, + "count": { + "type": "integer", + "format": "int32", + "example": 123 + } + } + }, + "StateCategory": { + "type": "string", + "x-extensible-enum": [ + "PAUSED", + "IN_ERROR", + "PENDING", + "ACTIVE", + "NOT_APPLICABLE" + ] + }, "LinkTaskError": { "type": "object", "required": ["error_code", "error_message"], @@ -60514,6 +60552,13 @@ "items": { "$ref": "#/components/schemas/LinkTask" } + }, + "category_counts": { + "nullable": true, + "type": "array", + "items": { + "$ref": "#/components/schemas/LinkCategory" + } } } } @@ -77078,6 +77123,8 @@ "is_simple": false, "partition_assignor": "org.apache.kafka.clients.consumer.RoundRobinAssignor", "state": "STABLE", + "type": "CLASSIC", + "is_mixed_consumer_group": false, "coordinator": { "related": "https://pkc-00000.region.provider.confluent.cloud/kafka/v3/clusters/cluster-1/brokers/1" }, @@ -77749,6 +77796,8 @@ "is_simple": false, "partition_assignor": "org.apache.kafka.clients.consumer.RoundRobinAssignor", "state": "STABLE", + "type": "CLASSIC", + "is_mixed_consumer_group": false, "coordinator": { "related": "https://pkc-00000.region.provider.confluent.cloud/kafka/v3/clusters/cluster-1/brokers/1" }, @@ -77770,6 +77819,8 @@ "is_simple": false, "partition_assignor": "org.apache.kafka.clients.consumer.StickyAssignor", "state": "PREPARING_REBALANCE", + "type": "CLASSIC", + "is_mixed_consumer_group": false, "coordinator": { "related": "https://pkc-00000.region.provider.confluent.cloud/kafka/v3/clusters/cluster-1/brokers/2" }, @@ -77791,6 +77842,8 @@ "is_simple": false, "partition_assignor": "org.apache.kafka.clients.consumer.RangeAssignor", "state": "DEAD", + "type": "CLASSIC", + "is_mixed_consumer_group": false, "coordinator": { "related": "https://pkc-00000.region.provider.confluent.cloud/kafka/v3/clusters/cluster-1/brokers/3" }, diff --git a/src/confluent/openapi-schema.d.ts b/src/confluent/openapi-schema.d.ts index d53503c..9fe0030 100644 --- a/src/confluent/openapi-schema.d.ts +++ b/src/confluent/openapi-schema.d.ts @@ -9330,14 +9330,17 @@ export interface components { is_simple: boolean; partition_assignor: string; state: components["schemas"]["ConsumerGroupState"]; + type: components["schemas"]["ConsumerGroupType"]; + is_mixed_consumer_group: boolean; coordinator: components["schemas"]["Relationship"]; - consumer?: components["schemas"]["Relationship"]; + consumers: components["schemas"]["Relationship"]; lag_summary: components["schemas"]["Relationship"]; }; ConsumerGroupDataList: components["schemas"]["ResourceCollection"] & { data: components["schemas"]["ConsumerGroupData"][]; }; ConsumerGroupState: string; + ConsumerGroupType: string; ConsumerLagData: components["schemas"]["Resource"] & { cluster_id: string; consumer_group_id: string; @@ -9647,6 +9650,15 @@ export interface components { state: components["schemas"]["LinkTaskState"]; errors: components["schemas"]["LinkTaskError"][]; }; + LinkCategory: { + state_category: components["schemas"]["StateCategory"]; + /** + * Format: int32 + * @example 123 + */ + count: number; + }; + StateCategory: string; LinkTaskError: { error_code: string; error_message: string; @@ -9666,6 +9678,7 @@ export interface components { link_error_message?: string | null; link_state?: components["schemas"]["LinkState"]; tasks?: components["schemas"]["LinkTask"][] | null; + category_counts?: components["schemas"]["LinkCategory"][] | null; }; ListLinksResponseDataList: components["schemas"]["ResourceCollection"] & { data: components["schemas"]["ListLinksResponseData"][]; @@ -20351,6 +20364,8 @@ export interface components { * "is_simple": false, * "partition_assignor": "org.apache.kafka.clients.consumer.RoundRobinAssignor", * "state": "STABLE", + * "type": "CLASSIC", + * "is_mixed_consumer_group": false, * "coordinator": { * "related": "https://pkc-00000.region.provider.confluent.cloud/kafka/v3/clusters/cluster-1/brokers/1" * }, @@ -21027,6 +21042,8 @@ export interface components { * "is_simple": false, * "partition_assignor": "org.apache.kafka.clients.consumer.RoundRobinAssignor", * "state": "STABLE", + * "type": "CLASSIC", + * "is_mixed_consumer_group": false, * "coordinator": { * "related": "https://pkc-00000.region.provider.confluent.cloud/kafka/v3/clusters/cluster-1/brokers/1" * }, @@ -21048,6 +21065,8 @@ export interface components { * "is_simple": false, * "partition_assignor": "org.apache.kafka.clients.consumer.StickyAssignor", * "state": "PREPARING_REBALANCE", + * "type": "CLASSIC", + * "is_mixed_consumer_group": false, * "coordinator": { * "related": "https://pkc-00000.region.provider.confluent.cloud/kafka/v3/clusters/cluster-1/brokers/2" * }, @@ -21069,6 +21088,8 @@ export interface components { * "is_simple": false, * "partition_assignor": "org.apache.kafka.clients.consumer.RangeAssignor", * "state": "DEAD", + * "type": "CLASSIC", + * "is_mixed_consumer_group": false, * "coordinator": { * "related": "https://pkc-00000.region.provider.confluent.cloud/kafka/v3/clusters/cluster-1/brokers/3" * }, diff --git a/src/confluent/tools/handlers/tableflow/catalog/create-tableflow-catalog-integration-handler.ts b/src/confluent/tools/handlers/tableflow/catalog/create-tableflow-catalog-integration-handler.ts new file mode 100644 index 0000000..e34d794 --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/catalog/create-tableflow-catalog-integration-handler.ts @@ -0,0 +1,115 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const createTableflowCatalogIntegrationArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + tableflowCatalogIntegrationConfig: z.object({ + // Required fields + display_name: z + .string() + .describe("The name of the Kafka topic for which Tableflow is enabled."), + environment: z.object({ + id: z + .string() + .describe( + "The unique identifier for the environment this resource belongs to.", + ), + }), + kafka_cluster: z.object({ + id: z.string().describe("ID of the referred resource"), + environment: z + .string() + .describe("Environment of the referred resource, if env-scoped"), + }), + config: z.object({ + kind: z + .string() + .default("AwsGlue") + .describe("The type of the catalog integration."), + provider_integration_id: z + .string() + .describe("The provider integration id."), + }), + // Optional fields + suspended: z + .boolean() + .optional() + .default(false) + .describe( + "Indicates whether Tableflow Catalog Integration should be suspended. The API allows setting it only to false i.e resume the Catalog Integration.", + ), + }), +}); + +export class CreateTableFlowCatalogIntegrationHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { baseUrl, tableflowCatalogIntegrationConfig } = + createTableflowCatalogIntegrationArguments.parse(toolArguments); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { environment, ...restOfTableflowCatalogIntegrationConfig } = + tableflowCatalogIntegrationConfig; + + const { data: response, error } = await pathBasedClient[ + "/tableflow/v1/catalog-integrations" + ].POST({ + body: { + spec: { + ...restOfTableflowCatalogIntegrationConfig, + environment: { id: environment.id }, // Only include id, as the general environment as requires readonly and resource_name + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } as any, // Due to how OpenAPI specification is structured and how generators interpret it, we have to treat it as any, as + // The most likely culprit for mismatch is the reuse of a single base schema for both input (requestBody) and output (responses) evironment. + }, + }); + if (error) { + return this.createResponse( + `Failed to create Tableflow Catalog Integration for ${tableflowCatalogIntegrationConfig.display_name}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse( + `Tableflow Catalog Integration ${tableflowCatalogIntegrationConfig.display_name} created: ${JSON.stringify(response)}`, + ); + } + getToolConfig(): ToolConfig { + return { + name: ToolName.CREATE_TABLEFLOW_CATALOG_INTEGRATION, + description: `Make a request to create a catalog integration.`, + inputSchema: createTableflowCatalogIntegrationArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/catalog/delete-tableflow-catalog-integration-handler.ts b/src/confluent/tools/handlers/tableflow/catalog/delete-tableflow-catalog-integration-handler.ts new file mode 100644 index 0000000..01dfa91 --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/catalog/delete-tableflow-catalog-integration-handler.ts @@ -0,0 +1,100 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { getEnsuredParam } from "@src/confluent/helpers.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const deleteTableflowCatalogIntegrationArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + id: z.string().describe("The unique identifier for the catalog integration."), + environmentId: z + .string() + .trim() + .optional() + .describe("Scope the operation to the given environment."), + clusterId: z + .string() + .trim() + .optional() + .describe("Scope the operation to the give Kafka cluster."), +}); + +export class DeleteTableFlowCatalogIntegrationHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { id, environmentId, clusterId, baseUrl } = + deleteTableflowCatalogIntegrationArguments.parse(toolArguments); + + const environment_id = getEnsuredParam( + "KAFKA_ENV_ID", + "Environment ID is required", + environmentId, + ); + + const kafka_cluster_id = getEnsuredParam( + "KAFKA_CLUSTER_ID", + "Kafka Cluster ID is required", + clusterId, + ); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { data: response, error } = await pathBasedClient[ + `/tableflow/v1/catalog-integrations/${id}?environment=${environment_id}&spec.kafka_cluster=${kafka_cluster_id}` + ].DELETE({ + params: { + path: { + id: id, + environment_id: environment_id, + kafka_cluster_id: kafka_cluster_id, + }, + }, + }); + if (error) { + return this.createResponse( + `Failed to delete Tableflow catalog integration ${id}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse( + `Tableflow catalog integration ${id} deleted: ${JSON.stringify(response)}`, + ); + } + + getToolConfig(): ToolConfig { + return { + name: ToolName.DELETE_TABLEFLOW_CATALOG_INTEGRATION, + description: `Make a request to delete a tableflow catalog integration.`, + inputSchema: deleteTableflowCatalogIntegrationArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/catalog/list-tableflow-catalog-integrations-handler.ts b/src/confluent/tools/handlers/tableflow/catalog/list-tableflow-catalog-integrations-handler.ts new file mode 100644 index 0000000..ab84e4e --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/catalog/list-tableflow-catalog-integrations-handler.ts @@ -0,0 +1,110 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { getEnsuredParam } from "@src/confluent/helpers.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const listTableFlowCatalogIntegrationsArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + environmentId: z + .string() + .trim() + .optional() + .describe( + "The unique identifier for the enviornment this resource belongs to.", + ), + clusterId: z + .string() + .trim() + .optional() + .describe("The unique identifier for the Kafka Cluster."), + pageSize: z + .string() + .trim() + .optional() + .default("10") + .describe("The pagination size of collection requests."), + pageToken: z + .string() + .trim() + .optional() + .default("0") + .describe("An opaque pagination token for collection requests."), +}); + +export class ListTableFlowCatalogIntegrationsHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { clusterId, environmentId, baseUrl } = + listTableFlowCatalogIntegrationsArguments.parse(toolArguments); + + const environment_id = getEnsuredParam( + "KAFKA_ENV_ID", + "Environment ID is required", + environmentId, + ); + const kafka_cluster_id = getEnsuredParam( + "KAFKA_CLUSTER_ID", + "Kafka Cluster ID is required", + clusterId, + ); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { data: response, error } = await pathBasedClient[ + `/tableflow/v1/catalog-integrations?environment=${environment_id}&spec.kafka_cluster=${kafka_cluster_id}` + ].GET({ + params: { + path: { + environment_id: environment_id, + kafka_cluster_id: kafka_cluster_id, + }, + }, + }); + if (error) { + return this.createResponse( + `Failed to list Tableflow catalog integrations for ${clusterId}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse( + `Tableflow catalog integrations: ${JSON.stringify(response)}`, + ); + } + getToolConfig(): ToolConfig { + return { + name: ToolName.LIST_TABLEFLOW_CATALOG_INTEGRATIONS, + description: `Retrieve a sorted, filtered, paginated list of all catalog integrations.`, + inputSchema: listTableFlowCatalogIntegrationsArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/catalog/read-tableflow-catalog-integration-handler.ts b/src/confluent/tools/handlers/tableflow/catalog/read-tableflow-catalog-integration-handler.ts new file mode 100644 index 0000000..08abe6a --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/catalog/read-tableflow-catalog-integration-handler.ts @@ -0,0 +1,100 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { getEnsuredParam } from "@src/confluent/helpers.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const readTableflowCatalogIntegrationArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + id: z.string().describe("The unique identifier for the catalog integration."), + environmentId: z + .string() + .trim() + .optional() + .describe("Scope the operation to the given environment."), + clusterId: z + .string() + .trim() + .optional() + .describe("Scope the operation to the give Kafka cluster."), +}); + +export class ReadTableFlowCatalogIntegrationHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { id, environmentId, clusterId, baseUrl } = + readTableflowCatalogIntegrationArguments.parse(toolArguments); + + const environment_id = getEnsuredParam( + "KAFKA_ENV_ID", + "Environment ID is required", + environmentId, + ); + + const kafka_cluster_id = getEnsuredParam( + "KAFKA_CLUSTER_ID", + "Kafka Cluster ID is required", + clusterId, + ); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { data: response, error } = await pathBasedClient[ + `/tableflow/v1/catalog-integrations/${id}?environment=${environment_id}&spec.kafka_cluster=${kafka_cluster_id}` + ].GET({ + params: { + path: { + id: id, + environment_id: environment_id, + kafka_cluster_id: kafka_cluster_id, + }, + }, + }); + if (error) { + return this.createResponse( + `Failed to read Tableflow catalog integration ${id}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse( + `Tableflow catalog integration: ${JSON.stringify(response)}`, + ); + } + + getToolConfig(): ToolConfig { + return { + name: ToolName.READ_TABLEFLOW_CATALOG_INTEGRATION, + description: `Make a request to read a catalog integration.`, + inputSchema: readTableflowCatalogIntegrationArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/catalog/update-tableflow-catalog-integration-handler.ts b/src/confluent/tools/handlers/tableflow/catalog/update-tableflow-catalog-integration-handler.ts new file mode 100644 index 0000000..99f9d17 --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/catalog/update-tableflow-catalog-integration-handler.ts @@ -0,0 +1,112 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const updateTableflowCatalogIntegrationArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + tableflowCatalogIntegrationConfig: z.object({ + // Required fields + display_name: z + .string() + .describe("The name of the Kafka topic for which Tableflow is enabled."), + environment: z.object({ + id: z + .string() + .describe( + "The unique identifier for the environment this resource belongs to.", + ), + }), + kafka_cluster: z.object({ + id: z.string().describe("ID of the referred resource"), + environment: z + .string() + .describe("Environment of the referred resource, if env-scoped"), + }), + config: z.object({ + kind: z + .string() + .default("AwsGlue") + .describe("The type of the catalog integration. AwsGlue, Snowflake"), + }), + // Optional fields + suspended: z + .boolean() + .optional() + .default(false) + .describe( + "Indicates whether Tableflow Catalog Integration should be suspended. The API allows setting it only to false i.e resume the Catalog Integration.", + ), + }), +}); + +export class UpdateTableFlowCatalogIntegrationHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { baseUrl, tableflowCatalogIntegrationConfig } = + updateTableflowCatalogIntegrationArguments.parse(toolArguments); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { environment, ...restOfTableflowCatalogIntegrationConfig } = + tableflowCatalogIntegrationConfig; + + const { data: response, error } = await pathBasedClient[ + "/tableflow/v1/catalog-integrations" + ].POST({ + body: { + spec: { + ...restOfTableflowCatalogIntegrationConfig, + environment: { id: environment.id }, // Only include id, as the general environment as requires readonly and resource_name + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } as any, // Due to how OpenAPI specification is structured and how generators interpret it, we have to treat it as any, as + // The most likely culprit for mismatch is the reuse of a single base schema for both input (requestBody) and output (responses) evironment. + }, + }); + if (error) { + return this.createResponse( + `Failed to update Tableflow Catalog Integration for ${tableflowCatalogIntegrationConfig.display_name}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse( + `Tableflow Catalog Integration ${tableflowCatalogIntegrationConfig.display_name} updated: ${JSON.stringify(response)}`, + ); + } + getToolConfig(): ToolConfig { + return { + name: ToolName.UPDATE_TABLEFLOW_CATALOG_INTEGRATION, + description: `Make a request to update a catalog integration.`, + inputSchema: updateTableflowCatalogIntegrationArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/list-tableflow-regions-handler.ts b/src/confluent/tools/handlers/tableflow/list-tableflow-regions-handler.ts new file mode 100644 index 0000000..fecf3db --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/list-tableflow-regions-handler.ts @@ -0,0 +1,90 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const listTableFlowRegionsArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + cloud: z + .string() + .trim() + .optional() + .describe("Filter the results by exact match for cloud."), + pageSize: z + .string() + .trim() + .optional() + .default("10") + .describe("The pagination size of collection requests."), + pageToken: z + .string() + .trim() + .optional() + .default("0") + .describe("An opaque pagination token for collection requests."), +}); + +export class ListTableFlowRegionsHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { cloud, baseUrl } = + listTableFlowRegionsArguments.parse(toolArguments); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { data: response, error } = await pathBasedClient[ + `/tableflow/v1/regions?cloud=${cloud}` + ].GET({ + params: { + path: { + cloud: cloud, + }, + }, + }); + if (error) { + return this.createResponse( + `Failed to list Tableflow regions for ${cloud}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse( + `Tableflow Regions: ${JSON.stringify(response)}`, + ); + } + getToolConfig(): ToolConfig { + return { + name: ToolName.LIST_TABLEFLOW_REGIONS, + description: `Retrieve a sorted, filtered, paginated list of all tableflow regions.`, + inputSchema: listTableFlowRegionsArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/topic/create-tableflow-topic-handler.ts b/src/confluent/tools/handlers/tableflow/topic/create-tableflow-topic-handler.ts new file mode 100644 index 0000000..0ce2cf5 --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/topic/create-tableflow-topic-handler.ts @@ -0,0 +1,135 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const createTableflowTopicArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + tableflowTopicConfig: z.object({ + // Required fields + display_name: z + .string() + .describe("The name of the Kafka topic for which Tableflow is enabled."), + storage: z.object({ + kind: z + .enum(["ByobAws", "Managed"]) + .default("ByobAws") + .describe("The storage type either 'Managed' or 'ByobAws'."), + bucket_name: z.string().describe("The bucket name."), + provider_integration_id: z + .string() + .describe("The provider integration id."), + }), + environment: z.object({ + id: z + .string() + .describe( + "The unique identifier for the environment this resource belongs to.", + ), + }), + kafka_cluster: z.object({ + id: z.string().describe("ID of the referred resource"), + environment: z + .string() + .describe("Environment of the referred resource, if env-scoped"), + }), + // Optional fields + suspended: z + .boolean() + .optional() + .default(false) + .describe( + "Indicates whether Tableflow should be suspended. The API allows setting it only to false i.e resume the Tableflow.", + ), + config: z.object({ + retention_ms: z + .string() + .default("6048000000") // equivalent to 7 days + .describe( + "The maximum age, in milliseconds, of snapshots (for Iceberg) or versions(for Delta) to retain in the table for the Tableflow-enabled topic.", + ), + record_failure_strategy: z + .string() + .default("SUSPENDED") + .describe( + "The strategy to handle record failures in the Tableflow enabled topic during materialization.", + ), + }), + table_formats: z + .array(z.string()) + .default(["ICEBERG"]) + .describe( + "The supported table formats for the Tableflow-enabled topic e.g ICEBERG, DELTA", + ), + }), +}); + +export class CreateTableFlowTopicHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { baseUrl, tableflowTopicConfig } = + createTableflowTopicArguments.parse(toolArguments); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { environment, ...restOfTableflowConfig } = tableflowTopicConfig; + + const { data: response, error } = await pathBasedClient[ + "/tableflow/v1/tableflow-topics" + ].POST({ + body: { + spec: { + ...restOfTableflowConfig, + environment: { id: environment.id }, // Only include id, as the general environment as requires readonly and resource_name + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } as any, // Due to how OpenAPI specification is structured and how generators interpret it, we have to treat it as any, as + // The most likely culprit for mismatch is the reuse of a single base schema for both input (requestBody) and output (responses) evironment. + }, + }); + if (error) { + return this.createResponse( + `Failed to create Tableflow topic for ${tableflowTopicConfig.display_name}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse( + `Tableflow Topic ${tableflowTopicConfig.display_name} created: ${JSON.stringify(response)}`, + ); + } + getToolConfig(): ToolConfig { + return { + name: ToolName.CREATE_TABLEFLOW_TOPIC, + description: `Make a request to create a tableflow topic.`, + inputSchema: createTableflowTopicArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/topic/delete-tableflow-topic-handler.ts b/src/confluent/tools/handlers/tableflow/topic/delete-tableflow-topic-handler.ts new file mode 100644 index 0000000..c196ad4 --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/topic/delete-tableflow-topic-handler.ts @@ -0,0 +1,102 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { getEnsuredParam } from "@src/confluent/helpers.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const deleteTableflowTopicArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + display_name: z + .string() + .describe("The name of the Kafka topic for which Tableflow is enabled."), + environmentId: z + .string() + .trim() + .optional() + .describe("Scope the operation to the given environment."), + clusterId: z + .string() + .trim() + .optional() + .describe("Scope the operation to the give Kafka cluster."), +}); + +export class DeleteTableFlowTopicHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { display_name, environmentId, clusterId, baseUrl } = + deleteTableflowTopicArguments.parse(toolArguments); + + const environment_id = getEnsuredParam( + "KAFKA_ENV_ID", + "Environment ID is required", + environmentId, + ); + + const kafka_cluster_id = getEnsuredParam( + "KAFKA_CLUSTER_ID", + "Kafka Cluster ID is required", + clusterId, + ); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { data: response, error } = await pathBasedClient[ + `/tableflow/v1/tableflow-topics/${display_name}?environment=${environment_id}&spec.kafka_cluster=${kafka_cluster_id}` + ].DELETE({ + params: { + path: { + display_name: display_name, + environment_id: environment_id, + kafka_cluster_id: kafka_cluster_id, + }, + }, + }); + if (error) { + return this.createResponse( + `Failed to delete Tableflow topic ${display_name}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse( + `Tableflow Topic ${display_name} deleted: ${JSON.stringify(response)}`, + ); + } + + getToolConfig(): ToolConfig { + return { + name: ToolName.DELETE_TABLEFLOW_TOPIC, + description: `Make a request to delete a tableflow topic.`, + inputSchema: deleteTableflowTopicArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/topic/list-tableflow-topics-handler.ts b/src/confluent/tools/handlers/tableflow/topic/list-tableflow-topics-handler.ts new file mode 100644 index 0000000..0e5fb6c --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/topic/list-tableflow-topics-handler.ts @@ -0,0 +1,108 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { getEnsuredParam } from "@src/confluent/helpers.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const listTableFlowTopicArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + environmentId: z + .string() + .trim() + .optional() + .describe( + "The unique identifier for the enviornment this resource belongs to.", + ), + clusterId: z + .string() + .trim() + .optional() + .describe("The unique identifier for the Kafka Cluster."), + pageSize: z + .string() + .trim() + .optional() + .default("10") + .describe("The pagination size of collection requests."), + pageToken: z + .string() + .trim() + .optional() + .default("0") + .describe("An opaque pagination token for collection requests."), +}); + +export class ListTableFlowTopicsHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { clusterId, environmentId, baseUrl } = + listTableFlowTopicArguments.parse(toolArguments); + + const environment_id = getEnsuredParam( + "KAFKA_ENV_ID", + "Environment ID is required", + environmentId, + ); + const kafka_cluster_id = getEnsuredParam( + "KAFKA_CLUSTER_ID", + "Kafka Cluster ID is required", + clusterId, + ); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { data: response, error } = await pathBasedClient[ + `/tableflow/v1/tableflow-topics?environment=${environment_id}&spec.kafka_cluster=${kafka_cluster_id}` + ].GET({ + params: { + path: { + environment_id: environment_id, + kafka_cluster_id: kafka_cluster_id, + }, + }, + }); + if (error) { + return this.createResponse( + `Failed to list Tableflow topics for ${clusterId}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse(`Tableflow Topics: ${JSON.stringify(response)}`); + } + getToolConfig(): ToolConfig { + return { + name: ToolName.LIST_TABLEFLOW_TOPICS, + description: `Retrieve a sorted, filtered, paginated list of all tableflow topics.`, + inputSchema: listTableFlowTopicArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/topic/read-tableflow-topic-handler.ts b/src/confluent/tools/handlers/tableflow/topic/read-tableflow-topic-handler.ts new file mode 100644 index 0000000..b86c06f --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/topic/read-tableflow-topic-handler.ts @@ -0,0 +1,100 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { getEnsuredParam } from "@src/confluent/helpers.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const readTableflowTopicArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + display_name: z + .string() + .describe("The name of the Kafka topic for which Tableflow is enabled."), + environmentId: z + .string() + .trim() + .optional() + .describe("Scope the operation to the given environment."), + clusterId: z + .string() + .trim() + .optional() + .describe("Scope the operation to the give Kafka cluster."), +}); + +export class ReadTableFlowTopicHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { display_name, environmentId, clusterId, baseUrl } = + readTableflowTopicArguments.parse(toolArguments); + + const environment_id = getEnsuredParam( + "KAFKA_ENV_ID", + "Environment ID is required", + environmentId, + ); + + const kafka_cluster_id = getEnsuredParam( + "KAFKA_CLUSTER_ID", + "Kafka Cluster ID is required", + clusterId, + ); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { data: response, error } = await pathBasedClient[ + `/tableflow/v1/tableflow-topics/${display_name}?environment=${environment_id}&spec.kafka_cluster=${kafka_cluster_id}` + ].GET({ + params: { + path: { + display_name: display_name, + environment_id: environment_id, + kafka_cluster_id: kafka_cluster_id, + }, + }, + }); + if (error) { + return this.createResponse( + `Failed to read Tableflow topic ${display_name}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse(`Tableflow Topic: ${JSON.stringify(response)}`); + } + + getToolConfig(): ToolConfig { + return { + name: ToolName.READ_TABLEFLOW_TOPIC, + description: `Make a request to read a tableflow topic.`, + inputSchema: readTableflowTopicArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/handlers/tableflow/topic/update-tableflow-topic-handler.ts b/src/confluent/tools/handlers/tableflow/topic/update-tableflow-topic-handler.ts new file mode 100644 index 0000000..a3f8c55 --- /dev/null +++ b/src/confluent/tools/handlers/tableflow/topic/update-tableflow-topic-handler.ts @@ -0,0 +1,140 @@ +import { ClientManager } from "@src/confluent/client-manager.js"; +import { CallToolResult } from "@src/confluent/schema.js"; +import { + BaseToolHandler, + ToolConfig, +} from "@src/confluent/tools/base-tools.js"; +import { ToolName } from "@src/confluent/tools/tool-name.js"; +import { EnvVar } from "@src/env-schema.js"; +import env from "@src/env.js"; +import { wrapAsPathBasedClient } from "openapi-fetch"; +import { z } from "zod"; + +const updateTableflowTopicArguments = z.object({ + baseUrl: z + .string() + .trim() + .describe("The base url of the Tableflow REST API.") + .url() + .default(() => env.CONFLUENT_CLOUD_REST_ENDPOINT ?? "") + .optional(), + display_name: z + .string() + .describe("The name of the Kafka topic for which Tableflow is enabled."), + tableflowTopicConfig: z.object({ + // Required fields + storage: z.object({ + kind: z + .enum(["ByobAws", "Managed"]) + .default("ByobAws") + .describe("The storage type either 'Managed' or 'ByobAws'."), + bucket_name: z.string().describe("The bucket name."), + provider_integration_id: z + .string() + .describe("The provider integration id."), + }), + environment: z.object({ + id: z + .string() + .describe( + "The unique identifier for the environment this resource belongs to.", + ), + }), + kafka_cluster: z.object({ + id: z.string().describe("ID of the referred resource"), + environment: z + .string() + .describe("Environment of the referred resource, if env-scoped"), + }), + // Optional fields + suspended: z + .boolean() + .optional() + .default(false) + .describe( + "Indicates whether Tableflow should be suspended. The API allows setting it only to false i.e resume the Tableflow.", + ), + config: z.object({ + retention_ms: z + .string() + .default("6048000000") // equivalent to 7 days + .describe( + "The maximum age, in milliseconds, of snapshots (for Iceberg) or versions(for Delta) to retain in the table for the Tableflow-enabled topic.", + ), + record_failure_strategy: z + .string() + .default("SUSPENDED") + .describe( + "The strategy to handle record failures in the Tableflow enabled topic during materialization.", + ), + }), + table_formats: z + .array(z.string()) + .default(["ICEBERG"]) + .describe( + "The supported table formats for the Tableflow-enabled topic e.g ICEBERG, DELTA", + ), + }), +}); + +export class UpdateTableFlowTopicHandler extends BaseToolHandler { + async handle( + clientManager: ClientManager, + toolArguments: Record | undefined, + ): Promise { + const { baseUrl, display_name, tableflowTopicConfig } = + updateTableflowTopicArguments.parse(toolArguments); + + if (baseUrl !== undefined && baseUrl !== "") { + clientManager.setConfluentCloudRestEndpoint(baseUrl); + } + + const pathBasedClient = wrapAsPathBasedClient( + clientManager.getConfluentCloudRestClient(), + ); + + const { environment, ...restOfTableflowConfig } = tableflowTopicConfig; + + const { data: response, error } = await pathBasedClient[ + `/tableflow/v1/tableflow-topics/${display_name}` + ].PATCH({ + params: { + path: { + display_name: display_name, + }, + }, + body: { + spec: { + ...restOfTableflowConfig, + environment: { id: environment.id }, // Only include id, as the general environment as requires readonly and resource_name + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } as any, // Due to how OpenAPI specification is structured and how generators interpret it, we have to treat it as any, as + // The most likely culprit for mismatch is the reuse of a single base schema for both input (requestBody) and output (responses) evironment. + }, + }); + if (error) { + return this.createResponse( + `Failed to update Tableflow topic for ${display_name}: ${JSON.stringify(error)}`, + true, + ); + } + return this.createResponse( + `Tableflow Topic ${display_name} updated: ${JSON.stringify(response)}`, + ); + } + getToolConfig(): ToolConfig { + return { + name: ToolName.UPDATE_TABLEFLOW_TOPIC, + description: `Make a request to update a tableflow topic.`, + inputSchema: updateTableflowTopicArguments.shape, + }; + } + + getRequiredEnvVars(): EnvVar[] { + return ["CONFLUENT_CLOUD_API_KEY", "CONFLUENT_CLOUD_API_SECRET"]; + } + + isConfluentCloudOnly(): boolean { + return true; + } +} diff --git a/src/confluent/tools/tool-factory.ts b/src/confluent/tools/tool-factory.ts index 80e1796..eed0009 100644 --- a/src/confluent/tools/tool-factory.ts +++ b/src/confluent/tools/tool-factory.ts @@ -24,6 +24,17 @@ import { ProduceKafkaMessageHandler } from "@src/confluent/tools/handlers/kafka/ import { ListSchemasHandler } from "@src/confluent/tools/handlers/schema/list-schemas-handler.js"; import { SearchTopicsByTagHandler } from "@src/confluent/tools/handlers/search/search-topic-by-tag-handler.js"; import { SearchTopicsByNameHandler } from "@src/confluent/tools/handlers/search/search-topics-by-name-handler.js"; +import { UpdateTableFlowTopicHandler } from "./handlers/tableflow/topic/update-tableflow-topic-handler.js"; +import { CreateTableFlowTopicHandler } from "./handlers/tableflow/topic/create-tableflow-topic-handler.js"; +import { DeleteTableFlowTopicHandler } from "./handlers/tableflow/topic/delete-tableflow-topic-handler.js"; +import { ListTableFlowRegionsHandler } from "./handlers/tableflow/list-tableflow-regions-handler.js"; +import { ListTableFlowTopicsHandler } from "./handlers/tableflow/topic/list-tableflow-topics-handler.js"; +import { ReadTableFlowTopicHandler } from "./handlers/tableflow/topic/read-tableflow-topic-handler.js"; +import { CreateTableFlowCatalogIntegrationHandler } from "./handlers/tableflow/catalog/create-tableflow-catalog-integration-handler.js"; +import { ReadTableFlowCatalogIntegrationHandler } from "./handlers/tableflow/catalog/read-tableflow-catalog-integration-handler.js"; +import { ListTableFlowCatalogIntegrationsHandler } from "./handlers/tableflow/catalog/list-tableflow-catalog-integrations-handler.js"; +import { UpdateTableFlowCatalogIntegrationHandler } from "./handlers/tableflow/catalog/update-tableflow-catalog-integration-handler.js"; +import { DeleteTableFlowCatalogIntegrationHandler } from "./handlers/tableflow/catalog/delete-tableflow-catalog-integration-handler.js"; import { ToolName } from "@src/confluent/tools/tool-name.js"; import { GetTopicConfigHandler } from "@src/confluent/tools/handlers/kafka/get-topic-config.js"; @@ -55,6 +66,32 @@ export class ToolFactory { [ToolName.LIST_SCHEMAS, new ListSchemasHandler()], [ToolName.CONSUME_MESSAGES, new ConsumeKafkaMessagesHandler()], [ToolName.GET_TOPIC_CONFIG, new GetTopicConfigHandler()], + [ToolName.CREATE_TABLEFLOW_TOPIC, new CreateTableFlowTopicHandler()], + [ToolName.LIST_TABLEFLOW_REGIONS, new ListTableFlowRegionsHandler()], + [ToolName.LIST_TABLEFLOW_TOPICS, new ListTableFlowTopicsHandler()], + [ToolName.READ_TABLEFLOW_TOPIC, new ReadTableFlowTopicHandler()], + [ToolName.UPDATE_TABLEFLOW_TOPIC, new UpdateTableFlowTopicHandler()], + [ToolName.DELETE_TABLEFLOW_TOPIC, new DeleteTableFlowTopicHandler()], + [ + ToolName.CREATE_TABLEFLOW_CATALOG_INTEGRATION, + new CreateTableFlowCatalogIntegrationHandler(), + ], + [ + ToolName.READ_TABLEFLOW_CATALOG_INTEGRATION, + new ReadTableFlowCatalogIntegrationHandler(), + ], + [ + ToolName.LIST_TABLEFLOW_CATALOG_INTEGRATIONS, + new ListTableFlowCatalogIntegrationsHandler(), + ], + [ + ToolName.UPDATE_TABLEFLOW_CATALOG_INTEGRATION, + new UpdateTableFlowCatalogIntegrationHandler(), + ], + [ + ToolName.DELETE_TABLEFLOW_CATALOG_INTEGRATION, + new DeleteTableFlowCatalogIntegrationHandler(), + ], ]); static createToolHandler(toolName: ToolName): ToolHandler { diff --git a/src/confluent/tools/tool-name.ts b/src/confluent/tools/tool-name.ts index 97e0a7c..69f6626 100644 --- a/src/confluent/tools/tool-name.ts +++ b/src/confluent/tools/tool-name.ts @@ -25,4 +25,15 @@ export enum ToolName { READ_ENVIRONMENT = "read-environment", LIST_SCHEMAS = "list-schemas", GET_TOPIC_CONFIG = "get-topic-config", + CREATE_TABLEFLOW_TOPIC = "create-tableflow-topic", + LIST_TABLEFLOW_REGIONS = "list-tableflow-regions", + LIST_TABLEFLOW_TOPICS = "list-tableflow-topics", + READ_TABLEFLOW_TOPIC = "read-tableflow-topic", + UPDATE_TABLEFLOW_TOPIC = "update-tableflow-topic", + DELETE_TABLEFLOW_TOPIC = "delete-tableflow-topic", + CREATE_TABLEFLOW_CATALOG_INTEGRATION = "create-tableflow-catalog-integration", + LIST_TABLEFLOW_CATALOG_INTEGRATIONS = "list-tableflow-catalog-integrations", + READ_TABLEFLOW_CATALOG_INTEGRATION = "read-tableflow-catalog-integration", + UPDATE_TABLEFLOW_CATALOG_INTEGRATION = "update-tableflow-catalog-integration", + DELETE_TABLEFLOW_CATALOG_INTEGRATION = "delete-tableflow-catalog-integration", } diff --git a/src/index.ts b/src/index.ts index b4f6311..51d5ea9 100644 --- a/src/index.ts +++ b/src/index.ts @@ -158,7 +158,7 @@ async function main() { process.on("SIGQUIT", performCleanup); process.on("SIGUSR2", performCleanup); } catch (error) { - logger.error({ error }, "Error starting server"); + logger.error({ err: error }, "Error starting server"); process.exit(1); } } From 5336436ec412aa2941fca1b93a411b5318f20034 Mon Sep 17 00:00:00 2001 From: stephenheg Date: Wed, 28 May 2025 11:04:26 +0100 Subject: [PATCH 2/2] refactor to use env id and cluster id for create tableflow apis --- ...e-tableflow-catalog-integration-handler.ts | 37 +++++++++--------- .../topic/create-tableflow-topic-handler.ts | 38 ++++++++++--------- 2 files changed, 39 insertions(+), 36 deletions(-) diff --git a/src/confluent/tools/handlers/tableflow/catalog/create-tableflow-catalog-integration-handler.ts b/src/confluent/tools/handlers/tableflow/catalog/create-tableflow-catalog-integration-handler.ts index e34d794..2b3c895 100644 --- a/src/confluent/tools/handlers/tableflow/catalog/create-tableflow-catalog-integration-handler.ts +++ b/src/confluent/tools/handlers/tableflow/catalog/create-tableflow-catalog-integration-handler.ts @@ -1,4 +1,5 @@ import { ClientManager } from "@src/confluent/client-manager.js"; +import { getEnsuredParam } from "@src/confluent/helpers.js"; import { CallToolResult } from "@src/confluent/schema.js"; import { BaseToolHandler, @@ -23,19 +24,6 @@ const createTableflowCatalogIntegrationArguments = z.object({ display_name: z .string() .describe("The name of the Kafka topic for which Tableflow is enabled."), - environment: z.object({ - id: z - .string() - .describe( - "The unique identifier for the environment this resource belongs to.", - ), - }), - kafka_cluster: z.object({ - id: z.string().describe("ID of the referred resource"), - environment: z - .string() - .describe("Environment of the referred resource, if env-scoped"), - }), config: z.object({ kind: z .string() @@ -64,6 +52,16 @@ export class CreateTableFlowCatalogIntegrationHandler extends BaseToolHandler { const { baseUrl, tableflowCatalogIntegrationConfig } = createTableflowCatalogIntegrationArguments.parse(toolArguments); + const environment_id = getEnsuredParam( + "KAFKA_ENV_ID", + "Environment ID is required", + ); + + const kafka_cluster_id = getEnsuredParam( + "KAFKA_CLUSTER_ID", + "Kafka Cluster ID is required", + ); + if (baseUrl !== undefined && baseUrl !== "") { clientManager.setConfluentCloudRestEndpoint(baseUrl); } @@ -72,16 +70,19 @@ export class CreateTableFlowCatalogIntegrationHandler extends BaseToolHandler { clientManager.getConfluentCloudRestClient(), ); - const { environment, ...restOfTableflowCatalogIntegrationConfig } = - tableflowCatalogIntegrationConfig; - const { data: response, error } = await pathBasedClient[ "/tableflow/v1/catalog-integrations" ].POST({ body: { spec: { - ...restOfTableflowCatalogIntegrationConfig, - environment: { id: environment.id }, // Only include id, as the general environment as requires readonly and resource_name + environment: { + id: environment_id, // Only include id, as the general environment object also requires readonly and resource_name + }, + kafka_cluster: { + id: kafka_cluster_id, + environment: environment_id, + }, + ...tableflowCatalogIntegrationConfig, // eslint-disable-next-line @typescript-eslint/no-explicit-any } as any, // Due to how OpenAPI specification is structured and how generators interpret it, we have to treat it as any, as // The most likely culprit for mismatch is the reuse of a single base schema for both input (requestBody) and output (responses) evironment. diff --git a/src/confluent/tools/handlers/tableflow/topic/create-tableflow-topic-handler.ts b/src/confluent/tools/handlers/tableflow/topic/create-tableflow-topic-handler.ts index 0ce2cf5..76fb7ee 100644 --- a/src/confluent/tools/handlers/tableflow/topic/create-tableflow-topic-handler.ts +++ b/src/confluent/tools/handlers/tableflow/topic/create-tableflow-topic-handler.ts @@ -4,6 +4,7 @@ import { BaseToolHandler, ToolConfig, } from "@src/confluent/tools/base-tools.js"; +import { getEnsuredParam } from "@src/confluent/helpers.js"; import { ToolName } from "@src/confluent/tools/tool-name.js"; import { EnvVar } from "@src/env-schema.js"; import env from "@src/env.js"; @@ -33,19 +34,6 @@ const createTableflowTopicArguments = z.object({ .string() .describe("The provider integration id."), }), - environment: z.object({ - id: z - .string() - .describe( - "The unique identifier for the environment this resource belongs to.", - ), - }), - kafka_cluster: z.object({ - id: z.string().describe("ID of the referred resource"), - environment: z - .string() - .describe("Environment of the referred resource, if env-scoped"), - }), // Optional fields suspended: z .boolean() @@ -63,7 +51,7 @@ const createTableflowTopicArguments = z.object({ ), record_failure_strategy: z .string() - .default("SUSPENDED") + .default("SUSPEND") .describe( "The strategy to handle record failures in the Tableflow enabled topic during materialization.", ), @@ -85,6 +73,16 @@ export class CreateTableFlowTopicHandler extends BaseToolHandler { const { baseUrl, tableflowTopicConfig } = createTableflowTopicArguments.parse(toolArguments); + const environment_id = getEnsuredParam( + "KAFKA_ENV_ID", + "Environment ID is required", + ); + + const kafka_cluster_id = getEnsuredParam( + "KAFKA_CLUSTER_ID", + "Kafka Cluster ID is required", + ); + if (baseUrl !== undefined && baseUrl !== "") { clientManager.setConfluentCloudRestEndpoint(baseUrl); } @@ -93,15 +91,19 @@ export class CreateTableFlowTopicHandler extends BaseToolHandler { clientManager.getConfluentCloudRestClient(), ); - const { environment, ...restOfTableflowConfig } = tableflowTopicConfig; - const { data: response, error } = await pathBasedClient[ "/tableflow/v1/tableflow-topics" ].POST({ body: { spec: { - ...restOfTableflowConfig, - environment: { id: environment.id }, // Only include id, as the general environment as requires readonly and resource_name + environment: { + id: environment_id, // Only include id, as the general environment object also requires readonly and resource_name + }, + kafka_cluster: { + id: kafka_cluster_id, + environment: environment_id, + }, + ...tableflowTopicConfig, // eslint-disable-next-line @typescript-eslint/no-explicit-any } as any, // Due to how OpenAPI specification is structured and how generators interpret it, we have to treat it as any, as // The most likely culprit for mismatch is the reuse of a single base schema for both input (requestBody) and output (responses) evironment. pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy