## Chat Completions `client.AISearch.Instances.ChatCompletions(ctx, id, params) (*InstanceChatCompletionsResponse, error)` **post** `/accounts/{account_id}/ai-search/instances/{id}/chat/completions` Performs a chat completion request against an AI Search instance, using indexed content as context for generating responses. ### Parameters - `id string` AI Search instance ID. Lowercase alphanumeric, hyphens, and underscores. - `params InstanceChatCompletionsParams` - `AccountID param.Field[string]` Path param - `Messages param.Field[[]InstanceChatCompletionsParamsMessage]` Body param - `Content string` - `Role InstanceChatCompletionsParamsMessagesRole` - `const InstanceChatCompletionsParamsMessagesRoleSystem InstanceChatCompletionsParamsMessagesRole = "system"` - `const InstanceChatCompletionsParamsMessagesRoleDeveloper InstanceChatCompletionsParamsMessagesRole = "developer"` - `const InstanceChatCompletionsParamsMessagesRoleUser InstanceChatCompletionsParamsMessagesRole = "user"` - `const InstanceChatCompletionsParamsMessagesRoleAssistant InstanceChatCompletionsParamsMessagesRole = "assistant"` - `const InstanceChatCompletionsParamsMessagesRoleTool InstanceChatCompletionsParamsMessagesRole = "tool"` - `AISearchOptions param.Field[InstanceChatCompletionsParamsAISearchOptions]` Body param - `Cache InstanceChatCompletionsParamsAISearchOptionsCache` - `CacheThreshold InstanceChatCompletionsParamsAISearchOptionsCacheCacheThreshold` - `const InstanceChatCompletionsParamsAISearchOptionsCacheCacheThresholdSuperStrictMatch InstanceChatCompletionsParamsAISearchOptionsCacheCacheThreshold = "super_strict_match"` - `const InstanceChatCompletionsParamsAISearchOptionsCacheCacheThresholdCloseEnough InstanceChatCompletionsParamsAISearchOptionsCacheCacheThreshold = "close_enough"` - `const InstanceChatCompletionsParamsAISearchOptionsCacheCacheThresholdFlexibleFriend InstanceChatCompletionsParamsAISearchOptionsCacheCacheThreshold = "flexible_friend"` - `const InstanceChatCompletionsParamsAISearchOptionsCacheCacheThresholdAnythingGoes InstanceChatCompletionsParamsAISearchOptionsCacheCacheThreshold = "anything_goes"` - `Enabled bool` - `QueryRewrite InstanceChatCompletionsParamsAISearchOptionsQueryRewrite` - `Enabled bool` - `Model InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMetaLlama3_3_70bInstructFp8Fast InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/meta/llama-3.3-70b-instruct-fp8-fast"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfZaiOrgGlm4_7Flash InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/zai-org/glm-4.7-flash"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMetaLlama3_1_8bInstructFast InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/meta/llama-3.1-8b-instruct-fast"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMetaLlama3_1_8bInstructFp8 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/meta/llama-3.1-8b-instruct-fp8"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMetaLlama4Scout17b16eInstruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/meta/llama-4-scout-17b-16e-instruct"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfQwenQwen3_30bA3bFp8 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/qwen/qwen3-30b-a3b-fp8"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfDeepseekAIDeepseekR1DistillQwen32b InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMoonshotaiKimiK2Instruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/moonshotai/kimi-k2-instruct"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfGoogleGemma3_12bIt InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/google/gemma-3-12b-it"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelAnthropicClaude3_7Sonnet InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "anthropic/claude-3-7-sonnet"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelAnthropicClaudeSonnet4 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "anthropic/claude-sonnet-4"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelAnthropicClaudeOpus4 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "anthropic/claude-opus-4"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelAnthropicClaude3_5Haiku InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "anthropic/claude-3-5-haiku"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasQwen3_235bA22bInstruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/qwen-3-235b-a22b-instruct"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasQwen3_235bA22bThinking InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/qwen-3-235b-a22b-thinking"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasLlama3_3_70b InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/llama-3.3-70b"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasLlama4Maverick17b128eInstruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/llama-4-maverick-17b-128e-instruct"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasLlama4Scout17b16eInstruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/llama-4-scout-17b-16e-instruct"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasGptOSs120b InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/gpt-oss-120b"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGoogleAIStudioGemini2_5Flash InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "google-ai-studio/gemini-2.5-flash"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGoogleAIStudioGemini2_5Pro InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "google-ai-studio/gemini-2.5-pro"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGrokGrok4 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "grok/grok-4"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGroqLlama3_3_70bVersatile InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "groq/llama-3.3-70b-versatile"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGroqLlama3_1_8bInstant InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "groq/llama-3.1-8b-instant"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelOpenAIGpt5 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "openai/gpt-5"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelOpenAIGpt5Mini InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "openai/gpt-5-mini"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelOpenAIGpt5Nano InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "openai/gpt-5-nano"` - `const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelEmpty InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = ""` - `RewritePrompt string` - `Reranking InstanceChatCompletionsParamsAISearchOptionsReranking` - `Enabled bool` - `MatchThreshold float64` - `Model InstanceChatCompletionsParamsAISearchOptionsRerankingModel` - `const InstanceChatCompletionsParamsAISearchOptionsRerankingModelCfBaaiBgeRerankerBase InstanceChatCompletionsParamsAISearchOptionsRerankingModel = "@cf/baai/bge-reranker-base"` - `const InstanceChatCompletionsParamsAISearchOptionsRerankingModelEmpty InstanceChatCompletionsParamsAISearchOptionsRerankingModel = ""` - `Retrieval InstanceChatCompletionsParamsAISearchOptionsRetrieval` - `BoostBy []InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostBy` Metadata fields to boost search results by. Overrides the instance-level boost_by config. Direction defaults to 'asc' for numeric/datetime fields, 'exists' for text/boolean fields. Fields must match 'timestamp' or a defined custom_metadata field. - `Field string` Metadata field name to boost by. Use 'timestamp' for document freshness, or any custom_metadata field. Numeric and datetime fields support asc/desc directions; text/boolean fields support exists/not_exists. - `Direction InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirection` Boost direction. 'desc' = higher values rank higher (e.g. newer timestamps). 'asc' = lower values rank higher. 'exists' = boost chunks that have the field. 'not_exists' = boost chunks that lack the field. Optional ��� defaults to 'asc' for numeric/datetime fields, 'exists' for text/boolean fields. - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirectionAsc InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirection = "asc"` - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirectionDesc InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirection = "desc"` - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirectionExists InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirection = "exists"` - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirectionNotExists InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirection = "not_exists"` - `ContextExpansion int64` - `Filters map[string, unknown]` - `FusionMethod InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethod` - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethodMax InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethod = "max"` - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethodRrf InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethod = "rrf"` - `KeywordMatchMode InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchMode` Controls which documents are candidates for BM25 scoring. 'and' restricts candidates to documents containing all query terms; 'or' includes any document containing at least one term, ranked by BM25 relevance. Defaults to 'and'. Legacy values 'exact_match' and 'fuzzy_match' are accepted and map to 'and' and 'or' respectively. - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchModeAnd InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchMode = "and"` - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchModeOr InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchMode = "or"` - `MatchThreshold float64` - `MaxNumResults int64` - `RetrievalType InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalType` - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalTypeVector InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalType = "vector"` - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalTypeKeyword InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalType = "keyword"` - `const InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalTypeHybrid InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalType = "hybrid"` - `ReturnOnFailure bool` - `Model param.Field[InstanceChatCompletionsParamsModel]` Body param - `const InstanceChatCompletionsParamsModelCfMetaLlama3_3_70bInstructFp8Fast InstanceChatCompletionsParamsModel = "@cf/meta/llama-3.3-70b-instruct-fp8-fast"` - `const InstanceChatCompletionsParamsModelCfZaiOrgGlm4_7Flash InstanceChatCompletionsParamsModel = "@cf/zai-org/glm-4.7-flash"` - `const InstanceChatCompletionsParamsModelCfMetaLlama3_1_8bInstructFast InstanceChatCompletionsParamsModel = "@cf/meta/llama-3.1-8b-instruct-fast"` - `const InstanceChatCompletionsParamsModelCfMetaLlama3_1_8bInstructFp8 InstanceChatCompletionsParamsModel = "@cf/meta/llama-3.1-8b-instruct-fp8"` - `const InstanceChatCompletionsParamsModelCfMetaLlama4Scout17b16eInstruct InstanceChatCompletionsParamsModel = "@cf/meta/llama-4-scout-17b-16e-instruct"` - `const InstanceChatCompletionsParamsModelCfQwenQwen3_30bA3bFp8 InstanceChatCompletionsParamsModel = "@cf/qwen/qwen3-30b-a3b-fp8"` - `const InstanceChatCompletionsParamsModelCfDeepseekAIDeepseekR1DistillQwen32b InstanceChatCompletionsParamsModel = "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b"` - `const InstanceChatCompletionsParamsModelCfMoonshotaiKimiK2Instruct InstanceChatCompletionsParamsModel = "@cf/moonshotai/kimi-k2-instruct"` - `const InstanceChatCompletionsParamsModelCfGoogleGemma3_12bIt InstanceChatCompletionsParamsModel = "@cf/google/gemma-3-12b-it"` - `const InstanceChatCompletionsParamsModelAnthropicClaude3_7Sonnet InstanceChatCompletionsParamsModel = "anthropic/claude-3-7-sonnet"` - `const InstanceChatCompletionsParamsModelAnthropicClaudeSonnet4 InstanceChatCompletionsParamsModel = "anthropic/claude-sonnet-4"` - `const InstanceChatCompletionsParamsModelAnthropicClaudeOpus4 InstanceChatCompletionsParamsModel = "anthropic/claude-opus-4"` - `const InstanceChatCompletionsParamsModelAnthropicClaude3_5Haiku InstanceChatCompletionsParamsModel = "anthropic/claude-3-5-haiku"` - `const InstanceChatCompletionsParamsModelCerebrasQwen3_235bA22bInstruct InstanceChatCompletionsParamsModel = "cerebras/qwen-3-235b-a22b-instruct"` - `const InstanceChatCompletionsParamsModelCerebrasQwen3_235bA22bThinking InstanceChatCompletionsParamsModel = "cerebras/qwen-3-235b-a22b-thinking"` - `const InstanceChatCompletionsParamsModelCerebrasLlama3_3_70b InstanceChatCompletionsParamsModel = "cerebras/llama-3.3-70b"` - `const InstanceChatCompletionsParamsModelCerebrasLlama4Maverick17b128eInstruct InstanceChatCompletionsParamsModel = "cerebras/llama-4-maverick-17b-128e-instruct"` - `const InstanceChatCompletionsParamsModelCerebrasLlama4Scout17b16eInstruct InstanceChatCompletionsParamsModel = "cerebras/llama-4-scout-17b-16e-instruct"` - `const InstanceChatCompletionsParamsModelCerebrasGptOSs120b InstanceChatCompletionsParamsModel = "cerebras/gpt-oss-120b"` - `const InstanceChatCompletionsParamsModelGoogleAIStudioGemini2_5Flash InstanceChatCompletionsParamsModel = "google-ai-studio/gemini-2.5-flash"` - `const InstanceChatCompletionsParamsModelGoogleAIStudioGemini2_5Pro InstanceChatCompletionsParamsModel = "google-ai-studio/gemini-2.5-pro"` - `const InstanceChatCompletionsParamsModelGrokGrok4 InstanceChatCompletionsParamsModel = "grok/grok-4"` - `const InstanceChatCompletionsParamsModelGroqLlama3_3_70bVersatile InstanceChatCompletionsParamsModel = "groq/llama-3.3-70b-versatile"` - `const InstanceChatCompletionsParamsModelGroqLlama3_1_8bInstant InstanceChatCompletionsParamsModel = "groq/llama-3.1-8b-instant"` - `const InstanceChatCompletionsParamsModelOpenAIGpt5 InstanceChatCompletionsParamsModel = "openai/gpt-5"` - `const InstanceChatCompletionsParamsModelOpenAIGpt5Mini InstanceChatCompletionsParamsModel = "openai/gpt-5-mini"` - `const InstanceChatCompletionsParamsModelOpenAIGpt5Nano InstanceChatCompletionsParamsModel = "openai/gpt-5-nano"` - `const InstanceChatCompletionsParamsModelEmpty InstanceChatCompletionsParamsModel = ""` - `Stream param.Field[bool]` Body param ### Returns - `type InstanceChatCompletionsResponse struct{…}` - `Choices []InstanceChatCompletionsResponseChoice` - `Message InstanceChatCompletionsResponseChoicesMessage` - `Content string` - `Role InstanceChatCompletionsResponseChoicesMessageRole` - `const InstanceChatCompletionsResponseChoicesMessageRoleSystem InstanceChatCompletionsResponseChoicesMessageRole = "system"` - `const InstanceChatCompletionsResponseChoicesMessageRoleDeveloper InstanceChatCompletionsResponseChoicesMessageRole = "developer"` - `const InstanceChatCompletionsResponseChoicesMessageRoleUser InstanceChatCompletionsResponseChoicesMessageRole = "user"` - `const InstanceChatCompletionsResponseChoicesMessageRoleAssistant InstanceChatCompletionsResponseChoicesMessageRole = "assistant"` - `const InstanceChatCompletionsResponseChoicesMessageRoleTool InstanceChatCompletionsResponseChoicesMessageRole = "tool"` - `Index int64` - `Chunks []InstanceChatCompletionsResponseChunk` - `ID string` - `Score float64` - `Text string` - `Type string` - `Item InstanceChatCompletionsResponseChunksItem` - `Key string` - `Metadata map[string, unknown]` - `Timestamp float64` - `ScoringDetails InstanceChatCompletionsResponseChunksScoringDetails` - `FusionMethod InstanceChatCompletionsResponseChunksScoringDetailsFusionMethod` - `const InstanceChatCompletionsResponseChunksScoringDetailsFusionMethodRrf InstanceChatCompletionsResponseChunksScoringDetailsFusionMethod = "rrf"` - `const InstanceChatCompletionsResponseChunksScoringDetailsFusionMethodMax InstanceChatCompletionsResponseChunksScoringDetailsFusionMethod = "max"` - `KeywordRank float64` - `KeywordScore float64` - `RerankingScore float64` - `VectorRank float64` - `VectorScore float64` - `ID string` - `Model string` - `Object string` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/ai_search" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) response, err := client.AISearch.Instances.ChatCompletions( context.TODO(), "my-ai-search", ai_search.InstanceChatCompletionsParams{ AccountID: cloudflare.F("c3dc5f0b34a14ff8e1b3ec04895e1b22"), Messages: cloudflare.F([]ai_search.InstanceChatCompletionsParamsMessage{ai_search.InstanceChatCompletionsParamsMessage{ Content: cloudflare.F("content"), Role: cloudflare.F(ai_search.InstanceChatCompletionsParamsMessagesRoleSystem), }}), }, ) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", response.ID) } ``` #### Response ```json { "choices": [ { "message": { "content": "content", "role": "system" }, "index": 0 } ], "chunks": [ { "id": "id", "score": 0, "text": "text", "type": "type", "item": { "key": "key", "metadata": { "foo": "bar" }, "timestamp": 0 }, "scoring_details": { "fusion_method": "rrf", "keyword_rank": 0, "keyword_score": 0, "reranking_score": 0, "vector_rank": 0, "vector_score": 0 } } ], "id": "id", "model": "model", "object": "object" } ```