Skip to content
Start here

Chat Completions

client.AISearch.Instances.ChatCompletions(ctx, id, params) (*InstanceChatCompletionsResponse, error)
POST/accounts/{account_id}/ai-search/instances/{id}/chat/completions

Performs a chat completion request against an AI Search instance, using indexed content as context for generating responses.

Security
API Token

The preferred authorization scheme for interacting with the Cloudflare API. Create a token.

Example:Authorization: Bearer Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY
API Email + API Key

The previous authorization scheme for interacting with the Cloudflare API, used in conjunction with a Global API key.

Example:X-Auth-Email: user@example.com

The previous authorization scheme for interacting with the Cloudflare API. When possible, use API tokens instead of Global API keys.

Example:X-Auth-Key: 144c9defac04969c7bfad8efaa8ea194
ParametersExpand Collapse
id string

AI Search instance ID. Lowercase alphanumeric, hyphens, and underscores.

maxLength64
minLength1
params InstanceChatCompletionsParams
AccountID param.Field[string]

Path param

Messages param.Field[[]InstanceChatCompletionsParamsMessage]

Body param

Content string
Role InstanceChatCompletionsParamsMessagesRole
One of the following:
const InstanceChatCompletionsParamsMessagesRoleSystem InstanceChatCompletionsParamsMessagesRole = "system"
const InstanceChatCompletionsParamsMessagesRoleDeveloper InstanceChatCompletionsParamsMessagesRole = "developer"
const InstanceChatCompletionsParamsMessagesRoleUser InstanceChatCompletionsParamsMessagesRole = "user"
const InstanceChatCompletionsParamsMessagesRoleAssistant InstanceChatCompletionsParamsMessagesRole = "assistant"
const InstanceChatCompletionsParamsMessagesRoleTool InstanceChatCompletionsParamsMessagesRole = "tool"
AISearchOptions param.Field[InstanceChatCompletionsParamsAISearchOptions]optional

Body param

Cache InstanceChatCompletionsParamsAISearchOptionsCacheoptional
CacheThreshold InstanceChatCompletionsParamsAISearchOptionsCacheCacheThresholdoptional
One of the following:
const InstanceChatCompletionsParamsAISearchOptionsCacheCacheThresholdSuperStrictMatch InstanceChatCompletionsParamsAISearchOptionsCacheCacheThreshold = "super_strict_match"
const InstanceChatCompletionsParamsAISearchOptionsCacheCacheThresholdCloseEnough InstanceChatCompletionsParamsAISearchOptionsCacheCacheThreshold = "close_enough"
const InstanceChatCompletionsParamsAISearchOptionsCacheCacheThresholdFlexibleFriend InstanceChatCompletionsParamsAISearchOptionsCacheCacheThreshold = "flexible_friend"
const InstanceChatCompletionsParamsAISearchOptionsCacheCacheThresholdAnythingGoes InstanceChatCompletionsParamsAISearchOptionsCacheCacheThreshold = "anything_goes"
Enabled booloptional
QueryRewrite InstanceChatCompletionsParamsAISearchOptionsQueryRewriteoptional
Enabled booloptional
Model InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModeloptional
One of the following:
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMetaLlama3_3_70bInstructFp8Fast InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/meta/llama-3.3-70b-instruct-fp8-fast"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfZaiOrgGlm4_7Flash InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/zai-org/glm-4.7-flash"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMetaLlama3_1_8bInstructFast InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/meta/llama-3.1-8b-instruct-fast"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMetaLlama3_1_8bInstructFp8 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/meta/llama-3.1-8b-instruct-fp8"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMetaLlama4Scout17b16eInstruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/meta/llama-4-scout-17b-16e-instruct"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfQwenQwen3_30bA3bFp8 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/qwen/qwen3-30b-a3b-fp8"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfDeepseekAIDeepseekR1DistillQwen32b InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfMoonshotaiKimiK2Instruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/moonshotai/kimi-k2-instruct"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCfGoogleGemma3_12bIt InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "@cf/google/gemma-3-12b-it"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelAnthropicClaude3_7Sonnet InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "anthropic/claude-3-7-sonnet"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelAnthropicClaudeSonnet4 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "anthropic/claude-sonnet-4"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelAnthropicClaudeOpus4 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "anthropic/claude-opus-4"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelAnthropicClaude3_5Haiku InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "anthropic/claude-3-5-haiku"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasQwen3_235bA22bInstruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/qwen-3-235b-a22b-instruct"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasQwen3_235bA22bThinking InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/qwen-3-235b-a22b-thinking"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasLlama3_3_70b InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/llama-3.3-70b"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasLlama4Maverick17b128eInstruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/llama-4-maverick-17b-128e-instruct"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasLlama4Scout17b16eInstruct InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/llama-4-scout-17b-16e-instruct"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelCerebrasGptOSs120b InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "cerebras/gpt-oss-120b"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGoogleAIStudioGemini2_5Flash InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "google-ai-studio/gemini-2.5-flash"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGoogleAIStudioGemini2_5Pro InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "google-ai-studio/gemini-2.5-pro"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGrokGrok4 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "grok/grok-4"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGroqLlama3_3_70bVersatile InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "groq/llama-3.3-70b-versatile"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelGroqLlama3_1_8bInstant InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "groq/llama-3.1-8b-instant"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelOpenAIGpt5 InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "openai/gpt-5"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelOpenAIGpt5Mini InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "openai/gpt-5-mini"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelOpenAIGpt5Nano InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = "openai/gpt-5-nano"
const InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModelEmpty InstanceChatCompletionsParamsAISearchOptionsQueryRewriteModel = ""
RewritePrompt stringoptional
Reranking InstanceChatCompletionsParamsAISearchOptionsRerankingoptional
Enabled booloptional
MatchThreshold float64optional
maximum1
minimum0
Model InstanceChatCompletionsParamsAISearchOptionsRerankingModeloptional
One of the following:
const InstanceChatCompletionsParamsAISearchOptionsRerankingModelCfBaaiBgeRerankerBase InstanceChatCompletionsParamsAISearchOptionsRerankingModel = "@cf/baai/bge-reranker-base"
const InstanceChatCompletionsParamsAISearchOptionsRerankingModelEmpty InstanceChatCompletionsParamsAISearchOptionsRerankingModel = ""
Retrieval InstanceChatCompletionsParamsAISearchOptionsRetrievaloptional
BoostBy []InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByoptional

Metadata fields to boost search results by. Overrides the instance-level boost_by config. Direction defaults to 'asc' for numeric/datetime fields, 'exists' for text/boolean fields. Fields must match 'timestamp' or a defined custom_metadata field.

Field string

Metadata field name to boost by. Use 'timestamp' for document freshness, or any custom_metadata field. Numeric and datetime fields support asc/desc directions; text/boolean fields support exists/not_exists.

maxLength64
minLength1
Direction InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirectionoptional

Boost direction. 'desc' = higher values rank higher (e.g. newer timestamps). 'asc' = lower values rank higher. 'exists' = boost chunks that have the field. 'not_exists' = boost chunks that lack the field. Optional ��� defaults to 'asc' for numeric/datetime fields, 'exists' for text/boolean fields.

One of the following:
const InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirectionAsc InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirection = "asc"
const InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirectionDesc InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirection = "desc"
const InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirectionExists InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirection = "exists"
const InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirectionNotExists InstanceChatCompletionsParamsAISearchOptionsRetrievalBoostByDirection = "not_exists"
ContextExpansion int64optional
maximum3
minimum0
Filters map[string, unknown]optional
FusionMethod InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethodoptional
One of the following:
const InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethodMax InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethod = "max"
const InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethodRrf InstanceChatCompletionsParamsAISearchOptionsRetrievalFusionMethod = "rrf"
KeywordMatchMode InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchModeoptional

Controls which documents are candidates for BM25 scoring. 'and' restricts candidates to documents containing all query terms; 'or' includes any document containing at least one term, ranked by BM25 relevance. Defaults to 'and'. Legacy values 'exact_match' and 'fuzzy_match' are accepted and map to 'and' and 'or' respectively.

One of the following:
const InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchModeAnd InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchMode = "and"
const InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchModeOr InstanceChatCompletionsParamsAISearchOptionsRetrievalKeywordMatchMode = "or"
MatchThreshold float64optional
maximum1
minimum0
MaxNumResults int64optional
maximum50
minimum1
RetrievalType InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalTypeoptional
One of the following:
const InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalTypeVector InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalType = "vector"
const InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalTypeKeyword InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalType = "keyword"
const InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalTypeHybrid InstanceChatCompletionsParamsAISearchOptionsRetrievalRetrievalType = "hybrid"
ReturnOnFailure booloptional
Model param.Field[InstanceChatCompletionsParamsModel]optional

Body param

const InstanceChatCompletionsParamsModelCfMetaLlama3_3_70bInstructFp8Fast InstanceChatCompletionsParamsModel = "@cf/meta/llama-3.3-70b-instruct-fp8-fast"
const InstanceChatCompletionsParamsModelCfZaiOrgGlm4_7Flash InstanceChatCompletionsParamsModel = "@cf/zai-org/glm-4.7-flash"
const InstanceChatCompletionsParamsModelCfMetaLlama3_1_8bInstructFast InstanceChatCompletionsParamsModel = "@cf/meta/llama-3.1-8b-instruct-fast"
const InstanceChatCompletionsParamsModelCfMetaLlama3_1_8bInstructFp8 InstanceChatCompletionsParamsModel = "@cf/meta/llama-3.1-8b-instruct-fp8"
const InstanceChatCompletionsParamsModelCfMetaLlama4Scout17b16eInstruct InstanceChatCompletionsParamsModel = "@cf/meta/llama-4-scout-17b-16e-instruct"
const InstanceChatCompletionsParamsModelCfQwenQwen3_30bA3bFp8 InstanceChatCompletionsParamsModel = "@cf/qwen/qwen3-30b-a3b-fp8"
const InstanceChatCompletionsParamsModelCfDeepseekAIDeepseekR1DistillQwen32b InstanceChatCompletionsParamsModel = "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b"
const InstanceChatCompletionsParamsModelCfMoonshotaiKimiK2Instruct InstanceChatCompletionsParamsModel = "@cf/moonshotai/kimi-k2-instruct"
const InstanceChatCompletionsParamsModelCfGoogleGemma3_12bIt InstanceChatCompletionsParamsModel = "@cf/google/gemma-3-12b-it"
const InstanceChatCompletionsParamsModelAnthropicClaude3_7Sonnet InstanceChatCompletionsParamsModel = "anthropic/claude-3-7-sonnet"
const InstanceChatCompletionsParamsModelAnthropicClaudeSonnet4 InstanceChatCompletionsParamsModel = "anthropic/claude-sonnet-4"
const InstanceChatCompletionsParamsModelAnthropicClaudeOpus4 InstanceChatCompletionsParamsModel = "anthropic/claude-opus-4"
const InstanceChatCompletionsParamsModelAnthropicClaude3_5Haiku InstanceChatCompletionsParamsModel = "anthropic/claude-3-5-haiku"
const InstanceChatCompletionsParamsModelCerebrasQwen3_235bA22bInstruct InstanceChatCompletionsParamsModel = "cerebras/qwen-3-235b-a22b-instruct"
const InstanceChatCompletionsParamsModelCerebrasQwen3_235bA22bThinking InstanceChatCompletionsParamsModel = "cerebras/qwen-3-235b-a22b-thinking"
const InstanceChatCompletionsParamsModelCerebrasLlama3_3_70b InstanceChatCompletionsParamsModel = "cerebras/llama-3.3-70b"
const InstanceChatCompletionsParamsModelCerebrasLlama4Maverick17b128eInstruct InstanceChatCompletionsParamsModel = "cerebras/llama-4-maverick-17b-128e-instruct"
const InstanceChatCompletionsParamsModelCerebrasLlama4Scout17b16eInstruct InstanceChatCompletionsParamsModel = "cerebras/llama-4-scout-17b-16e-instruct"
const InstanceChatCompletionsParamsModelCerebrasGptOSs120b InstanceChatCompletionsParamsModel = "cerebras/gpt-oss-120b"
const InstanceChatCompletionsParamsModelGoogleAIStudioGemini2_5Flash InstanceChatCompletionsParamsModel = "google-ai-studio/gemini-2.5-flash"
const InstanceChatCompletionsParamsModelGoogleAIStudioGemini2_5Pro InstanceChatCompletionsParamsModel = "google-ai-studio/gemini-2.5-pro"
const InstanceChatCompletionsParamsModelGrokGrok4 InstanceChatCompletionsParamsModel = "grok/grok-4"
const InstanceChatCompletionsParamsModelGroqLlama3_3_70bVersatile InstanceChatCompletionsParamsModel = "groq/llama-3.3-70b-versatile"
const InstanceChatCompletionsParamsModelGroqLlama3_1_8bInstant InstanceChatCompletionsParamsModel = "groq/llama-3.1-8b-instant"
const InstanceChatCompletionsParamsModelOpenAIGpt5 InstanceChatCompletionsParamsModel = "openai/gpt-5"
const InstanceChatCompletionsParamsModelOpenAIGpt5Mini InstanceChatCompletionsParamsModel = "openai/gpt-5-mini"
const InstanceChatCompletionsParamsModelOpenAIGpt5Nano InstanceChatCompletionsParamsModel = "openai/gpt-5-nano"
const InstanceChatCompletionsParamsModelEmpty InstanceChatCompletionsParamsModel = ""
Stream param.Field[bool]optional

Body param

ReturnsExpand Collapse
type InstanceChatCompletionsResponse struct{…}
Choices []InstanceChatCompletionsResponseChoice
Message InstanceChatCompletionsResponseChoicesMessage
Content string
Role InstanceChatCompletionsResponseChoicesMessageRole
One of the following:
const InstanceChatCompletionsResponseChoicesMessageRoleSystem InstanceChatCompletionsResponseChoicesMessageRole = "system"
const InstanceChatCompletionsResponseChoicesMessageRoleDeveloper InstanceChatCompletionsResponseChoicesMessageRole = "developer"
const InstanceChatCompletionsResponseChoicesMessageRoleUser InstanceChatCompletionsResponseChoicesMessageRole = "user"
const InstanceChatCompletionsResponseChoicesMessageRoleAssistant InstanceChatCompletionsResponseChoicesMessageRole = "assistant"
const InstanceChatCompletionsResponseChoicesMessageRoleTool InstanceChatCompletionsResponseChoicesMessageRole = "tool"
Index int64optional
Chunks []InstanceChatCompletionsResponseChunk
ID string
Score float64
maximum1
minimum0
Text string
Type string
Item InstanceChatCompletionsResponseChunksItemoptional
Key string
Metadata map[string, unknown]optional
Timestamp float64optional
ScoringDetails InstanceChatCompletionsResponseChunksScoringDetailsoptional
FusionMethod InstanceChatCompletionsResponseChunksScoringDetailsFusionMethodoptional
One of the following:
const InstanceChatCompletionsResponseChunksScoringDetailsFusionMethodRrf InstanceChatCompletionsResponseChunksScoringDetailsFusionMethod = "rrf"
const InstanceChatCompletionsResponseChunksScoringDetailsFusionMethodMax InstanceChatCompletionsResponseChunksScoringDetailsFusionMethod = "max"
KeywordRank float64optional
KeywordScore float64optional
minimum0
RerankingScore float64optional
maximum1
minimum0
VectorRank float64optional
VectorScore float64optional
maximum1
minimum0
ID stringoptional
Model stringoptional
Object stringoptional

Chat Completions

package main

import (
  "context"
  "fmt"

  "github.com/cloudflare/cloudflare-go"
  "github.com/cloudflare/cloudflare-go/ai_search"
  "github.com/cloudflare/cloudflare-go/option"
)

func main() {
  client := cloudflare.NewClient(
    option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"),
  )
  response, err := client.AISearch.Instances.ChatCompletions(
    context.TODO(),
    "my-ai-search",
    ai_search.InstanceChatCompletionsParams{
      AccountID: cloudflare.F("c3dc5f0b34a14ff8e1b3ec04895e1b22"),
      Messages: cloudflare.F([]ai_search.InstanceChatCompletionsParamsMessage{ai_search.InstanceChatCompletionsParamsMessage{
        Content: cloudflare.F("content"),
        Role: cloudflare.F(ai_search.InstanceChatCompletionsParamsMessagesRoleSystem),
      }}),
    },
  )
  if err != nil {
    panic(err.Error())
  }
  fmt.Printf("%+v\n", response.ID)
}
{
  "choices": [
    {
      "message": {
        "content": "content",
        "role": "system"
      },
      "index": 0
    }
  ],
  "chunks": [
    {
      "id": "id",
      "score": 0,
      "text": "text",
      "type": "type",
      "item": {
        "key": "key",
        "metadata": {
          "foo": "bar"
        },
        "timestamp": 0
      },
      "scoring_details": {
        "fusion_method": "rrf",
        "keyword_rank": 0,
        "keyword_score": 0,
        "reranking_score": 0,
        "vector_rank": 0,
        "vector_score": 0
      }
    }
  ],
  "id": "id",
  "model": "model",
  "object": "object"
}
Returns Examples
{
  "choices": [
    {
      "message": {
        "content": "content",
        "role": "system"
      },
      "index": 0
    }
  ],
  "chunks": [
    {
      "id": "id",
      "score": 0,
      "text": "text",
      "type": "type",
      "item": {
        "key": "key",
        "metadata": {
          "foo": "bar"
        },
        "timestamp": 0
      },
      "scoring_details": {
        "fusion_method": "rrf",
        "keyword_rank": 0,
        "keyword_score": 0,
        "reranking_score": 0,
        "vector_rank": 0,
        "vector_score": 0
      }
    }
  ],
  "id": "id",
  "model": "model",
  "object": "object"
}