Skip to main content
GET
/
assistant
List Assistants
curl --request GET \
  --url https://api.cozmox.ai/assistant \
  --header 'Authorization: Bearer <token>'
[
  {
    "id": "<string>",
    "orgId": "<string>",
    "createdAt": "2023-11-07T05:31:56Z",
    "updatedAt": "2023-11-07T05:31:56Z",
    "transcriber": {
      "provider": "assembly-ai",
      "language": "en",
      "confidenceThreshold": 0.4,
      "enableUniversalStreamingApi": false,
      "formatTurns": false,
      "endOfTurnConfidenceThreshold": 0.7,
      "minEndOfTurnSilenceWhenConfident": 160,
      "wordFinalizationMaxWaitTime": 160,
      "maxTurnSilence": 400,
      "realtimeUrl": "<string>",
      "wordBoost": [
        "<string>"
      ],
      "endUtteranceSilenceThreshold": 123,
      "disablePartialTranscripts": true,
      "fallbackPlan": {
        "transcribers": [
          {
            "provider": "assembly-ai",
            "language": "en",
            "confidenceThreshold": 0.4,
            "enableUniversalStreamingApi": false,
            "formatTurns": false,
            "endOfTurnConfidenceThreshold": 0.7,
            "minEndOfTurnSilenceWhenConfident": 160,
            "wordFinalizationMaxWaitTime": 160,
            "maxTurnSilence": 400,
            "realtimeUrl": "<string>",
            "wordBoost": [
              "<string>"
            ],
            "endUtteranceSilenceThreshold": 123,
            "disablePartialTranscripts": true
          }
        ]
      }
    },
    "model": {
      "model": "claude-3-opus-20240229",
      "provider": "anthropic",
      "messages": [
        {
          "content": "<string>",
          "role": "assistant"
        }
      ],
      "tools": [
        {
          "type": "apiRequest",
          "method": "POST",
          "url": "<string>",
          "messages": [
            {
              "type": "request-start",
              "contents": [
                "<unknown>"
              ],
              "blocking": false,
              "content": "<string>",
              "conditions": [
                {
                  "operator": "eq",
                  "param": "<string>",
                  "value": "<string>"
                }
              ]
            }
          ],
          "timeoutSeconds": 20,
          "name": "<string>",
          "description": "<string>",
          "body": {
            "type": "string",
            "items": {},
            "properties": {},
            "description": "<string>",
            "pattern": "<string>",
            "format": "date-time",
            "required": [
              "<string>"
            ],
            "enum": [
              "<string>"
            ],
            "title": "<string>"
          },
          "headers": {
            "type": "string",
            "items": {},
            "properties": {},
            "description": "<string>",
            "pattern": "<string>",
            "format": "date-time",
            "required": [
              "<string>"
            ],
            "enum": [
              "<string>"
            ],
            "title": "<string>"
          },
          "backoffPlan": {
            "type": "fixed",
            "maxRetries": 0,
            "baseDelaySeconds": 1
          },
          "variableExtractionPlan": {
            "schema": {
              "type": "string",
              "items": {},
              "properties": {},
              "description": "<string>",
              "pattern": "<string>",
              "format": "date-time",
              "required": [
                "<string>"
              ],
              "enum": [
                "<string>"
              ],
              "title": "<string>"
            },
            "aliases": [
              {
                "key": "<string>",
                "value": "<string>"
              }
            ]
          },
          "function": {
            "name": "<string>",
            "strict": false,
            "description": "<string>",
            "parameters": {
              "type": "object",
              "properties": {},
              "required": [
                "<string>"
              ]
            }
          }
        }
      ],
      "toolIds": [
        "<string>"
      ],
      "knowledgeBase": {
        "provider": "custom-knowledge-base",
        "server": {
          "timeoutSeconds": 20,
          "url": "<string>",
          "headers": {},
          "backoffPlan": {
            "type": "fixed",
            "maxRetries": 0,
            "baseDelaySeconds": 1
          }
        }
      },
      "knowledgeBaseId": "<string>",
      "thinking": {
        "type": "enabled",
        "budgetTokens": 50512
      },
      "temperature": 1,
      "maxTokens": 5025,
      "emotionRecognitionEnabled": true,
      "numFastTurns": 1
    },
    "voice": {
      "provider": "azure",
      "voiceId": "andrew",
      "cachingEnabled": true,
      "chunkPlan": {
        "enabled": true,
        "minCharacters": 30,
        "punctuationBoundaries": [
          "。",
          ",",
          ".",
          "!",
          "?",
          ";",
          "،",
          "۔",
          "।",
          "॥",
          "|",
          "||",
          ",",
          ":"
        ],
        "formatPlan": {
          "enabled": true,
          "numberToDigitsCutoff": 2025,
          "replacements": [
            {
              "type": "exact",
              "key": "<string>",
              "value": "<string>",
              "replaceAllEnabled": false
            }
          ],
          "formattersEnabled": "markdown"
        }
      },
      "speed": 1.25,
      "fallbackPlan": {
        "voices": [
          {
            "provider": "azure",
            "voiceId": "andrew",
            "cachingEnabled": true,
            "speed": 1.25,
            "chunkPlan": {
              "enabled": true,
              "minCharacters": 30,
              "punctuationBoundaries": [
                "。",
                ",",
                ".",
                "!",
                "?",
                ";",
                "،",
                "۔",
                "।",
                "॥",
                "|",
                "||",
                ",",
                ":"
              ],
              "formatPlan": {
                "enabled": true,
                "numberToDigitsCutoff": 2025,
                "replacements": [
                  {
                    "type": "exact",
                    "key": "<string>",
                    "value": "<string>",
                    "replaceAllEnabled": false
                  }
                ],
                "formattersEnabled": "markdown"
              }
            }
          }
        ]
      }
    },
    "firstMessage": "Hello! How can I help you today?",
    "firstMessageInterruptionsEnabled": false,
    "firstMessageMode": "assistant-speaks-first",
    "voicemailDetection": {
      "provider": "google",
      "beepMaxAwaitSeconds": 30,
      "backoffPlan": {
        "startAtSeconds": 5,
        "frequencySeconds": 5,
        "maxRetries": 6
      }
    },
    "clientMessages": [
      "conversation-update",
      "function-call",
      "hang",
      "model-output",
      "speech-update",
      "status-update",
      "transfer-update",
      "transcript",
      "tool-calls",
      "user-interrupted",
      "voice-input",
      "workflow.node.started"
    ],
    "serverMessages": [
      "conversation-update",
      "end-of-call-report",
      "function-call",
      "hang",
      "speech-update",
      "status-update",
      "tool-calls",
      "transfer-destination-request",
      "user-interrupted"
    ],
    "silenceTimeoutSeconds": 30,
    "maxDurationSeconds": 600,
    "backgroundSound": "office",
    "backgroundDenoisingEnabled": false,
    "modelOutputInMessagesEnabled": false,
    "transportConfigurations": [
      {
        "provider": "twilio",
        "timeout": 60,
        "record": false,
        "recordingChannels": "mono"
      }
    ],
    "observabilityPlan": {
      "provider": "langfuse",
      "tags": [
        "<string>"
      ],
      "metadata": {}
    },
    "credentials": [
      {
        "provider": "anthropic",
        "apiKey": "<string>",
        "name": "<string>"
      }
    ],
    "hooks": [
      {
        "on": "call.ending",
        "do": [
          {
            "type": "transfer",
            "destination": {
              "type": "number",
              "number": "<string>",
              "message": "<string>",
              "numberE164CheckEnabled": true,
              "extension": "<string>",
              "callerId": "<string>",
              "transferPlan": {
                "mode": "blind-transfer",
                "message": "<string>",
                "timeout": 60,
                "sipVerb": "refer",
                "holdAudioUrl": "<string>",
                "transferCompleteAudioUrl": "<string>",
                "twiml": "<string>",
                "summaryPlan": {
                  "messages": [
                    {}
                  ],
                  "enabled": true,
                  "timeoutSeconds": 30.5
                },
                "sipHeadersInReferToEnabled": true,
                "fallbackPlan": {
                  "message": "<string>",
                  "endCallEnabled": true
                }
              },
              "description": "<string>"
            }
          }
        ],
        "filters": [
          {
            "type": "oneOf",
            "key": "<string>",
            "oneOf": [
              "<string>"
            ]
          }
        ]
      }
    ],
    "name": "<string>",
    "voicemailMessage": "<string>",
    "endCallMessage": "<string>",
    "endCallPhrases": [
      "<string>"
    ],
    "compliancePlan": {
      "hipaaEnabled": {
        "hipaaEnabled": false
      },
      "pciEnabled": {
        "pciEnabled": false
      }
    },
    "metadata": {},
    "backgroundSpeechDenoisingPlan": {
      "smartDenoisingPlan": {
        "enabled": false
      },
      "fourierDenoisingPlan": {
        "enabled": false,
        "mediaDetectionEnabled": true,
        "staticThreshold": -35,
        "baselineOffsetDb": -15,
        "windowSizeMs": 3000,
        "baselinePercentile": 85
      }
    },
    "analysisPlan": {
      "minMessagesThreshold": 1,
      "summaryPlan": {
        "messages": [
          {}
        ],
        "enabled": true,
        "timeoutSeconds": 30.5
      },
      "structuredDataPlan": {
        "messages": [
          {}
        ],
        "enabled": true,
        "schema": {
          "type": "string",
          "items": {},
          "properties": {},
          "description": "<string>",
          "pattern": "<string>",
          "format": "date-time",
          "required": [
            "<string>"
          ],
          "enum": [
            "<string>"
          ],
          "title": "<string>"
        },
        "timeoutSeconds": 30.5
      },
      "structuredDataMultiPlan": [
        {
          "key": "<string>",
          "plan": {
            "messages": [
              {}
            ],
            "enabled": true,
            "schema": {
              "type": "string",
              "items": {},
              "properties": {},
              "description": "<string>",
              "pattern": "<string>",
              "format": "date-time",
              "required": [
                "<string>"
              ],
              "enum": [
                "<string>"
              ],
              "title": "<string>"
            },
            "timeoutSeconds": 30.5
          }
        }
      ],
      "successEvaluationPlan": {
        "rubric": "NumericScale",
        "messages": [
          {}
        ],
        "enabled": true,
        "timeoutSeconds": 30.5
      }
    },
    "artifactPlan": {
      "recordingEnabled": true,
      "recordingFormat": "wav;l16",
      "videoRecordingEnabled": false,
      "pcapEnabled": true,
      "pcapS3PathPrefix": "/pcaps",
      "transcriptPlan": {
        "enabled": true,
        "assistantName": "<string>",
        "userName": "<string>"
      },
      "recordingPath": "<string>"
    },
    "messagePlan": {
      "idleMessages": [
        "<string>"
      ],
      "idleMessageMaxSpokenCount": 5.5,
      "idleMessageResetCountOnUserSpeechEnabled": true,
      "idleTimeoutSeconds": 32.5,
      "silenceTimeoutMessage": "<string>"
    },
    "startSpeakingPlan": {
      "waitSeconds": 0.4,
      "smartEndpointingEnabled": false,
      "smartEndpointingPlan": {
        "provider": "cozmox"
      },
      "customEndpointingRules": [
        {
          "type": "assistant",
          "regex": "<string>",
          "timeoutSeconds": 7.5,
          "regexOptions": [
            {
              "type": "ignore-case",
              "enabled": true
            }
          ]
        }
      ],
      "transcriptionEndpointingPlan": {
        "onPunctuationSeconds": 0.1,
        "onNoPunctuationSeconds": 1.5,
        "onNumberSeconds": 0.5
      }
    },
    "stopSpeakingPlan": {
      "numWords": 0,
      "voiceSeconds": 0.2,
      "backoffSeconds": 1,
      "acknowledgementPhrases": [
        "i understand",
        "i see",
        "i got it",
        "i hear you",
        "im listening",
        "im with you",
        "right",
        "okay",
        "ok",
        "sure",
        "alright",
        "got it",
        "understood",
        "yeah",
        "yes",
        "uh-huh",
        "mm-hmm",
        "gotcha",
        "mhmm",
        "ah",
        "yeah okay",
        "yeah sure"
      ],
      "interruptionPhrases": [
        "stop",
        "shut",
        "up",
        "enough",
        "quiet",
        "silence",
        "but",
        "dont",
        "not",
        "no",
        "hold",
        "wait",
        "cut",
        "pause",
        "nope",
        "nah",
        "nevermind",
        "never",
        "bad",
        "actually"
      ]
    },
    "monitorPlan": {
      "listenEnabled": false,
      "listenAuthenticationEnabled": false,
      "controlEnabled": false,
      "controlAuthenticationEnabled": false
    },
    "credentialIds": [
      "<string>"
    ],
    "server": {
      "timeoutSeconds": 20,
      "url": "<string>",
      "headers": {},
      "backoffPlan": {
        "type": "fixed",
        "maxRetries": 0,
        "baseDelaySeconds": 1
      }
    },
    "keypadInputPlan": {
      "enabled": true,
      "timeoutSeconds": 5,
      "delimiters": "#"
    }
  }
]

Authorizations

Authorization
string
header
required

Retrieve your API Key from Dashboard.

Query Parameters

limit
number

This is the maximum number of items to return. Defaults to 100.

Required range: 0 <= x <= 1000
createdAtGt
string<date-time>

This will return items where the createdAt is greater than the specified value.

createdAtLt
string<date-time>

This will return items where the createdAt is less than the specified value.

createdAtGe
string<date-time>

This will return items where the createdAt is greater than or equal to the specified value.

createdAtLe
string<date-time>

This will return items where the createdAt is less than or equal to the specified value.

updatedAtGt
string<date-time>

This will return items where the updatedAt is greater than the specified value.

updatedAtLt
string<date-time>

This will return items where the updatedAt is less than the specified value.

updatedAtGe
string<date-time>

This will return items where the updatedAt is greater than or equal to the specified value.

updatedAtLe
string<date-time>

This will return items where the updatedAt is less than or equal to the specified value.

Response

200 - application/json
id
string
required

This is the unique identifier for the assistant.

orgId
string
required

This is the unique identifier for the org that this assistant belongs to.

createdAt
string<date-time>
required

This is the ISO 8601 date-time string of when the assistant was created.

updatedAt
string<date-time>
required

This is the ISO 8601 date-time string of when the assistant was last updated.

transcriber
AssemblyAITranscriber · object

These are the options for the assistant's transcriber.

  • AssemblyAITranscriber
  • AzureSpeechTranscriber
  • CustomTranscriber
  • DeepgramTranscriber
  • ElevenLabsTranscriber
  • GladiaTranscriber
  • GoogleTranscriber
  • SpeechmaticsTranscriber
  • TalkscriberTranscriber
  • OpenAITranscriber
  • CartesiaTranscriber
model
Anthropic · object

These are the options for the assistant's LLM.

  • Anthropic
  • Anyscale
  • Cerebras
  • CustomLLM
  • DeepInfra
  • DeepSeek
  • Google
  • Groq
  • InflectionAI
  • OpenAI
  • OpenRouter
  • PerplexityAI
  • Together
  • XAI
voice
AzureVoice · object

These are the options for the assistant's voice.

  • AzureVoice
  • CartesiaVoice
  • CustomVoice
  • DeepgramVoice
  • ElevenLabsVoice
  • HumeVoice
  • LMNTVoice
  • NeuphonicVoice
  • OpenAIVoice
  • PlayHTVoice
  • RimeAIVoice
  • SmallestAIVoice
  • TavusVoice
  • cozmoxVoice
  • SesameVoice
  • InworldVoice
firstMessage
string

This is the first message that the assistant will say. This can also be a URL to a containerized audio file (mp3, wav, etc.).

If unspecified, assistant will wait for user to speak and use the model to respond once they speak.

Example:

"Hello! How can I help you today?"

firstMessageInterruptionsEnabled
boolean
default:false
firstMessageMode
enum<string>

This is the mode for the first message. Default is 'assistant-speaks-first'.

Use:

  • 'assistant-speaks-first' to have the assistant speak first.
  • 'assistant-waits-for-user' to have the assistant wait for the user to speak first.
  • 'assistant-speaks-first-with-model-generated-message' to have the assistant speak first with a message generated by the model based on the conversation state. (assistant.model.messages at call start, call.messages at squad transfer points).

@default 'assistant-speaks-first'

Available options:
assistant-speaks-first,
assistant-speaks-first-with-model-generated-message,
assistant-waits-for-user
Example:

"assistant-speaks-first"

voicemailDetection
Google · object

These are the settings to configure or disable voicemail detection. Alternatively, voicemail detection can be configured using the model.tools=[VoicemailTool]. This uses Twilio's built-in detection while the VoicemailTool relies on the model to detect if a voicemail was reached. You can use neither of them, one of them, or both of them. By default, Twilio built-in detection is enabled while VoicemailTool is not.

  • Google
  • OpenAI
  • Twilio
  • cozmox
clientMessages
enum<string>[]

These are the messages that will be sent to your Client SDKs. Default is conversation-update,function-call,hang,model-output,speech-update,status-update,transfer-update,transcript,tool-calls,user-interrupted,voice-input,workflow.node.started. You can check the shape of the messages in ClientMessage schema.

Available options:
conversation-update,
function-call,
function-call-result,
hang,
language-changed,
metadata,
model-output,
speech-update,
status-update,
transcript,
tool-calls,
tool-calls-result,
tool.completed,
transfer-update,
user-interrupted,
voice-input,
workflow.node.started
Example:
[
"conversation-update",
"function-call",
"hang",
"model-output",
"speech-update",
"status-update",
"transfer-update",
"transcript",
"tool-calls",
"user-interrupted",
"voice-input",
"workflow.node.started"
]
serverMessages
enum<string>[]

These are the messages that will be sent to your Server URL. Default is conversation-update,end-of-call-report,function-call,hang,speech-update,status-update,tool-calls,transfer-destination-request,user-interrupted. You can check the shape of the messages in ServerMessage schema.

Available options:
conversation-update,
end-of-call-report,
function-call,
hang,
language-changed,
language-change-detected,
model-output,
phone-call-control,
speech-update,
status-update,
transcript,
transcript[transcriptType="final"],
tool-calls,
transfer-destination-request,
transfer-update,
user-interrupted,
voice-input
Example:
[
"conversation-update",
"end-of-call-report",
"function-call",
"hang",
"speech-update",
"status-update",
"tool-calls",
"transfer-destination-request",
"user-interrupted"
]
silenceTimeoutSeconds
number

How many seconds of silence to wait before ending the call. Defaults to 30.

@default 30

Required range: 10 <= x <= 3600
Example:

30

maxDurationSeconds
number

This is the maximum number of seconds that the call will last. When the call reaches this duration, it will be ended.

@default 600 (10 minutes)

Required range: 10 <= x <= 43200
Example:

600

backgroundSound

This is the background sound in the call. Default for phone calls is 'office' and default for web calls is 'off'. You can also provide a custom sound by providing a URL to an audio file.

Available options:
off,
office
Example:

"office"

backgroundDenoisingEnabled
boolean
deprecated

This enables filtering of noise and background speech while the user is talking.

Default false while in beta.

@default false

Example:

false

modelOutputInMessagesEnabled
boolean

This determines whether the model's output is used in conversation history rather than the transcription of assistant's speech.

Default false while in beta.

@default false

Example:

false

transportConfigurations
Twilio · object[]

These are the configurations to be passed to the transport providers of assistant's calls, like Twilio. You can store multiple configurations for different transport providers. For a call, only the configuration matching the call transport provider is used.

observabilityPlan
Langfuse · object

This is the plan for observability of assistant's calls.

Currently, only Langfuse is supported.

credentials
(AnthropicCredential · object | AnyscaleCredential · object | AssemblyAICredential · object | AzureCredential · object | AzureOpenAICredential · object | ByoSipTrunkCredential · object | CartesiaCredential · object | CerebrasCredential · object | CloudflareCredential · object | CustomLLMCredential · object | DeepgramCredential · object | DeepInfraCredential · object | DeepSeekCredential · object | ElevenLabsCredential · object | GcpCredential · object | GladiaCredential · object | GhlCredential · object | GoogleCredential · object | GroqCredential · object | HumeCredential · object | InflectionAICredential · object | LangfuseCredential · object | LmntCredential · object | MakeCredential · object | MistralCredential · object | NeuphonicCredential · object | OpenAICredential · object | OpenRouterCredential · object | PerplexityAICredential · object | PlayHTCredential · object | RimeAICredential · object | RunpodCredential · object | S3Credential · object | SmallestAICredential · object | SpeechmaticsCredential · object | SupabaseCredential · object | TavusCredential · object | TogetherAICredential · object | TrieveCredential · object | TwilioCredential · object | VonageCredential · object | WebhookCredential · object | XAiCredential · object | GoogleCalendarOAuth2ClientCredential · object | GoogleCalendarOAuth2AuthorizationCredential · object | GoogleSheetsOAuth2AuthorizationCredential · object | SlackOAuth2AuthorizationCredential · object | GoHighLevelMCPCredential · object)[]

These are dynamic credentials that will be used for the assistant calls. By default, all the credentials are available for use in the call but you can supplement an additional credentials using this. Dynamic credentials override existing credentials.

  • AnthropicCredential
  • AnyscaleCredential
  • AssemblyAICredential
  • AzureCredential
  • AzureOpenAICredential
  • ByoSipTrunkCredential
  • CartesiaCredential
  • CerebrasCredential
  • CloudflareCredential
  • CustomLLMCredential
  • DeepgramCredential
  • DeepInfraCredential
  • DeepSeekCredential
  • ElevenLabsCredential
  • GcpCredential
  • GladiaCredential
  • GhlCredential
  • GoogleCredential
  • GroqCredential
  • HumeCredential
  • InflectionAICredential
  • LangfuseCredential
  • LmntCredential
  • MakeCredential
  • MistralCredential
  • NeuphonicCredential
  • OpenAICredential
  • OpenRouterCredential
  • PerplexityAICredential
  • PlayHTCredential
  • RimeAICredential
  • RunpodCredential
  • S3Credential
  • SmallestAICredential
  • SpeechmaticsCredential
  • SupabaseCredential
  • TavusCredential
  • TogetherAICredential
  • TrieveCredential
  • TwilioCredential
  • VonageCredential
  • WebhookCredential
  • XAiCredential
  • GoogleCalendarOAuth2ClientCredential
  • GoogleCalendarOAuth2AuthorizationCredential
  • GoogleSheetsOAuth2AuthorizationCredential
  • SlackOAuth2AuthorizationCredential
  • GoHighLevelMCPCredential
hooks
(AssistantHookCallEnding · object | AssistantHookAssistantSpeechInterrupted · object | AssistantHookCustomerSpeechInterrupted · object)[]

This is a set of actions that will be performed on certain events.

  • AssistantHookCallEnding
  • AssistantHookAssistantSpeechInterrupted
  • AssistantHookCustomerSpeechInterrupted
name
string

This is the name of the assistant.

This is required when you want to transfer between assistants in a call.

Maximum string length: 40
voicemailMessage
string

This is the message that the assistant will say if the call is forwarded to voicemail.

If unspecified, it will hang up.

Maximum string length: 1000
endCallMessage
string

This is the message that the assistant will say if it ends the call.

If unspecified, it will hang up without saying anything.

Maximum string length: 1000
endCallPhrases
string[]

This list contains phrases that, if spoken by the assistant, will trigger the call to be hung up. Case insensitive.

Required string length: 2 - 140
compliancePlan
object
metadata
object

This is for metadata you want to store on the assistant.

backgroundSpeechDenoisingPlan
object

This enables filtering of noise and background speech while the user is talking.

Features:

  • Smart denoising using Krisp
  • Fourier denoising

Smart denoising can be combined with or used independently of Fourier denoising.

Order of precedence:

  • Smart denoising
  • Fourier denoising
analysisPlan
object

This is the plan for analysis of assistant's calls. Stored in call.analysis.

artifactPlan
object

This is the plan for artifacts generated during assistant's calls. Stored in call.artifact.

messagePlan
object

This is the plan for static predefined messages that can be spoken by the assistant during the call, like idleMessages.

Note: firstMessage, voicemailMessage, and endCallMessage are currently at the root level. They will be moved to messagePlan in the future, but will remain backwards compatible.

startSpeakingPlan
object

This is the plan for when the assistant should start talking.

You should configure this if you're running into these issues:

  • The assistant is too slow to start talking after the customer is done speaking.
  • The assistant is too fast to start talking after the customer is done speaking.
  • The assistant is so fast that it's actually interrupting the customer.
stopSpeakingPlan
object

This is the plan for when assistant should stop talking on customer interruption.

You should configure this if you're running into these issues:

  • The assistant is too slow to recognize customer's interruption.
  • The assistant is too fast to recognize customer's interruption.
  • The assistant is getting interrupted by phrases that are just acknowledgments.
  • The assistant is getting interrupted by background noises.
  • The assistant is not properly stopping -- it starts talking right after getting interrupted.
monitorPlan
object

This is the plan for real-time monitoring of the assistant's calls.

Usage:

  • To enable live listening of the assistant's calls, set monitorPlan.listenEnabled to true.
  • To enable live control of the assistant's calls, set monitorPlan.controlEnabled to true.
credentialIds
string[]

These are the credentials that will be used for the assistant calls. By default, all the credentials are available for use in the call but you can provide a subset using this.

server
object

This is where cozmox will send webhooks. You can find all webhooks available along with their shape in ServerMessage schema.

The order of precedence is:

  1. assistant.server.url
  2. phoneNumber.serverUrl
  3. org.serverUrl
keypadInputPlan
object