Loading...
Loading...
Access Telnyx LLM inference APIs, embeddings, and AI analytics for call insights and summaries. This skill provides Python SDK examples.
npx skill4agent add team-telnyx/skills telnyx-ai-inference-pythonpip install telnyximport os
from telnyx import Telnyx
client = Telnyx(
api_key=os.environ.get("TELNYX_API_KEY"), # This is the default and can be omitted
)clientimport telnyx
try:
result = client.messages.send(to="+13125550001", from_="+13125550002", text="Hello")
except telnyx.APIConnectionError:
print("Network error — check connectivity and retry")
except telnyx.RateLimitError:
# 429: rate limited — wait and retry with exponential backoff
import time
time.sleep(1) # Check Retry-After header for actual delay
except telnyx.APIStatusError as e:
print(f"API error {e.status_code}: {e.message}")
if e.status_code == 422:
print("Validation error — check required fields and formats")401403404422429for item in page_result:POST /ai/audio/transcriptionsresponse = client.ai.audio.transcribe(
model="distil-whisper/distil-large-v2",
)
print(response.text)durationsegmentstextPOST /ai/chat/completionsmessagesapi_key_refbest_ofearly_stoppingenable_thinkingfrequency_penaltyguided_choiceguided_jsonguided_regexlength_penaltylogprobsmax_tokensmin_pmodelnpresence_penaltyresponse_formatstreamtemperaturetool_choicetoolstop_logprobstop_puse_beam_searchresponse = client.ai.chat.create_completion(
messages=[{
"role": "system",
"content": "You are a friendly chatbot.",
}, {
"role": "user",
"content": "Hello, world!",
}],
)
print(response)GET /ai/conversationsconversations = client.ai.conversations.list()
print(conversations.data)created_atidlast_message_atmetadatanamePOST /ai/conversationsmetadatanameconversation = client.ai.conversations.create()
print(conversation.id)created_atidlast_message_atmetadatanameGET /ai/conversations/insight-groupspage = client.ai.conversations.insight_groups.retrieve_insight_groups()
page = page.data[0]
print(page.id)created_atdescriptionidinsightsnamewebhookPOST /ai/conversations/insight-groupsnamedescriptionwebhookinsight_template_group_detail = client.ai.conversations.insight_groups.insight_groups(
name="my-resource",
)
print(insight_template_group_detail.data)created_atdescriptionidinsightsnamewebhookGET /ai/conversations/insight-groups/{group_id}insight_template_group_detail = client.ai.conversations.insight_groups.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
print(insight_template_group_detail.data)created_atdescriptionidinsightsnamewebhookPUT /ai/conversations/insight-groups/{group_id}descriptionnamewebhookinsight_template_group_detail = client.ai.conversations.insight_groups.update(
group_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
print(insight_template_group_detail.data)created_atdescriptionidinsightsnamewebhookDELETE /ai/conversations/insight-groups/{group_id}client.ai.conversations.insight_groups.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)POST /ai/conversations/insight-groups/{group_id}/insights/{insight_id}/assignclient.ai.conversations.insight_groups.insights.assign(
insight_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
group_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)DELETE /ai/conversations/insight-groups/{group_id}/insights/{insight_id}/unassignclient.ai.conversations.insight_groups.insights.delete_unassign(
insight_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
group_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)GET /ai/conversations/insightspage = client.ai.conversations.insights.list()
page = page.data[0]
print(page.id)created_atidinsight_typeinstructionsjson_schemanamewebhookPOST /ai/conversations/insightsinstructionsnamejson_schemawebhookinsight_template_detail = client.ai.conversations.insights.create(
instructions="You are a helpful assistant.",
name="my-resource",
)
print(insight_template_detail.data)created_atidinsight_typeinstructionsjson_schemanamewebhookGET /ai/conversations/insights/{insight_id}insight_template_detail = client.ai.conversations.insights.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
print(insight_template_detail.data)created_atidinsight_typeinstructionsjson_schemanamewebhookPUT /ai/conversations/insights/{insight_id}instructionsjson_schemanamewebhookinsight_template_detail = client.ai.conversations.insights.update(
insight_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
print(insight_template_detail.data)created_atidinsight_typeinstructionsjson_schemanamewebhookDELETE /ai/conversations/insights/{insight_id}client.ai.conversations.insights.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)GET /ai/conversations/{conversation_id}conversation = client.ai.conversations.retrieve(
"conversation_id",
)
print(conversation.data)created_atidlast_message_atmetadatanamePUT /ai/conversations/{conversation_id}metadataconversation = client.ai.conversations.update(
conversation_id="550e8400-e29b-41d4-a716-446655440000",
)
print(conversation.data)created_atidlast_message_atmetadatanameDELETE /ai/conversations/{conversation_id}client.ai.conversations.delete(
"conversation_id",
)GET /ai/conversations/{conversation_id}/conversations-insightsresponse = client.ai.conversations.retrieve_conversations_insights(
"conversation_id",
)
print(response.data)conversation_insightscreated_atidstatusPOST /ai/conversations/{conversation_id}/messagerolecontentmetadatanamesent_attool_call_idtool_callstool_choiceclient.ai.conversations.add_message(
conversation_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
role="user",
)GET /ai/conversations/{conversation_id}/messagesmessages = client.ai.conversations.messages.list(
"conversation_id",
)
print(messages.data)created_atrolesent_attexttool_callsqueuedprocessingfailedsuccesspartial_successqueuedprocessingGET /ai/embeddingsembeddings = client.ai.embeddings.list()
print(embeddings.data)bucketcreated_atfinished_atstatustask_idtask_nameuser_idPOST /ai/embeddingsbucket_namedocument_chunk_overlap_sizedocument_chunk_sizeembedding_modelloaderembedding_response = client.ai.embeddings.create(
bucket_name="my-bucket",
)
print(embedding_response.data)created_atfinished_atstatustask_idtask_nameuser_idGET /ai/embeddings/bucketsbuckets = client.ai.embeddings.buckets.list()
print(buckets.data)bucketsGET /ai/embeddings/buckets/{bucket_name}bucket = client.ai.embeddings.buckets.retrieve(
"bucket_name",
)
print(bucket.data)created_aterror_reasonfilenamelast_embedded_atstatusupdated_atDELETE /ai/embeddings/buckets/{bucket_name}client.ai.embeddings.buckets.delete(
"bucket_name",
)num_docsdistancePOST /ai/embeddings/similarity-searchbucket_namequerynum_of_docsresponse = client.ai.embeddings.similarity_search(
bucket_name="my-bucket",
query="What is Telnyx?",
)
print(response.data)distancedocument_chunkmetadataPOST /ai/embeddings/urlurlbucket_nameembedding_response = client.ai.embeddings.url(
bucket_name="my-bucket",
url="https://example.com/resource",
)
print(embedding_response.data)created_atfinished_atstatustask_idtask_nameuser_idqueuedprocessingsuccessfailurepartial_successGET /ai/embeddings/{task_id}embedding = client.ai.embeddings.retrieve(
"task_id",
)
print(embedding.data)created_atfinished_atstatustask_idtask_nameGET /ai/fine_tuning/jobsjobs = client.ai.fine_tuning.jobs.list()
print(jobs.data)created_atfinished_athyperparametersidmodelorganization_idstatustrained_tokenstraining_filePOST /ai/fine_tuning/jobsmodeltraining_filehyperparameterssuffixfine_tuning_job = client.ai.fine_tuning.jobs.create(
model="openai/gpt-4o",
training_file="training-data.jsonl",
)
print(fine_tuning_job.id)created_atfinished_athyperparametersidmodelorganization_idstatustrained_tokenstraining_filejob_idGET /ai/fine_tuning/jobs/{job_id}fine_tuning_job = client.ai.fine_tuning.jobs.retrieve(
"job_id",
)
print(fine_tuning_job.id)created_atfinished_athyperparametersidmodelorganization_idstatustrained_tokenstraining_filePOST /ai/fine_tuning/jobs/{job_id}/cancelfine_tuning_job = client.ai.fine_tuning.jobs.cancel(
"job_id",
)
print(fine_tuning_job.id)created_atfinished_athyperparametersidmodelorganization_idstatustrained_tokenstraining_fileid{source}/{model_name}openai/gpt-4mistralai/Mistral-7B-Instruct-v0.1GET /ai/modelsresponse = client.ai.retrieve_models()
print(response.data)createdidobjectowned_byhttps://api.telnyx.com/v2/ai/openaiPOST /ai/openai/embeddingsinputmodeldimensionsencoding_formatuserresponse = client.ai.openai.embeddings.create_embeddings(
input="The quick brown fox jumps over the lazy dog",
model="thenlper/gte-large",
)
print(response.data)datamodelobjectusageGET /ai/openai/embeddings/modelsresponse = client.ai.openai.embeddings.list_embedding_models()
print(response.data)createdidobjectowned_byPOST /ai/summarizebucketfilenamesystem_promptresponse = client.ai.summarize(
bucket="my-bucket",
filename="data.csv",
)
print(response.data)summaryGET /legacy/reporting/batch_detail_records/speech_to_textspeech_to_texts = client.legacy.reporting.batch_detail_records.speech_to_text.list()
print(speech_to_texts.data)created_atdownload_linkend_dateidrecord_typestart_datestatusPOST /legacy/reporting/batch_detail_records/speech_to_textstart_dateend_datefrom datetime import datetime
speech_to_text = client.legacy.reporting.batch_detail_records.speech_to_text.create(
end_date=datetime.fromisoformat("2020-07-01T00:00:00-06:00"),
start_date=datetime.fromisoformat("2020-07-01T00:00:00-06:00"),
)
print(speech_to_text.data)created_atdownload_linkend_dateidrecord_typestart_datestatusGET /legacy/reporting/batch_detail_records/speech_to_text/{id}speech_to_text = client.legacy.reporting.batch_detail_records.speech_to_text.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
print(speech_to_text.data)created_atdownload_linkend_dateidrecord_typestart_datestatusDELETE /legacy/reporting/batch_detail_records/speech_to_text/{id}speech_to_text = client.legacy.reporting.batch_detail_records.speech_to_text.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
print(speech_to_text.data)created_atdownload_linkend_dateidrecord_typestart_datestatusGET /legacy/reporting/usage_reports/speech_to_textresponse = client.legacy.reporting.usage_reports.retrieve_speech_to_text()
print(response.data)dataAuthorization: Bearer POST /text-to-speech/speechawsazuredisable_cacheelevenlabslanguageminimaxoutput_typeproviderresemblerimetelnyxtexttext_typevoicevoice_settingsresponse = client.text_to_speech.generate()
print(response.base64_audio)base64_audioproviderGET /text-to-speech/voicesresponse = client.text_to_speech.list_voices()
print(response.voices)voices