From a7783e307c6fb04a83612c601e21594535d8d218 Mon Sep 17 00:00:00 2001 From: speakeasybot Date: Sun, 1 Feb 2026 21:22:28 +0000 Subject: [PATCH 1/2] ## Python SDK Changes: * `mistral.beta.conversations.restart_stream()`: `request.agent_version` **Changed** **Breaking** :warning: * `mistral.beta.conversations.start()`: `request.agent_version` **Changed** **Breaking** :warning: * `mistral.beta.conversations.list()`: `response.[].[agent_conversation].agent_version` **Changed** **Breaking** :warning: * `mistral.beta.conversations.get()`: `response.[agent_conversation].agent_version` **Changed** **Breaking** :warning: * `mistral.beta.conversations.restart()`: `request.agent_version` **Changed** **Breaking** :warning: * `mistral.beta.conversations.start_stream()`: `request.agent_version` **Changed** **Breaking** :warning: * `mistral.beta.agents.get()`: `request.agent_version` **Changed** **Breaking** :warning: * `mistral.beta.agents.get_version()`: `request.version` **Changed** **Breaking** :warning: * `mistral.beta.agents.list_version_aliases()`: **Added** * `mistral.models.list()`: `response.data.[].[fine-tuned].capabilities.audio_transcription` **Added** * `mistral.models.retrieve()`: `response.[base].capabilities.audio_transcription` **Added** * `mistral.beta.agents.create_version_alias()`: **Added** * `mistral.files.list()`: `request.mimetypes` **Added** --- .speakeasy/gen.lock | 261 +++++++++--- .speakeasy/gen.yaml | 2 +- .speakeasy/workflow.lock | 12 +- README.md | 4 +- RELEASES.md | 12 +- docs/models/agentaliasresponse.md | 11 + docs/models/agentconversation.md | 22 +- docs/models/agentconversationagentversion.md | 17 + ...tsapiv1agentscreateorupdatealiasrequest.md | 10 + docs/models/agentsapiv1agentsgetrequest.md | 8 +- .../agentsapiv1agentsgetversionrequest.md | 2 +- ...ntsapiv1agentslistversionaliasesrequest.md | 8 + docs/models/agentversion.md | 17 + docs/models/conversationrequest.md | 2 +- docs/models/conversationrestartrequest.md | 2 +- .../conversationrestartrequestagentversion.md | 19 + .../conversationrestartstreamrequest.md | 2 +- ...rsationrestartstreamrequestagentversion.md | 19 + docs/models/conversationstreamrequest.md | 2 +- .../conversationstreamrequestagentversion.md | 17 + docs/models/filesapirouteslistfilesrequest.md | 3 +- docs/models/message.md | 19 + docs/models/mistralpromptmode.md | 4 + docs/models/modelcapabilities.md | 23 +- docs/models/queryparamagentversion.md | 17 + docs/models/realtimetranscriptionerror.md | 9 + .../realtimetranscriptionerrordetail.md | 9 + docs/models/realtimetranscriptionsession.md | 10 + .../realtimetranscriptionsessioncreated.md | 9 + .../realtimetranscriptionsessionupdated.md | 9 + docs/sdks/conversations/README.md | 8 +- docs/sdks/files/README.md | 1 + docs/sdks/mistralagents/README.md | 104 ++++- src/mistralai/_version.py | 4 +- src/mistralai/conversations.py | 56 ++- src/mistralai/files.py | 6 + src/mistralai/mistral_agents.py | 399 +++++++++++++++++- src/mistralai/models/__init__.py | 103 +++++ src/mistralai/models/agentaliasresponse.py | 23 + src/mistralai/models/agentconversation.py | 18 +- ..._api_v1_agents_create_or_update_aliasop.py | 26 ++ .../agents_api_v1_agents_get_versionop.py | 4 +- .../models/agents_api_v1_agents_getop.py | 15 +- ...ts_api_v1_agents_list_version_aliasesop.py | 16 + src/mistralai/models/conversationrequest.py | 10 +- .../models/conversationrestartrequest.py | 22 +- .../conversationrestartstreamrequest.py | 24 +- .../models/conversationstreamrequest.py | 14 +- .../models/files_api_routes_list_filesop.py | 9 +- src/mistralai/models/mistralpromptmode.py | 4 + src/mistralai/models/modelcapabilities.py | 3 + .../models/realtimetranscriptionerror.py | 27 ++ .../realtimetranscriptionerrordetail.py | 29 ++ .../models/realtimetranscriptionsession.py | 20 + .../realtimetranscriptionsessioncreated.py | 30 ++ .../realtimetranscriptionsessionupdated.py | 30 ++ uv.lock | 2 +- 57 files changed, 1405 insertions(+), 163 deletions(-) create mode 100644 docs/models/agentaliasresponse.md create mode 100644 docs/models/agentconversationagentversion.md create mode 100644 docs/models/agentsapiv1agentscreateorupdatealiasrequest.md create mode 100644 docs/models/agentsapiv1agentslistversionaliasesrequest.md create mode 100644 docs/models/agentversion.md create mode 100644 docs/models/conversationrestartrequestagentversion.md create mode 100644 docs/models/conversationrestartstreamrequestagentversion.md create mode 100644 docs/models/conversationstreamrequestagentversion.md create mode 100644 docs/models/message.md create mode 100644 docs/models/queryparamagentversion.md create mode 100644 docs/models/realtimetranscriptionerror.md create mode 100644 docs/models/realtimetranscriptionerrordetail.md create mode 100644 docs/models/realtimetranscriptionsession.md create mode 100644 docs/models/realtimetranscriptionsessioncreated.md create mode 100644 docs/models/realtimetranscriptionsessionupdated.md create mode 100644 src/mistralai/models/agentaliasresponse.py create mode 100644 src/mistralai/models/agents_api_v1_agents_create_or_update_aliasop.py create mode 100644 src/mistralai/models/agents_api_v1_agents_list_version_aliasesop.py create mode 100644 src/mistralai/models/realtimetranscriptionerror.py create mode 100644 src/mistralai/models/realtimetranscriptionerrordetail.py create mode 100644 src/mistralai/models/realtimetranscriptionsession.py create mode 100644 src/mistralai/models/realtimetranscriptionsessioncreated.py create mode 100644 src/mistralai/models/realtimetranscriptionsessionupdated.py diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index b89ea6b3..9d51b30a 100644 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,19 +1,19 @@ lockVersion: 2.0.0 id: 2d045ec7-2ebb-4f4d-ad25-40953b132161 management: - docChecksum: a61cb56fd9942dd20124e0422444bac3 + docChecksum: cc385dce976ac06e6d062e992f0ee380 docVersion: 1.0.0 speakeasyVersion: 1.685.0 generationVersion: 2.794.1 - releaseVersion: 1.11.1 - configChecksum: 1a6d0af8e5d88c97b7e947763e633c3d + releaseVersion: 1.12.0 + configChecksum: 862d9a8667674972c091f9db84d42ba0 repoURL: https://github.com/mistralai/client-python.git installationURL: https://github.com/mistralai/client-python.git published: true persistentEdits: - generation_id: 031e6fcc-162d-451f-a98c-f65bf3605643 - pristine_commit_hash: 08ac7141d4e4dffd4a3327da51bd2a70d50ff68f - pristine_tree_hash: aeb852eedd1ebeb4411a5c0f286d53884362af3b + generation_id: 8b0735b6-5924-48f1-ade2-47cb374c76bc + pristine_commit_hash: a9971b936f50486e2e4ceef95d0b2c4708633219 + pristine_tree_hash: 51b8a57de0bf62da607fe0023eec1124458ebee9 features: python: additionalDependencies: 1.0.0 @@ -63,10 +63,18 @@ trackedFiles: id: ffdbb4c53c87 last_write_checksum: sha1:ec6c799658040b3c75d6ae0572bb391c6aea3fd4 pristine_git_object: ee054dd349848eff144d7064c319c3c8434bdc6c + docs/models/agentaliasresponse.md: + id: 5ac4721d8947 + last_write_checksum: sha1:15dcc6820e89d2c6bb799e331463419ce29ec167 + pristine_git_object: aa531ec5d1464f95e3938f148c1e88efc30fa6a6 docs/models/agentconversation.md: id: 3590c1a566fa - last_write_checksum: sha1:a88c8e10a9de2bc99cabd38ab9fc775a2d33e9ef - pristine_git_object: 92fd673c0710889ae3f1d77f82c32113f39457b7 + last_write_checksum: sha1:264d78815c3999bac377ab3f8c08a264178baf43 + pristine_git_object: a2d617316f1965acfabf7d2fe74334de16213829 + docs/models/agentconversationagentversion.md: + id: 468e0d1614bb + last_write_checksum: sha1:6e60bf4a18d791d694e90c89bdb8cc38e43c324b + pristine_git_object: 668a8dc0f0c51a231a73aed51b2db13de243a038 docs/models/agentconversationobject.md: id: cfd35d9dd4f2 last_write_checksum: sha1:112552d4a241967cf0a7dcb981428e7e0715dc34 @@ -111,22 +119,30 @@ trackedFiles: id: ed24a6d647a0 last_write_checksum: sha1:ff5dfde6cc19f09c83afb5b4f0f103096df6691d pristine_git_object: 70e143b030d3041c7538ecdacb8f5f9f8d1b5c92 + docs/models/agentsapiv1agentscreateorupdatealiasrequest.md: + id: c09ec9946094 + last_write_checksum: sha1:0883217b4bad21f5d4f8162ca72005bf9105a93f + pristine_git_object: 79406434cc6ff3d1485089f35639d6284f66d6cb docs/models/agentsapiv1agentsdeleterequest.md: id: 0faaaa59add9 last_write_checksum: sha1:2a34269e682bb910b83814b4d730ba2ce07f8cb2 pristine_git_object: 2799f41817ab0f7a22b49b4ff895c8308525953c docs/models/agentsapiv1agentsgetrequest.md: id: 01740ae62cff - last_write_checksum: sha1:0ed4bb58c94493e21826b38d33c2498de9150b98 - pristine_git_object: 825e03a02e14d03ce47022df840c118de8cd921f + last_write_checksum: sha1:9c4f6d88f29c39238757547da605ecb7106e76c2 + pristine_git_object: c71d4419afd3b51713e154b8021d4fe2b49d8af5 docs/models/agentsapiv1agentsgetversionrequest.md: id: 88ed22b85cde - last_write_checksum: sha1:c6706d79c9253829cf4285c99d49873fa56596bf - pristine_git_object: 7617d2748c86f537bf125d90e67f41df71c1e5cd + last_write_checksum: sha1:0ef23807c8efa2662144da66745045abdd2cb60a + pristine_git_object: 96a7358943a69e871a2bb7f0f30d6fe2bb8dff3d docs/models/agentsapiv1agentslistrequest.md: id: c2720c209527 last_write_checksum: sha1:cb599d1583ee9374d44695f5ee7efe79dbb8a503 pristine_git_object: 8cba13253d42a180b06eab8c10297ef362fb434d + docs/models/agentsapiv1agentslistversionaliasesrequest.md: + id: 69c8bce2c017 + last_write_checksum: sha1:4083fc80627b2cc04fd271df21393944730ef1ba + pristine_git_object: 3083bf92641404738948cd57306eac978b701551 docs/models/agentsapiv1agentslistversionsrequest.md: id: 0bc44ed8d6bb last_write_checksum: sha1:315790552fc5b2b3a6c4f7be2eb33100133abe18 @@ -223,6 +239,10 @@ trackedFiles: id: a39223b88fc9 last_write_checksum: sha1:925ef5852c2031c9bf2608577e55edbc36708730 pristine_git_object: 1752ee6861d23c6abaa6b748f4ff43e9545505ec + docs/models/agentversion.md: + id: b0aa02d6c085 + last_write_checksum: sha1:f6fcf351de43eed5345f88f5cb6a2bf928a594d9 + pristine_git_object: fd4b6a3ea4ade6c9f62594b377c8e791a50211e7 docs/models/apiendpoint.md: id: be613fd9b947 last_write_checksum: sha1:4d984c11248f7da42c949164e69b53995d5942c4 @@ -589,8 +609,8 @@ trackedFiles: pristine_git_object: db3a441bde0d086bccda4814ddfbf737539681a6 docs/models/conversationrequest.md: id: dd7f4d6807f2 - last_write_checksum: sha1:4ecca434753494ff0af66952655af92293690702 - pristine_git_object: 04378ae34c754f2ed67a34d14923c7b0d1605d4e + last_write_checksum: sha1:33dec32dbf20979ac04763e99a82e90ee474fef4 + pristine_git_object: 2b4ff8ef3398561d9b3e192a51ec22f64880389c docs/models/conversationresponse.md: id: 2eccf42d48af last_write_checksum: sha1:69059d02d5354897d23c9d9654d38a85c7e0afc6 @@ -601,24 +621,36 @@ trackedFiles: pristine_git_object: bea66e5277feca4358dd6447959ca945eff2171a docs/models/conversationrestartrequest.md: id: 558e9daa00bd - last_write_checksum: sha1:97c25a370411e1bce144c61272ca8f32066112be - pristine_git_object: f389a1e5c42cf0f73784d5563eaa6d0b29e0d69e + last_write_checksum: sha1:0e33f56f69313b9111b3394ecca693871d48acfa + pristine_git_object: d98653127fd522e35323b310d2342ccc08927962 + docs/models/conversationrestartrequestagentversion.md: + id: e6ea289c6b23 + last_write_checksum: sha1:a5abf95a81b7e080bd3cadf65c2db38ca458573f + pristine_git_object: 019ba301411729ec2c8078404adae998b3b9dacd docs/models/conversationrestartrequesthandoffexecution.md: id: faee86c7832c last_write_checksum: sha1:44728be55e96193e6f433e2f46f8f749f1671097 pristine_git_object: 5790624b82ce47ea99e5c25c825fbc25145bfb8e docs/models/conversationrestartstreamrequest.md: id: 01b92ab1b56d - last_write_checksum: sha1:90f0ab9aba1919cbc2b9cfc8e5ec9d80f8f3910c - pristine_git_object: d7358dc20b2b60cb287b3c4a1c174a7883871a54 + last_write_checksum: sha1:aa3d30800417e04f741324d60529f3190ea9cd16 + pristine_git_object: a5f8cbe73ed1ce28c82d76f0e9f933bda64f733c + docs/models/conversationrestartstreamrequestagentversion.md: + id: 395265f34ff6 + last_write_checksum: sha1:ebf4e89a478ab40e1f8cd3f9a000e179426bda47 + pristine_git_object: 9e0063003f1d8acce61cf4edda91ddbc23a3c69d docs/models/conversationrestartstreamrequesthandoffexecution.md: id: 3e9c4a9ab94d last_write_checksum: sha1:300e197f11ad5efc654b51198b75049890258eef pristine_git_object: 97266b43444f5ed50eeedf574abd99cb201199fd docs/models/conversationstreamrequest.md: id: 833f266c4f96 - last_write_checksum: sha1:b7196c9194bc5167d35d09774a3f26bc7d543790 - pristine_git_object: e403db68e7932f60b1343d9282e2c110414486ce + last_write_checksum: sha1:8d7400dcdb9525c2e45bdaa495df6ca7dcf7f992 + pristine_git_object: 299346f8aaa8ccddcbf7fd083389b74346ef2d4f + docs/models/conversationstreamrequestagentversion.md: + id: e99ccc842929 + last_write_checksum: sha1:0ba5fca217681cdc5e08e0d82db67884bed076a6 + pristine_git_object: 52ee96720abbb3fec822d0792dbde7020f9fb189 docs/models/conversationstreamrequesthandoffexecution.md: id: e6701e5f9f0c last_write_checksum: sha1:ef2ebe8f23f27144e7403f0a522326a7e4f25f50 @@ -745,8 +777,8 @@ trackedFiles: pristine_git_object: dbe3c801003c7bb8616f0c5be2dac2ab1e7e9fb1 docs/models/filesapirouteslistfilesrequest.md: id: 04bdf7c654bd - last_write_checksum: sha1:258317fd5c0738cff883f31e13393ac64f817a6f - pristine_git_object: 3801a96e19f149a665bde4890e26df54d7f07d77 + last_write_checksum: sha1:0a99755150c2ded8e5d59a96527021d29326b980 + pristine_git_object: 57d11722f1dba2640df97c22be2a91317c240608 docs/models/filesapiroutesretrievefilerequest.md: id: 2783bfd9c4b9 last_write_checksum: sha1:a1249ef0aedb3056e613078488832c96b91f8cab @@ -1151,6 +1183,10 @@ trackedFiles: id: b071d5a509cc last_write_checksum: sha1:09a04749333ab50ae806c3ac6adcaa90d54df0f1 pristine_git_object: d6094ac2c6e0326c039dad2f6b89158694ef6aa7 + docs/models/message.md: + id: a9614076792b + last_write_checksum: sha1:9199637b21212e630336d0d513c6b799732dee54 + pristine_git_object: 752f04a8b5ec3bedb0b5c3e4fbf3e5c3fccc07cd docs/models/messageentries.md: id: 9af3a27b862b last_write_checksum: sha1:a3eb6e37b780644313738f84e6c5ac653b4686bc @@ -1225,12 +1261,12 @@ trackedFiles: pristine_git_object: 3c552bac2fa3a5a3783db994d47d255a94643110 docs/models/mistralpromptmode.md: id: d17d5db4d3b6 - last_write_checksum: sha1:5ccd31d3804f70b6abb0e5a00bda57b9102225e3 - pristine_git_object: 7416e2037c507d19ac02aed914da1208a2fed0a1 + last_write_checksum: sha1:abcb7205c5086169c7d9449d15ac142448a7d258 + pristine_git_object: c3409d03b9646e21a3793372d06dcae6fef95463 docs/models/modelcapabilities.md: id: 283fbc5fa32f - last_write_checksum: sha1:69312b751771ae8ffa0d1452e3c6c545fdbf52b7 - pristine_git_object: 646c8e94fd208cbf01df19ad6c9707ad235bc59b + last_write_checksum: sha1:8a221e2334193907f84cf241ebaf6b86512bbd8b + pristine_git_object: c7dd2710011451c2db15f53ebc659770e786c4ca docs/models/modelconversation.md: id: 497521ee9bd6 last_write_checksum: sha1:bd11f51f1b6fedbf8a1e1973889d1961086c164f @@ -1319,10 +1355,34 @@ trackedFiles: id: 83c8c59c1802 last_write_checksum: sha1:046375bb3035cc033d4484099cd7f5a4f53ce88c pristine_git_object: 7b67583f4209778ac6f945631c0ee03ba1f4c663 + docs/models/queryparamagentversion.md: + id: 49d942f63049 + last_write_checksum: sha1:42557c6bf0afc1eabde48c4b6122f801608d8f05 + pristine_git_object: 3eb5ef1840299139bf969379cbfc3ed49127f176 docs/models/queryparamstatus.md: id: 15628120923d last_write_checksum: sha1:36f1c9b6a6af6f27fbf0190417abf95b4a0bc1b9 pristine_git_object: dcd2090861b16f72b0fb321714b4143bc14b7566 + docs/models/realtimetranscriptionerror.md: + id: 4bc5e819565b + last_write_checksum: sha1:c93e4b19a0aa68723ea69973a9f22a581c7b2ff6 + pristine_git_object: e01f2126b3084eade47a26ea092556f7f61142c9 + docs/models/realtimetranscriptionerrordetail.md: + id: ea137b1051f1 + last_write_checksum: sha1:43ae02b32b473d8ba1aaa3b336a40f706d6338d0 + pristine_git_object: 96420ada2ac94fca24a36ddacae9c876e14ccb7a + docs/models/realtimetranscriptionsession.md: + id: aeb0a0f87d6f + last_write_checksum: sha1:c3aa4050d9cc1b73df8496760f1c723d16183f3a + pristine_git_object: 94a0a89e8ca03866f8b09202a28c4e0f7c3af2e6 + docs/models/realtimetranscriptionsessioncreated.md: + id: aa2ae26192d6 + last_write_checksum: sha1:d13fec916d05300c86b52e951e81b1ceee230634 + pristine_git_object: 34e603fd0a1cbc8007eef06decb158213faebeed + docs/models/realtimetranscriptionsessionupdated.md: + id: 56ce3ae7e208 + last_write_checksum: sha1:833db566b2c8a6839b43cb4e760f2af53a2d7f57 + pristine_git_object: 7e2719957aae390ee18b699e61fbc7581242942f docs/models/referencechunk.md: id: 07895f9debfd last_write_checksum: sha1:97d01dd2b907e87b58bebd9c950e1bef29747c89 @@ -1737,8 +1797,8 @@ trackedFiles: pristine_git_object: e76efb79d8b1353208b42619f4cc5b688ef5d561 docs/sdks/conversations/README.md: id: e22a9d2c5424 - last_write_checksum: sha1:b4e49eadaf5a3bb50f5c3a88a759bc529db2584f - pristine_git_object: c488848cc4c18a098deae8f02c0d4a86d1d898db + last_write_checksum: sha1:06b7381c76c258e2a2dca3764456105929d98315 + pristine_git_object: ca383176a8b349cbaa757690b3f7a2cefe22cb1a docs/sdks/documents/README.md: id: 9758e88a0a9d last_write_checksum: sha1:84791e86c3b9c15f8fd16d2a3df6bd3685023a69 @@ -1749,8 +1809,8 @@ trackedFiles: pristine_git_object: 4390b7bd999a75a608f324f685b2284a8fa277ec docs/sdks/files/README.md: id: e576d7a117f0 - last_write_checksum: sha1:88cd213e513854b8beee72b8ea751f74bf32a845 - pristine_git_object: f0dfd59364c06e84d9cce517594a2912e2b724c8 + last_write_checksum: sha1:99d15a4acce49d5eca853b5a08fd81e76581dc52 + pristine_git_object: 57b53fc75208f4f6361636690b91564148448633 docs/sdks/fim/README.md: id: 499b227bf6ca last_write_checksum: sha1:824f7d1b58ff0b650367737c0e9b91a9d2d14a45 @@ -1765,8 +1825,8 @@ trackedFiles: pristine_git_object: e672c190ad6ac4623f99357d7e59d52f6722518f docs/sdks/mistralagents/README.md: id: 20b3478ad16d - last_write_checksum: sha1:73c444aaf6e547439dafb8d099142fd0059fdf4f - pristine_git_object: 8021fa07d58f71765097d1b3cea7ac4a2d6224a1 + last_write_checksum: sha1:c4e73cd96136392d01b0ce2a57bf0854d05688c0 + pristine_git_object: bdd8d588d88f4929c3b33bcecd72bbb5fce7402d docs/sdks/mistraljobs/README.md: id: 71aafa44d228 last_write_checksum: sha1:255a4221b3b61ef247b39c9723a78408cda486d3 @@ -1809,8 +1869,8 @@ trackedFiles: pristine_git_object: 6d0f3e1166cb0271f89f5ba83441c88199d7a432 src/mistralai/_version.py: id: 37b53ba66d7f - last_write_checksum: sha1:c4d3183c7342cd3d37f1a2fb2a707b2cb76cafec - pristine_git_object: aae7598df33f9fc79d17c1cb19baf2b61539e9db + last_write_checksum: sha1:a4d76992b028e2d138e2f7f6d3087c2a606a21c7 + pristine_git_object: 6ee91593a9fbcd6c53eae810c1c2d0120f56262e src/mistralai/accesses.py: id: 98cb4addd052 last_write_checksum: sha1:5d9d495274d67b1343ba99d755c1c01c64c2ead1 @@ -1845,8 +1905,8 @@ trackedFiles: pristine_git_object: 7c32506ec03cc0fd88b786ff49d7690fd4283d2a src/mistralai/conversations.py: id: be58e57a6198 - last_write_checksum: sha1:76169b9954e645c9d7260b4d9e08be87de7ec643 - pristine_git_object: 93ed8c281a2f44e19f833309ec67b5f35cab1b53 + last_write_checksum: sha1:b9287bbe777a042b8258494cd5162d32e6a89c20 + pristine_git_object: 194cb4c0a629654b31bbcce8391baf48601d0eb7 src/mistralai/documents.py: id: 1945602083a8 last_write_checksum: sha1:14d1e6b5a95869d70a6fc89b07d5365c98aff5d7 @@ -1857,8 +1917,8 @@ trackedFiles: pristine_git_object: 7430f8042df4fec517288d0ddb0eb174e7e43a8e src/mistralai/files.py: id: 0e29db0e2269 - last_write_checksum: sha1:e4f833d390f1b3b682f073a76ffb6e29f89c55d1 - pristine_git_object: ab2c75a2f6774a99fe67ac5d3b0fa6544d093181 + last_write_checksum: sha1:d79d5b1785f441a46673a7efa108ddb98c44376a + pristine_git_object: 90ada0ff707521d59d329bebac74005eb68488d8 src/mistralai/fim.py: id: 71a865142baf last_write_checksum: sha1:7accf79c11a17fefbacde7f2b0f966f3716233df @@ -1881,24 +1941,28 @@ trackedFiles: pristine_git_object: 32648937feb79adf6155423cbe9bac4d7fe52224 src/mistralai/mistral_agents.py: id: 671c4985aaa1 - last_write_checksum: sha1:01d02e6ea96903bf0b9893d24115a154e078096d - pristine_git_object: e4abf6e4cba4cfedbe1d6bd93292318f641d49d0 + last_write_checksum: sha1:1fe4fb4f2828b532ac3ddf3b72e748a53d5099e9 + pristine_git_object: 7fb0ce259cb1c1a3847c567bdc992c176489add6 src/mistralai/mistral_jobs.py: id: 18065a449da0 last_write_checksum: sha1:fb205d962444f6aba163ecd3169c12489b3f0cc9 pristine_git_object: d1aeec8a014b22e44f4fe5e751206c3648e875af src/mistralai/models/__init__.py: id: 3228134f03e5 - last_write_checksum: sha1:0e6ec6d05cfd56d49d761a68e4f42f550695aa81 - pristine_git_object: c35b3d24abc3863d88e40b8d9e8bd2c1a35a4541 + last_write_checksum: sha1:cb1fb02e33b85bf82db7d6fd15b2cc3b109c5060 + pristine_git_object: 23e652220f29a882748661a8c0d21aa2830471bf src/mistralai/models/agent.py: id: ca4162a131b1 last_write_checksum: sha1:fe8a7c8c9c4ba59613d7d89f0c2e7a6958e25f85 pristine_git_object: eb30905b3de2b69ece35bdd40f390b2fa6ffc5a8 + src/mistralai/models/agentaliasresponse.py: + id: d329dd68429e + last_write_checksum: sha1:a3ebf39f159f7cd63dbabd9ff2c79df97e43e41f + pristine_git_object: c0928da9c65c588c515f3f1668ccfb69d3a23861 src/mistralai/models/agentconversation.py: id: bd3035451c40 - last_write_checksum: sha1:2e4a6a5ae0da2e9ccbb588c8487b48077d561d93 - pristine_git_object: 625fb4fc6697860060dfdeb449986d89efc232d6 + last_write_checksum: sha1:724a256f4914116500fd962df4b3cfc79ea75c43 + pristine_git_object: 6007b5715fd4a463d25a244b716effafbeecace6 src/mistralai/models/agentcreationrequest.py: id: 87f33bd9ea58 last_write_checksum: sha1:a6885376d36a5a17273d8d8d8d45e3d6c3ee1b9f @@ -1915,18 +1979,26 @@ trackedFiles: id: ce8e306fa522 last_write_checksum: sha1:2b5bac2f628c0e7cdd6df73404f69f5d405e576c pristine_git_object: 11bfa918903f8de96f98f722eaaf9a70b4fca8c1 + src/mistralai/models/agents_api_v1_agents_create_or_update_aliasop.py: + id: dd0e03fda847 + last_write_checksum: sha1:a0dd39bb4b0af3a15b1aa8427a6f07d1826c04dc + pristine_git_object: 6cf9d0e0644ce0afd5f673f18fdda9dcccb5f04c src/mistralai/models/agents_api_v1_agents_deleteop.py: id: 588791d168a1 last_write_checksum: sha1:2dae37c3b9778d688663550b9803d52111577f3e pristine_git_object: 38e04953cc320f503a2f6e77096985da60896f2a src/mistralai/models/agents_api_v1_agents_get_versionop.py: id: bdb81ef0e35a - last_write_checksum: sha1:dab21f6fae05e2794208baf3b4e43feeeaf9b3bd - pristine_git_object: 4463d3b25aedad4f3b96a9fb7174a598c843939f + last_write_checksum: sha1:372da3794afd45d442d56edd3ec3cc4907f88223 + pristine_git_object: fddb10dde6707b6641b035e372270991d349f4f3 src/mistralai/models/agents_api_v1_agents_getop.py: id: 2358eceee519 - last_write_checksum: sha1:362d0c781b2c79d829f6e4901e558aaca937b105 - pristine_git_object: dced6dbb49c31fe2981cbd3865c0d580082a1ade + last_write_checksum: sha1:dca59474f75a6636ecac8265cab1bb51d36df56a + pristine_git_object: 2b7d89a5b34f3e768a18f9edbdf712fbcf5c20e4 + src/mistralai/models/agents_api_v1_agents_list_version_aliasesop.py: + id: 51215b825530 + last_write_checksum: sha1:d24f8eff3bd19414c0a04e474b33e1c63861a1da + pristine_git_object: 650a7187a3ac419069440fe040a166a036835b37 src/mistralai/models/agents_api_v1_agents_list_versionsop.py: id: 5f680df288a9 last_write_checksum: sha1:a236170f366d9701346b57f9ee4c788a9a2293e5 @@ -2197,24 +2269,24 @@ trackedFiles: pristine_git_object: 32ca9c20cb37ff65f7e9b126650a78a4b97e4b56 src/mistralai/models/conversationrequest.py: id: ceffcc288c2d - last_write_checksum: sha1:32e7b41c01d2d7accccb1f79248b9e1c56c816f3 - pristine_git_object: 09d934ed3db66ecbd5ab8e3406c3ffb8a1c3c606 + last_write_checksum: sha1:c4c62ef9cdf9bb08463bcb12919abd98ceb8d344 + pristine_git_object: 80581cc10a8e7555546e38c8b7068a2744eb552b src/mistralai/models/conversationresponse.py: id: 016ec02abd32 last_write_checksum: sha1:37c3f143b83939b369fe8637932974d163da3c37 pristine_git_object: ff318e35ee63e43c64e504301236327374442a16 src/mistralai/models/conversationrestartrequest.py: id: 2a8207f159f5 - last_write_checksum: sha1:8f53b5faba0b19d8fdf22388c72eb2580ee121f6 - pristine_git_object: a9c8410c7b1010780bf1d98b1580453aeef07509 + last_write_checksum: sha1:93cd4370afe6a06b375e0e54ca09225e02fc42d3 + pristine_git_object: 6f21d01267481b8b47d4d37609ac131c34c10a9b src/mistralai/models/conversationrestartstreamrequest.py: id: d98d3e0c8eed - last_write_checksum: sha1:cba039d9276869be283d83218659f4bf7537b958 - pristine_git_object: 0703bb5fe6566ff15677e5f604537ab9ae2b79bd + last_write_checksum: sha1:90f295ce27ba55d58899e06a29af223a464f5a4c + pristine_git_object: 2cec7958ab31378d480f0f93a5ed75ac8c624442 src/mistralai/models/conversationstreamrequest.py: id: f7051f125d44 - last_write_checksum: sha1:7ce5ab24500754f4c4f36fd07934fe992d7bbb2e - pristine_git_object: 6ff56e1786e7342284bac0fb4b669806cee55c0f + last_write_checksum: sha1:12bc85a14f110f5c8a3149540668bea178995fae + pristine_git_object: 1a481b77f706db7101521756c7c3476eaa1918c5 src/mistralai/models/conversationusageinfo.py: id: 922894aa994b last_write_checksum: sha1:0e0039421d7291ecbbf820ea843031c50371dd9e @@ -2309,8 +2381,8 @@ trackedFiles: pristine_git_object: 708d40ab993f93227b9795c745383ab954c1c89c src/mistralai/models/files_api_routes_list_filesop.py: id: 865dd74c577c - last_write_checksum: sha1:df0af95515546660ec9ff343c17f0b2dfe8b0375 - pristine_git_object: 9b9422b405ba967d7f6ed84196fe8e1dc9c5d95f + last_write_checksum: sha1:d75afa1ee7e34cbcfb8da78e3b5c9384b684b89b + pristine_git_object: 84d61b9b4d7032a60e3055b683a396e53b625274 src/mistralai/models/files_api_routes_retrieve_fileop.py: id: d821f72ee198 last_write_checksum: sha1:d0d07123fd941bb99a00a36e87bc7ab4c21506a6 @@ -2613,12 +2685,12 @@ trackedFiles: pristine_git_object: 28cfd22dc3d567aa4ae55cc19ad89341fa9c96a1 src/mistralai/models/mistralpromptmode.py: id: b2580604c1fe - last_write_checksum: sha1:1ac4d9fb8fbf0b21958be5483a569da7f1f49ff0 - pristine_git_object: ee82fb6d056e2d9699628698750e68b4ab6ef851 + last_write_checksum: sha1:71cf04622681998b091f51e4157463109761333f + pristine_git_object: dfb6f2d2a76fd2749d91397752a38b333bae8b02 src/mistralai/models/modelcapabilities.py: id: a9589b97b15c - last_write_checksum: sha1:d7a7d530750418a54a5fc1698d855df7a519a45c - pristine_git_object: 4b5d5da7da9573f998e977e8a14a9b8f8cbf4f55 + last_write_checksum: sha1:56ea040fb631f0825e9ce2c7b32de2c90f6923a1 + pristine_git_object: 6edf8e5bf238b91a245db3489f09ae24506103f3 src/mistralai/models/modelconversation.py: id: 7d8b7b8d62a8 last_write_checksum: sha1:b76cc407f807c19c1ff5602f7dd1d0421db2486d @@ -2683,6 +2755,26 @@ trackedFiles: id: 54d1c125ef83 last_write_checksum: sha1:475749250ada2566c5a5d769eda1d350ddd8be8f pristine_git_object: e67bfa865dcf94656a67f8612a5420f8b43cc0ec + src/mistralai/models/realtimetranscriptionerror.py: + id: f869fd6faf74 + last_write_checksum: sha1:17f78beea9e1821eed90c8a2412aadf953e17774 + pristine_git_object: 0785f7001aeaba7904120a62d569a35b7ee88a80 + src/mistralai/models/realtimetranscriptionerrordetail.py: + id: d106a319e66b + last_write_checksum: sha1:16e0fea1a3be85dfea6f2c44a53a15a3dc322b4c + pristine_git_object: cb5d73f861ce053a17b66695d2b56bafe1eeb03e + src/mistralai/models/realtimetranscriptionsession.py: + id: 48c7076e6ede + last_write_checksum: sha1:ae722fc946adf7282fd79c3a2c80fb53acc70ef2 + pristine_git_object: bcd0cfe37600b80e59cd50bd0edac3444be34fdb + src/mistralai/models/realtimetranscriptionsessioncreated.py: + id: 24825bcd61b2 + last_write_checksum: sha1:81f840757637e678c4512765ba8fda060f5af8cb + pristine_git_object: 9a2c2860d1538f03e795c62754244131820e2d44 + src/mistralai/models/realtimetranscriptionsessionupdated.py: + id: 5575fb5d1980 + last_write_checksum: sha1:a2d8d5947ba6b46dcd9a0a1e377067dbb92bfdf1 + pristine_git_object: ad1b513364f5d8d2f92fbc012509bf7567fa4573 src/mistralai/models/referencechunk.py: id: 6cdbb4e60749 last_write_checksum: sha1:48a4dddda06aadd16f6ea34c58848430bd561432 @@ -3022,7 +3114,7 @@ examples: model_id: "ft:open-mistral-7b:587a6b29:20240514:7e773925" responses: "200": - application/json: {"id": "", "object": "model", "owned_by": "mistralai", "capabilities": {"completion_chat": true, "function_calling": true, "completion_fim": false, "fine_tuning": false, "vision": false, "ocr": false, "classification": false, "moderation": false, "audio": false}, "max_context_length": 32768, "type": "fine-tuned", "job": "Product Markets Facilitator", "root": "", "archived": false} + application/json: {"id": "", "object": "model", "owned_by": "mistralai", "capabilities": {"completion_chat": true, "function_calling": true, "completion_fim": false, "fine_tuning": false, "vision": false, "ocr": false, "classification": false, "moderation": false, "audio": false, "audio_transcription": false}, "max_context_length": 32768, "type": "fine-tuned", "job": "Product Markets Facilitator", "root": "", "archived": false} "422": application/json: {} userExample: @@ -3031,7 +3123,7 @@ examples: model_id: "ft:open-mistral-7b:587a6b29:20240514:7e773925" responses: "200": - application/json: {"id": "", "object": "model", "owned_by": "mistralai", "capabilities": {"completion_chat": false, "function_calling": false, "completion_fim": false, "fine_tuning": false, "vision": false, "ocr": false, "classification": false, "moderation": false, "audio": false}, "max_context_length": 32768, "type": "fine-tuned", "job": "Product Markets Facilitator", "root": "", "archived": false} + application/json: {"id": "", "object": "model", "owned_by": "mistralai", "capabilities": {"completion_chat": false, "function_calling": false, "completion_fim": false, "fine_tuning": false, "vision": false, "ocr": false, "classification": false, "moderation": false, "audio": false, "audio_transcription": false}, "max_context_length": 32768, "type": "fine-tuned", "job": "Product Markets Facilitator", "root": "", "archived": false} delete_model_v1_models__model_id__delete: speakeasy-default-delete-model-v1-models-model-id-delete: parameters: @@ -3774,19 +3866,52 @@ examples: parameters: path: agent_id: "" - version: 788393 + version: "788393" responses: "200": application/json: {"completion_args": {"response_format": {"type": "text"}}, "model": "Model 3", "name": "", "object": "agent", "id": "", "version": 377706, "versions": [658369, 642981], "created_at": "2024-10-02T23:01:15.980Z", "updated_at": "2026-12-22T00:55:26.568Z", "deployment_chat": false, "source": ""} "422": application/json: {} + agents_api_v1_agents_create_or_update_alias: + speakeasy-default-agents-api-v1-agents-create-or-update-alias: + parameters: + path: + agent_id: "" + query: + alias: "" + version: 595141 + responses: + "200": + application/json: {"alias": "", "version": 768764, "created_at": "2026-12-28T00:40:21.715Z", "updated_at": "2025-09-01T12:54:58.254Z"} + "422": + application/json: {} + agents_api_v1_agents_list_version_aliases: + speakeasy-default-agents-api-v1-agents-list-version-aliases: + parameters: + path: + agent_id: "" + responses: + "200": + application/json: [{"alias": "", "version": 318290, "created_at": "2025-10-02T20:25:32.322Z", "updated_at": "2026-11-19T02:58:37.894Z"}] + "422": + application/json: {} examplesVersion: 1.0.2 generatedTests: {} releaseNotes: | ## Python SDK Changes: - * `mistral.beta.agents.list_versions()`: **Added** - * `mistral.beta.agents.get_version()`: **Added** - * `mistral.ocr.process()`: `request.document_annotation_prompt` **Added** + * `mistral.beta.conversations.restart_stream()`: `request.agent_version` **Changed** **Breaking** :warning: + * `mistral.beta.conversations.start()`: `request.agent_version` **Changed** **Breaking** :warning: + * `mistral.beta.conversations.list()`: `response.[].[agent_conversation].agent_version` **Changed** **Breaking** :warning: + * `mistral.beta.conversations.get()`: `response.[agent_conversation].agent_version` **Changed** **Breaking** :warning: + * `mistral.beta.conversations.restart()`: `request.agent_version` **Changed** **Breaking** :warning: + * `mistral.beta.conversations.start_stream()`: `request.agent_version` **Changed** **Breaking** :warning: + * `mistral.beta.agents.get()`: `request.agent_version` **Changed** **Breaking** :warning: + * `mistral.beta.agents.get_version()`: `request.version` **Changed** **Breaking** :warning: + * `mistral.beta.agents.list_version_aliases()`: **Added** + * `mistral.models.list()`: `response.data.[].[fine-tuned].capabilities.audio_transcription` **Added** + * `mistral.models.retrieve()`: `response.[base].capabilities.audio_transcription` **Added** + * `mistral.beta.agents.create_version_alias()`: **Added** + * `mistral.files.list()`: `request.mimetypes` **Added** generatedFiles: - .gitattributes - .vscode/settings.json diff --git a/.speakeasy/gen.yaml b/.speakeasy/gen.yaml index 4f9a9747..0cc6f059 100644 --- a/.speakeasy/gen.yaml +++ b/.speakeasy/gen.yaml @@ -26,7 +26,7 @@ generation: generateNewTests: false skipResponseBodyAssertions: false python: - version: 1.11.1 + version: 1.12.0 additionalDependencies: dev: pytest: ^8.2.2 diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index 89c966c7..3bb067a0 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -14,11 +14,11 @@ sources: - latest mistral-openapi: sourceNamespace: mistral-openapi - sourceRevisionDigest: sha256:fd94dc1f574f3bb88a839543675b2c3b3aa895519ec2797efb143ead830ae982 - sourceBlobDigest: sha256:2dd0ee9d000907ffe699cdd48a18163b88297d0ce63f2cdc05efa35cee136bc0 + sourceRevisionDigest: sha256:56bcbb02148ddfabd64cb2dced1a9efe0f00d0fa106435fdd0fb2a889c1c6fed + sourceBlobDigest: sha256:c014d9220f14e04b573acf291c173954b8d34d03d852877a91756afb68ccc65b tags: - latest - - speakeasy-sdk-regen-1768506286 + - speakeasy-sdk-regen-1769979831 targets: mistralai-azure-sdk: source: mistral-azure-source @@ -37,10 +37,10 @@ targets: mistralai-sdk: source: mistral-openapi sourceNamespace: mistral-openapi - sourceRevisionDigest: sha256:fd94dc1f574f3bb88a839543675b2c3b3aa895519ec2797efb143ead830ae982 - sourceBlobDigest: sha256:2dd0ee9d000907ffe699cdd48a18163b88297d0ce63f2cdc05efa35cee136bc0 + sourceRevisionDigest: sha256:56bcbb02148ddfabd64cb2dced1a9efe0f00d0fa106435fdd0fb2a889c1c6fed + sourceBlobDigest: sha256:c014d9220f14e04b573acf291c173954b8d34d03d852877a91756afb68ccc65b codeSamplesNamespace: mistral-openapi-code-samples - codeSamplesRevisionDigest: sha256:881a74af90c5678411207a0a9b0e370496d44b18174e96ba7c6812d400788637 + codeSamplesRevisionDigest: sha256:feb7bf2f6fab8456316453c7e14eda6201fe8649fe0ffcdb1eaa4580cc66a51e workflow: workflowVersion: 1.0.0 speakeasyVersion: 1.685.0 diff --git a/README.md b/README.md index 2569d112..131ce557 100644 --- a/README.md +++ b/README.md @@ -475,6 +475,8 @@ The documentation for the GCP SDK is available [here](packages/mistralai_gcp/REA * [update_version](docs/sdks/mistralagents/README.md#update_version) - Update an agent version. * [list_versions](docs/sdks/mistralagents/README.md#list_versions) - List all versions of an agent. * [get_version](docs/sdks/mistralagents/README.md#get_version) - Retrieve a specific version of an agent. +* [create_version_alias](docs/sdks/mistralagents/README.md#create_version_alias) - Create or update an agent version alias. +* [list_version_aliases](docs/sdks/mistralagents/README.md#list_version_aliases) - List all aliases for an agent. ### [Beta.Conversations](docs/sdks/conversations/README.md) @@ -752,7 +754,7 @@ with Mistral( **Inherit from [`MistralError`](./src/mistralai/models/mistralerror.py)**: -* [`HTTPValidationError`](./src/mistralai/models/httpvalidationerror.py): Validation Error. Status code `422`. Applicable to 50 of 72 methods.* +* [`HTTPValidationError`](./src/mistralai/models/httpvalidationerror.py): Validation Error. Status code `422`. Applicable to 52 of 74 methods.* * [`ResponseValidationError`](./src/mistralai/models/responsevalidationerror.py): Type mismatch between the response data and the expected Pydantic model. Provides access to the Pydantic validation error via the `cause` attribute. diff --git a/RELEASES.md b/RELEASES.md index 984e9145..90f534ef 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -358,4 +358,14 @@ Based on: ### Generated - [python v1.11.1] . ### Releases -- [PyPI v1.11.1] https://pypi.org/project/mistralai/1.11.1 - . \ No newline at end of file +- [PyPI v1.11.1] https://pypi.org/project/mistralai/1.11.1 - . + +## 2026-02-01 21:20:42 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.685.0 (2.794.1) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v1.12.0] . +### Releases +- [PyPI v1.12.0] https://pypi.org/project/mistralai/1.12.0 - . \ No newline at end of file diff --git a/docs/models/agentaliasresponse.md b/docs/models/agentaliasresponse.md new file mode 100644 index 00000000..aa531ec5 --- /dev/null +++ b/docs/models/agentaliasresponse.md @@ -0,0 +1,11 @@ +# AgentAliasResponse + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `alias` | *str* | :heavy_check_mark: | N/A | +| `version` | *int* | :heavy_check_mark: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `updated_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/agentconversation.md b/docs/models/agentconversation.md index 92fd673c..a2d61731 100644 --- a/docs/models/agentconversation.md +++ b/docs/models/agentconversation.md @@ -3,14 +3,14 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | Name given to the conversation. | -| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | Description of the what the conversation is about. | -| `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Custom metadata for the conversation. | -| `object` | [Optional[models.AgentConversationObject]](../models/agentconversationobject.md) | :heavy_minus_sign: | N/A | -| `id` | *str* | :heavy_check_mark: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | -| `updated_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | -| `agent_id` | *str* | :heavy_check_mark: | N/A | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | +| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | Name given to the conversation. | +| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | Description of the what the conversation is about. | +| `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Custom metadata for the conversation. | +| `object` | [Optional[models.AgentConversationObject]](../models/agentconversationobject.md) | :heavy_minus_sign: | N/A | +| `id` | *str* | :heavy_check_mark: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `updated_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `agent_id` | *str* | :heavy_check_mark: | N/A | +| `agent_version` | [OptionalNullable[models.AgentConversationAgentVersion]](../models/agentconversationagentversion.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/agentconversationagentversion.md b/docs/models/agentconversationagentversion.md new file mode 100644 index 00000000..668a8dc0 --- /dev/null +++ b/docs/models/agentconversationagentversion.md @@ -0,0 +1,17 @@ +# AgentConversationAgentVersion + + +## Supported Types + +### `str` + +```python +value: str = /* values here */ +``` + +### `int` + +```python +value: int = /* values here */ +``` + diff --git a/docs/models/agentsapiv1agentscreateorupdatealiasrequest.md b/docs/models/agentsapiv1agentscreateorupdatealiasrequest.md new file mode 100644 index 00000000..79406434 --- /dev/null +++ b/docs/models/agentsapiv1agentscreateorupdatealiasrequest.md @@ -0,0 +1,10 @@ +# AgentsAPIV1AgentsCreateOrUpdateAliasRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `agent_id` | *str* | :heavy_check_mark: | N/A | +| `alias` | *str* | :heavy_check_mark: | N/A | +| `version` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/agentsapiv1agentsgetrequest.md b/docs/models/agentsapiv1agentsgetrequest.md index 825e03a0..c71d4419 100644 --- a/docs/models/agentsapiv1agentsgetrequest.md +++ b/docs/models/agentsapiv1agentsgetrequest.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ----------------------- | ----------------------- | ----------------------- | ----------------------- | -| `agent_id` | *str* | :heavy_check_mark: | N/A | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `agent_id` | *str* | :heavy_check_mark: | N/A | +| `agent_version` | [OptionalNullable[models.QueryParamAgentVersion]](../models/queryparamagentversion.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/agentsapiv1agentsgetversionrequest.md b/docs/models/agentsapiv1agentsgetversionrequest.md index 7617d274..96a73589 100644 --- a/docs/models/agentsapiv1agentsgetversionrequest.md +++ b/docs/models/agentsapiv1agentsgetversionrequest.md @@ -6,4 +6,4 @@ | Field | Type | Required | Description | | ------------------ | ------------------ | ------------------ | ------------------ | | `agent_id` | *str* | :heavy_check_mark: | N/A | -| `version` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file +| `version` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/agentsapiv1agentslistversionaliasesrequest.md b/docs/models/agentsapiv1agentslistversionaliasesrequest.md new file mode 100644 index 00000000..3083bf92 --- /dev/null +++ b/docs/models/agentsapiv1agentslistversionaliasesrequest.md @@ -0,0 +1,8 @@ +# AgentsAPIV1AgentsListVersionAliasesRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `agent_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/agentversion.md b/docs/models/agentversion.md new file mode 100644 index 00000000..fd4b6a3e --- /dev/null +++ b/docs/models/agentversion.md @@ -0,0 +1,17 @@ +# AgentVersion + + +## Supported Types + +### `str` + +```python +value: str = /* values here */ +``` + +### `int` + +```python +value: int = /* values here */ +``` + diff --git a/docs/models/conversationrequest.md b/docs/models/conversationrequest.md index 04378ae3..2b4ff8ef 100644 --- a/docs/models/conversationrequest.md +++ b/docs/models/conversationrequest.md @@ -16,5 +16,5 @@ | `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | +| `agent_version` | [OptionalNullable[models.AgentVersion]](../models/agentversion.md) | :heavy_minus_sign: | N/A | | `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/conversationrestartrequest.md b/docs/models/conversationrestartrequest.md index f389a1e5..d9865312 100644 --- a/docs/models/conversationrestartrequest.md +++ b/docs/models/conversationrestartrequest.md @@ -14,4 +14,4 @@ Request to restart a new conversation from a given entry in the conversation. | `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Custom metadata for the conversation. | | `from_entry_id` | *str* | :heavy_check_mark: | N/A | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | Specific version of the agent to use when restarting. If not provided, uses the current version. | \ No newline at end of file +| `agent_version` | [OptionalNullable[models.ConversationRestartRequestAgentVersion]](../models/conversationrestartrequestagentversion.md) | :heavy_minus_sign: | Specific version of the agent to use when restarting. If not provided, uses the current version. | \ No newline at end of file diff --git a/docs/models/conversationrestartrequestagentversion.md b/docs/models/conversationrestartrequestagentversion.md new file mode 100644 index 00000000..019ba301 --- /dev/null +++ b/docs/models/conversationrestartrequestagentversion.md @@ -0,0 +1,19 @@ +# ConversationRestartRequestAgentVersion + +Specific version of the agent to use when restarting. If not provided, uses the current version. + + +## Supported Types + +### `str` + +```python +value: str = /* values here */ +``` + +### `int` + +```python +value: int = /* values here */ +``` + diff --git a/docs/models/conversationrestartstreamrequest.md b/docs/models/conversationrestartstreamrequest.md index d7358dc2..a5f8cbe7 100644 --- a/docs/models/conversationrestartstreamrequest.md +++ b/docs/models/conversationrestartstreamrequest.md @@ -14,4 +14,4 @@ Request to restart a new conversation from a given entry in the conversation. | `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Custom metadata for the conversation. | | `from_entry_id` | *str* | :heavy_check_mark: | N/A | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | Specific version of the agent to use when restarting. If not provided, uses the current version. | \ No newline at end of file +| `agent_version` | [OptionalNullable[models.ConversationRestartStreamRequestAgentVersion]](../models/conversationrestartstreamrequestagentversion.md) | :heavy_minus_sign: | Specific version of the agent to use when restarting. If not provided, uses the current version. | \ No newline at end of file diff --git a/docs/models/conversationrestartstreamrequestagentversion.md b/docs/models/conversationrestartstreamrequestagentversion.md new file mode 100644 index 00000000..9e006300 --- /dev/null +++ b/docs/models/conversationrestartstreamrequestagentversion.md @@ -0,0 +1,19 @@ +# ConversationRestartStreamRequestAgentVersion + +Specific version of the agent to use when restarting. If not provided, uses the current version. + + +## Supported Types + +### `str` + +```python +value: str = /* values here */ +``` + +### `int` + +```python +value: int = /* values here */ +``` + diff --git a/docs/models/conversationstreamrequest.md b/docs/models/conversationstreamrequest.md index e403db68..299346f8 100644 --- a/docs/models/conversationstreamrequest.md +++ b/docs/models/conversationstreamrequest.md @@ -16,5 +16,5 @@ | `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | +| `agent_version` | [OptionalNullable[models.ConversationStreamRequestAgentVersion]](../models/conversationstreamrequestagentversion.md) | :heavy_minus_sign: | N/A | | `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/conversationstreamrequestagentversion.md b/docs/models/conversationstreamrequestagentversion.md new file mode 100644 index 00000000..52ee9672 --- /dev/null +++ b/docs/models/conversationstreamrequestagentversion.md @@ -0,0 +1,17 @@ +# ConversationStreamRequestAgentVersion + + +## Supported Types + +### `str` + +```python +value: str = /* values here */ +``` + +### `int` + +```python +value: int = /* values here */ +``` + diff --git a/docs/models/filesapirouteslistfilesrequest.md b/docs/models/filesapirouteslistfilesrequest.md index 3801a96e..57d11722 100644 --- a/docs/models/filesapirouteslistfilesrequest.md +++ b/docs/models/filesapirouteslistfilesrequest.md @@ -11,4 +11,5 @@ | `sample_type` | List[[models.SampleType](../models/sampletype.md)] | :heavy_minus_sign: | N/A | | `source` | List[[models.Source](../models/source.md)] | :heavy_minus_sign: | N/A | | `search` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `purpose` | [OptionalNullable[models.FilePurpose]](../models/filepurpose.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `purpose` | [OptionalNullable[models.FilePurpose]](../models/filepurpose.md) | :heavy_minus_sign: | N/A | +| `mimetypes` | List[*str*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/message.md b/docs/models/message.md new file mode 100644 index 00000000..752f04a8 --- /dev/null +++ b/docs/models/message.md @@ -0,0 +1,19 @@ +# Message + +Human-readable error message. + + +## Supported Types + +### `str` + +```python +value: str = /* values here */ +``` + +### `Dict[str, Any]` + +```python +value: Dict[str, Any] = /* values here */ +``` + diff --git a/docs/models/mistralpromptmode.md b/docs/models/mistralpromptmode.md index 7416e203..c3409d03 100644 --- a/docs/models/mistralpromptmode.md +++ b/docs/models/mistralpromptmode.md @@ -1,5 +1,9 @@ # MistralPromptMode +Available options to the prompt_mode argument on the chat completion endpoint. +Values represent high-level intent. Assignment to actual SPs is handled internally. +System prompt may include knowledge cutoff date, model capabilities, tone to use, safety guidelines, etc. + ## Values diff --git a/docs/models/modelcapabilities.md b/docs/models/modelcapabilities.md index 646c8e94..c7dd2710 100644 --- a/docs/models/modelcapabilities.md +++ b/docs/models/modelcapabilities.md @@ -3,14 +3,15 @@ ## Fields -| Field | Type | Required | Description | -| ------------------ | ------------------ | ------------------ | ------------------ | -| `completion_chat` | *Optional[bool]* | :heavy_minus_sign: | N/A | -| `function_calling` | *Optional[bool]* | :heavy_minus_sign: | N/A | -| `completion_fim` | *Optional[bool]* | :heavy_minus_sign: | N/A | -| `fine_tuning` | *Optional[bool]* | :heavy_minus_sign: | N/A | -| `vision` | *Optional[bool]* | :heavy_minus_sign: | N/A | -| `ocr` | *Optional[bool]* | :heavy_minus_sign: | N/A | -| `classification` | *Optional[bool]* | :heavy_minus_sign: | N/A | -| `moderation` | *Optional[bool]* | :heavy_minus_sign: | N/A | -| `audio` | *Optional[bool]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| --------------------- | --------------------- | --------------------- | --------------------- | +| `completion_chat` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `function_calling` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `completion_fim` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `fine_tuning` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `vision` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `ocr` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `classification` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `moderation` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `audio` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `audio_transcription` | *Optional[bool]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/queryparamagentversion.md b/docs/models/queryparamagentversion.md new file mode 100644 index 00000000..3eb5ef18 --- /dev/null +++ b/docs/models/queryparamagentversion.md @@ -0,0 +1,17 @@ +# QueryParamAgentVersion + + +## Supported Types + +### `int` + +```python +value: int = /* values here */ +``` + +### `str` + +```python +value: str = /* values here */ +``` + diff --git a/docs/models/realtimetranscriptionerror.md b/docs/models/realtimetranscriptionerror.md new file mode 100644 index 00000000..e01f2126 --- /dev/null +++ b/docs/models/realtimetranscriptionerror.md @@ -0,0 +1,9 @@ +# RealtimeTranscriptionError + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | +| `type` | *Optional[Literal["error"]]* | :heavy_minus_sign: | N/A | +| `error` | [models.RealtimeTranscriptionErrorDetail](../models/realtimetranscriptionerrordetail.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/realtimetranscriptionerrordetail.md b/docs/models/realtimetranscriptionerrordetail.md new file mode 100644 index 00000000..96420ada --- /dev/null +++ b/docs/models/realtimetranscriptionerrordetail.md @@ -0,0 +1,9 @@ +# RealtimeTranscriptionErrorDetail + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------- | -------------------------------------- | -------------------------------------- | -------------------------------------- | +| `message` | [models.Message](../models/message.md) | :heavy_check_mark: | Human-readable error message. | +| `code` | *int* | :heavy_check_mark: | Internal error code for debugging. | \ No newline at end of file diff --git a/docs/models/realtimetranscriptionsession.md b/docs/models/realtimetranscriptionsession.md new file mode 100644 index 00000000..94a0a89e --- /dev/null +++ b/docs/models/realtimetranscriptionsession.md @@ -0,0 +1,10 @@ +# RealtimeTranscriptionSession + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | +| `request_id` | *str* | :heavy_check_mark: | N/A | +| `model` | *str* | :heavy_check_mark: | N/A | +| `audio_format` | [models.AudioFormat](../models/audioformat.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/realtimetranscriptionsessioncreated.md b/docs/models/realtimetranscriptionsessioncreated.md new file mode 100644 index 00000000..34e603fd --- /dev/null +++ b/docs/models/realtimetranscriptionsessioncreated.md @@ -0,0 +1,9 @@ +# RealtimeTranscriptionSessionCreated + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `type` | *Optional[Literal["session.created"]]* | :heavy_minus_sign: | N/A | +| `session` | [models.RealtimeTranscriptionSession](../models/realtimetranscriptionsession.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/realtimetranscriptionsessionupdated.md b/docs/models/realtimetranscriptionsessionupdated.md new file mode 100644 index 00000000..7e271995 --- /dev/null +++ b/docs/models/realtimetranscriptionsessionupdated.md @@ -0,0 +1,9 @@ +# RealtimeTranscriptionSessionUpdated + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `type` | *Optional[Literal["session.updated"]]* | :heavy_minus_sign: | N/A | +| `session` | [models.RealtimeTranscriptionSession](../models/realtimetranscriptionsession.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/sdks/conversations/README.md b/docs/sdks/conversations/README.md index c488848c..ca383176 100644 --- a/docs/sdks/conversations/README.md +++ b/docs/sdks/conversations/README.md @@ -60,7 +60,7 @@ with Mistral( | `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | +| `agent_version` | [OptionalNullable[models.AgentVersion]](../../models/agentversion.md) | :heavy_minus_sign: | N/A | | `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | @@ -366,7 +366,7 @@ with Mistral( | `handoff_execution` | [Optional[models.ConversationRestartRequestHandoffExecution]](../../models/conversationrestartrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | | `completion_args` | [Optional[models.CompletionArgs]](../../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Custom metadata for the conversation. | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | Specific version of the agent to use when restarting. If not provided, uses the current version. | +| `agent_version` | [OptionalNullable[models.ConversationRestartRequestAgentVersion]](../../models/conversationrestartrequestagentversion.md) | :heavy_minus_sign: | Specific version of the agent to use when restarting. If not provided, uses the current version. | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -431,7 +431,7 @@ with Mistral( | `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | +| `agent_version` | [OptionalNullable[models.ConversationStreamRequestAgentVersion]](../../models/conversationstreamrequestagentversion.md) | :heavy_minus_sign: | N/A | | `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | @@ -547,7 +547,7 @@ with Mistral( | `handoff_execution` | [Optional[models.ConversationRestartStreamRequestHandoffExecution]](../../models/conversationrestartstreamrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | | `completion_args` | [Optional[models.CompletionArgs]](../../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Custom metadata for the conversation. | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | Specific version of the agent to use when restarting. If not provided, uses the current version. | +| `agent_version` | [OptionalNullable[models.ConversationRestartStreamRequestAgentVersion]](../../models/conversationrestartstreamrequestagentversion.md) | :heavy_minus_sign: | Specific version of the agent to use when restarting. If not provided, uses the current version. | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response diff --git a/docs/sdks/files/README.md b/docs/sdks/files/README.md index f0dfd593..57b53fc7 100644 --- a/docs/sdks/files/README.md +++ b/docs/sdks/files/README.md @@ -95,6 +95,7 @@ with Mistral( | `source` | List[[models.Source](../../models/source.md)] | :heavy_minus_sign: | N/A | | `search` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `purpose` | [OptionalNullable[models.FilePurpose]](../../models/filepurpose.md) | :heavy_minus_sign: | N/A | +| `mimetypes` | List[*str*] | :heavy_minus_sign: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response diff --git a/docs/sdks/mistralagents/README.md b/docs/sdks/mistralagents/README.md index 8021fa07..bdd8d588 100644 --- a/docs/sdks/mistralagents/README.md +++ b/docs/sdks/mistralagents/README.md @@ -14,6 +14,8 @@ * [update_version](#update_version) - Update an agent version. * [list_versions](#list_versions) - List all versions of an agent. * [get_version](#get_version) - Retrieve a specific version of an agent. +* [create_version_alias](#create_version_alias) - Create or update an agent version alias. +* [list_version_aliases](#list_version_aliases) - List all aliases for an agent. ## create @@ -116,7 +118,7 @@ with Mistral( ## get -Given an agent retrieve an agent entity with its attributes. +Given an agent, retrieve an agent entity with its attributes. The agent_version parameter can be an integer version number or a string alias. ### Example Usage @@ -139,11 +141,11 @@ with Mistral( ### Parameters -| Parameter | Type | Required | Description | -| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `agent_id` | *str* | :heavy_check_mark: | N/A | -| `agent_version` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | -| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | +| `agent_id` | *str* | :heavy_check_mark: | N/A | +| `agent_version` | [OptionalNullable[models.QueryParamAgentVersion]](../../models/queryparamagentversion.md) | :heavy_minus_sign: | N/A | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -337,7 +339,7 @@ Get a specific agent version by version number. ### Example Usage - + ```python from mistralai import Mistral import os @@ -347,7 +349,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.agents.get_version(agent_id="", version=788393) + res = mistral.beta.agents.get_version(agent_id="", version="788393") # Handle response print(res) @@ -359,7 +361,7 @@ with Mistral( | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | | `agent_id` | *str* | :heavy_check_mark: | N/A | -| `version` | *int* | :heavy_check_mark: | N/A | +| `version` | *str* | :heavy_check_mark: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -368,6 +370,90 @@ with Mistral( ### Errors +| Error Type | Status Code | Content Type | +| -------------------------- | -------------------------- | -------------------------- | +| models.HTTPValidationError | 422 | application/json | +| models.SDKError | 4XX, 5XX | \*/\* | + +## create_version_alias + +Create a new alias or update an existing alias to point to a specific version. Aliases are unique per agent and can be reassigned to different versions. + +### Example Usage + + +```python +from mistralai import Mistral +import os + + +with Mistral( + api_key=os.getenv("MISTRAL_API_KEY", ""), +) as mistral: + + res = mistral.beta.agents.create_version_alias(agent_id="", alias="", version=595141) + + # Handle response + print(res) + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `agent_id` | *str* | :heavy_check_mark: | N/A | +| `alias` | *str* | :heavy_check_mark: | N/A | +| `version` | *int* | :heavy_check_mark: | N/A | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + +### Response + +**[models.AgentAliasResponse](../../models/agentaliasresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| -------------------------- | -------------------------- | -------------------------- | +| models.HTTPValidationError | 422 | application/json | +| models.SDKError | 4XX, 5XX | \*/\* | + +## list_version_aliases + +Retrieve all version aliases for a specific agent. + +### Example Usage + + +```python +from mistralai import Mistral +import os + + +with Mistral( + api_key=os.getenv("MISTRAL_API_KEY", ""), +) as mistral: + + res = mistral.beta.agents.list_version_aliases(agent_id="") + + # Handle response + print(res) + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `agent_id` | *str* | :heavy_check_mark: | N/A | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + +### Response + +**[List[models.AgentAliasResponse]](../../models/.md)** + +### Errors + | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | | models.HTTPValidationError | 422 | application/json | diff --git a/src/mistralai/_version.py b/src/mistralai/_version.py index aae7598d..6ee91593 100644 --- a/src/mistralai/_version.py +++ b/src/mistralai/_version.py @@ -3,10 +3,10 @@ import importlib.metadata __title__: str = "mistralai" -__version__: str = "1.11.1" +__version__: str = "1.12.0" __openapi_doc_version__: str = "1.0.0" __gen_version__: str = "2.794.1" -__user_agent__: str = "speakeasy-sdk/python 1.11.1 2.794.1 1.0.0 mistralai" +__user_agent__: str = "speakeasy-sdk/python 1.12.0 2.794.1 1.0.0 mistralai" try: if __package__ is not None: diff --git a/src/mistralai/conversations.py b/src/mistralai/conversations.py index 93ed8c28..194cb4c0 100644 --- a/src/mistralai/conversations.py +++ b/src/mistralai/conversations.py @@ -259,7 +259,12 @@ def start( description: OptionalNullable[str] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, agent_id: OptionalNullable[str] = UNSET, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_conversationrequest.AgentVersion, + models_conversationrequest.AgentVersionTypedDict, + ] + ] = UNSET, model: OptionalNullable[str] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -405,7 +410,12 @@ async def start_async( description: OptionalNullable[str] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, agent_id: OptionalNullable[str] = UNSET, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_conversationrequest.AgentVersion, + models_conversationrequest.AgentVersionTypedDict, + ] + ] = UNSET, model: OptionalNullable[str] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -1711,7 +1721,12 @@ def restart( ] ] = None, metadata: OptionalNullable[Dict[str, Any]] = UNSET, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_conversationrestartrequest.ConversationRestartRequestAgentVersion, + models_conversationrestartrequest.ConversationRestartRequestAgentVersionTypedDict, + ] + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -1846,7 +1861,12 @@ async def restart_async( ] ] = None, metadata: OptionalNullable[Dict[str, Any]] = UNSET, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_conversationrestartrequest.ConversationRestartRequestAgentVersion, + models_conversationrestartrequest.ConversationRestartRequestAgentVersionTypedDict, + ] + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -1991,7 +2011,12 @@ def start_stream( description: OptionalNullable[str] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, agent_id: OptionalNullable[str] = UNSET, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_conversationstreamrequest.ConversationStreamRequestAgentVersion, + models_conversationstreamrequest.ConversationStreamRequestAgentVersionTypedDict, + ] + ] = UNSET, model: OptionalNullable[str] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -2148,7 +2173,12 @@ async def start_stream_async( description: OptionalNullable[str] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, agent_id: OptionalNullable[str] = UNSET, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_conversationstreamrequest.ConversationStreamRequestAgentVersion, + models_conversationstreamrequest.ConversationStreamRequestAgentVersionTypedDict, + ] + ] = UNSET, model: OptionalNullable[str] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -2561,7 +2591,12 @@ def restart_stream( ] ] = None, metadata: OptionalNullable[Dict[str, Any]] = UNSET, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_conversationrestartstreamrequest.ConversationRestartStreamRequestAgentVersion, + models_conversationrestartstreamrequest.ConversationRestartStreamRequestAgentVersionTypedDict, + ] + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -2703,7 +2738,12 @@ async def restart_stream_async( ] ] = None, metadata: OptionalNullable[Dict[str, Any]] = UNSET, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_conversationrestartstreamrequest.ConversationRestartStreamRequestAgentVersion, + models_conversationrestartstreamrequest.ConversationRestartStreamRequestAgentVersionTypedDict, + ] + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, diff --git a/src/mistralai/files.py b/src/mistralai/files.py index ab2c75a2..90ada0ff 100644 --- a/src/mistralai/files.py +++ b/src/mistralai/files.py @@ -241,6 +241,7 @@ def list( source: OptionalNullable[List[models_source.Source]] = UNSET, search: OptionalNullable[str] = UNSET, purpose: OptionalNullable[models_filepurpose.FilePurpose] = UNSET, + mimetypes: OptionalNullable[List[str]] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -257,6 +258,7 @@ def list( :param source: :param search: :param purpose: + :param mimetypes: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds @@ -280,6 +282,7 @@ def list( source=source, search=search, purpose=purpose, + mimetypes=mimetypes, ) req = self._build_request( @@ -343,6 +346,7 @@ async def list_async( source: OptionalNullable[List[models_source.Source]] = UNSET, search: OptionalNullable[str] = UNSET, purpose: OptionalNullable[models_filepurpose.FilePurpose] = UNSET, + mimetypes: OptionalNullable[List[str]] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -359,6 +363,7 @@ async def list_async( :param source: :param search: :param purpose: + :param mimetypes: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds @@ -382,6 +387,7 @@ async def list_async( source=source, search=search, purpose=purpose, + mimetypes=mimetypes, ) req = self._build_request_async( diff --git a/src/mistralai/mistral_agents.py b/src/mistralai/mistral_agents.py index e4abf6e4..7fb0ce25 100644 --- a/src/mistralai/mistral_agents.py +++ b/src/mistralai/mistral_agents.py @@ -5,6 +5,7 @@ from mistralai._hooks import HookContext from mistralai.models import ( agentcreationrequest as models_agentcreationrequest, + agents_api_v1_agents_getop as models_agents_api_v1_agents_getop, agentupdaterequest as models_agentupdaterequest, completionargs as models_completionargs, requestsource as models_requestsource, @@ -494,7 +495,12 @@ def get( self, *, agent_id: str, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_agents_api_v1_agents_getop.QueryParamAgentVersion, + models_agents_api_v1_agents_getop.QueryParamAgentVersionTypedDict, + ] + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -502,7 +508,7 @@ def get( ) -> models.Agent: r"""Retrieve an agent entity. - Given an agent retrieve an agent entity with its attributes. + Given an agent, retrieve an agent entity with its attributes. The agent_version parameter can be an integer version number or a string alias. :param agent_id: :param agent_version: @@ -587,7 +593,12 @@ async def get_async( self, *, agent_id: str, - agent_version: OptionalNullable[int] = UNSET, + agent_version: OptionalNullable[ + Union[ + models_agents_api_v1_agents_getop.QueryParamAgentVersion, + models_agents_api_v1_agents_getop.QueryParamAgentVersionTypedDict, + ] + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -595,7 +606,7 @@ async def get_async( ) -> models.Agent: r"""Retrieve an agent entity. - Given an agent retrieve an agent entity with its attributes. + Given an agent, retrieve an agent entity with its attributes. The agent_version parameter can be an integer version number or a string alias. :param agent_id: :param agent_version: @@ -1514,7 +1525,7 @@ def get_version( self, *, agent_id: str, - version: int, + version: str, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -1548,7 +1559,7 @@ def get_version( req = self._build_request( method="GET", - path="/v1/agents/{agent_id}/version/{version}", + path="/v1/agents/{agent_id}/versions/{version}", base_url=base_url, url_variables=url_variables, request=request, @@ -1607,7 +1618,7 @@ async def get_version_async( self, *, agent_id: str, - version: int, + version: str, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -1641,7 +1652,7 @@ async def get_version_async( req = self._build_request_async( method="GET", - path="/v1/agents/{agent_id}/version/{version}", + path="/v1/agents/{agent_id}/versions/{version}", base_url=base_url, url_variables=url_variables, request=request, @@ -1695,3 +1706,375 @@ async def get_version_async( raise models.SDKError("API error occurred", http_res, http_res_text) raise models.SDKError("Unexpected response received", http_res) + + def create_version_alias( + self, + *, + agent_id: str, + alias: str, + version: int, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.AgentAliasResponse: + r"""Create or update an agent version alias. + + Create a new alias or update an existing alias to point to a specific version. Aliases are unique per agent and can be reassigned to different versions. + + :param agent_id: + :param alias: + :param version: + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + request = models.AgentsAPIV1AgentsCreateOrUpdateAliasRequest( + agent_id=agent_id, + alias=alias, + version=version, + ) + + req = self._build_request( + method="PUT", + path="/v1/agents/{agent_id}/aliases", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + allow_empty_value=None, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["429", "500", "502", "503", "504"]) + + http_res = self.do_request( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="agents_api_v1_agents_create_or_update_alias", + oauth2_scopes=None, + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return unmarshal_json_response(models.AgentAliasResponse, http_res) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + models.HTTPValidationErrorData, http_res + ) + raise models.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError("API error occurred", http_res, http_res_text) + + raise models.SDKError("Unexpected response received", http_res) + + async def create_version_alias_async( + self, + *, + agent_id: str, + alias: str, + version: int, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.AgentAliasResponse: + r"""Create or update an agent version alias. + + Create a new alias or update an existing alias to point to a specific version. Aliases are unique per agent and can be reassigned to different versions. + + :param agent_id: + :param alias: + :param version: + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + request = models.AgentsAPIV1AgentsCreateOrUpdateAliasRequest( + agent_id=agent_id, + alias=alias, + version=version, + ) + + req = self._build_request_async( + method="PUT", + path="/v1/agents/{agent_id}/aliases", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + allow_empty_value=None, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["429", "500", "502", "503", "504"]) + + http_res = await self.do_request_async( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="agents_api_v1_agents_create_or_update_alias", + oauth2_scopes=None, + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return unmarshal_json_response(models.AgentAliasResponse, http_res) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + models.HTTPValidationErrorData, http_res + ) + raise models.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError("API error occurred", http_res, http_res_text) + + raise models.SDKError("Unexpected response received", http_res) + + def list_version_aliases( + self, + *, + agent_id: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> List[models.AgentAliasResponse]: + r"""List all aliases for an agent. + + Retrieve all version aliases for a specific agent. + + :param agent_id: + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + request = models.AgentsAPIV1AgentsListVersionAliasesRequest( + agent_id=agent_id, + ) + + req = self._build_request( + method="GET", + path="/v1/agents/{agent_id}/aliases", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + allow_empty_value=None, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["429", "500", "502", "503", "504"]) + + http_res = self.do_request( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="agents_api_v1_agents_list_version_aliases", + oauth2_scopes=None, + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return unmarshal_json_response(List[models.AgentAliasResponse], http_res) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + models.HTTPValidationErrorData, http_res + ) + raise models.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError("API error occurred", http_res, http_res_text) + + raise models.SDKError("Unexpected response received", http_res) + + async def list_version_aliases_async( + self, + *, + agent_id: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> List[models.AgentAliasResponse]: + r"""List all aliases for an agent. + + Retrieve all version aliases for a specific agent. + + :param agent_id: + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + request = models.AgentsAPIV1AgentsListVersionAliasesRequest( + agent_id=agent_id, + ) + + req = self._build_request_async( + method="GET", + path="/v1/agents/{agent_id}/aliases", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + allow_empty_value=None, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["429", "500", "502", "503", "504"]) + + http_res = await self.do_request_async( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="agents_api_v1_agents_list_version_aliases", + oauth2_scopes=None, + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return unmarshal_json_response(List[models.AgentAliasResponse], http_res) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + models.HTTPValidationErrorData, http_res + ) + raise models.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError("API error occurred", http_res, http_res_text) + + raise models.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/models/__init__.py b/src/mistralai/models/__init__.py index c35b3d24..23e65222 100644 --- a/src/mistralai/models/__init__.py +++ b/src/mistralai/models/__init__.py @@ -14,8 +14,11 @@ AgentToolsTypedDict, AgentTypedDict, ) + from .agentaliasresponse import AgentAliasResponse, AgentAliasResponseTypedDict from .agentconversation import ( AgentConversation, + AgentConversationAgentVersion, + AgentConversationAgentVersionTypedDict, AgentConversationObject, AgentConversationTypedDict, ) @@ -41,6 +44,10 @@ AgentHandoffStartedEventType, AgentHandoffStartedEventTypedDict, ) + from .agents_api_v1_agents_create_or_update_aliasop import ( + AgentsAPIV1AgentsCreateOrUpdateAliasRequest, + AgentsAPIV1AgentsCreateOrUpdateAliasRequestTypedDict, + ) from .agents_api_v1_agents_deleteop import ( AgentsAPIV1AgentsDeleteRequest, AgentsAPIV1AgentsDeleteRequestTypedDict, @@ -52,6 +59,12 @@ from .agents_api_v1_agents_getop import ( AgentsAPIV1AgentsGetRequest, AgentsAPIV1AgentsGetRequestTypedDict, + QueryParamAgentVersion, + QueryParamAgentVersionTypedDict, + ) + from .agents_api_v1_agents_list_version_aliasesop import ( + AgentsAPIV1AgentsListVersionAliasesRequest, + AgentsAPIV1AgentsListVersionAliasesRequestTypedDict, ) from .agents_api_v1_agents_list_versionsop import ( AgentsAPIV1AgentsListVersionsRequest, @@ -340,6 +353,8 @@ ConversationMessagesTypedDict, ) from .conversationrequest import ( + AgentVersion, + AgentVersionTypedDict, ConversationRequest, ConversationRequestTypedDict, HandoffExecution, @@ -355,16 +370,22 @@ ) from .conversationrestartrequest import ( ConversationRestartRequest, + ConversationRestartRequestAgentVersion, + ConversationRestartRequestAgentVersionTypedDict, ConversationRestartRequestHandoffExecution, ConversationRestartRequestTypedDict, ) from .conversationrestartstreamrequest import ( ConversationRestartStreamRequest, + ConversationRestartStreamRequestAgentVersion, + ConversationRestartStreamRequestAgentVersionTypedDict, ConversationRestartStreamRequestHandoffExecution, ConversationRestartStreamRequestTypedDict, ) from .conversationstreamrequest import ( ConversationStreamRequest, + ConversationStreamRequestAgentVersion, + ConversationStreamRequestAgentVersionTypedDict, ConversationStreamRequestHandoffExecution, ConversationStreamRequestTools, ConversationStreamRequestToolsTypedDict, @@ -758,6 +779,28 @@ from .paginationinfo import PaginationInfo, PaginationInfoTypedDict from .prediction import Prediction, PredictionTypedDict from .processingstatusout import ProcessingStatusOut, ProcessingStatusOutTypedDict + from .realtimetranscriptionerror import ( + RealtimeTranscriptionError, + RealtimeTranscriptionErrorTypedDict, + ) + from .realtimetranscriptionerrordetail import ( + Message, + MessageTypedDict, + RealtimeTranscriptionErrorDetail, + RealtimeTranscriptionErrorDetailTypedDict, + ) + from .realtimetranscriptionsession import ( + RealtimeTranscriptionSession, + RealtimeTranscriptionSessionTypedDict, + ) + from .realtimetranscriptionsessioncreated import ( + RealtimeTranscriptionSessionCreated, + RealtimeTranscriptionSessionCreatedTypedDict, + ) + from .realtimetranscriptionsessionupdated import ( + RealtimeTranscriptionSessionUpdated, + RealtimeTranscriptionSessionUpdatedTypedDict, + ) from .referencechunk import ( ReferenceChunk, ReferenceChunkType, @@ -951,7 +994,11 @@ __all__ = [ "APIEndpoint", "Agent", + "AgentAliasResponse", + "AgentAliasResponseTypedDict", "AgentConversation", + "AgentConversationAgentVersion", + "AgentConversationAgentVersionTypedDict", "AgentConversationObject", "AgentConversationTypedDict", "AgentCreationRequest", @@ -976,6 +1023,10 @@ "AgentUpdateRequestTools", "AgentUpdateRequestToolsTypedDict", "AgentUpdateRequestTypedDict", + "AgentVersion", + "AgentVersionTypedDict", + "AgentsAPIV1AgentsCreateOrUpdateAliasRequest", + "AgentsAPIV1AgentsCreateOrUpdateAliasRequestTypedDict", "AgentsAPIV1AgentsDeleteRequest", "AgentsAPIV1AgentsDeleteRequestTypedDict", "AgentsAPIV1AgentsGetRequest", @@ -984,6 +1035,8 @@ "AgentsAPIV1AgentsGetVersionRequestTypedDict", "AgentsAPIV1AgentsListRequest", "AgentsAPIV1AgentsListRequestTypedDict", + "AgentsAPIV1AgentsListVersionAliasesRequest", + "AgentsAPIV1AgentsListVersionAliasesRequestTypedDict", "AgentsAPIV1AgentsListVersionsRequest", "AgentsAPIV1AgentsListVersionsRequestTypedDict", "AgentsAPIV1AgentsUpdateRequest", @@ -1184,12 +1237,18 @@ "ConversationResponseObject", "ConversationResponseTypedDict", "ConversationRestartRequest", + "ConversationRestartRequestAgentVersion", + "ConversationRestartRequestAgentVersionTypedDict", "ConversationRestartRequestHandoffExecution", "ConversationRestartRequestTypedDict", "ConversationRestartStreamRequest", + "ConversationRestartStreamRequestAgentVersion", + "ConversationRestartStreamRequestAgentVersionTypedDict", "ConversationRestartStreamRequestHandoffExecution", "ConversationRestartStreamRequestTypedDict", "ConversationStreamRequest", + "ConversationStreamRequestAgentVersion", + "ConversationStreamRequestAgentVersionTypedDict", "ConversationStreamRequestHandoffExecution", "ConversationStreamRequestTools", "ConversationStreamRequestToolsTypedDict", @@ -1431,6 +1490,7 @@ "ListSharingOutTypedDict", "Loc", "LocTypedDict", + "Message", "MessageEntries", "MessageEntriesTypedDict", "MessageInputContentChunks", @@ -1456,6 +1516,7 @@ "MessageOutputEventRole", "MessageOutputEventType", "MessageOutputEventTypedDict", + "MessageTypedDict", "Messages", "MessagesTypedDict", "MetricOut", @@ -1506,7 +1567,19 @@ "PredictionTypedDict", "ProcessingStatusOut", "ProcessingStatusOutTypedDict", + "QueryParamAgentVersion", + "QueryParamAgentVersionTypedDict", "QueryParamStatus", + "RealtimeTranscriptionError", + "RealtimeTranscriptionErrorDetail", + "RealtimeTranscriptionErrorDetailTypedDict", + "RealtimeTranscriptionErrorTypedDict", + "RealtimeTranscriptionSession", + "RealtimeTranscriptionSessionCreated", + "RealtimeTranscriptionSessionCreatedTypedDict", + "RealtimeTranscriptionSessionTypedDict", + "RealtimeTranscriptionSessionUpdated", + "RealtimeTranscriptionSessionUpdatedTypedDict", "ReferenceChunk", "ReferenceChunkType", "ReferenceChunkTypedDict", @@ -1675,7 +1748,11 @@ "AgentTools": ".agent", "AgentToolsTypedDict": ".agent", "AgentTypedDict": ".agent", + "AgentAliasResponse": ".agentaliasresponse", + "AgentAliasResponseTypedDict": ".agentaliasresponse", "AgentConversation": ".agentconversation", + "AgentConversationAgentVersion": ".agentconversation", + "AgentConversationAgentVersionTypedDict": ".agentconversation", "AgentConversationObject": ".agentconversation", "AgentConversationTypedDict": ".agentconversation", "AgentCreationRequest": ".agentcreationrequest", @@ -1692,12 +1769,18 @@ "AgentHandoffStartedEvent": ".agenthandoffstartedevent", "AgentHandoffStartedEventType": ".agenthandoffstartedevent", "AgentHandoffStartedEventTypedDict": ".agenthandoffstartedevent", + "AgentsAPIV1AgentsCreateOrUpdateAliasRequest": ".agents_api_v1_agents_create_or_update_aliasop", + "AgentsAPIV1AgentsCreateOrUpdateAliasRequestTypedDict": ".agents_api_v1_agents_create_or_update_aliasop", "AgentsAPIV1AgentsDeleteRequest": ".agents_api_v1_agents_deleteop", "AgentsAPIV1AgentsDeleteRequestTypedDict": ".agents_api_v1_agents_deleteop", "AgentsAPIV1AgentsGetVersionRequest": ".agents_api_v1_agents_get_versionop", "AgentsAPIV1AgentsGetVersionRequestTypedDict": ".agents_api_v1_agents_get_versionop", "AgentsAPIV1AgentsGetRequest": ".agents_api_v1_agents_getop", "AgentsAPIV1AgentsGetRequestTypedDict": ".agents_api_v1_agents_getop", + "QueryParamAgentVersion": ".agents_api_v1_agents_getop", + "QueryParamAgentVersionTypedDict": ".agents_api_v1_agents_getop", + "AgentsAPIV1AgentsListVersionAliasesRequest": ".agents_api_v1_agents_list_version_aliasesop", + "AgentsAPIV1AgentsListVersionAliasesRequestTypedDict": ".agents_api_v1_agents_list_version_aliasesop", "AgentsAPIV1AgentsListVersionsRequest": ".agents_api_v1_agents_list_versionsop", "AgentsAPIV1AgentsListVersionsRequestTypedDict": ".agents_api_v1_agents_list_versionsop", "AgentsAPIV1AgentsListRequest": ".agents_api_v1_agents_listop", @@ -1913,6 +1996,8 @@ "ConversationMessages": ".conversationmessages", "ConversationMessagesObject": ".conversationmessages", "ConversationMessagesTypedDict": ".conversationmessages", + "AgentVersion": ".conversationrequest", + "AgentVersionTypedDict": ".conversationrequest", "ConversationRequest": ".conversationrequest", "ConversationRequestTypedDict": ".conversationrequest", "HandoffExecution": ".conversationrequest", @@ -1924,12 +2009,18 @@ "Outputs": ".conversationresponse", "OutputsTypedDict": ".conversationresponse", "ConversationRestartRequest": ".conversationrestartrequest", + "ConversationRestartRequestAgentVersion": ".conversationrestartrequest", + "ConversationRestartRequestAgentVersionTypedDict": ".conversationrestartrequest", "ConversationRestartRequestHandoffExecution": ".conversationrestartrequest", "ConversationRestartRequestTypedDict": ".conversationrestartrequest", "ConversationRestartStreamRequest": ".conversationrestartstreamrequest", + "ConversationRestartStreamRequestAgentVersion": ".conversationrestartstreamrequest", + "ConversationRestartStreamRequestAgentVersionTypedDict": ".conversationrestartstreamrequest", "ConversationRestartStreamRequestHandoffExecution": ".conversationrestartstreamrequest", "ConversationRestartStreamRequestTypedDict": ".conversationrestartstreamrequest", "ConversationStreamRequest": ".conversationstreamrequest", + "ConversationStreamRequestAgentVersion": ".conversationstreamrequest", + "ConversationStreamRequestAgentVersionTypedDict": ".conversationstreamrequest", "ConversationStreamRequestHandoffExecution": ".conversationstreamrequest", "ConversationStreamRequestTools": ".conversationstreamrequest", "ConversationStreamRequestToolsTypedDict": ".conversationstreamrequest", @@ -2237,6 +2328,18 @@ "PredictionTypedDict": ".prediction", "ProcessingStatusOut": ".processingstatusout", "ProcessingStatusOutTypedDict": ".processingstatusout", + "RealtimeTranscriptionError": ".realtimetranscriptionerror", + "RealtimeTranscriptionErrorTypedDict": ".realtimetranscriptionerror", + "Message": ".realtimetranscriptionerrordetail", + "MessageTypedDict": ".realtimetranscriptionerrordetail", + "RealtimeTranscriptionErrorDetail": ".realtimetranscriptionerrordetail", + "RealtimeTranscriptionErrorDetailTypedDict": ".realtimetranscriptionerrordetail", + "RealtimeTranscriptionSession": ".realtimetranscriptionsession", + "RealtimeTranscriptionSessionTypedDict": ".realtimetranscriptionsession", + "RealtimeTranscriptionSessionCreated": ".realtimetranscriptionsessioncreated", + "RealtimeTranscriptionSessionCreatedTypedDict": ".realtimetranscriptionsessioncreated", + "RealtimeTranscriptionSessionUpdated": ".realtimetranscriptionsessionupdated", + "RealtimeTranscriptionSessionUpdatedTypedDict": ".realtimetranscriptionsessionupdated", "ReferenceChunk": ".referencechunk", "ReferenceChunkType": ".referencechunk", "ReferenceChunkTypedDict": ".referencechunk", diff --git a/src/mistralai/models/agentaliasresponse.py b/src/mistralai/models/agentaliasresponse.py new file mode 100644 index 00000000..c0928da9 --- /dev/null +++ b/src/mistralai/models/agentaliasresponse.py @@ -0,0 +1,23 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from datetime import datetime +from mistralai.types import BaseModel +from typing_extensions import TypedDict + + +class AgentAliasResponseTypedDict(TypedDict): + alias: str + version: int + created_at: datetime + updated_at: datetime + + +class AgentAliasResponse(BaseModel): + alias: str + + version: int + + created_at: datetime + + updated_at: datetime diff --git a/src/mistralai/models/agentconversation.py b/src/mistralai/models/agentconversation.py index 625fb4fc..6007b571 100644 --- a/src/mistralai/models/agentconversation.py +++ b/src/mistralai/models/agentconversation.py @@ -4,13 +4,23 @@ from datetime import datetime from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL from pydantic import model_serializer -from typing import Any, Dict, Literal, Optional -from typing_extensions import NotRequired, TypedDict +from typing import Any, Dict, Literal, Optional, Union +from typing_extensions import NotRequired, TypeAliasType, TypedDict AgentConversationObject = Literal["conversation",] +AgentConversationAgentVersionTypedDict = TypeAliasType( + "AgentConversationAgentVersionTypedDict", Union[str, int] +) + + +AgentConversationAgentVersion = TypeAliasType( + "AgentConversationAgentVersion", Union[str, int] +) + + class AgentConversationTypedDict(TypedDict): id: str created_at: datetime @@ -23,7 +33,7 @@ class AgentConversationTypedDict(TypedDict): metadata: NotRequired[Nullable[Dict[str, Any]]] r"""Custom metadata for the conversation.""" object: NotRequired[AgentConversationObject] - agent_version: NotRequired[Nullable[int]] + agent_version: NotRequired[Nullable[AgentConversationAgentVersionTypedDict]] class AgentConversation(BaseModel): @@ -46,7 +56,7 @@ class AgentConversation(BaseModel): object: Optional[AgentConversationObject] = "conversation" - agent_version: OptionalNullable[int] = UNSET + agent_version: OptionalNullable[AgentConversationAgentVersion] = UNSET @model_serializer(mode="wrap") def serialize_model(self, handler): diff --git a/src/mistralai/models/agents_api_v1_agents_create_or_update_aliasop.py b/src/mistralai/models/agents_api_v1_agents_create_or_update_aliasop.py new file mode 100644 index 00000000..6cf9d0e0 --- /dev/null +++ b/src/mistralai/models/agents_api_v1_agents_create_or_update_aliasop.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from mistralai.types import BaseModel +from mistralai.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata +from typing_extensions import Annotated, TypedDict + + +class AgentsAPIV1AgentsCreateOrUpdateAliasRequestTypedDict(TypedDict): + agent_id: str + alias: str + version: int + + +class AgentsAPIV1AgentsCreateOrUpdateAliasRequest(BaseModel): + agent_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + alias: Annotated[ + str, FieldMetadata(query=QueryParamMetadata(style="form", explode=True)) + ] + + version: Annotated[ + int, FieldMetadata(query=QueryParamMetadata(style="form", explode=True)) + ] diff --git a/src/mistralai/models/agents_api_v1_agents_get_versionop.py b/src/mistralai/models/agents_api_v1_agents_get_versionop.py index 4463d3b2..fddb10dd 100644 --- a/src/mistralai/models/agents_api_v1_agents_get_versionop.py +++ b/src/mistralai/models/agents_api_v1_agents_get_versionop.py @@ -8,7 +8,7 @@ class AgentsAPIV1AgentsGetVersionRequestTypedDict(TypedDict): agent_id: str - version: int + version: str class AgentsAPIV1AgentsGetVersionRequest(BaseModel): @@ -17,5 +17,5 @@ class AgentsAPIV1AgentsGetVersionRequest(BaseModel): ] version: Annotated[ - int, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/models/agents_api_v1_agents_getop.py b/src/mistralai/models/agents_api_v1_agents_getop.py index dced6dbb..2b7d89a5 100644 --- a/src/mistralai/models/agents_api_v1_agents_getop.py +++ b/src/mistralai/models/agents_api_v1_agents_getop.py @@ -4,12 +4,21 @@ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL from mistralai.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata from pydantic import model_serializer -from typing_extensions import Annotated, NotRequired, TypedDict +from typing import Union +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict + + +QueryParamAgentVersionTypedDict = TypeAliasType( + "QueryParamAgentVersionTypedDict", Union[int, str] +) + + +QueryParamAgentVersion = TypeAliasType("QueryParamAgentVersion", Union[int, str]) class AgentsAPIV1AgentsGetRequestTypedDict(TypedDict): agent_id: str - agent_version: NotRequired[Nullable[int]] + agent_version: NotRequired[Nullable[QueryParamAgentVersionTypedDict]] class AgentsAPIV1AgentsGetRequest(BaseModel): @@ -18,7 +27,7 @@ class AgentsAPIV1AgentsGetRequest(BaseModel): ] agent_version: Annotated[ - OptionalNullable[int], + OptionalNullable[QueryParamAgentVersion], FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), ] = UNSET diff --git a/src/mistralai/models/agents_api_v1_agents_list_version_aliasesop.py b/src/mistralai/models/agents_api_v1_agents_list_version_aliasesop.py new file mode 100644 index 00000000..650a7187 --- /dev/null +++ b/src/mistralai/models/agents_api_v1_agents_list_version_aliasesop.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from mistralai.types import BaseModel +from mistralai.utils import FieldMetadata, PathParamMetadata +from typing_extensions import Annotated, TypedDict + + +class AgentsAPIV1AgentsListVersionAliasesRequestTypedDict(TypedDict): + agent_id: str + + +class AgentsAPIV1AgentsListVersionAliasesRequest(BaseModel): + agent_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] diff --git a/src/mistralai/models/conversationrequest.py b/src/mistralai/models/conversationrequest.py index 09d934ed..80581cc1 100644 --- a/src/mistralai/models/conversationrequest.py +++ b/src/mistralai/models/conversationrequest.py @@ -48,6 +48,12 @@ ] +AgentVersionTypedDict = TypeAliasType("AgentVersionTypedDict", Union[str, int]) + + +AgentVersion = TypeAliasType("AgentVersion", Union[str, int]) + + class ConversationRequestTypedDict(TypedDict): inputs: ConversationInputsTypedDict stream: NotRequired[bool] @@ -61,7 +67,7 @@ class ConversationRequestTypedDict(TypedDict): description: NotRequired[Nullable[str]] metadata: NotRequired[Nullable[Dict[str, Any]]] agent_id: NotRequired[Nullable[str]] - agent_version: NotRequired[Nullable[int]] + agent_version: NotRequired[Nullable[AgentVersionTypedDict]] model: NotRequired[Nullable[str]] @@ -89,7 +95,7 @@ class ConversationRequest(BaseModel): agent_id: OptionalNullable[str] = UNSET - agent_version: OptionalNullable[int] = UNSET + agent_version: OptionalNullable[AgentVersion] = UNSET model: OptionalNullable[str] = UNSET diff --git a/src/mistralai/models/conversationrestartrequest.py b/src/mistralai/models/conversationrestartrequest.py index a9c8410c..6f21d012 100644 --- a/src/mistralai/models/conversationrestartrequest.py +++ b/src/mistralai/models/conversationrestartrequest.py @@ -5,8 +5,8 @@ from .conversationinputs import ConversationInputs, ConversationInputsTypedDict from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL from pydantic import model_serializer -from typing import Any, Dict, Literal, Optional -from typing_extensions import NotRequired, TypedDict +from typing import Any, Dict, Literal, Optional, Union +from typing_extensions import NotRequired, TypeAliasType, TypedDict ConversationRestartRequestHandoffExecution = Literal[ @@ -15,6 +15,18 @@ ] +ConversationRestartRequestAgentVersionTypedDict = TypeAliasType( + "ConversationRestartRequestAgentVersionTypedDict", Union[str, int] +) +r"""Specific version of the agent to use when restarting. If not provided, uses the current version.""" + + +ConversationRestartRequestAgentVersion = TypeAliasType( + "ConversationRestartRequestAgentVersion", Union[str, int] +) +r"""Specific version of the agent to use when restarting. If not provided, uses the current version.""" + + class ConversationRestartRequestTypedDict(TypedDict): r"""Request to restart a new conversation from a given entry in the conversation.""" @@ -28,7 +40,9 @@ class ConversationRestartRequestTypedDict(TypedDict): r"""White-listed arguments from the completion API""" metadata: NotRequired[Nullable[Dict[str, Any]]] r"""Custom metadata for the conversation.""" - agent_version: NotRequired[Nullable[int]] + agent_version: NotRequired[ + Nullable[ConversationRestartRequestAgentVersionTypedDict] + ] r"""Specific version of the agent to use when restarting. If not provided, uses the current version.""" @@ -52,7 +66,7 @@ class ConversationRestartRequest(BaseModel): metadata: OptionalNullable[Dict[str, Any]] = UNSET r"""Custom metadata for the conversation.""" - agent_version: OptionalNullable[int] = UNSET + agent_version: OptionalNullable[ConversationRestartRequestAgentVersion] = UNSET r"""Specific version of the agent to use when restarting. If not provided, uses the current version.""" @model_serializer(mode="wrap") diff --git a/src/mistralai/models/conversationrestartstreamrequest.py b/src/mistralai/models/conversationrestartstreamrequest.py index 0703bb5f..2cec7958 100644 --- a/src/mistralai/models/conversationrestartstreamrequest.py +++ b/src/mistralai/models/conversationrestartstreamrequest.py @@ -5,8 +5,8 @@ from .conversationinputs import ConversationInputs, ConversationInputsTypedDict from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL from pydantic import model_serializer -from typing import Any, Dict, Literal, Optional -from typing_extensions import NotRequired, TypedDict +from typing import Any, Dict, Literal, Optional, Union +from typing_extensions import NotRequired, TypeAliasType, TypedDict ConversationRestartStreamRequestHandoffExecution = Literal[ @@ -15,6 +15,18 @@ ] +ConversationRestartStreamRequestAgentVersionTypedDict = TypeAliasType( + "ConversationRestartStreamRequestAgentVersionTypedDict", Union[str, int] +) +r"""Specific version of the agent to use when restarting. If not provided, uses the current version.""" + + +ConversationRestartStreamRequestAgentVersion = TypeAliasType( + "ConversationRestartStreamRequestAgentVersion", Union[str, int] +) +r"""Specific version of the agent to use when restarting. If not provided, uses the current version.""" + + class ConversationRestartStreamRequestTypedDict(TypedDict): r"""Request to restart a new conversation from a given entry in the conversation.""" @@ -28,7 +40,9 @@ class ConversationRestartStreamRequestTypedDict(TypedDict): r"""White-listed arguments from the completion API""" metadata: NotRequired[Nullable[Dict[str, Any]]] r"""Custom metadata for the conversation.""" - agent_version: NotRequired[Nullable[int]] + agent_version: NotRequired[ + Nullable[ConversationRestartStreamRequestAgentVersionTypedDict] + ] r"""Specific version of the agent to use when restarting. If not provided, uses the current version.""" @@ -54,7 +68,9 @@ class ConversationRestartStreamRequest(BaseModel): metadata: OptionalNullable[Dict[str, Any]] = UNSET r"""Custom metadata for the conversation.""" - agent_version: OptionalNullable[int] = UNSET + agent_version: OptionalNullable[ConversationRestartStreamRequestAgentVersion] = ( + UNSET + ) r"""Specific version of the agent to use when restarting. If not provided, uses the current version.""" @model_serializer(mode="wrap") diff --git a/src/mistralai/models/conversationstreamrequest.py b/src/mistralai/models/conversationstreamrequest.py index 6ff56e17..1a481b77 100644 --- a/src/mistralai/models/conversationstreamrequest.py +++ b/src/mistralai/models/conversationstreamrequest.py @@ -48,6 +48,16 @@ ] +ConversationStreamRequestAgentVersionTypedDict = TypeAliasType( + "ConversationStreamRequestAgentVersionTypedDict", Union[str, int] +) + + +ConversationStreamRequestAgentVersion = TypeAliasType( + "ConversationStreamRequestAgentVersion", Union[str, int] +) + + class ConversationStreamRequestTypedDict(TypedDict): inputs: ConversationInputsTypedDict stream: NotRequired[bool] @@ -61,7 +71,7 @@ class ConversationStreamRequestTypedDict(TypedDict): description: NotRequired[Nullable[str]] metadata: NotRequired[Nullable[Dict[str, Any]]] agent_id: NotRequired[Nullable[str]] - agent_version: NotRequired[Nullable[int]] + agent_version: NotRequired[Nullable[ConversationStreamRequestAgentVersionTypedDict]] model: NotRequired[Nullable[str]] @@ -91,7 +101,7 @@ class ConversationStreamRequest(BaseModel): agent_id: OptionalNullable[str] = UNSET - agent_version: OptionalNullable[int] = UNSET + agent_version: OptionalNullable[ConversationStreamRequestAgentVersion] = UNSET model: OptionalNullable[str] = UNSET diff --git a/src/mistralai/models/files_api_routes_list_filesop.py b/src/mistralai/models/files_api_routes_list_filesop.py index 9b9422b4..84d61b9b 100644 --- a/src/mistralai/models/files_api_routes_list_filesop.py +++ b/src/mistralai/models/files_api_routes_list_filesop.py @@ -19,6 +19,7 @@ class FilesAPIRoutesListFilesRequestTypedDict(TypedDict): source: NotRequired[Nullable[List[Source]]] search: NotRequired[Nullable[str]] purpose: NotRequired[Nullable[FilePurpose]] + mimetypes: NotRequired[Nullable[List[str]]] class FilesAPIRoutesListFilesRequest(BaseModel): @@ -57,6 +58,11 @@ class FilesAPIRoutesListFilesRequest(BaseModel): FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), ] = UNSET + mimetypes: Annotated[ + OptionalNullable[List[str]], + FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), + ] = UNSET + @model_serializer(mode="wrap") def serialize_model(self, handler): optional_fields = [ @@ -67,8 +73,9 @@ def serialize_model(self, handler): "source", "search", "purpose", + "mimetypes", ] - nullable_fields = ["sample_type", "source", "search", "purpose"] + nullable_fields = ["sample_type", "source", "search", "purpose", "mimetypes"] null_default_fields = [] serialized = handler(self) diff --git a/src/mistralai/models/mistralpromptmode.py b/src/mistralai/models/mistralpromptmode.py index ee82fb6d..dfb6f2d2 100644 --- a/src/mistralai/models/mistralpromptmode.py +++ b/src/mistralai/models/mistralpromptmode.py @@ -6,3 +6,7 @@ MistralPromptMode = Union[Literal["reasoning",], UnrecognizedStr] +r"""Available options to the prompt_mode argument on the chat completion endpoint. +Values represent high-level intent. Assignment to actual SPs is handled internally. +System prompt may include knowledge cutoff date, model capabilities, tone to use, safety guidelines, etc. +""" diff --git a/src/mistralai/models/modelcapabilities.py b/src/mistralai/models/modelcapabilities.py index 4b5d5da7..6edf8e5b 100644 --- a/src/mistralai/models/modelcapabilities.py +++ b/src/mistralai/models/modelcapabilities.py @@ -16,6 +16,7 @@ class ModelCapabilitiesTypedDict(TypedDict): classification: NotRequired[bool] moderation: NotRequired[bool] audio: NotRequired[bool] + audio_transcription: NotRequired[bool] class ModelCapabilities(BaseModel): @@ -36,3 +37,5 @@ class ModelCapabilities(BaseModel): moderation: Optional[bool] = False audio: Optional[bool] = False + + audio_transcription: Optional[bool] = False diff --git a/src/mistralai/models/realtimetranscriptionerror.py b/src/mistralai/models/realtimetranscriptionerror.py new file mode 100644 index 00000000..0785f700 --- /dev/null +++ b/src/mistralai/models/realtimetranscriptionerror.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .realtimetranscriptionerrordetail import ( + RealtimeTranscriptionErrorDetail, + RealtimeTranscriptionErrorDetailTypedDict, +) +from mistralai.types import BaseModel +from mistralai.utils import validate_const +import pydantic +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, TypedDict + + +class RealtimeTranscriptionErrorTypedDict(TypedDict): + error: RealtimeTranscriptionErrorDetailTypedDict + type: Literal["error"] + + +class RealtimeTranscriptionError(BaseModel): + error: RealtimeTranscriptionErrorDetail + + TYPE: Annotated[ + Annotated[Optional[Literal["error"]], AfterValidator(validate_const("error"))], + pydantic.Field(alias="type"), + ] = "error" diff --git a/src/mistralai/models/realtimetranscriptionerrordetail.py b/src/mistralai/models/realtimetranscriptionerrordetail.py new file mode 100644 index 00000000..cb5d73f8 --- /dev/null +++ b/src/mistralai/models/realtimetranscriptionerrordetail.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from mistralai.types import BaseModel +from typing import Any, Dict, Union +from typing_extensions import TypeAliasType, TypedDict + + +MessageTypedDict = TypeAliasType("MessageTypedDict", Union[str, Dict[str, Any]]) +r"""Human-readable error message.""" + + +Message = TypeAliasType("Message", Union[str, Dict[str, Any]]) +r"""Human-readable error message.""" + + +class RealtimeTranscriptionErrorDetailTypedDict(TypedDict): + message: MessageTypedDict + r"""Human-readable error message.""" + code: int + r"""Internal error code for debugging.""" + + +class RealtimeTranscriptionErrorDetail(BaseModel): + message: Message + r"""Human-readable error message.""" + + code: int + r"""Internal error code for debugging.""" diff --git a/src/mistralai/models/realtimetranscriptionsession.py b/src/mistralai/models/realtimetranscriptionsession.py new file mode 100644 index 00000000..bcd0cfe3 --- /dev/null +++ b/src/mistralai/models/realtimetranscriptionsession.py @@ -0,0 +1,20 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .audioformat import AudioFormat, AudioFormatTypedDict +from mistralai.types import BaseModel +from typing_extensions import TypedDict + + +class RealtimeTranscriptionSessionTypedDict(TypedDict): + request_id: str + model: str + audio_format: AudioFormatTypedDict + + +class RealtimeTranscriptionSession(BaseModel): + request_id: str + + model: str + + audio_format: AudioFormat diff --git a/src/mistralai/models/realtimetranscriptionsessioncreated.py b/src/mistralai/models/realtimetranscriptionsessioncreated.py new file mode 100644 index 00000000..9a2c2860 --- /dev/null +++ b/src/mistralai/models/realtimetranscriptionsessioncreated.py @@ -0,0 +1,30 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .realtimetranscriptionsession import ( + RealtimeTranscriptionSession, + RealtimeTranscriptionSessionTypedDict, +) +from mistralai.types import BaseModel +from mistralai.utils import validate_const +import pydantic +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, TypedDict + + +class RealtimeTranscriptionSessionCreatedTypedDict(TypedDict): + session: RealtimeTranscriptionSessionTypedDict + type: Literal["session.created"] + + +class RealtimeTranscriptionSessionCreated(BaseModel): + session: RealtimeTranscriptionSession + + TYPE: Annotated[ + Annotated[ + Optional[Literal["session.created"]], + AfterValidator(validate_const("session.created")), + ], + pydantic.Field(alias="type"), + ] = "session.created" diff --git a/src/mistralai/models/realtimetranscriptionsessionupdated.py b/src/mistralai/models/realtimetranscriptionsessionupdated.py new file mode 100644 index 00000000..ad1b5133 --- /dev/null +++ b/src/mistralai/models/realtimetranscriptionsessionupdated.py @@ -0,0 +1,30 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .realtimetranscriptionsession import ( + RealtimeTranscriptionSession, + RealtimeTranscriptionSessionTypedDict, +) +from mistralai.types import BaseModel +from mistralai.utils import validate_const +import pydantic +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, TypedDict + + +class RealtimeTranscriptionSessionUpdatedTypedDict(TypedDict): + session: RealtimeTranscriptionSessionTypedDict + type: Literal["session.updated"] + + +class RealtimeTranscriptionSessionUpdated(BaseModel): + session: RealtimeTranscriptionSession + + TYPE: Annotated[ + Annotated[ + Optional[Literal["session.updated"]], + AfterValidator(validate_const("session.updated")), + ], + pydantic.Field(alias="type"), + ] = "session.updated" diff --git a/uv.lock b/uv.lock index dc8f42ea..efffa7ad 100644 --- a/uv.lock +++ b/uv.lock @@ -563,7 +563,7 @@ wheels = [ [[package]] name = "mistralai" -version = "1.11.0" +version = "1.11.1" source = { editable = "." } dependencies = [ { name = "eval-type-backport" }, From caf71b23f165f202a81afa11fdbc9d51a1f34ea5 Mon Sep 17 00:00:00 2001 From: jean-malo Date: Sun, 1 Feb 2026 22:44:58 +0100 Subject: [PATCH 2/2] feat(realtime): add realtime audio transcription support This commit adds support for realtime audio transcription using WebSocket connections. The implementation includes: 1. New realtime transcription client in the extra module 2. Examples for microphone and file-based transcription 3. Support for audio format negotiation 4. Proper error handling and connection management The realtime transcription feature requires the websockets package (>=13.0) which is now added as an optional dependency. This implementation allows for streaming audio data to the Mistral API and receiving transcription results in realtime. The changes include new models for realtime events and connection management, as well as updated audio.py to expose the realtime functionality. --- ...async_realtime_transcription_microphone.py | 225 +++++++++++++++ .../async_realtime_transcription_stream.py | 144 ++++++++++ examples/mistral/audio/chat_base64.py | 19 +- examples/mistral/audio/chat_no_streaming.py | 19 +- examples/mistral/audio/chat_streaming.py | 24 +- .../audio/transcription_diarize_async.py | 28 ++ .../async_batch_job_chat_completion_inline.py | 1 - pyproject.toml | 3 + scripts/run_examples.sh | 3 + src/mistralai/audio.py | 20 ++ src/mistralai/extra/__init__.py | 48 ++++ src/mistralai/extra/exceptions.py | 53 +++- src/mistralai/extra/realtime/__init__.py | 25 ++ src/mistralai/extra/realtime/connection.py | 207 +++++++++++++ src/mistralai/extra/realtime/transcription.py | 271 ++++++++++++++++++ uv.lock | 74 ++++- 16 files changed, 1133 insertions(+), 31 deletions(-) create mode 100644 examples/mistral/audio/async_realtime_transcription_microphone.py create mode 100644 examples/mistral/audio/async_realtime_transcription_stream.py create mode 100644 examples/mistral/audio/transcription_diarize_async.py create mode 100644 src/mistralai/extra/realtime/__init__.py create mode 100644 src/mistralai/extra/realtime/connection.py create mode 100644 src/mistralai/extra/realtime/transcription.py diff --git a/examples/mistral/audio/async_realtime_transcription_microphone.py b/examples/mistral/audio/async_realtime_transcription_microphone.py new file mode 100644 index 00000000..748dbcaf --- /dev/null +++ b/examples/mistral/audio/async_realtime_transcription_microphone.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python +# /// script +# requires-python = ">=3.9" +# dependencies = [ +# "mistralai[realtime]", +# "pyaudio", +# "rich", +# ] +# [tool.uv.sources] +# mistralai = { path = "../../..", editable = true } +# /// + +import argparse +import asyncio +import os +import sys +from typing import AsyncIterator + +from rich.align import Align +from rich.console import Console +from rich.layout import Layout +from rich.live import Live +from rich.panel import Panel +from rich.text import Text + +from mistralai import Mistral +from mistralai.extra.realtime import UnknownRealtimeEvent +from mistralai.models import ( + AudioFormat, + RealtimeTranscriptionError, + RealtimeTranscriptionSessionCreated, + TranscriptionStreamDone, + TranscriptionStreamTextDelta, +) + +console = Console() + + +class TranscriptDisplay: + """Manages the live transcript display.""" + + def __init__(self, model: str) -> None: + self.model = model + self.transcript = "" + self.status = "🔌 Connecting..." + self.error: str | None = None + + def set_listening(self) -> None: + self.status = "🎤 Listening..." + + def add_text(self, text: str) -> None: + self.transcript += text + + def set_done(self) -> None: + self.status = "✅ Done" + + def set_error(self, error: str) -> None: + self.status = "❌ Error" + self.error = error + + def render(self) -> Layout: + layout = Layout() + + # Create minimal header + header_text = Text() + header_text.append("│ ", style="dim") + header_text.append(self.model, style="dim") + header_text.append(" │ ", style="dim") + + if "Listening" in self.status: + status_style = "green" + elif "Connecting" in self.status: + status_style = "yellow dim" + elif "Done" in self.status or "Stopped" in self.status: + status_style = "dim" + else: + status_style = "red" + header_text.append(self.status, style=status_style) + + header = Align.left(header_text, vertical="middle", pad=False) + + # Create main transcript area - no title, minimal border + transcript_text = Text( + self.transcript or "...", style="white" if self.transcript else "dim" + ) + transcript = Panel( + Align.left(transcript_text, vertical="top"), + border_style="dim", + padding=(1, 2), + ) + + # Minimal footer + footer_text = Text() + footer_text.append("ctrl+c", style="dim") + footer_text.append(" quit", style="dim italic") + footer = Align.left(footer_text, vertical="middle", pad=False) + + # Handle error display + if self.error: + layout.split_column( + Layout(header, name="header", size=1), + Layout(transcript, name="body"), + Layout( + Panel(Text(self.error, style="red"), border_style="red"), + name="error", + size=4, + ), + Layout(footer, name="footer", size=1), + ) + else: + layout.split_column( + Layout(header, name="header", size=1), + Layout(transcript, name="body"), + Layout(footer, name="footer", size=1), + ) + + return layout + + +async def iter_microphone( + *, + sample_rate: int, + chunk_duration_ms: int, +) -> AsyncIterator[bytes]: + """ + Yield microphone PCM chunks using PyAudio (16-bit mono). + Encoding is always pcm_s16le. + """ + import pyaudio + + p = pyaudio.PyAudio() + chunk_samples = int(sample_rate * chunk_duration_ms / 1000) + + stream = p.open( + format=pyaudio.paInt16, + channels=1, + rate=sample_rate, + input=True, + frames_per_buffer=chunk_samples, + ) + + loop = asyncio.get_running_loop() + try: + while True: + # stream.read is blocking; run it off-thread + data = await loop.run_in_executor(None, stream.read, chunk_samples, False) + yield data + finally: + stream.stop_stream() + stream.close() + p.terminate() + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Real-time microphone transcription.") + parser.add_argument("--model", default="voxtral-mini-transcribe-realtime-2602", help="Model ID") + parser.add_argument( + "--sample-rate", + type=int, + default=16000, + choices=[8000, 16000, 22050, 44100, 48000], + help="Sample rate in Hz", + ) + parser.add_argument( + "--chunk-duration", type=int, default=10, help="Chunk duration in ms" + ) + parser.add_argument( + "--api-key", default=os.environ.get("MISTRAL_API_KEY"), help="Mistral API key" + ) + parser.add_argument( + "--base-url", + default=os.environ.get("MISTRAL_BASE_URL", "wss://api.mistral.ai"), + ) + return parser.parse_args() + + +async def main() -> int: + args = parse_args() + api_key = args.api_key or os.environ["MISTRAL_API_KEY"] + + client = Mistral(api_key=api_key, server_url=args.base_url) + + # microphone is always pcm_s16le here + audio_format = AudioFormat(encoding="pcm_s16le", sample_rate=args.sample_rate) + + mic_stream = iter_microphone( + sample_rate=args.sample_rate, chunk_duration_ms=args.chunk_duration + ) + + display = TranscriptDisplay(model=args.model) + + with Live( + display.render(), console=console, refresh_per_second=10, screen=True + ) as live: + try: + async for event in client.audio.realtime.transcribe_stream( + audio_stream=mic_stream, + model=args.model, + audio_format=audio_format, + ): + if isinstance(event, RealtimeTranscriptionSessionCreated): + display.set_listening() + live.update(display.render()) + elif isinstance(event, TranscriptionStreamTextDelta): + display.add_text(event.text) + live.update(display.render()) + elif isinstance(event, TranscriptionStreamDone): + display.set_done() + live.update(display.render()) + break + elif isinstance(event, RealtimeTranscriptionError): + display.set_error(str(event.error)) + live.update(display.render()) + return 1 + elif isinstance(event, UnknownRealtimeEvent): + continue + except KeyboardInterrupt: + display.status = "âšī¸ Stopped" + live.update(display.render()) + + return 0 + + +if __name__ == "__main__": + sys.exit(asyncio.run(main())) diff --git a/examples/mistral/audio/async_realtime_transcription_stream.py b/examples/mistral/audio/async_realtime_transcription_stream.py new file mode 100644 index 00000000..6dbcd103 --- /dev/null +++ b/examples/mistral/audio/async_realtime_transcription_stream.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python + +import argparse +import asyncio +import os +import subprocess +import sys +import tempfile +from pathlib import Path +from typing import AsyncIterator + +from mistralai import Mistral +from mistralai.extra.realtime.connection import UnknownRealtimeEvent +from mistralai.models import ( + AudioFormat, + RealtimeTranscriptionError, + TranscriptionStreamDone, + TranscriptionStreamTextDelta, +) + + +def convert_audio_to_pcm( + input_path: Path, +) -> Path: + temp_file = tempfile.NamedTemporaryFile(suffix=".pcm", delete=False) + temp_path = Path(temp_file.name) + temp_file.close() + + cmd = [ + "ffmpeg", + "-y", + "-i", + str(input_path), + "-f", + "s16le", + "-ar", + str(16000), + "-ac", + "1", + str(temp_path), + ] + + try: + subprocess.run(cmd, check=True, capture_output=True, text=True) + except subprocess.CalledProcessError as exc: + temp_path.unlink(missing_ok=True) + raise RuntimeError(f"ffmpeg conversion failed: {exc.stderr}") from exc + + return temp_path + + +async def aiter_audio_file( + path: Path, + *, + chunk_size: int = 4096, + chunk_delay: float = 0.0, +) -> AsyncIterator[bytes]: + with open(path, "rb") as f: + while True: + chunk = f.read(chunk_size) + if not chunk: + break + yield chunk + if chunk_delay > 0: + await asyncio.sleep(chunk_delay) + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Real-time audio transcription via WebSocket (iterator-based)." + ) + parser.add_argument("file", type=Path, help="Path to the audio file") + parser.add_argument("--model", default="voxtral-mini-2601", help="Model ID") + parser.add_argument( + "--api-key", + default=os.environ.get("MISTRAL_API_KEY"), + help="Mistral API key", + ) + parser.add_argument( + "--base-url", + default=os.environ.get("MISTRAL_BASE_URL", "https://api.mistral.ai"), + help="API base URL (http/https/ws/wss)", + ) + parser.add_argument( + "--chunk-size", type=int, default=4096, help="Audio chunk size in bytes" + ) + parser.add_argument( + "--chunk-delay", + type=float, + default=0.01, + help="Delay between chunks in seconds", + ) + parser.add_argument( + "--no-convert", + action="store_true", + help="Skip ffmpeg conversion (input must be raw PCM)", + ) + return parser.parse_args() + + +async def main() -> int: + args = parse_args() + api_key = args.api_key or os.environ["MISTRAL_API_KEY"] + + pcm_path = args.file + temp_path = None + + if not args.no_convert and args.file.suffix.lower() not in (".pcm", ".raw"): + pcm_path = convert_audio_to_pcm(args.file) + temp_path = pcm_path + + client = Mistral(api_key=api_key, server_url=args.base_url) + + try: + async for event in client.audio.realtime.transcribe_stream( + audio_stream=aiter_audio_file( + pcm_path, + chunk_size=args.chunk_size, + chunk_delay=args.chunk_delay, + ), + model=args.model, + audio_format=AudioFormat(encoding="pcm_s16le", sample_rate=16000), + ): + if isinstance(event, TranscriptionStreamTextDelta): + print(event.text, end="", flush=True) + elif isinstance(event, TranscriptionStreamDone): + print() + break + elif isinstance(event, RealtimeTranscriptionError): + print(f"\nError: {event.error}", file=sys.stderr) + break + elif isinstance(event, UnknownRealtimeEvent): + # ignore future / unknown events; keep going + continue + + finally: + if temp_path is not None: + temp_path.unlink(missing_ok=True) + + return 0 + + +if __name__ == "__main__": + sys.exit(asyncio.run(main())) diff --git a/examples/mistral/audio/chat_base64.py b/examples/mistral/audio/chat_base64.py index ea5ea79a..8468fbfb 100755 --- a/examples/mistral/audio/chat_base64.py +++ b/examples/mistral/audio/chat_base64.py @@ -6,7 +6,6 @@ from mistralai.models import UserMessage - def main(): api_key = os.environ["MISTRAL_API_KEY"] model = "voxtral-small-latest" @@ -16,13 +15,17 @@ def main(): content = f.read() chat_response = client.chat.complete( model=model, - messages=[UserMessage(content=[ - {"type": "text", "text": "What's in this audio file?"}, - { - "type": "input_audio", - "input_audio": base64.b64encode(content).decode('utf-8'), - }, - ])], + messages=[ + UserMessage( + content=[ + {"type": "text", "text": "What's in this audio file?"}, + { + "type": "input_audio", + "input_audio": base64.b64encode(content).decode("utf-8"), + }, + ] + ) + ], ) print(chat_response.choices[0].message.content) diff --git a/examples/mistral/audio/chat_no_streaming.py b/examples/mistral/audio/chat_no_streaming.py index 2caebb25..f10240bd 100755 --- a/examples/mistral/audio/chat_no_streaming.py +++ b/examples/mistral/audio/chat_no_streaming.py @@ -6,7 +6,6 @@ from mistralai.models import UserMessage - def main(): api_key = os.environ["MISTRAL_API_KEY"] model = "voxtral-small-latest" @@ -15,13 +14,17 @@ def main(): chat_response = client.chat.complete( model=model, - messages=[UserMessage(content=[ - {"type": "text", "text": "What is this audio about?"}, - { - "type": "input_audio", - "input_audio": "https://docs.mistral.ai/audio/bcn_weather.mp3", - }, - ])], + messages=[ + UserMessage( + content=[ + {"type": "text", "text": "What is this audio about?"}, + { + "type": "input_audio", + "input_audio": "https://docs.mistral.ai/audio/bcn_weather.mp3", + }, + ] + ) + ], ) print(chat_response.choices[0].message.content) diff --git a/examples/mistral/audio/chat_streaming.py b/examples/mistral/audio/chat_streaming.py index 060bfdd9..f9c913a0 100755 --- a/examples/mistral/audio/chat_streaming.py +++ b/examples/mistral/audio/chat_streaming.py @@ -6,26 +6,31 @@ from mistralai.models import UserMessage - def main(): api_key = os.environ["MISTRAL_API_KEY"] model = "voxtral-small-latest" client = Mistral(api_key=api_key) with open("examples/fixtures/bcn_weather.mp3", "rb") as f: - file = client.files.upload(file=File(content=f, file_name=f.name), purpose="audio") + file = client.files.upload( + file=File(content=f, file_name=f.name), purpose="audio" + ) print(f"Uploaded audio file, id={file.id}") signed_url = client.files.get_signed_url(file_id=file.id) try: chat_response = client.chat.stream( model=model, - messages=[UserMessage(content=[ - {"type": "text", "text": "What is this audio about?"}, - { - "type": "input_audio", - "input_audio": signed_url.url, - }, - ])], + messages=[ + UserMessage( + content=[ + {"type": "text", "text": "What is this audio about?"}, + { + "type": "input_audio", + "input_audio": signed_url.url, + }, + ] + ) + ], ) for chunk in chat_response: print(chunk.data.choices[0].delta.content) @@ -33,5 +38,6 @@ def main(): client.files.delete(file_id=file.id) print(f"Deleted audio file, id={file.id}") + if __name__ == "__main__": main() diff --git a/examples/mistral/audio/transcription_diarize_async.py b/examples/mistral/audio/transcription_diarize_async.py new file mode 100644 index 00000000..ef5323f4 --- /dev/null +++ b/examples/mistral/audio/transcription_diarize_async.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python + +import os +import asyncio +from mistralai import Mistral, File + + +async def main(): + api_key = os.environ["MISTRAL_API_KEY"] + model = "voxtral-mini-2602" + + client = Mistral(api_key=api_key) + with open("examples/fixtures/bcn_weather.mp3", "rb") as f: + response = await client.audio.transcriptions.complete_async( + model=model, + file=File(content=f, file_name=f.name), + diarize=True, + timestamp_granularities=["segment"], + ) + for segment in response.segments: + speaker = segment.speaker_id or "unknown" + print( + f"[{segment.start:.1f}s → {segment.end:.1f}s] {speaker}: {segment.text.strip()}" + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/mistral/jobs/async_batch_job_chat_completion_inline.py b/examples/mistral/jobs/async_batch_job_chat_completion_inline.py index 94a01c6f..e728b8fa 100644 --- a/examples/mistral/jobs/async_batch_job_chat_completion_inline.py +++ b/examples/mistral/jobs/async_batch_job_chat_completion_inline.py @@ -37,4 +37,3 @@ async def main(): if __name__ == "__main__": asyncio.run(main()) - diff --git a/pyproject.toml b/pyproject.toml index 680ae19b..dbb5d44a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,9 @@ agents = [ "griffe >=1.7.3,<2.0", "authlib >=1.5.2,<2.0", ] +realtime = [ + "websockets >=13.0", +] [project.urls] Repository = "https://github.com/mistralai/client-python.git" diff --git a/scripts/run_examples.sh b/scripts/run_examples.sh index 106c10b2..5191033a 100755 --- a/scripts/run_examples.sh +++ b/scripts/run_examples.sh @@ -40,6 +40,9 @@ exclude_files=( "examples/mistral/agents/async_conversation_run_stream.py" "examples/mistral/agents/async_conversation_run_mcp.py" "examples/mistral/agents/async_conversation_run_mcp_remote.py" + "examples/mistral/audio/async_realtime_transcription_microphone.py" + "examples/mistral/audio/async_realtime_transcription_stream.py" + "examples/mistral/audio/transcription_diarize_async.py" ) # Check if the no-extra-dep flag is set diff --git a/src/mistralai/audio.py b/src/mistralai/audio.py index 5687abdb..54430d49 100644 --- a/src/mistralai/audio.py +++ b/src/mistralai/audio.py @@ -1,5 +1,12 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# region imports +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from mistralai.extra.realtime import RealtimeTranscription +# endregion imports + from .basesdk import BaseSDK from .sdkconfiguration import SDKConfiguration from mistralai.transcriptions import Transcriptions @@ -21,3 +28,16 @@ def _init_sdks(self): self.transcriptions = Transcriptions( self.sdk_configuration, parent_ref=self.parent_ref ) + + # region sdk-class-body + @property + def realtime(self) -> "RealtimeTranscription": + """Returns a client for real-time audio transcription via WebSocket.""" + if not hasattr(self, "_realtime"): + from mistralai.extra.realtime import RealtimeTranscription + + self._realtime = RealtimeTranscription(self.sdk_configuration) + + return self._realtime + + # endregion sdk-class-body diff --git a/src/mistralai/extra/__init__.py b/src/mistralai/extra/__init__.py index d9a81d24..cabda728 100644 --- a/src/mistralai/extra/__init__.py +++ b/src/mistralai/extra/__init__.py @@ -1,3 +1,5 @@ +from typing import TYPE_CHECKING + from .struct_chat import ( ParsedChatCompletionResponse, convert_to_parsed_chat_completion_response, @@ -5,9 +7,55 @@ from .utils import response_format_from_pydantic_model from .utils.response_format import CustomPydanticModel +if TYPE_CHECKING: + from .realtime import ( + AudioEncoding, + AudioFormat, + RealtimeConnection, + RealtimeTranscriptionError, + RealtimeTranscriptionErrorDetail, + RealtimeTranscriptionSession, + RealtimeTranscriptionSessionCreated, + RealtimeTranscriptionSessionUpdated, + RealtimeTranscription, + UnknownRealtimeEvent, + ) + +_REALTIME_EXPORTS = { + "RealtimeTranscription", + "RealtimeConnection", + "AudioEncoding", + "AudioFormat", + "UnknownRealtimeEvent", + "RealtimeTranscriptionError", + "RealtimeTranscriptionErrorDetail", + "RealtimeTranscriptionSession", + "RealtimeTranscriptionSessionCreated", + "RealtimeTranscriptionSessionUpdated", +} + + +def __getattr__(name: str): + if name in _REALTIME_EXPORTS: + from . import realtime + + return getattr(realtime, name) + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + + __all__ = [ "convert_to_parsed_chat_completion_response", "response_format_from_pydantic_model", "CustomPydanticModel", "ParsedChatCompletionResponse", + "RealtimeTranscription", + "RealtimeConnection", + "AudioEncoding", + "AudioFormat", + "UnknownRealtimeEvent", + "RealtimeTranscriptionError", + "RealtimeTranscriptionErrorDetail", + "RealtimeTranscriptionSession", + "RealtimeTranscriptionSessionCreated", + "RealtimeTranscriptionSessionUpdated", ] diff --git a/src/mistralai/extra/exceptions.py b/src/mistralai/extra/exceptions.py index 7853ddc2..ee107698 100644 --- a/src/mistralai/extra/exceptions.py +++ b/src/mistralai/extra/exceptions.py @@ -1,14 +1,59 @@ +from typing import Optional, TYPE_CHECKING + +if TYPE_CHECKING: + from mistralai.models import RealtimeTranscriptionError + + class MistralClientException(Exception): - """Base exception for all the client errors.""" + """Base exception for client errors.""" class RunException(MistralClientException): - """Exception raised for errors during a conversation run.""" + """Conversation run errors.""" class MCPException(MistralClientException): - """Exception raised for errors related to MCP operations.""" + """MCP operation errors.""" class MCPAuthException(MCPException): - """Exception raised for authentication errors with an MCP server.""" + """MCP authentication errors.""" + + +class RealtimeTranscriptionException(MistralClientException): + """Base realtime transcription exception.""" + + def __init__( + self, + message: str, + *, + code: Optional[int] = None, + payload: Optional[object] = None, + ) -> None: + super().__init__(message) + self.code = code + self.payload = payload + + +class RealtimeTranscriptionWSError(RealtimeTranscriptionException): + def __init__( + self, + message: str, + *, + payload: Optional["RealtimeTranscriptionError"] = None, + raw: Optional[object] = None, + ) -> None: + code: Optional[int] = None + if payload is not None: + try: + maybe_code = getattr(payload.error, "code", None) + if isinstance(maybe_code, int): + code = maybe_code + except Exception: + code = None + + super().__init__( + message, code=code, payload=payload if payload is not None else raw + ) + self.payload_typed = payload + self.payload_raw = raw diff --git a/src/mistralai/extra/realtime/__init__.py b/src/mistralai/extra/realtime/__init__.py new file mode 100644 index 00000000..85bf1d88 --- /dev/null +++ b/src/mistralai/extra/realtime/__init__.py @@ -0,0 +1,25 @@ +from mistralai.models import ( + AudioEncoding, + AudioFormat, + RealtimeTranscriptionError, + RealtimeTranscriptionErrorDetail, + RealtimeTranscriptionSession, + RealtimeTranscriptionSessionCreated, + RealtimeTranscriptionSessionUpdated, +) + +from .connection import UnknownRealtimeEvent, RealtimeConnection +from .transcription import RealtimeTranscription + +__all__ = [ + "AudioEncoding", + "AudioFormat", + "RealtimeTranscriptionError", + "RealtimeTranscriptionErrorDetail", + "RealtimeTranscriptionSession", + "RealtimeTranscriptionSessionCreated", + "RealtimeTranscriptionSessionUpdated", + "RealtimeConnection", + "RealtimeTranscription", + "UnknownRealtimeEvent", +] diff --git a/src/mistralai/extra/realtime/connection.py b/src/mistralai/extra/realtime/connection.py new file mode 100644 index 00000000..042854ab --- /dev/null +++ b/src/mistralai/extra/realtime/connection.py @@ -0,0 +1,207 @@ +from __future__ import annotations + +import base64 +import json +from asyncio import CancelledError +from collections import deque +from typing import Any, AsyncIterator, Deque, Optional, Union + +from pydantic import ValidationError, BaseModel + +try: + from websockets.asyncio.client import ClientConnection # websockets >= 13.0 +except ImportError as exc: + raise ImportError( + "The `websockets` package (>=13.0) is required for real-time transcription. " + "Install with: pip install 'mistralai[realtime]'" + ) from exc + +from mistralai.models import ( + AudioFormat, + RealtimeTranscriptionError, + RealtimeTranscriptionSession, + RealtimeTranscriptionSessionCreated, + RealtimeTranscriptionSessionUpdated, + TranscriptionStreamDone, + TranscriptionStreamLanguage, + TranscriptionStreamSegmentDelta, + TranscriptionStreamTextDelta, +) + + +class UnknownRealtimeEvent(BaseModel): + """ + Forward-compat fallback event: + - unknown message type + - invalid JSON payload + - schema validation failure + """ + type: Optional[str] + content: Any + error: Optional[str] = None + + +RealtimeEvent = Union[ + # session lifecycle + RealtimeTranscriptionSessionCreated, + RealtimeTranscriptionSessionUpdated, + # server errors + RealtimeTranscriptionError, + # transcription events + TranscriptionStreamLanguage, + TranscriptionStreamSegmentDelta, + TranscriptionStreamTextDelta, + TranscriptionStreamDone, + # forward-compat fallback + UnknownRealtimeEvent, +] + + +_MESSAGE_MODELS: dict[str, Any] = { + "session.created": RealtimeTranscriptionSessionCreated, + "session.updated": RealtimeTranscriptionSessionUpdated, + "error": RealtimeTranscriptionError, + "transcription.language": TranscriptionStreamLanguage, + "transcription.segment": TranscriptionStreamSegmentDelta, + "transcription.text.delta": TranscriptionStreamTextDelta, + "transcription.done": TranscriptionStreamDone, +} + + +def parse_realtime_event(payload: Any) -> RealtimeEvent: + """ + Tolerant parser: + - unknown event type -> UnknownRealtimeEvent + - validation failures -> UnknownRealtimeEvent (includes error string) + - invalid payload -> UnknownRealtimeEvent + """ + if not isinstance(payload, dict): + return UnknownRealtimeEvent( + type=None, content=payload, error="expected JSON object" + ) + + msg_type = payload.get("type") + if not isinstance(msg_type, str): + return UnknownRealtimeEvent( + type=None, content=payload, error="missing/invalid 'type'" + ) + + model_cls = _MESSAGE_MODELS.get(msg_type) + if model_cls is None: + return UnknownRealtimeEvent( + type=msg_type, content=payload, error="unknown event type" + ) + try: + parsed = model_cls.model_validate(payload) + return parsed + except ValidationError as exc: + return UnknownRealtimeEvent(type=msg_type, content=payload, error=str(exc)) + + +class RealtimeConnection: + def __init__( + self, + websocket: ClientConnection, + session: RealtimeTranscriptionSession, + *, + initial_events: Optional[list[RealtimeEvent]] = None, + ) -> None: + self._websocket = websocket + self._session = session + self._audio_format = session.audio_format + self._closed = False + self._initial_events: Deque[RealtimeEvent] = deque(initial_events or []) + + @property + def request_id(self) -> str: + return self._session.request_id + + @property + def session(self) -> RealtimeTranscriptionSession: + return self._session + + @property + def audio_format(self) -> AudioFormat: + return self._audio_format + + @property + def is_closed(self) -> bool: + return self._closed + + async def send_audio( + self, audio_bytes: Union[bytes, bytearray, memoryview] + ) -> None: + if self._closed: + raise RuntimeError("Connection is closed") + + message = { + "type": "input_audio.append", + "audio": base64.b64encode(bytes(audio_bytes)).decode("ascii"), + } + await self._websocket.send(json.dumps(message)) + + async def update_session(self, audio_format: AudioFormat) -> None: + if self._closed: + raise RuntimeError("Connection is closed") + + self._audio_format = audio_format + message = { + "type": "session.update", + "session": {"audio_format": audio_format.model_dump(mode="json")}, + } + await self._websocket.send(json.dumps(message)) + + async def end_audio(self) -> None: + if self._closed: + return + await self._websocket.send(json.dumps({"type": "input_audio.end"})) + + async def close(self, *, code: int = 1000, reason: str = "") -> None: + if self._closed: + return + self._closed = True + await self._websocket.close(code=code, reason=reason) + + async def __aenter__(self) -> "RealtimeConnection": + return self + + async def __aexit__(self, exc_type, exc, tb) -> None: + await self.close() + + def __aiter__(self) -> AsyncIterator[RealtimeEvent]: + return self.events() + + async def events(self) -> AsyncIterator[RealtimeEvent]: + # replay any handshake/prelude events (including session.created) + while self._initial_events: + ev = self._initial_events.popleft() + self._apply_session_updates(ev) + yield ev + + try: + async for msg in self._websocket: + text = ( + msg.decode("utf-8", errors="replace") + if isinstance(msg, (bytes, bytearray)) + else msg + ) + try: + data = json.loads(text) + except Exception as exc: + yield UnknownRealtimeEvent( + type=None, content=text, error=f"invalid JSON: {exc}" + ) + continue + + ev = parse_realtime_event(data) + self._apply_session_updates(ev) + yield ev + except CancelledError: + pass + finally: + await self.close() + + def _apply_session_updates(self, ev: RealtimeEvent) -> None: + if isinstance(ev, RealtimeTranscriptionSessionCreated) or isinstance(ev, RealtimeTranscriptionSessionUpdated): + self._session = ev.session + self._audio_format = ev.session.audio_format diff --git a/src/mistralai/extra/realtime/transcription.py b/src/mistralai/extra/realtime/transcription.py new file mode 100644 index 00000000..de117645 --- /dev/null +++ b/src/mistralai/extra/realtime/transcription.py @@ -0,0 +1,271 @@ +from __future__ import annotations + +import asyncio +import json +import time +from typing import AsyncIterator, Mapping, Optional +from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse + +try: + from websockets.asyncio.client import ( + ClientConnection, + connect, + ) # websockets >= 13.0 +except ImportError as exc: + raise ImportError( + "The `websockets` package (>=13.0) is required for real-time transcription. " + "Install with: pip install 'mistralai[realtime]'" + ) from exc + +from mistralai import models, utils +from mistralai.models import ( + AudioFormat, + RealtimeTranscriptionError, + RealtimeTranscriptionSession, + RealtimeTranscriptionSessionCreated, +) +from mistralai.sdkconfiguration import SDKConfiguration +from mistralai.utils import generate_url, get_security, get_security_from_env + +from ..exceptions import RealtimeTranscriptionException, RealtimeTranscriptionWSError +from .connection import ( + RealtimeConnection, + RealtimeEvent, + UnknownRealtimeEvent, + parse_realtime_event, +) + + +class RealtimeTranscription: + """Client for realtime transcription over WebSocket (websockets >= 13.0).""" + + def __init__(self, sdk_config: SDKConfiguration) -> None: + self._sdk_config = sdk_config + + def _build_url( + self, + model: str, + *, + server_url: Optional[str], + query_params: Mapping[str, str], + ) -> str: + if server_url is not None: + base_url = utils.remove_suffix(server_url, "/") + else: + base_url, _ = self._sdk_config.get_server_details() + + url = generate_url(base_url, "/v1/audio/transcriptions/realtime", None) + + parsed = urlparse(url) + merged = dict(parse_qsl(parsed.query, keep_blank_values=True)) + merged["model"] = model + merged.update(dict(query_params)) + + return urlunparse(parsed._replace(query=urlencode(merged))) + + async def connect( + self, + model: str, + audio_format: Optional[AudioFormat] = None, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> RealtimeConnection: + if timeout_ms is None: + timeout_ms = self._sdk_config.timeout_ms + + security = self._sdk_config.security + if security is not None and callable(security): + security = security() + + resolved_security = get_security_from_env(security, models.Security) + + headers: dict[str, str] = {} + query_params: dict[str, str] = {} + + if resolved_security is not None: + security_headers, security_query = get_security(resolved_security) + headers |= security_headers + for key, values in security_query.items(): + if values: + query_params[key] = values[-1] + + if http_headers is not None: + headers |= dict(http_headers) + + url = self._build_url(model, server_url=server_url, query_params=query_params) + + parsed = urlparse(url) + if parsed.scheme == "https": + parsed = parsed._replace(scheme="wss") + elif parsed.scheme == "http": + parsed = parsed._replace(scheme="ws") + ws_url = urlunparse(parsed) + open_timeout = None if timeout_ms is None else timeout_ms / 1000.0 + user_agent = self._sdk_config.user_agent + + websocket: Optional[ClientConnection] = None + try: + websocket = await connect( + ws_url, + additional_headers=dict(headers), + open_timeout=open_timeout, + user_agent_header=user_agent, + ) + + session, initial_events = await _recv_handshake( + websocket, timeout_ms=timeout_ms + ) + connection = RealtimeConnection( + websocket=websocket, + session=session, + initial_events=initial_events, + ) + + if audio_format is not None: + await connection.update_session(audio_format) + + return connection + + except RealtimeTranscriptionException: + if websocket is not None: + await websocket.close() + raise + except Exception as exc: + if websocket is not None: + await websocket.close() + raise RealtimeTranscriptionException(f"Failed to connect: {exc}") from exc + + async def transcribe_stream( + self, + audio_stream: AsyncIterator[bytes], + model: str, + audio_format: Optional[AudioFormat] = None, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> AsyncIterator[RealtimeEvent]: + """ + Flow + - opens connection + - streams audio in background + - yields events from the connection + """ + async with await self.connect( + model=model, + audio_format=audio_format, + server_url=server_url, + timeout_ms=timeout_ms, + http_headers=http_headers, + ) as connection: + + async def _send() -> None: + async for chunk in audio_stream: + if connection.is_closed: + break + await connection.send_audio(chunk) + await connection.end_audio() + + send_task = asyncio.create_task(_send()) + + try: + async for event in connection: + yield event + + # stop early (caller still sees the terminating event) + if isinstance(event, RealtimeTranscriptionError): + break + if getattr(event, "type", None) == "transcription.done": + break + finally: + send_task.cancel() + try: + await send_task + except asyncio.CancelledError: + pass + await connection.close() + + +def _extract_error_message(payload: dict) -> str: + err = payload.get("error") + if isinstance(err, dict): + msg = err.get("message") + if isinstance(msg, str): + return msg + if isinstance(msg, dict): + detail = msg.get("detail") + if isinstance(detail, str): + return detail + return "Realtime transcription error" + + +async def _recv_handshake( + websocket: ClientConnection, + *, + timeout_ms: Optional[int], +) -> tuple[RealtimeTranscriptionSession, list[RealtimeEvent]]: + """ + Read messages until session.created or error. + Replay all messages read during handshake as initial events (lossless). + """ + timeout_s = None if timeout_ms is None else timeout_ms / 1000.0 + deadline = None if timeout_s is None else (time.monotonic() + timeout_s) + + initial_events: list[RealtimeEvent] = [] + + def remaining() -> Optional[float]: + if deadline is None: + return None + return max(0.0, deadline - time.monotonic()) + + try: + while True: + raw = await asyncio.wait_for(websocket.recv(), timeout=remaining()) + text = ( + raw.decode("utf-8", errors="replace") + if isinstance(raw, (bytes, bytearray)) + else raw + ) + + try: + payload = json.loads(text) + except Exception as exc: + initial_events.append( + UnknownRealtimeEvent( + type=None, content=text, error=f"invalid JSON: {exc}" + ) + ) + continue + + msg_type = payload.get("type") if isinstance(payload, dict) else None + if msg_type == "error" and isinstance(payload, dict): + parsed = parse_realtime_event(payload) + initial_events.append(parsed) + if isinstance(parsed, RealtimeTranscriptionError): + raise RealtimeTranscriptionWSError( + _extract_error_message(payload), + payload=parsed, + raw=payload, + ) + raise RealtimeTranscriptionWSError( + _extract_error_message(payload), + payload=None, + raw=payload, + ) + + event = parse_realtime_event(payload) + initial_events.append(event) + + if isinstance(event, RealtimeTranscriptionSessionCreated): + return event.session, initial_events + + except asyncio.TimeoutError as exc: + raise RealtimeTranscriptionException( + "Timeout waiting for session creation." + ) from exc + except RealtimeTranscriptionException: + raise + except Exception as exc: + raise RealtimeTranscriptionException( + f"Unexpected websocket handshake failure: {exc}" + ) from exc diff --git a/uv.lock b/uv.lock index efffa7ad..85e04bd9 100644 --- a/uv.lock +++ b/uv.lock @@ -589,6 +589,9 @@ gcp = [ { name = "google-auth" }, { name = "requests" }, ] +realtime = [ + { name = "websockets" }, +] [package.dev-dependencies] dev = [ @@ -627,8 +630,9 @@ requires-dist = [ { name = "pyyaml", specifier = ">=6.0.2,<7.0.0" }, { name = "requests", marker = "extra == 'gcp'", specifier = ">=2.32.3" }, { name = "typing-inspection", specifier = ">=0.4.0" }, + { name = "websockets", marker = "extra == 'realtime'", specifier = ">=13.0" }, ] -provides-extras = ["gcp", "agents"] +provides-extras = ["gcp", "agents", "realtime"] [package.metadata.requires-dev] dev = [ @@ -1562,6 +1566,74 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, ] +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/74/221f58decd852f4b59cc3354cccaf87e8ef695fede361d03dc9a7396573b/websockets-16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04cdd5d2d1dacbad0a7bf36ccbcd3ccd5a30ee188f2560b7a62a30d14107b31a", size = 177343, upload-time = "2026-01-10T09:22:21.28Z" }, + { url = "https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8ff32bb86522a9e5e31439a58addbb0166f0204d64066fb955265c4e214160f0", size = 175021, upload-time = "2026-01-10T09:22:22.696Z" }, + { url = "https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:583b7c42688636f930688d712885cf1531326ee05effd982028212ccc13e5957", size = 175320, upload-time = "2026-01-10T09:22:23.94Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7d837379b647c0c4c2355c2499723f82f1635fd2c26510e1f587d89bc2199e72", size = 183815, upload-time = "2026-01-10T09:22:25.469Z" }, + { url = "https://files.pythonhosted.org/packages/86/26/d40eaa2a46d4302becec8d15b0fc5e45bdde05191e7628405a19cf491ccd/websockets-16.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df57afc692e517a85e65b72e165356ed1df12386ecb879ad5693be08fac65dde", size = 185054, upload-time = "2026-01-10T09:22:27.101Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ba/6500a0efc94f7373ee8fefa8c271acdfd4dca8bd49a90d4be7ccabfc397e/websockets-16.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2b9f1e0d69bc60a4a87349d50c09a037a2607918746f07de04df9e43252c77a3", size = 184565, upload-time = "2026-01-10T09:22:28.293Z" }, + { url = "https://files.pythonhosted.org/packages/04/b4/96bf2cee7c8d8102389374a2616200574f5f01128d1082f44102140344cc/websockets-16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:335c23addf3d5e6a8633f9f8eda77efad001671e80b95c491dd0924587ece0b3", size = 183848, upload-time = "2026-01-10T09:22:30.394Z" }, + { url = "https://files.pythonhosted.org/packages/02/8e/81f40fb00fd125357814e8c3025738fc4ffc3da4b6b4a4472a82ba304b41/websockets-16.0-cp310-cp310-win32.whl", hash = "sha256:37b31c1623c6605e4c00d466c9d633f9b812ea430c11c8a278774a1fde1acfa9", size = 178249, upload-time = "2026-01-10T09:22:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/b4/5f/7e40efe8df57db9b91c88a43690ac66f7b7aa73a11aa6a66b927e44f26fa/websockets-16.0-cp310-cp310-win_amd64.whl", hash = "sha256:8e1dab317b6e77424356e11e99a432b7cb2f3ec8c5ab4dabbcee6add48f72b35", size = 178685, upload-time = "2026-01-10T09:22:33.345Z" }, + { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8", size = 177340, upload-time = "2026-01-10T09:22:34.539Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad", size = 175022, upload-time = "2026-01-10T09:22:36.332Z" }, + { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d", size = 175319, upload-time = "2026-01-10T09:22:37.602Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe", size = 184631, upload-time = "2026-01-10T09:22:38.789Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b", size = 185870, upload-time = "2026-01-10T09:22:39.893Z" }, + { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5", size = 185361, upload-time = "2026-01-10T09:22:41.016Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64", size = 184615, upload-time = "2026-01-10T09:22:42.442Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6", size = 178246, upload-time = "2026-01-10T09:22:43.654Z" }, + { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac", size = 178684, upload-time = "2026-01-10T09:22:44.941Z" }, + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d", size = 174947, upload-time = "2026-01-10T09:23:36.166Z" }, + { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03", size = 175260, upload-time = "2026-01-10T09:23:37.409Z" }, + { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da", size = 176071, upload-time = "2026-01-10T09:23:39.158Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c", size = 176968, upload-time = "2026-01-10T09:23:41.031Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735, upload-time = "2026-01-10T09:23:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] + [[package]] name = "zipp" version = "3.23.0"