From 794344a0baf1d85554988aba346cbd47cd2d6c2d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 18 Oct 2025 02:07:59 +0000 Subject: [PATCH 1/6] chore: bump `httpx-aiohttp` version to 0.1.9 --- pyproject.toml | 2 +- requirements-dev.lock | 24 +-- uv.lock | 408 ++++++++++++++++++++++++------------------ 3 files changed, 247 insertions(+), 187 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b10fc11e..8574870f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ Homepage = "https://github.com/llamastack/llama-stack-client-python" Repository = "https://github.com/llamastack/llama-stack-client-python" [project.optional-dependencies] -aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"] +aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.9"] [tool.uv] managed = true diff --git a/requirements-dev.lock b/requirements-dev.lock index 003a453c..d4264113 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -7,7 +7,7 @@ anyio==4.8.0 # via # httpx # llama-stack-client -black==25.1.0 +black==25.9.0 certifi==2024.12.14 # via # httpcore @@ -15,9 +15,9 @@ certifi==2024.12.14 # requests cfgv==3.4.0 # via pre-commit -charset-normalizer==3.4.3 +charset-normalizer==3.4.4 # via requests -click==8.2.1 +click==8.3.0 # via # black # llama-stack-client @@ -33,7 +33,7 @@ distro==1.9.0 # via llama-stack-client execnet==2.1.1 # via pytest-xdist -filelock==3.19.1 +filelock==3.20.0 # via virtualenv fire==0.7.1 # via llama-stack-client @@ -45,7 +45,7 @@ httpx==0.28.1 # via # llama-stack-client # respx -identify==2.6.14 +identify==2.6.15 # via pre-commit idna==3.10 # via @@ -68,17 +68,17 @@ nodeenv==1.9.1 # via # pre-commit # pyright -numpy==2.3.3 +numpy==2.3.4 # via pandas packaging==24.2 # via # black # pytest -pandas==2.3.2 +pandas==2.3.3 # via llama-stack-client pathspec==0.12.1 # via black -platformdirs==4.4.0 +platformdirs==4.5.0 # via # black # virtualenv @@ -108,9 +108,11 @@ python-dateutil==2.9.0.post0 # via # pandas # time-machine +pytokens==0.2.0 + # via black pytz==2024.2 # via pandas -pyyaml==6.0.2 +pyyaml==6.0.3 # via # pre-commit # pyaml @@ -148,9 +150,9 @@ tzdata==2025.2 # via pandas urllib3==2.5.0 # via requests -virtualenv==20.34.0 +virtualenv==20.35.3 # via pre-commit -wcwidth==0.2.13 +wcwidth==0.2.14 # via prompt-toolkit zipp==3.21.0 # via importlib-metadata diff --git a/uv.lock b/uv.lock index a3b5af5e..935254f6 100644 --- a/uv.lock +++ b/uv.lock @@ -113,7 +113,7 @@ wheels = [ [[package]] name = "black" -version = "25.1.0" +version = "25.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -121,18 +121,19 @@ dependencies = [ { name = "packaging" }, { name = "pathspec" }, { name = "platformdirs" }, + { name = "pytokens" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4b/43/20b5c90612d7bdb2bdbcceeb53d588acca3bb8f0e4c5d5c751a2c8fdd55a/black-25.9.0.tar.gz", hash = "sha256:0474bca9a0dd1b51791fcc507a4e02078a1c63f6d4e4ae5544b9848c7adfb619", size = 648393, upload-time = "2025-09-19T00:27:37.758Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988, upload-time = "2025-01-29T05:37:16.707Z" }, - { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985, upload-time = "2025-01-29T05:37:18.273Z" }, - { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" }, - { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860, upload-time = "2025-01-29T04:19:12.944Z" }, - { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673, upload-time = "2025-01-29T05:37:20.574Z" }, - { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190, upload-time = "2025-01-29T05:37:22.106Z" }, - { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" }, - { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613, upload-time = "2025-01-29T04:19:27.63Z" }, - { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" }, + { url = "https://files.pythonhosted.org/packages/fb/8e/319cfe6c82f7e2d5bfb4d3353c6cc85b523d677ff59edc61fdb9ee275234/black-25.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1b9dc70c21ef8b43248f1d86aedd2aaf75ae110b958a7909ad8463c4aa0880b0", size = 1742012, upload-time = "2025-09-19T00:33:08.678Z" }, + { url = "https://files.pythonhosted.org/packages/94/cc/f562fe5d0a40cd2a4e6ae3f685e4c36e365b1f7e494af99c26ff7f28117f/black-25.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e46eecf65a095fa62e53245ae2795c90bdecabd53b50c448d0a8bcd0d2e74c4", size = 1581421, upload-time = "2025-09-19T00:35:25.937Z" }, + { url = "https://files.pythonhosted.org/packages/84/67/6db6dff1ebc8965fd7661498aea0da5d7301074b85bba8606a28f47ede4d/black-25.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9101ee58ddc2442199a25cb648d46ba22cd580b00ca4b44234a324e3ec7a0f7e", size = 1655619, upload-time = "2025-09-19T00:30:49.241Z" }, + { url = "https://files.pythonhosted.org/packages/10/10/3faef9aa2a730306cf469d76f7f155a8cc1f66e74781298df0ba31f8b4c8/black-25.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:77e7060a00c5ec4b3367c55f39cf9b06e68965a4f2e61cecacd6d0d9b7ec945a", size = 1342481, upload-time = "2025-09-19T00:31:29.625Z" }, + { url = "https://files.pythonhosted.org/packages/48/99/3acfea65f5e79f45472c45f87ec13037b506522719cd9d4ac86484ff51ac/black-25.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0172a012f725b792c358d57fe7b6b6e8e67375dd157f64fa7a3097b3ed3e2175", size = 1742165, upload-time = "2025-09-19T00:34:10.402Z" }, + { url = "https://files.pythonhosted.org/packages/3a/18/799285282c8236a79f25d590f0222dbd6850e14b060dfaa3e720241fd772/black-25.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3bec74ee60f8dfef564b573a96b8930f7b6a538e846123d5ad77ba14a8d7a64f", size = 1581259, upload-time = "2025-09-19T00:32:49.685Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ce/883ec4b6303acdeca93ee06b7622f1fa383c6b3765294824165d49b1a86b/black-25.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b756fc75871cb1bcac5499552d771822fd9db5a2bb8db2a7247936ca48f39831", size = 1655583, upload-time = "2025-09-19T00:30:44.505Z" }, + { url = "https://files.pythonhosted.org/packages/21/17/5c253aa80a0639ccc427a5c7144534b661505ae2b5a10b77ebe13fa25334/black-25.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:846d58e3ce7879ec1ffe816bb9df6d006cd9590515ed5d17db14e17666b2b357", size = 1343428, upload-time = "2025-09-19T00:32:13.839Z" }, + { url = "https://files.pythonhosted.org/packages/1b/46/863c90dcd3f9d41b109b7f19032ae0db021f0b2a81482ba0a1e28c84de86/black-25.9.0-py3-none-any.whl", hash = "sha256:474b34c1342cdc157d307b56c4c65bce916480c4a8f6551fdc6bf9b486a7c4ae", size = 203363, upload-time = "2025-09-19T00:27:35.724Z" }, ] [[package]] @@ -155,56 +156,71 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, - { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, - { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, - { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, - { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, - { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, - { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, - { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, - { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, - { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, - { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, - { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, - { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, - { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, - { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, - { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, - { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, - { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, - { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, - { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, - { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, - { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, - { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, - { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, - { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, - { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, - { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, - { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, - { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, - { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, - { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, - { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, - { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, ] [[package]] name = "click" -version = "8.2.1" +version = "8.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'group-18-llama-stack-client-pydantic-v1' and extra == 'group-18-llama-stack-client-pydantic-v2')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] @@ -254,11 +270,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.19.1" +version = "3.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, ] [[package]] @@ -372,24 +388,24 @@ wheels = [ [[package]] name = "httpx-aiohttp" -version = "0.1.8" +version = "0.1.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, { name = "httpx" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/37/19/ae2d2bf1f57fdd23c8ad83675599fb5c407fa13bc20e90f00cffa4dea3aa/httpx_aiohttp-0.1.8.tar.gz", hash = "sha256:756c5e74cdb568c3248ba63fe82bfe8bbe64b928728720f7eaac64b3cf46f308", size = 25401, upload-time = "2025-07-04T10:40:32.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/f2/9a86ce9bc48cf57dabb3a3160dfed26d8bbe5a2478a51f9d1dbf89f2f1fc/httpx_aiohttp-0.1.9.tar.gz", hash = "sha256:4ee8b22e6f2e7c80cd03be29eff98bfe7d89bd77f021ce0b578ee76b73b4bfe6", size = 206023, upload-time = "2025-10-15T08:52:57.475Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/7a/514c484b88cc4ebbcd2e27e92b86019c0c5bb920582f5fbb10b7e6c78574/httpx_aiohttp-0.1.8-py3-none-any.whl", hash = "sha256:b7bd958d1331f3759a38a0ba22ad29832cb63ca69498c17735228055bf78fa7e", size = 6180, upload-time = "2025-07-04T10:40:31.522Z" }, + { url = "https://files.pythonhosted.org/packages/a1/db/5cfa8254a86c34a1ab7fe0dbec9f81bb5ebd831cbdd65aa4be4f37027804/httpx_aiohttp-0.1.9-py3-none-any.whl", hash = "sha256:3dc2845568b07742588710fcf3d72db2cbcdf2acc93376edf85f789c4d8e5fda", size = 6180, upload-time = "2025-10-15T08:52:56.521Z" }, ] [[package]] name = "identify" -version = "2.6.14" +version = "2.6.15" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/c4/62963f25a678f6a050fb0505a65e9e726996171e6dbe1547f79619eefb15/identify-2.6.14.tar.gz", hash = "sha256:663494103b4f717cb26921c52f8751363dc89db64364cd836a9bf1535f53cd6a", size = 99283, upload-time = "2025-09-06T19:30:52.938Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/ae/2ad30f4652712c82f1c23423d79136fbce338932ad166d70c1efb86a5998/identify-2.6.14-py2.py3-none-any.whl", hash = "sha256:11a073da82212c6646b1f39bb20d4483bfb9543bd5566fec60053c4bb309bf2e", size = 99172, upload-time = "2025-09-06T19:30:51.759Z" }, + { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, ] [[package]] @@ -482,7 +498,7 @@ requires-dist = [ { name = "distro", specifier = ">=1.7.0,<2" }, { name = "fire" }, { name = "httpx", specifier = ">=0.23.0,<1" }, - { name = "httpx-aiohttp", marker = "extra == 'aiohttp'", specifier = ">=0.1.8" }, + { name = "httpx-aiohttp", marker = "extra == 'aiohttp'", specifier = ">=0.1.9" }, { name = "pandas" }, { name = "prompt-toolkit" }, { name = "pyaml" }, @@ -644,65 +660,65 @@ wheels = [ [[package]] name = "numpy" -version = "2.3.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029", size = 20576648, upload-time = "2025-09-09T16:54:12.543Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf", size = 20957014, upload-time = "2025-09-09T15:56:29.966Z" }, - { url = "https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25", size = 14185220, upload-time = "2025-09-09T15:56:32.175Z" }, - { url = "https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe", size = 5113918, upload-time = "2025-09-09T15:56:34.175Z" }, - { url = "https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b", size = 6647922, upload-time = "2025-09-09T15:56:36.149Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8", size = 14281991, upload-time = "2025-09-09T15:56:40.548Z" }, - { url = "https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20", size = 16641643, upload-time = "2025-09-09T15:56:43.343Z" }, - { url = "https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea", size = 16056787, upload-time = "2025-09-09T15:56:46.141Z" }, - { url = "https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7", size = 18579598, upload-time = "2025-09-09T15:56:49.844Z" }, - { url = "https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf", size = 6320800, upload-time = "2025-09-09T15:56:52.499Z" }, - { url = "https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb", size = 12786615, upload-time = "2025-09-09T15:56:54.422Z" }, - { url = "https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5", size = 10195936, upload-time = "2025-09-09T15:56:56.541Z" }, - { url = "https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf", size = 20949588, upload-time = "2025-09-09T15:56:59.087Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7", size = 14177802, upload-time = "2025-09-09T15:57:01.73Z" }, - { url = "https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6", size = 5106537, upload-time = "2025-09-09T15:57:03.765Z" }, - { url = "https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7", size = 6640743, upload-time = "2025-09-09T15:57:07.921Z" }, - { url = "https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c", size = 14278881, upload-time = "2025-09-09T15:57:11.349Z" }, - { url = "https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93", size = 16636301, upload-time = "2025-09-09T15:57:14.245Z" }, - { url = "https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae", size = 16053645, upload-time = "2025-09-09T15:57:16.534Z" }, - { url = "https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86", size = 18578179, upload-time = "2025-09-09T15:57:18.883Z" }, - { url = "https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8", size = 6312250, upload-time = "2025-09-09T15:57:21.296Z" }, - { url = "https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf", size = 12783269, upload-time = "2025-09-09T15:57:23.034Z" }, - { url = "https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5", size = 10195314, upload-time = "2025-09-09T15:57:25.045Z" }, - { url = "https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc", size = 21048025, upload-time = "2025-09-09T15:57:27.257Z" }, - { url = "https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc", size = 14301053, upload-time = "2025-09-09T15:57:30.077Z" }, - { url = "https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b", size = 5229444, upload-time = "2025-09-09T15:57:32.733Z" }, - { url = "https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19", size = 6738039, upload-time = "2025-09-09T15:57:34.328Z" }, - { url = "https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30", size = 14352314, upload-time = "2025-09-09T15:57:36.255Z" }, - { url = "https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e", size = 16701722, upload-time = "2025-09-09T15:57:38.622Z" }, - { url = "https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3", size = 16132755, upload-time = "2025-09-09T15:57:41.16Z" }, - { url = "https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea", size = 18651560, upload-time = "2025-09-09T15:57:43.459Z" }, - { url = "https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd", size = 6442776, upload-time = "2025-09-09T15:57:45.793Z" }, - { url = "https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d", size = 12927281, upload-time = "2025-09-09T15:57:47.492Z" }, - { url = "https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1", size = 10265275, upload-time = "2025-09-09T15:57:49.647Z" }, - { url = "https://files.pythonhosted.org/packages/6b/01/342ad585ad82419b99bcf7cebe99e61da6bedb89e213c5fd71acc467faee/numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593", size = 20951527, upload-time = "2025-09-09T15:57:52.006Z" }, - { url = "https://files.pythonhosted.org/packages/ef/d8/204e0d73fc1b7a9ee80ab1fe1983dd33a4d64a4e30a05364b0208e9a241a/numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652", size = 14186159, upload-time = "2025-09-09T15:57:54.407Z" }, - { url = "https://files.pythonhosted.org/packages/22/af/f11c916d08f3a18fb8ba81ab72b5b74a6e42ead4c2846d270eb19845bf74/numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7", size = 5114624, upload-time = "2025-09-09T15:57:56.5Z" }, - { url = "https://files.pythonhosted.org/packages/fb/11/0ed919c8381ac9d2ffacd63fd1f0c34d27e99cab650f0eb6f110e6ae4858/numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a", size = 6642627, upload-time = "2025-09-09T15:57:58.206Z" }, - { url = "https://files.pythonhosted.org/packages/ee/83/deb5f77cb0f7ba6cb52b91ed388b47f8f3c2e9930d4665c600408d9b90b9/numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe", size = 14296926, upload-time = "2025-09-09T15:58:00.035Z" }, - { url = "https://files.pythonhosted.org/packages/77/cc/70e59dcb84f2b005d4f306310ff0a892518cc0c8000a33d0e6faf7ca8d80/numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421", size = 16638958, upload-time = "2025-09-09T15:58:02.738Z" }, - { url = "https://files.pythonhosted.org/packages/b6/5a/b2ab6c18b4257e099587d5b7f903317bd7115333ad8d4ec4874278eafa61/numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021", size = 16071920, upload-time = "2025-09-09T15:58:05.029Z" }, - { url = "https://files.pythonhosted.org/packages/b8/f1/8b3fdc44324a259298520dd82147ff648979bed085feeacc1250ef1656c0/numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf", size = 18577076, upload-time = "2025-09-09T15:58:07.745Z" }, - { url = "https://files.pythonhosted.org/packages/f0/a1/b87a284fb15a42e9274e7fcea0dad259d12ddbf07c1595b26883151ca3b4/numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0", size = 6366952, upload-time = "2025-09-09T15:58:10.096Z" }, - { url = "https://files.pythonhosted.org/packages/70/5f/1816f4d08f3b8f66576d8433a66f8fa35a5acfb3bbd0bf6c31183b003f3d/numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8", size = 12919322, upload-time = "2025-09-09T15:58:12.138Z" }, - { url = "https://files.pythonhosted.org/packages/8c/de/072420342e46a8ea41c324a555fa90fcc11637583fb8df722936aed1736d/numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe", size = 10478630, upload-time = "2025-09-09T15:58:14.64Z" }, - { url = "https://files.pythonhosted.org/packages/d5/df/ee2f1c0a9de7347f14da5dd3cd3c3b034d1b8607ccb6883d7dd5c035d631/numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00", size = 21047987, upload-time = "2025-09-09T15:58:16.889Z" }, - { url = "https://files.pythonhosted.org/packages/d6/92/9453bdc5a4e9e69cf4358463f25e8260e2ffc126d52e10038b9077815989/numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a", size = 14301076, upload-time = "2025-09-09T15:58:20.343Z" }, - { url = "https://files.pythonhosted.org/packages/13/77/1447b9eb500f028bb44253105bd67534af60499588a5149a94f18f2ca917/numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d", size = 5229491, upload-time = "2025-09-09T15:58:22.481Z" }, - { url = "https://files.pythonhosted.org/packages/3d/f9/d72221b6ca205f9736cb4b2ce3b002f6e45cd67cd6a6d1c8af11a2f0b649/numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a", size = 6737913, upload-time = "2025-09-09T15:58:24.569Z" }, - { url = "https://files.pythonhosted.org/packages/3c/5f/d12834711962ad9c46af72f79bb31e73e416ee49d17f4c797f72c96b6ca5/numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54", size = 14352811, upload-time = "2025-09-09T15:58:26.416Z" }, - { url = "https://files.pythonhosted.org/packages/a1/0d/fdbec6629d97fd1bebed56cd742884e4eead593611bbe1abc3eb40d304b2/numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e", size = 16702689, upload-time = "2025-09-09T15:58:28.831Z" }, - { url = "https://files.pythonhosted.org/packages/9b/09/0a35196dc5575adde1eb97ddfbc3e1687a814f905377621d18ca9bc2b7dd/numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097", size = 16133855, upload-time = "2025-09-09T15:58:31.349Z" }, - { url = "https://files.pythonhosted.org/packages/7a/ca/c9de3ea397d576f1b6753eaa906d4cdef1bf97589a6d9825a349b4729cc2/numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970", size = 18652520, upload-time = "2025-09-09T15:58:33.762Z" }, - { url = "https://files.pythonhosted.org/packages/fd/c2/e5ed830e08cd0196351db55db82f65bc0ab05da6ef2b72a836dcf1936d2f/numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5", size = 6515371, upload-time = "2025-09-09T15:58:36.04Z" }, - { url = "https://files.pythonhosted.org/packages/47/c7/b0f6b5b67f6788a0725f744496badbb604d226bf233ba716683ebb47b570/numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f", size = 13112576, upload-time = "2025-09-09T15:58:37.927Z" }, - { url = "https://files.pythonhosted.org/packages/06/b9/33bba5ff6fb679aa0b1f8a07e853f002a6b04b9394db3069a1270a7784ca/numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b", size = 10545953, upload-time = "2025-09-09T15:58:40.576Z" }, +version = "2.3.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/f4/098d2270d52b41f1bd7db9fc288aaa0400cb48c2a3e2af6fa365d9720947/numpy-2.3.4.tar.gz", hash = "sha256:a7d018bfedb375a8d979ac758b120ba846a7fe764911a64465fd87b8729f4a6a", size = 20582187, upload-time = "2025-10-15T16:18:11.77Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/7a/02420400b736f84317e759291b8edaeee9dc921f72b045475a9cbdb26b17/numpy-2.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1b5a3e808bc40827b5fa2c8196151a4c5abe110e1726949d7abddfe5c7ae11", size = 20957727, upload-time = "2025-10-15T16:15:44.9Z" }, + { url = "https://files.pythonhosted.org/packages/18/90/a014805d627aa5750f6f0e878172afb6454552da929144b3c07fcae1bb13/numpy-2.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f91f496a87235c6aaf6d3f3d89b17dba64996abadccb289f48456cff931ca9", size = 14187262, upload-time = "2025-10-15T16:15:47.761Z" }, + { url = "https://files.pythonhosted.org/packages/c7/e4/0a94b09abe89e500dc748e7515f21a13e30c5c3fe3396e6d4ac108c25fca/numpy-2.3.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f77e5b3d3da652b474cc80a14084927a5e86a5eccf54ca8ca5cbd697bf7f2667", size = 5115992, upload-time = "2025-10-15T16:15:50.144Z" }, + { url = "https://files.pythonhosted.org/packages/88/dd/db77c75b055c6157cbd4f9c92c4458daef0dd9cbe6d8d2fe7f803cb64c37/numpy-2.3.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ab1c5f5ee40d6e01cbe96de5863e39b215a4d24e7d007cad56c7184fdf4aeef", size = 6648672, upload-time = "2025-10-15T16:15:52.442Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e6/e31b0d713719610e406c0ea3ae0d90760465b086da8783e2fd835ad59027/numpy-2.3.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77b84453f3adcb994ddbd0d1c5d11db2d6bda1a2b7fd5ac5bd4649d6f5dc682e", size = 14284156, upload-time = "2025-10-15T16:15:54.351Z" }, + { url = "https://files.pythonhosted.org/packages/f9/58/30a85127bfee6f108282107caf8e06a1f0cc997cb6b52cdee699276fcce4/numpy-2.3.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4121c5beb58a7f9e6dfdee612cb24f4df5cd4db6e8261d7f4d7450a997a65d6a", size = 16641271, upload-time = "2025-10-15T16:15:56.67Z" }, + { url = "https://files.pythonhosted.org/packages/06/f2/2e06a0f2adf23e3ae29283ad96959267938d0efd20a2e25353b70065bfec/numpy-2.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65611ecbb00ac9846efe04db15cbe6186f562f6bb7e5e05f077e53a599225d16", size = 16059531, upload-time = "2025-10-15T16:15:59.412Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e7/b106253c7c0d5dc352b9c8fab91afd76a93950998167fa3e5afe4ef3a18f/numpy-2.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dabc42f9c6577bcc13001b8810d300fe814b4cfbe8a92c873f269484594f9786", size = 18578983, upload-time = "2025-10-15T16:16:01.804Z" }, + { url = "https://files.pythonhosted.org/packages/73/e3/04ecc41e71462276ee867ccbef26a4448638eadecf1bc56772c9ed6d0255/numpy-2.3.4-cp312-cp312-win32.whl", hash = "sha256:a49d797192a8d950ca59ee2d0337a4d804f713bb5c3c50e8db26d49666e351dc", size = 6291380, upload-time = "2025-10-15T16:16:03.938Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a8/566578b10d8d0e9955b1b6cd5db4e9d4592dd0026a941ff7994cedda030a/numpy-2.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:985f1e46358f06c2a09921e8921e2c98168ed4ae12ccd6e5e87a4f1857923f32", size = 12787999, upload-time = "2025-10-15T16:16:05.801Z" }, + { url = "https://files.pythonhosted.org/packages/58/22/9c903a957d0a8071b607f5b1bff0761d6e608b9a965945411f867d515db1/numpy-2.3.4-cp312-cp312-win_arm64.whl", hash = "sha256:4635239814149e06e2cb9db3dd584b2fa64316c96f10656983b8026a82e6e4db", size = 10197412, upload-time = "2025-10-15T16:16:07.854Z" }, + { url = "https://files.pythonhosted.org/packages/57/7e/b72610cc91edf138bc588df5150957a4937221ca6058b825b4725c27be62/numpy-2.3.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c090d4860032b857d94144d1a9976b8e36709e40386db289aaf6672de2a81966", size = 20950335, upload-time = "2025-10-15T16:16:10.304Z" }, + { url = "https://files.pythonhosted.org/packages/3e/46/bdd3370dcea2f95ef14af79dbf81e6927102ddf1cc54adc0024d61252fd9/numpy-2.3.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a13fc473b6db0be619e45f11f9e81260f7302f8d180c49a22b6e6120022596b3", size = 14179878, upload-time = "2025-10-15T16:16:12.595Z" }, + { url = "https://files.pythonhosted.org/packages/ac/01/5a67cb785bda60f45415d09c2bc245433f1c68dd82eef9c9002c508b5a65/numpy-2.3.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:3634093d0b428e6c32c3a69b78e554f0cd20ee420dcad5a9f3b2a63762ce4197", size = 5108673, upload-time = "2025-10-15T16:16:14.877Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cd/8428e23a9fcebd33988f4cb61208fda832800ca03781f471f3727a820704/numpy-2.3.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:043885b4f7e6e232d7df4f51ffdef8c36320ee9d5f227b380ea636722c7ed12e", size = 6641438, upload-time = "2025-10-15T16:16:16.805Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d1/913fe563820f3c6b079f992458f7331278dcd7ba8427e8e745af37ddb44f/numpy-2.3.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ee6a571d1e4f0ea6d5f22d6e5fbd6ed1dc2b18542848e1e7301bd190500c9d7", size = 14281290, upload-time = "2025-10-15T16:16:18.764Z" }, + { url = "https://files.pythonhosted.org/packages/9e/7e/7d306ff7cb143e6d975cfa7eb98a93e73495c4deabb7d1b5ecf09ea0fd69/numpy-2.3.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fc8a63918b04b8571789688b2780ab2b4a33ab44bfe8ccea36d3eba51228c953", size = 16636543, upload-time = "2025-10-15T16:16:21.072Z" }, + { url = "https://files.pythonhosted.org/packages/47/6a/8cfc486237e56ccfb0db234945552a557ca266f022d281a2f577b98e955c/numpy-2.3.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:40cc556d5abbc54aabe2b1ae287042d7bdb80c08edede19f0c0afb36ae586f37", size = 16056117, upload-time = "2025-10-15T16:16:23.369Z" }, + { url = "https://files.pythonhosted.org/packages/b1/0e/42cb5e69ea901e06ce24bfcc4b5664a56f950a70efdcf221f30d9615f3f3/numpy-2.3.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ecb63014bb7f4ce653f8be7f1df8cbc6093a5a2811211770f6606cc92b5a78fd", size = 18577788, upload-time = "2025-10-15T16:16:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/86/92/41c3d5157d3177559ef0a35da50f0cda7fa071f4ba2306dd36818591a5bc/numpy-2.3.4-cp313-cp313-win32.whl", hash = "sha256:e8370eb6925bb8c1c4264fec52b0384b44f675f191df91cbe0140ec9f0955646", size = 6282620, upload-time = "2025-10-15T16:16:29.811Z" }, + { url = "https://files.pythonhosted.org/packages/09/97/fd421e8bc50766665ad35536c2bb4ef916533ba1fdd053a62d96cc7c8b95/numpy-2.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:56209416e81a7893036eea03abcb91c130643eb14233b2515c90dcac963fe99d", size = 12784672, upload-time = "2025-10-15T16:16:31.589Z" }, + { url = "https://files.pythonhosted.org/packages/ad/df/5474fb2f74970ca8eb978093969b125a84cc3d30e47f82191f981f13a8a0/numpy-2.3.4-cp313-cp313-win_arm64.whl", hash = "sha256:a700a4031bc0fd6936e78a752eefb79092cecad2599ea9c8039c548bc097f9bc", size = 10196702, upload-time = "2025-10-15T16:16:33.902Z" }, + { url = "https://files.pythonhosted.org/packages/11/83/66ac031464ec1767ea3ed48ce40f615eb441072945e98693bec0bcd056cc/numpy-2.3.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:86966db35c4040fdca64f0816a1c1dd8dbd027d90fca5a57e00e1ca4cd41b879", size = 21049003, upload-time = "2025-10-15T16:16:36.101Z" }, + { url = "https://files.pythonhosted.org/packages/5f/99/5b14e0e686e61371659a1d5bebd04596b1d72227ce36eed121bb0aeab798/numpy-2.3.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:838f045478638b26c375ee96ea89464d38428c69170360b23a1a50fa4baa3562", size = 14302980, upload-time = "2025-10-15T16:16:39.124Z" }, + { url = "https://files.pythonhosted.org/packages/2c/44/e9486649cd087d9fc6920e3fc3ac2aba10838d10804b1e179fb7cbc4e634/numpy-2.3.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d7315ed1dab0286adca467377c8381cd748f3dc92235f22a7dfc42745644a96a", size = 5231472, upload-time = "2025-10-15T16:16:41.168Z" }, + { url = "https://files.pythonhosted.org/packages/3e/51/902b24fa8887e5fe2063fd61b1895a476d0bbf46811ab0c7fdf4bd127345/numpy-2.3.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:84f01a4d18b2cc4ade1814a08e5f3c907b079c847051d720fad15ce37aa930b6", size = 6739342, upload-time = "2025-10-15T16:16:43.777Z" }, + { url = "https://files.pythonhosted.org/packages/34/f1/4de9586d05b1962acdcdb1dc4af6646361a643f8c864cef7c852bf509740/numpy-2.3.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:817e719a868f0dacde4abdfc5c1910b301877970195db9ab6a5e2c4bd5b121f7", size = 14354338, upload-time = "2025-10-15T16:16:46.081Z" }, + { url = "https://files.pythonhosted.org/packages/1f/06/1c16103b425de7969d5a76bdf5ada0804b476fed05d5f9e17b777f1cbefd/numpy-2.3.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85e071da78d92a214212cacea81c6da557cab307f2c34b5f85b628e94803f9c0", size = 16702392, upload-time = "2025-10-15T16:16:48.455Z" }, + { url = "https://files.pythonhosted.org/packages/34/b2/65f4dc1b89b5322093572b6e55161bb42e3e0487067af73627f795cc9d47/numpy-2.3.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2ec646892819370cf3558f518797f16597b4e4669894a2ba712caccc9da53f1f", size = 16134998, upload-time = "2025-10-15T16:16:51.114Z" }, + { url = "https://files.pythonhosted.org/packages/d4/11/94ec578896cdb973aaf56425d6c7f2aff4186a5c00fac15ff2ec46998b46/numpy-2.3.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:035796aaaddfe2f9664b9a9372f089cfc88bd795a67bd1bfe15e6e770934cf64", size = 18651574, upload-time = "2025-10-15T16:16:53.429Z" }, + { url = "https://files.pythonhosted.org/packages/62/b7/7efa763ab33dbccf56dade36938a77345ce8e8192d6b39e470ca25ff3cd0/numpy-2.3.4-cp313-cp313t-win32.whl", hash = "sha256:fea80f4f4cf83b54c3a051f2f727870ee51e22f0248d3114b8e755d160b38cfb", size = 6413135, upload-time = "2025-10-15T16:16:55.992Z" }, + { url = "https://files.pythonhosted.org/packages/43/70/aba4c38e8400abcc2f345e13d972fb36c26409b3e644366db7649015f291/numpy-2.3.4-cp313-cp313t-win_amd64.whl", hash = "sha256:15eea9f306b98e0be91eb344a94c0e630689ef302e10c2ce5f7e11905c704f9c", size = 12928582, upload-time = "2025-10-15T16:16:57.943Z" }, + { url = "https://files.pythonhosted.org/packages/67/63/871fad5f0073fc00fbbdd7232962ea1ac40eeaae2bba66c76214f7954236/numpy-2.3.4-cp313-cp313t-win_arm64.whl", hash = "sha256:b6c231c9c2fadbae4011ca5e7e83e12dc4a5072f1a1d85a0a7b3ed754d145a40", size = 10266691, upload-time = "2025-10-15T16:17:00.048Z" }, + { url = "https://files.pythonhosted.org/packages/72/71/ae6170143c115732470ae3a2d01512870dd16e0953f8a6dc89525696069b/numpy-2.3.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:81c3e6d8c97295a7360d367f9f8553973651b76907988bb6066376bc2252f24e", size = 20955580, upload-time = "2025-10-15T16:17:02.509Z" }, + { url = "https://files.pythonhosted.org/packages/af/39/4be9222ffd6ca8a30eda033d5f753276a9c3426c397bb137d8e19dedd200/numpy-2.3.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7c26b0b2bf58009ed1f38a641f3db4be8d960a417ca96d14e5b06df1506d41ff", size = 14188056, upload-time = "2025-10-15T16:17:04.873Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3d/d85f6700d0a4aa4f9491030e1021c2b2b7421b2b38d01acd16734a2bfdc7/numpy-2.3.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:62b2198c438058a20b6704351b35a1d7db881812d8512d67a69c9de1f18ca05f", size = 5116555, upload-time = "2025-10-15T16:17:07.499Z" }, + { url = "https://files.pythonhosted.org/packages/bf/04/82c1467d86f47eee8a19a464c92f90a9bb68ccf14a54c5224d7031241ffb/numpy-2.3.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:9d729d60f8d53a7361707f4b68a9663c968882dd4f09e0d58c044c8bf5faee7b", size = 6643581, upload-time = "2025-10-15T16:17:09.774Z" }, + { url = "https://files.pythonhosted.org/packages/0c/d3/c79841741b837e293f48bd7db89d0ac7a4f2503b382b78a790ef1dc778a5/numpy-2.3.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd0c630cf256b0a7fd9d0a11c9413b42fef5101219ce6ed5a09624f5a65392c7", size = 14299186, upload-time = "2025-10-15T16:17:11.937Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7e/4a14a769741fbf237eec5a12a2cbc7a4c4e061852b6533bcb9e9a796c908/numpy-2.3.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5e081bc082825f8b139f9e9fe42942cb4054524598aaeb177ff476cc76d09d2", size = 16638601, upload-time = "2025-10-15T16:17:14.391Z" }, + { url = "https://files.pythonhosted.org/packages/93/87/1c1de269f002ff0a41173fe01dcc925f4ecff59264cd8f96cf3b60d12c9b/numpy-2.3.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:15fb27364ed84114438fff8aaf998c9e19adbeba08c0b75409f8c452a8692c52", size = 16074219, upload-time = "2025-10-15T16:17:17.058Z" }, + { url = "https://files.pythonhosted.org/packages/cd/28/18f72ee77408e40a76d691001ae599e712ca2a47ddd2c4f695b16c65f077/numpy-2.3.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:85d9fb2d8cd998c84d13a79a09cc0c1091648e848e4e6249b0ccd7f6b487fa26", size = 18576702, upload-time = "2025-10-15T16:17:19.379Z" }, + { url = "https://files.pythonhosted.org/packages/c3/76/95650169b465ececa8cf4b2e8f6df255d4bf662775e797ade2025cc51ae6/numpy-2.3.4-cp314-cp314-win32.whl", hash = "sha256:e73d63fd04e3a9d6bc187f5455d81abfad05660b212c8804bf3b407e984cd2bc", size = 6337136, upload-time = "2025-10-15T16:17:22.886Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/a231a5c43ede5d6f77ba4a91e915a87dea4aeea76560ba4d2bf185c683f0/numpy-2.3.4-cp314-cp314-win_amd64.whl", hash = "sha256:3da3491cee49cf16157e70f607c03a217ea6647b1cea4819c4f48e53d49139b9", size = 12920542, upload-time = "2025-10-15T16:17:24.783Z" }, + { url = "https://files.pythonhosted.org/packages/0d/0c/ae9434a888f717c5ed2ff2393b3f344f0ff6f1c793519fa0c540461dc530/numpy-2.3.4-cp314-cp314-win_arm64.whl", hash = "sha256:6d9cd732068e8288dbe2717177320723ccec4fb064123f0caf9bbd90ab5be868", size = 10480213, upload-time = "2025-10-15T16:17:26.935Z" }, + { url = "https://files.pythonhosted.org/packages/83/4b/c4a5f0841f92536f6b9592694a5b5f68c9ab37b775ff342649eadf9055d3/numpy-2.3.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:22758999b256b595cf0b1d102b133bb61866ba5ceecf15f759623b64c020c9ec", size = 21052280, upload-time = "2025-10-15T16:17:29.638Z" }, + { url = "https://files.pythonhosted.org/packages/3e/80/90308845fc93b984d2cc96d83e2324ce8ad1fd6efea81b324cba4b673854/numpy-2.3.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9cb177bc55b010b19798dc5497d540dea67fd13a8d9e882b2dae71de0cf09eb3", size = 14302930, upload-time = "2025-10-15T16:17:32.384Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4e/07439f22f2a3b247cec4d63a713faae55e1141a36e77fb212881f7cda3fb/numpy-2.3.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0f2bcc76f1e05e5ab58893407c63d90b2029908fa41f9f1cc51eecce936c3365", size = 5231504, upload-time = "2025-10-15T16:17:34.515Z" }, + { url = "https://files.pythonhosted.org/packages/ab/de/1e11f2547e2fe3d00482b19721855348b94ada8359aef5d40dd57bfae9df/numpy-2.3.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dc20bde86802df2ed8397a08d793da0ad7a5fd4ea3ac85d757bf5dd4ad7c252", size = 6739405, upload-time = "2025-10-15T16:17:36.128Z" }, + { url = "https://files.pythonhosted.org/packages/3b/40/8cd57393a26cebe2e923005db5134a946c62fa56a1087dc7c478f3e30837/numpy-2.3.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e199c087e2aa71c8f9ce1cb7a8e10677dc12457e7cc1be4798632da37c3e86e", size = 14354866, upload-time = "2025-10-15T16:17:38.884Z" }, + { url = "https://files.pythonhosted.org/packages/93/39/5b3510f023f96874ee6fea2e40dfa99313a00bf3ab779f3c92978f34aace/numpy-2.3.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85597b2d25ddf655495e2363fe044b0ae999b75bc4d630dc0d886484b03a5eb0", size = 16703296, upload-time = "2025-10-15T16:17:41.564Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/19bb163617c8045209c1996c4e427bccbc4bbff1e2c711f39203c8ddbb4a/numpy-2.3.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04a69abe45b49c5955923cf2c407843d1c85013b424ae8a560bba16c92fe44a0", size = 16136046, upload-time = "2025-10-15T16:17:43.901Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c1/6dba12fdf68b02a21ac411c9df19afa66bed2540f467150ca64d246b463d/numpy-2.3.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e1708fac43ef8b419c975926ce1eaf793b0c13b7356cfab6ab0dc34c0a02ac0f", size = 18652691, upload-time = "2025-10-15T16:17:46.247Z" }, + { url = "https://files.pythonhosted.org/packages/f8/73/f85056701dbbbb910c51d846c58d29fd46b30eecd2b6ba760fc8b8a1641b/numpy-2.3.4-cp314-cp314t-win32.whl", hash = "sha256:863e3b5f4d9915aaf1b8ec79ae560ad21f0b8d5e3adc31e73126491bb86dee1d", size = 6485782, upload-time = "2025-10-15T16:17:48.872Z" }, + { url = "https://files.pythonhosted.org/packages/17/90/28fa6f9865181cb817c2471ee65678afa8a7e2a1fb16141473d5fa6bacc3/numpy-2.3.4-cp314-cp314t-win_amd64.whl", hash = "sha256:962064de37b9aef801d33bc579690f8bfe6c5e70e29b61783f60bcba838a14d6", size = 13113301, upload-time = "2025-10-15T16:17:50.938Z" }, + { url = "https://files.pythonhosted.org/packages/54/23/08c002201a8e7e1f9afba93b97deceb813252d9cfd0d3351caed123dcf97/numpy-2.3.4-cp314-cp314t-win_arm64.whl", hash = "sha256:8b5a9a39c45d852b62693d9b3f3e0fe052541f804296ff401a72a1b60edafb29", size = 10547532, upload-time = "2025-10-15T16:17:53.48Z" }, ] [[package]] @@ -716,7 +732,7 @@ wheels = [ [[package]] name = "pandas" -version = "2.3.2" +version = "2.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, @@ -724,28 +740,41 @@ dependencies = [ { name = "pytz" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/8e/0e90233ac205ad182bd6b422532695d2b9414944a280488105d598c70023/pandas-2.3.2.tar.gz", hash = "sha256:ab7b58f8f82706890924ccdfb5f48002b83d2b5a3845976a9fb705d36c34dcdb", size = 4488684, upload-time = "2025-08-21T10:28:29.257Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/db/614c20fb7a85a14828edd23f1c02db58a30abf3ce76f38806155d160313c/pandas-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fbb977f802156e7a3f829e9d1d5398f6192375a3e2d1a9ee0803e35fe70a2b9", size = 11587652, upload-time = "2025-08-21T10:27:15.888Z" }, - { url = "https://files.pythonhosted.org/packages/99/b0/756e52f6582cade5e746f19bad0517ff27ba9c73404607c0306585c201b3/pandas-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b9b52693123dd234b7c985c68b709b0b009f4521000d0525f2b95c22f15944b", size = 10717686, upload-time = "2025-08-21T10:27:18.486Z" }, - { url = "https://files.pythonhosted.org/packages/37/4c/dd5ccc1e357abfeee8353123282de17997f90ff67855f86154e5a13b81e5/pandas-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bd281310d4f412733f319a5bc552f86d62cddc5f51d2e392c8787335c994175", size = 11278722, upload-time = "2025-08-21T10:27:21.149Z" }, - { url = "https://files.pythonhosted.org/packages/d3/a4/f7edcfa47e0a88cda0be8b068a5bae710bf264f867edfdf7b71584ace362/pandas-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96d31a6b4354e3b9b8a2c848af75d31da390657e3ac6f30c05c82068b9ed79b9", size = 11987803, upload-time = "2025-08-21T10:27:23.767Z" }, - { url = "https://files.pythonhosted.org/packages/f6/61/1bce4129f93ab66f1c68b7ed1c12bac6a70b1b56c5dab359c6bbcd480b52/pandas-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df4df0b9d02bb873a106971bb85d448378ef14b86ba96f035f50bbd3688456b4", size = 12766345, upload-time = "2025-08-21T10:27:26.6Z" }, - { url = "https://files.pythonhosted.org/packages/8e/46/80d53de70fee835531da3a1dae827a1e76e77a43ad22a8cd0f8142b61587/pandas-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:213a5adf93d020b74327cb2c1b842884dbdd37f895f42dcc2f09d451d949f811", size = 13439314, upload-time = "2025-08-21T10:27:29.213Z" }, - { url = "https://files.pythonhosted.org/packages/28/30/8114832daff7489f179971dbc1d854109b7f4365a546e3ea75b6516cea95/pandas-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c13b81a9347eb8c7548f53fd9a4f08d4dfe996836543f805c987bafa03317ae", size = 10983326, upload-time = "2025-08-21T10:27:31.901Z" }, - { url = "https://files.pythonhosted.org/packages/27/64/a2f7bf678af502e16b472527735d168b22b7824e45a4d7e96a4fbb634b59/pandas-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0c6ecbac99a354a051ef21c5307601093cb9e0f4b1855984a084bfec9302699e", size = 11531061, upload-time = "2025-08-21T10:27:34.647Z" }, - { url = "https://files.pythonhosted.org/packages/54/4c/c3d21b2b7769ef2f4c2b9299fcadd601efa6729f1357a8dbce8dd949ed70/pandas-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6f048aa0fd080d6a06cc7e7537c09b53be6642d330ac6f54a600c3ace857ee9", size = 10668666, upload-time = "2025-08-21T10:27:37.203Z" }, - { url = "https://files.pythonhosted.org/packages/50/e2/f775ba76ecfb3424d7f5862620841cf0edb592e9abd2d2a5387d305fe7a8/pandas-2.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0064187b80a5be6f2f9c9d6bdde29372468751dfa89f4211a3c5871854cfbf7a", size = 11332835, upload-time = "2025-08-21T10:27:40.188Z" }, - { url = "https://files.pythonhosted.org/packages/8f/52/0634adaace9be2d8cac9ef78f05c47f3a675882e068438b9d7ec7ef0c13f/pandas-2.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac8c320bded4718b298281339c1a50fb00a6ba78cb2a63521c39bec95b0209b", size = 12057211, upload-time = "2025-08-21T10:27:43.117Z" }, - { url = "https://files.pythonhosted.org/packages/0b/9d/2df913f14b2deb9c748975fdb2491da1a78773debb25abbc7cbc67c6b549/pandas-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:114c2fe4f4328cf98ce5716d1532f3ab79c5919f95a9cfee81d9140064a2e4d6", size = 12749277, upload-time = "2025-08-21T10:27:45.474Z" }, - { url = "https://files.pythonhosted.org/packages/87/af/da1a2417026bd14d98c236dba88e39837182459d29dcfcea510b2ac9e8a1/pandas-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:48fa91c4dfb3b2b9bfdb5c24cd3567575f4e13f9636810462ffed8925352be5a", size = 13415256, upload-time = "2025-08-21T10:27:49.885Z" }, - { url = "https://files.pythonhosted.org/packages/22/3c/f2af1ce8840ef648584a6156489636b5692c162771918aa95707c165ad2b/pandas-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:12d039facec710f7ba305786837d0225a3444af7bbd9c15c32ca2d40d157ed8b", size = 10982579, upload-time = "2025-08-21T10:28:08.435Z" }, - { url = "https://files.pythonhosted.org/packages/f3/98/8df69c4097a6719e357dc249bf437b8efbde808038268e584421696cbddf/pandas-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c624b615ce97864eb588779ed4046186f967374185c047070545253a52ab2d57", size = 12028163, upload-time = "2025-08-21T10:27:52.232Z" }, - { url = "https://files.pythonhosted.org/packages/0e/23/f95cbcbea319f349e10ff90db488b905c6883f03cbabd34f6b03cbc3c044/pandas-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0cee69d583b9b128823d9514171cabb6861e09409af805b54459bd0c821a35c2", size = 11391860, upload-time = "2025-08-21T10:27:54.673Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1b/6a984e98c4abee22058aa75bfb8eb90dce58cf8d7296f8bc56c14bc330b0/pandas-2.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2319656ed81124982900b4c37f0e0c58c015af9a7bbc62342ba5ad07ace82ba9", size = 11309830, upload-time = "2025-08-21T10:27:56.957Z" }, - { url = "https://files.pythonhosted.org/packages/15/d5/f0486090eb18dd8710bf60afeaf638ba6817047c0c8ae5c6a25598665609/pandas-2.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b37205ad6f00d52f16b6d09f406434ba928c1a1966e2771006a9033c736d30d2", size = 11883216, upload-time = "2025-08-21T10:27:59.302Z" }, - { url = "https://files.pythonhosted.org/packages/10/86/692050c119696da19e20245bbd650d8dfca6ceb577da027c3a73c62a047e/pandas-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:837248b4fc3a9b83b9c6214699a13f069dc13510a6a6d7f9ba33145d2841a012", size = 12699743, upload-time = "2025-08-21T10:28:02.447Z" }, - { url = "https://files.pythonhosted.org/packages/cd/d7/612123674d7b17cf345aad0a10289b2a384bff404e0463a83c4a3a59d205/pandas-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d2c3554bd31b731cd6490d94a28f3abb8dd770634a9e06eb6d2911b9827db370", size = 13186141, upload-time = "2025-08-21T10:28:05.377Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, + { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, + { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, + { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, + { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" }, + { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" }, + { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" }, + { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" }, + { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" }, + { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" }, + { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" }, + { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" }, + { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" }, + { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635, upload-time = "2025-09-29T23:25:52.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079, upload-time = "2025-09-29T23:26:33.204Z" }, + { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049, upload-time = "2025-09-29T23:27:15.384Z" }, + { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638, upload-time = "2025-09-29T23:27:51.625Z" }, + { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834, upload-time = "2025-09-29T23:28:21.289Z" }, + { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925, upload-time = "2025-09-29T23:28:58.261Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071, upload-time = "2025-09-29T23:32:27.484Z" }, + { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504, upload-time = "2025-09-29T23:29:31.47Z" }, + { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702, upload-time = "2025-09-29T23:29:54.591Z" }, + { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535, upload-time = "2025-09-29T23:30:21.003Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582, upload-time = "2025-09-29T23:30:43.391Z" }, + { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963, upload-time = "2025-09-29T23:31:10.009Z" }, + { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" }, ] [[package]] @@ -759,11 +788,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.4.0" +version = "4.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, + { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, ] [[package]] @@ -1030,6 +1059,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] +[[package]] +name = "pytokens" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/c2/dbadcdddb412a267585459142bfd7cc241e6276db69339353ae6e241ab2b/pytokens-0.2.0.tar.gz", hash = "sha256:532d6421364e5869ea57a9523bf385f02586d4662acbcc0342afd69511b4dd43", size = 15368, upload-time = "2025-10-15T08:02:42.738Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/5a/c269ea6b348b6f2c32686635df89f32dbe05df1088dd4579302a6f8f99af/pytokens-0.2.0-py3-none-any.whl", hash = "sha256:74d4b318c67f4295c13782ddd9abcb7e297ec5630ad060eb90abf7ebbefe59f8", size = 12038, upload-time = "2025-10-15T08:02:41.694Z" }, +] + [[package]] name = "pytz" version = "2024.2" @@ -1041,28 +1079,48 @@ wheels = [ [[package]] name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, ] [[package]] @@ -1243,25 +1301,25 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.34.0" +version = "20.35.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a4/d5/b0ccd381d55c8f45d46f77df6ae59fbc23d19e901e2d523395598e5f4c93/virtualenv-20.35.3.tar.gz", hash = "sha256:4f1a845d131133bdff10590489610c98c168ff99dc75d6c96853801f7f67af44", size = 6002907, upload-time = "2025-10-10T21:23:33.178Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/d9a94da0e9d470a543c1b9d3ccbceb0f59455983088e727b8a1824ed90fb/virtualenv-20.35.3-py3-none-any.whl", hash = "sha256:63d106565078d8c8d0b206d48080f938a8b25361e19432d2c9db40d2899c810a", size = 5981061, upload-time = "2025-10-10T21:23:30.433Z" }, ] [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.2.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, ] [[package]] From 88ed3fb387aa6a752fd04b3fd82c727dd95384a0 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 22 Oct 2025 20:15:13 +0000 Subject: [PATCH 2/6] feat(api): move datasets to beta, vector_db -> vector_store --- .stats.yml | 8 +- api.md | 77 +- src/llama_stack_client/_client.py | 114 +-- src/llama_stack_client/resources/__init__.py | 42 +- .../resources/beta/__init__.py | 33 + src/llama_stack_client/resources/beta/beta.py | 102 ++ .../resources/{ => beta}/datasets.py | 24 +- .../resources/conversations/conversations.py | 24 +- .../resources/conversations/items.py | 18 +- src/llama_stack_client/resources/telemetry.py | 871 ------------------ .../resources/tool_runtime/rag_tool.py | 24 +- src/llama_stack_client/resources/vector_io.py | 24 +- src/llama_stack_client/types/__init__.py | 24 - .../types/alpha/memory_retrieval_step.py | 2 +- src/llama_stack_client/types/beta/__init__.py | 12 + .../{ => beta}/dataset_appendrows_params.py | 0 .../{ => beta}/dataset_iterrows_params.py | 0 .../{ => beta}/dataset_iterrows_response.py | 2 +- .../types/{ => beta}/dataset_list_response.py | 4 +- .../{ => beta}/dataset_register_params.py | 0 .../{ => beta}/dataset_register_response.py | 4 +- .../{ => beta}/dataset_retrieve_response.py | 4 +- .../{ => beta}/list_datasets_response.py | 2 +- .../types/conversation_create_params.py | 148 ++- .../types/conversations/item_create_params.py | 148 ++- .../conversations/item_create_response.py | 155 +++- .../types/conversations/item_get_response.py | 153 ++- .../types/conversations/item_list_response.py | 155 +++- .../types/query_condition_param.py | 25 - .../types/query_spans_response.py | 17 - .../types/response_create_params.py | 107 ++- .../types/response_list_response.py | 178 +++- .../types/response_object.py | 64 +- .../types/response_object_stream.py | 110 ++- .../responses/input_item_list_response.py | 114 ++- .../types/span_with_status.py | 41 - .../types/telemetry_get_span_response.py | 37 - .../types/telemetry_get_span_tree_params.py | 23 - .../types/telemetry_get_span_tree_response.py | 16 - .../types/telemetry_query_metrics_params.py | 42 - .../types/telemetry_query_metrics_response.py | 51 - .../types/telemetry_query_spans_params.py | 28 - .../types/telemetry_query_spans_response.py | 41 - .../types/telemetry_query_traces_params.py | 31 - .../types/telemetry_query_traces_response.py | 16 - .../telemetry_save_spans_to_dataset_params.py | 31 - .../tool_runtime/rag_tool_insert_params.py | 2 +- .../tool_runtime/rag_tool_query_params.py | 2 +- src/llama_stack_client/types/trace.py | 28 - .../types/vector_io_insert_params.py | 2 +- .../types/vector_io_query_params.py | 2 +- tests/api_resources/beta/__init__.py | 1 + .../api_resources/{ => beta}/test_datasets.py | 98 +- tests/api_resources/test_telemetry.py | 815 ---------------- tests/api_resources/test_vector_io.py | 32 +- .../tool_runtime/test_rag_tool.py | 28 +- 56 files changed, 1395 insertions(+), 2761 deletions(-) create mode 100644 src/llama_stack_client/resources/beta/__init__.py create mode 100644 src/llama_stack_client/resources/beta/beta.py rename src/llama_stack_client/resources/{ => beta}/datasets.py (97%) delete mode 100644 src/llama_stack_client/resources/telemetry.py create mode 100644 src/llama_stack_client/types/beta/__init__.py rename src/llama_stack_client/types/{ => beta}/dataset_appendrows_params.py (100%) rename src/llama_stack_client/types/{ => beta}/dataset_iterrows_params.py (100%) rename src/llama_stack_client/types/{ => beta}/dataset_iterrows_response.py (95%) rename src/llama_stack_client/types/{ => beta}/dataset_list_response.py (96%) rename src/llama_stack_client/types/{ => beta}/dataset_register_params.py (100%) rename src/llama_stack_client/types/{ => beta}/dataset_register_response.py (96%) rename src/llama_stack_client/types/{ => beta}/dataset_retrieve_response.py (96%) rename src/llama_stack_client/types/{ => beta}/list_datasets_response.py (93%) delete mode 100644 src/llama_stack_client/types/query_condition_param.py delete mode 100644 src/llama_stack_client/types/query_spans_response.py delete mode 100644 src/llama_stack_client/types/span_with_status.py delete mode 100644 src/llama_stack_client/types/telemetry_get_span_response.py delete mode 100644 src/llama_stack_client/types/telemetry_get_span_tree_params.py delete mode 100644 src/llama_stack_client/types/telemetry_get_span_tree_response.py delete mode 100644 src/llama_stack_client/types/telemetry_query_metrics_params.py delete mode 100644 src/llama_stack_client/types/telemetry_query_metrics_response.py delete mode 100644 src/llama_stack_client/types/telemetry_query_spans_params.py delete mode 100644 src/llama_stack_client/types/telemetry_query_spans_response.py delete mode 100644 src/llama_stack_client/types/telemetry_query_traces_params.py delete mode 100644 src/llama_stack_client/types/telemetry_query_traces_response.py delete mode 100644 src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py delete mode 100644 src/llama_stack_client/types/trace.py create mode 100644 tests/api_resources/beta/__init__.py rename tests/api_resources/{ => beta}/test_datasets.py (84%) delete mode 100644 tests/api_resources/test_telemetry.py diff --git a/.stats.yml b/.stats.yml index bf6aaffb..89ced1ec 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 111 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-15a929a0b71de779accc56bd09d1e5f580e216affdb408cf9890bc7a37847e9e.yml -openapi_spec_hash: 5db9f7c7e80427cfa0298cbb01689559 -config_hash: 06758df5c4f261f9c97eafcef7e0028f +configured_endpoints: 104 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-2acd62f8d5c4523bad4ddb2cc50608135249858b7047a71b48e25befa1e1f43f.yml +openapi_spec_hash: 1ad726ff81dc21720c8c3443d33c0562 +config_hash: 6c5b61acff5bb6534edd57f93ed60106 diff --git a/api.md b/api.md index f0155d69..e28c1b59 100644 --- a/api.md +++ b/api.md @@ -135,29 +135,6 @@ Methods: - client.conversations.items.list(conversation_id, \*\*params) -> ItemListResponse - client.conversations.items.get(item_id, \*, conversation_id) -> ItemGetResponse -# Datasets - -Types: - -```python -from llama_stack_client.types import ( - ListDatasetsResponse, - DatasetRetrieveResponse, - DatasetListResponse, - DatasetIterrowsResponse, - DatasetRegisterResponse, -) -``` - -Methods: - -- client.datasets.retrieve(dataset_id) -> DatasetRetrieveResponse -- client.datasets.list() -> DatasetListResponse -- client.datasets.appendrows(dataset_id, \*\*params) -> None -- client.datasets.iterrows(dataset_id, \*\*params) -> DatasetIterrowsResponse -- client.datasets.register(\*\*params) -> DatasetRegisterResponse -- client.datasets.unregister(dataset_id) -> None - # Inspect Types: @@ -392,35 +369,6 @@ Methods: - client.synthetic_data_generation.generate(\*\*params) -> SyntheticDataGenerationResponse -# Telemetry - -Types: - -```python -from llama_stack_client.types import ( - Event, - QueryCondition, - QuerySpansResponse, - SpanWithStatus, - Trace, - TelemetryGetSpanResponse, - TelemetryGetSpanTreeResponse, - TelemetryQueryMetricsResponse, - TelemetryQuerySpansResponse, - TelemetryQueryTracesResponse, -) -``` - -Methods: - -- client.telemetry.get_span(span_id, \*, trace_id) -> TelemetryGetSpanResponse -- client.telemetry.get_span_tree(span_id, \*\*params) -> TelemetryGetSpanTreeResponse -- client.telemetry.get_trace(trace_id) -> Trace -- client.telemetry.query_metrics(metric_name, \*\*params) -> TelemetryQueryMetricsResponse -- client.telemetry.query_spans(\*\*params) -> TelemetryQuerySpansResponse -- client.telemetry.query_traces(\*\*params) -> TelemetryQueryTracesResponse -- client.telemetry.save_spans_to_dataset(\*\*params) -> None - # Scoring Types: @@ -483,6 +431,31 @@ Methods: - client.files.delete(file_id) -> DeleteFileResponse - client.files.content(file_id) -> object +# Beta + +## Datasets + +Types: + +```python +from llama_stack_client.types.beta import ( + ListDatasetsResponse, + DatasetRetrieveResponse, + DatasetListResponse, + DatasetIterrowsResponse, + DatasetRegisterResponse, +) +``` + +Methods: + +- client.beta.datasets.retrieve(dataset_id) -> DatasetRetrieveResponse +- client.beta.datasets.list() -> DatasetListResponse +- client.beta.datasets.appendrows(dataset_id, \*\*params) -> None +- client.beta.datasets.iterrows(dataset_id, \*\*params) -> DatasetIterrowsResponse +- client.beta.datasets.register(\*\*params) -> DatasetRegisterResponse +- client.beta.datasets.unregister(dataset_id) -> None + # Alpha ## Inference diff --git a/src/llama_stack_client/_client.py b/src/llama_stack_client/_client.py index 66096e21..a70956ab 100644 --- a/src/llama_stack_client/_client.py +++ b/src/llama_stack_client/_client.py @@ -39,6 +39,7 @@ if TYPE_CHECKING: from .resources import ( + beta, chat, alpha, files, @@ -49,10 +50,8 @@ inspect, scoring, shields, - datasets, providers, responses, - telemetry, vector_io, benchmarks, embeddings, @@ -72,10 +71,9 @@ from .resources.inspect import InspectResource, AsyncInspectResource from .resources.scoring import ScoringResource, AsyncScoringResource from .resources.shields import ShieldsResource, AsyncShieldsResource - from .resources.datasets import DatasetsResource, AsyncDatasetsResource + from .resources.beta.beta import BetaResource, AsyncBetaResource from .resources.chat.chat import ChatResource, AsyncChatResource from .resources.providers import ProvidersResource, AsyncProvidersResource - from .resources.telemetry import TelemetryResource, AsyncTelemetryResource from .resources.vector_io import VectorIoResource, AsyncVectorIoResource from .resources.benchmarks import BenchmarksResource, AsyncBenchmarksResource from .resources.embeddings import EmbeddingsResource, AsyncEmbeddingsResource @@ -193,12 +191,6 @@ def conversations(self) -> ConversationsResource: return ConversationsResource(self) - @cached_property - def datasets(self) -> DatasetsResource: - from .resources.datasets import DatasetsResource - - return DatasetsResource(self) - @cached_property def inspect(self) -> InspectResource: from .resources.inspect import InspectResource @@ -277,12 +269,6 @@ def synthetic_data_generation(self) -> SyntheticDataGenerationResource: return SyntheticDataGenerationResource(self) - @cached_property - def telemetry(self) -> TelemetryResource: - from .resources.telemetry import TelemetryResource - - return TelemetryResource(self) - @cached_property def scoring(self) -> ScoringResource: from .resources.scoring import ScoringResource @@ -307,6 +293,12 @@ def files(self) -> FilesResource: return FilesResource(self) + @cached_property + def beta(self) -> BetaResource: + from .resources.beta import BetaResource + + return BetaResource(self) + @cached_property def alpha(self) -> AlphaResource: from .resources.alpha import AlphaResource @@ -515,12 +507,6 @@ def conversations(self) -> AsyncConversationsResource: return AsyncConversationsResource(self) - @cached_property - def datasets(self) -> AsyncDatasetsResource: - from .resources.datasets import AsyncDatasetsResource - - return AsyncDatasetsResource(self) - @cached_property def inspect(self) -> AsyncInspectResource: from .resources.inspect import AsyncInspectResource @@ -599,12 +585,6 @@ def synthetic_data_generation(self) -> AsyncSyntheticDataGenerationResource: return AsyncSyntheticDataGenerationResource(self) - @cached_property - def telemetry(self) -> AsyncTelemetryResource: - from .resources.telemetry import AsyncTelemetryResource - - return AsyncTelemetryResource(self) - @cached_property def scoring(self) -> AsyncScoringResource: from .resources.scoring import AsyncScoringResource @@ -629,6 +609,12 @@ def files(self) -> AsyncFilesResource: return AsyncFilesResource(self) + @cached_property + def beta(self) -> AsyncBetaResource: + from .resources.beta import AsyncBetaResource + + return AsyncBetaResource(self) + @cached_property def alpha(self) -> AsyncAlphaResource: from .resources.alpha import AsyncAlphaResource @@ -786,12 +772,6 @@ def conversations(self) -> conversations.ConversationsResourceWithRawResponse: return ConversationsResourceWithRawResponse(self._client.conversations) - @cached_property - def datasets(self) -> datasets.DatasetsResourceWithRawResponse: - from .resources.datasets import DatasetsResourceWithRawResponse - - return DatasetsResourceWithRawResponse(self._client.datasets) - @cached_property def inspect(self) -> inspect.InspectResourceWithRawResponse: from .resources.inspect import InspectResourceWithRawResponse @@ -870,12 +850,6 @@ def synthetic_data_generation(self) -> synthetic_data_generation.SyntheticDataGe return SyntheticDataGenerationResourceWithRawResponse(self._client.synthetic_data_generation) - @cached_property - def telemetry(self) -> telemetry.TelemetryResourceWithRawResponse: - from .resources.telemetry import TelemetryResourceWithRawResponse - - return TelemetryResourceWithRawResponse(self._client.telemetry) - @cached_property def scoring(self) -> scoring.ScoringResourceWithRawResponse: from .resources.scoring import ScoringResourceWithRawResponse @@ -900,6 +874,12 @@ def files(self) -> files.FilesResourceWithRawResponse: return FilesResourceWithRawResponse(self._client.files) + @cached_property + def beta(self) -> beta.BetaResourceWithRawResponse: + from .resources.beta import BetaResourceWithRawResponse + + return BetaResourceWithRawResponse(self._client.beta) + @cached_property def alpha(self) -> alpha.AlphaResourceWithRawResponse: from .resources.alpha import AlphaResourceWithRawResponse @@ -943,12 +923,6 @@ def conversations(self) -> conversations.AsyncConversationsResourceWithRawRespon return AsyncConversationsResourceWithRawResponse(self._client.conversations) - @cached_property - def datasets(self) -> datasets.AsyncDatasetsResourceWithRawResponse: - from .resources.datasets import AsyncDatasetsResourceWithRawResponse - - return AsyncDatasetsResourceWithRawResponse(self._client.datasets) - @cached_property def inspect(self) -> inspect.AsyncInspectResourceWithRawResponse: from .resources.inspect import AsyncInspectResourceWithRawResponse @@ -1029,12 +1003,6 @@ def synthetic_data_generation( return AsyncSyntheticDataGenerationResourceWithRawResponse(self._client.synthetic_data_generation) - @cached_property - def telemetry(self) -> telemetry.AsyncTelemetryResourceWithRawResponse: - from .resources.telemetry import AsyncTelemetryResourceWithRawResponse - - return AsyncTelemetryResourceWithRawResponse(self._client.telemetry) - @cached_property def scoring(self) -> scoring.AsyncScoringResourceWithRawResponse: from .resources.scoring import AsyncScoringResourceWithRawResponse @@ -1059,6 +1027,12 @@ def files(self) -> files.AsyncFilesResourceWithRawResponse: return AsyncFilesResourceWithRawResponse(self._client.files) + @cached_property + def beta(self) -> beta.AsyncBetaResourceWithRawResponse: + from .resources.beta import AsyncBetaResourceWithRawResponse + + return AsyncBetaResourceWithRawResponse(self._client.beta) + @cached_property def alpha(self) -> alpha.AsyncAlphaResourceWithRawResponse: from .resources.alpha import AsyncAlphaResourceWithRawResponse @@ -1102,12 +1076,6 @@ def conversations(self) -> conversations.ConversationsResourceWithStreamingRespo return ConversationsResourceWithStreamingResponse(self._client.conversations) - @cached_property - def datasets(self) -> datasets.DatasetsResourceWithStreamingResponse: - from .resources.datasets import DatasetsResourceWithStreamingResponse - - return DatasetsResourceWithStreamingResponse(self._client.datasets) - @cached_property def inspect(self) -> inspect.InspectResourceWithStreamingResponse: from .resources.inspect import InspectResourceWithStreamingResponse @@ -1188,12 +1156,6 @@ def synthetic_data_generation( return SyntheticDataGenerationResourceWithStreamingResponse(self._client.synthetic_data_generation) - @cached_property - def telemetry(self) -> telemetry.TelemetryResourceWithStreamingResponse: - from .resources.telemetry import TelemetryResourceWithStreamingResponse - - return TelemetryResourceWithStreamingResponse(self._client.telemetry) - @cached_property def scoring(self) -> scoring.ScoringResourceWithStreamingResponse: from .resources.scoring import ScoringResourceWithStreamingResponse @@ -1218,6 +1180,12 @@ def files(self) -> files.FilesResourceWithStreamingResponse: return FilesResourceWithStreamingResponse(self._client.files) + @cached_property + def beta(self) -> beta.BetaResourceWithStreamingResponse: + from .resources.beta import BetaResourceWithStreamingResponse + + return BetaResourceWithStreamingResponse(self._client.beta) + @cached_property def alpha(self) -> alpha.AlphaResourceWithStreamingResponse: from .resources.alpha import AlphaResourceWithStreamingResponse @@ -1261,12 +1229,6 @@ def conversations(self) -> conversations.AsyncConversationsResourceWithStreaming return AsyncConversationsResourceWithStreamingResponse(self._client.conversations) - @cached_property - def datasets(self) -> datasets.AsyncDatasetsResourceWithStreamingResponse: - from .resources.datasets import AsyncDatasetsResourceWithStreamingResponse - - return AsyncDatasetsResourceWithStreamingResponse(self._client.datasets) - @cached_property def inspect(self) -> inspect.AsyncInspectResourceWithStreamingResponse: from .resources.inspect import AsyncInspectResourceWithStreamingResponse @@ -1347,12 +1309,6 @@ def synthetic_data_generation( return AsyncSyntheticDataGenerationResourceWithStreamingResponse(self._client.synthetic_data_generation) - @cached_property - def telemetry(self) -> telemetry.AsyncTelemetryResourceWithStreamingResponse: - from .resources.telemetry import AsyncTelemetryResourceWithStreamingResponse - - return AsyncTelemetryResourceWithStreamingResponse(self._client.telemetry) - @cached_property def scoring(self) -> scoring.AsyncScoringResourceWithStreamingResponse: from .resources.scoring import AsyncScoringResourceWithStreamingResponse @@ -1377,6 +1333,12 @@ def files(self) -> files.AsyncFilesResourceWithStreamingResponse: return AsyncFilesResourceWithStreamingResponse(self._client.files) + @cached_property + def beta(self) -> beta.AsyncBetaResourceWithStreamingResponse: + from .resources.beta import AsyncBetaResourceWithStreamingResponse + + return AsyncBetaResourceWithStreamingResponse(self._client.beta) + @cached_property def alpha(self) -> alpha.AsyncAlphaResourceWithStreamingResponse: from .resources.alpha import AsyncAlphaResourceWithStreamingResponse diff --git a/src/llama_stack_client/resources/__init__.py b/src/llama_stack_client/resources/__init__.py index d8c715f8..f09606b6 100644 --- a/src/llama_stack_client/resources/__init__.py +++ b/src/llama_stack_client/resources/__init__.py @@ -6,6 +6,14 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +from .beta import ( + BetaResource, + AsyncBetaResource, + BetaResourceWithRawResponse, + AsyncBetaResourceWithRawResponse, + BetaResourceWithStreamingResponse, + AsyncBetaResourceWithStreamingResponse, +) from .chat import ( ChatResource, AsyncChatResource, @@ -86,14 +94,6 @@ ShieldsResourceWithStreamingResponse, AsyncShieldsResourceWithStreamingResponse, ) -from .datasets import ( - DatasetsResource, - AsyncDatasetsResource, - DatasetsResourceWithRawResponse, - AsyncDatasetsResourceWithRawResponse, - DatasetsResourceWithStreamingResponse, - AsyncDatasetsResourceWithStreamingResponse, -) from .providers import ( ProvidersResource, AsyncProvidersResource, @@ -110,14 +110,6 @@ ResponsesResourceWithStreamingResponse, AsyncResponsesResourceWithStreamingResponse, ) -from .telemetry import ( - TelemetryResource, - AsyncTelemetryResource, - TelemetryResourceWithRawResponse, - AsyncTelemetryResourceWithRawResponse, - TelemetryResourceWithStreamingResponse, - AsyncTelemetryResourceWithStreamingResponse, -) from .vector_io import ( VectorIoResource, AsyncVectorIoResource, @@ -238,12 +230,6 @@ "AsyncConversationsResourceWithRawResponse", "ConversationsResourceWithStreamingResponse", "AsyncConversationsResourceWithStreamingResponse", - "DatasetsResource", - "AsyncDatasetsResource", - "DatasetsResourceWithRawResponse", - "AsyncDatasetsResourceWithRawResponse", - "DatasetsResourceWithStreamingResponse", - "AsyncDatasetsResourceWithStreamingResponse", "InspectResource", "AsyncInspectResource", "InspectResourceWithRawResponse", @@ -322,12 +308,6 @@ "AsyncSyntheticDataGenerationResourceWithRawResponse", "SyntheticDataGenerationResourceWithStreamingResponse", "AsyncSyntheticDataGenerationResourceWithStreamingResponse", - "TelemetryResource", - "AsyncTelemetryResource", - "TelemetryResourceWithRawResponse", - "AsyncTelemetryResourceWithRawResponse", - "TelemetryResourceWithStreamingResponse", - "AsyncTelemetryResourceWithStreamingResponse", "ScoringResource", "AsyncScoringResource", "ScoringResourceWithRawResponse", @@ -352,6 +332,12 @@ "AsyncFilesResourceWithRawResponse", "FilesResourceWithStreamingResponse", "AsyncFilesResourceWithStreamingResponse", + "BetaResource", + "AsyncBetaResource", + "BetaResourceWithRawResponse", + "AsyncBetaResourceWithRawResponse", + "BetaResourceWithStreamingResponse", + "AsyncBetaResourceWithStreamingResponse", "AlphaResource", "AsyncAlphaResource", "AlphaResourceWithRawResponse", diff --git a/src/llama_stack_client/resources/beta/__init__.py b/src/llama_stack_client/resources/beta/__init__.py new file mode 100644 index 00000000..6fd69c43 --- /dev/null +++ b/src/llama_stack_client/resources/beta/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .beta import ( + BetaResource, + AsyncBetaResource, + BetaResourceWithRawResponse, + AsyncBetaResourceWithRawResponse, + BetaResourceWithStreamingResponse, + AsyncBetaResourceWithStreamingResponse, +) +from .datasets import ( + DatasetsResource, + AsyncDatasetsResource, + DatasetsResourceWithRawResponse, + AsyncDatasetsResourceWithRawResponse, + DatasetsResourceWithStreamingResponse, + AsyncDatasetsResourceWithStreamingResponse, +) + +__all__ = [ + "DatasetsResource", + "AsyncDatasetsResource", + "DatasetsResourceWithRawResponse", + "AsyncDatasetsResourceWithRawResponse", + "DatasetsResourceWithStreamingResponse", + "AsyncDatasetsResourceWithStreamingResponse", + "BetaResource", + "AsyncBetaResource", + "BetaResourceWithRawResponse", + "AsyncBetaResourceWithRawResponse", + "BetaResourceWithStreamingResponse", + "AsyncBetaResourceWithStreamingResponse", +] diff --git a/src/llama_stack_client/resources/beta/beta.py b/src/llama_stack_client/resources/beta/beta.py new file mode 100644 index 00000000..7bf1c711 --- /dev/null +++ b/src/llama_stack_client/resources/beta/beta.py @@ -0,0 +1,102 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .datasets import ( + DatasetsResource, + AsyncDatasetsResource, + DatasetsResourceWithRawResponse, + AsyncDatasetsResourceWithRawResponse, + DatasetsResourceWithStreamingResponse, + AsyncDatasetsResourceWithStreamingResponse, +) +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource + +__all__ = ["BetaResource", "AsyncBetaResource"] + + +class BetaResource(SyncAPIResource): + @cached_property + def datasets(self) -> DatasetsResource: + return DatasetsResource(self._client) + + @cached_property + def with_raw_response(self) -> BetaResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers + """ + return BetaResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> BetaResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response + """ + return BetaResourceWithStreamingResponse(self) + + +class AsyncBetaResource(AsyncAPIResource): + @cached_property + def datasets(self) -> AsyncDatasetsResource: + return AsyncDatasetsResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncBetaResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers + """ + return AsyncBetaResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncBetaResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response + """ + return AsyncBetaResourceWithStreamingResponse(self) + + +class BetaResourceWithRawResponse: + def __init__(self, beta: BetaResource) -> None: + self._beta = beta + + @cached_property + def datasets(self) -> DatasetsResourceWithRawResponse: + return DatasetsResourceWithRawResponse(self._beta.datasets) + + +class AsyncBetaResourceWithRawResponse: + def __init__(self, beta: AsyncBetaResource) -> None: + self._beta = beta + + @cached_property + def datasets(self) -> AsyncDatasetsResourceWithRawResponse: + return AsyncDatasetsResourceWithRawResponse(self._beta.datasets) + + +class BetaResourceWithStreamingResponse: + def __init__(self, beta: BetaResource) -> None: + self._beta = beta + + @cached_property + def datasets(self) -> DatasetsResourceWithStreamingResponse: + return DatasetsResourceWithStreamingResponse(self._beta.datasets) + + +class AsyncBetaResourceWithStreamingResponse: + def __init__(self, beta: AsyncBetaResource) -> None: + self._beta = beta + + @cached_property + def datasets(self) -> AsyncDatasetsResourceWithStreamingResponse: + return AsyncDatasetsResourceWithStreamingResponse(self._beta.datasets) diff --git a/src/llama_stack_client/resources/datasets.py b/src/llama_stack_client/resources/beta/datasets.py similarity index 97% rename from src/llama_stack_client/resources/datasets.py rename to src/llama_stack_client/resources/beta/datasets.py index 3a632462..03321e48 100644 --- a/src/llama_stack_client/resources/datasets.py +++ b/src/llama_stack_client/resources/beta/datasets.py @@ -13,23 +13,23 @@ import httpx -from ..types import dataset_iterrows_params, dataset_register_params, dataset_appendrows_params -from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given -from .._utils import maybe_transform, async_maybe_transform -from .._compat import cached_property -from .._resource import SyncAPIResource, AsyncAPIResource -from .._response import ( +from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( to_raw_response_wrapper, to_streamed_response_wrapper, async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._wrappers import DataWrapper -from .._base_client import make_request_options -from ..types.dataset_list_response import DatasetListResponse -from ..types.dataset_iterrows_response import DatasetIterrowsResponse -from ..types.dataset_register_response import DatasetRegisterResponse -from ..types.dataset_retrieve_response import DatasetRetrieveResponse +from ..._wrappers import DataWrapper +from ...types.beta import dataset_iterrows_params, dataset_register_params, dataset_appendrows_params +from ..._base_client import make_request_options +from ...types.beta.dataset_list_response import DatasetListResponse +from ...types.beta.dataset_iterrows_response import DatasetIterrowsResponse +from ...types.beta.dataset_register_response import DatasetRegisterResponse +from ...types.beta.dataset_retrieve_response import DatasetRetrieveResponse __all__ = ["DatasetsResource", "AsyncDatasetsResource"] diff --git a/src/llama_stack_client/resources/conversations/conversations.py b/src/llama_stack_client/resources/conversations/conversations.py index 00209df0..66dfe7d3 100644 --- a/src/llama_stack_client/resources/conversations/conversations.py +++ b/src/llama_stack_client/resources/conversations/conversations.py @@ -74,7 +74,8 @@ def create( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ConversationObject: - """ + """Create a conversation. + Create a conversation. Args: @@ -116,7 +117,8 @@ def retrieve( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ConversationObject: - """ + """Retrieve a conversation. + Get a conversation with the given ID. Args: @@ -150,7 +152,8 @@ def update( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ConversationObject: - """ + """Update a conversation. + Update a conversation's metadata with the given ID. Args: @@ -186,7 +189,8 @@ def delete( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ConversationDeleteResponse: - """ + """Delete a conversation. + Delete a conversation with the given ID. Args: @@ -245,7 +249,8 @@ async def create( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ConversationObject: - """ + """Create a conversation. + Create a conversation. Args: @@ -287,7 +292,8 @@ async def retrieve( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ConversationObject: - """ + """Retrieve a conversation. + Get a conversation with the given ID. Args: @@ -321,7 +327,8 @@ async def update( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ConversationObject: - """ + """Update a conversation. + Update a conversation's metadata with the given ID. Args: @@ -359,7 +366,8 @@ async def delete( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ConversationDeleteResponse: - """ + """Delete a conversation. + Delete a conversation with the given ID. Args: diff --git a/src/llama_stack_client/resources/conversations/items.py b/src/llama_stack_client/resources/conversations/items.py index 6fefac49..598e3168 100644 --- a/src/llama_stack_client/resources/conversations/items.py +++ b/src/llama_stack_client/resources/conversations/items.py @@ -64,7 +64,8 @@ def create( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ItemCreateResponse: - """ + """Create items. + Create items in the conversation. Args: @@ -116,7 +117,8 @@ def list( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ItemListResponse: - """ + """List items. + List items in the conversation. Args: @@ -170,7 +172,8 @@ def get( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ItemGetResponse: - """ + """Retrieve an item. + Retrieve a conversation item. Args: @@ -230,7 +233,8 @@ async def create( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ItemCreateResponse: - """ + """Create items. + Create items in the conversation. Args: @@ -282,7 +286,8 @@ async def list( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ItemListResponse: - """ + """List items. + List items in the conversation. Args: @@ -336,7 +341,8 @@ async def get( extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> ItemGetResponse: - """ + """Retrieve an item. + Retrieve a conversation item. Args: diff --git a/src/llama_stack_client/resources/telemetry.py b/src/llama_stack_client/resources/telemetry.py deleted file mode 100644 index 81117934..00000000 --- a/src/llama_stack_client/resources/telemetry.py +++ /dev/null @@ -1,871 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Type, Iterable, cast -from typing_extensions import Literal - -import httpx - -from ..types import ( - telemetry_query_spans_params, - telemetry_query_traces_params, - telemetry_get_span_tree_params, - telemetry_query_metrics_params, - telemetry_save_spans_to_dataset_params, -) -from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given -from .._utils import maybe_transform, async_maybe_transform -from .._compat import cached_property -from .._resource import SyncAPIResource, AsyncAPIResource -from .._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from .._wrappers import DataWrapper -from ..types.trace import Trace -from .._base_client import make_request_options -from ..types.query_condition_param import QueryConditionParam -from ..types.telemetry_get_span_response import TelemetryGetSpanResponse -from ..types.telemetry_query_spans_response import TelemetryQuerySpansResponse -from ..types.telemetry_query_traces_response import TelemetryQueryTracesResponse -from ..types.telemetry_get_span_tree_response import TelemetryGetSpanTreeResponse -from ..types.telemetry_query_metrics_response import TelemetryQueryMetricsResponse - -__all__ = ["TelemetryResource", "AsyncTelemetryResource"] - - -class TelemetryResource(SyncAPIResource): - @cached_property - def with_raw_response(self) -> TelemetryResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers - """ - return TelemetryResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> TelemetryResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response - """ - return TelemetryResourceWithStreamingResponse(self) - - def get_span( - self, - span_id: str, - *, - trace_id: str, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryGetSpanResponse: - """ - Get a span by its ID. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not trace_id: - raise ValueError(f"Expected a non-empty value for `trace_id` but received {trace_id!r}") - if not span_id: - raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}") - return self._get( - f"/v1alpha/telemetry/traces/{trace_id}/spans/{span_id}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TelemetryGetSpanResponse, - ) - - def get_span_tree( - self, - span_id: str, - *, - attributes_to_return: SequenceNotStr[str] | Omit = omit, - max_depth: int | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryGetSpanTreeResponse: - """ - Get a span tree by its ID. - - Args: - attributes_to_return: The attributes to return in the tree. - - max_depth: The maximum depth of the tree. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not span_id: - raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}") - return self._post( - f"/v1alpha/telemetry/spans/{span_id}/tree", - body=maybe_transform( - { - "attributes_to_return": attributes_to_return, - "max_depth": max_depth, - }, - telemetry_get_span_tree_params.TelemetryGetSpanTreeParams, - ), - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - post_parser=DataWrapper[TelemetryGetSpanTreeResponse]._unwrapper, - ), - cast_to=cast(Type[TelemetryGetSpanTreeResponse], DataWrapper[TelemetryGetSpanTreeResponse]), - ) - - def get_trace( - self, - trace_id: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> Trace: - """ - Get a trace by its ID. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not trace_id: - raise ValueError(f"Expected a non-empty value for `trace_id` but received {trace_id!r}") - return self._get( - f"/v1alpha/telemetry/traces/{trace_id}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=Trace, - ) - - def query_metrics( - self, - metric_name: str, - *, - query_type: Literal["range", "instant"], - start_time: int, - end_time: int | Omit = omit, - granularity: str | Omit = omit, - label_matchers: Iterable[telemetry_query_metrics_params.LabelMatcher] | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryQueryMetricsResponse: - """ - Query metrics. - - Args: - query_type: The type of query to perform. - - start_time: The start time of the metric to query. - - end_time: The end time of the metric to query. - - granularity: The granularity of the metric to query. - - label_matchers: The label matchers to apply to the metric. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not metric_name: - raise ValueError(f"Expected a non-empty value for `metric_name` but received {metric_name!r}") - return self._post( - f"/v1alpha/telemetry/metrics/{metric_name}", - body=maybe_transform( - { - "query_type": query_type, - "start_time": start_time, - "end_time": end_time, - "granularity": granularity, - "label_matchers": label_matchers, - }, - telemetry_query_metrics_params.TelemetryQueryMetricsParams, - ), - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - post_parser=DataWrapper[TelemetryQueryMetricsResponse]._unwrapper, - ), - cast_to=cast(Type[TelemetryQueryMetricsResponse], DataWrapper[TelemetryQueryMetricsResponse]), - ) - - def query_spans( - self, - *, - attribute_filters: Iterable[QueryConditionParam], - attributes_to_return: SequenceNotStr[str], - max_depth: int | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryQuerySpansResponse: - """ - Query spans. - - Args: - attribute_filters: The attribute filters to apply to the spans. - - attributes_to_return: The attributes to return in the spans. - - max_depth: The maximum depth of the tree. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._post( - "/v1alpha/telemetry/spans", - body=maybe_transform( - { - "attribute_filters": attribute_filters, - "attributes_to_return": attributes_to_return, - "max_depth": max_depth, - }, - telemetry_query_spans_params.TelemetryQuerySpansParams, - ), - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - post_parser=DataWrapper[TelemetryQuerySpansResponse]._unwrapper, - ), - cast_to=cast(Type[TelemetryQuerySpansResponse], DataWrapper[TelemetryQuerySpansResponse]), - ) - - def query_traces( - self, - *, - attribute_filters: Iterable[QueryConditionParam] | Omit = omit, - limit: int | Omit = omit, - offset: int | Omit = omit, - order_by: SequenceNotStr[str] | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryQueryTracesResponse: - """ - Query traces. - - Args: - attribute_filters: The attribute filters to apply to the traces. - - limit: The limit of traces to return. - - offset: The offset of the traces to return. - - order_by: The order by of the traces to return. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._post( - "/v1alpha/telemetry/traces", - body=maybe_transform( - { - "attribute_filters": attribute_filters, - "limit": limit, - "offset": offset, - "order_by": order_by, - }, - telemetry_query_traces_params.TelemetryQueryTracesParams, - ), - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - post_parser=DataWrapper[TelemetryQueryTracesResponse]._unwrapper, - ), - cast_to=cast(Type[TelemetryQueryTracesResponse], DataWrapper[TelemetryQueryTracesResponse]), - ) - - def save_spans_to_dataset( - self, - *, - attribute_filters: Iterable[QueryConditionParam], - attributes_to_save: SequenceNotStr[str], - dataset_id: str, - max_depth: int | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> None: - """ - Save spans to a dataset. - - Args: - attribute_filters: The attribute filters to apply to the spans. - - attributes_to_save: The attributes to save to the dataset. - - dataset_id: The ID of the dataset to save the spans to. - - max_depth: The maximum depth of the tree. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - extra_headers = {"Accept": "*/*", **(extra_headers or {})} - return self._post( - "/v1alpha/telemetry/spans/export", - body=maybe_transform( - { - "attribute_filters": attribute_filters, - "attributes_to_save": attributes_to_save, - "dataset_id": dataset_id, - "max_depth": max_depth, - }, - telemetry_save_spans_to_dataset_params.TelemetrySaveSpansToDatasetParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=NoneType, - ) - - -class AsyncTelemetryResource(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncTelemetryResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers - """ - return AsyncTelemetryResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncTelemetryResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response - """ - return AsyncTelemetryResourceWithStreamingResponse(self) - - async def get_span( - self, - span_id: str, - *, - trace_id: str, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryGetSpanResponse: - """ - Get a span by its ID. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not trace_id: - raise ValueError(f"Expected a non-empty value for `trace_id` but received {trace_id!r}") - if not span_id: - raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}") - return await self._get( - f"/v1alpha/telemetry/traces/{trace_id}/spans/{span_id}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TelemetryGetSpanResponse, - ) - - async def get_span_tree( - self, - span_id: str, - *, - attributes_to_return: SequenceNotStr[str] | Omit = omit, - max_depth: int | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryGetSpanTreeResponse: - """ - Get a span tree by its ID. - - Args: - attributes_to_return: The attributes to return in the tree. - - max_depth: The maximum depth of the tree. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not span_id: - raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}") - return await self._post( - f"/v1alpha/telemetry/spans/{span_id}/tree", - body=await async_maybe_transform( - { - "attributes_to_return": attributes_to_return, - "max_depth": max_depth, - }, - telemetry_get_span_tree_params.TelemetryGetSpanTreeParams, - ), - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - post_parser=DataWrapper[TelemetryGetSpanTreeResponse]._unwrapper, - ), - cast_to=cast(Type[TelemetryGetSpanTreeResponse], DataWrapper[TelemetryGetSpanTreeResponse]), - ) - - async def get_trace( - self, - trace_id: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> Trace: - """ - Get a trace by its ID. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not trace_id: - raise ValueError(f"Expected a non-empty value for `trace_id` but received {trace_id!r}") - return await self._get( - f"/v1alpha/telemetry/traces/{trace_id}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=Trace, - ) - - async def query_metrics( - self, - metric_name: str, - *, - query_type: Literal["range", "instant"], - start_time: int, - end_time: int | Omit = omit, - granularity: str | Omit = omit, - label_matchers: Iterable[telemetry_query_metrics_params.LabelMatcher] | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryQueryMetricsResponse: - """ - Query metrics. - - Args: - query_type: The type of query to perform. - - start_time: The start time of the metric to query. - - end_time: The end time of the metric to query. - - granularity: The granularity of the metric to query. - - label_matchers: The label matchers to apply to the metric. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not metric_name: - raise ValueError(f"Expected a non-empty value for `metric_name` but received {metric_name!r}") - return await self._post( - f"/v1alpha/telemetry/metrics/{metric_name}", - body=await async_maybe_transform( - { - "query_type": query_type, - "start_time": start_time, - "end_time": end_time, - "granularity": granularity, - "label_matchers": label_matchers, - }, - telemetry_query_metrics_params.TelemetryQueryMetricsParams, - ), - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - post_parser=DataWrapper[TelemetryQueryMetricsResponse]._unwrapper, - ), - cast_to=cast(Type[TelemetryQueryMetricsResponse], DataWrapper[TelemetryQueryMetricsResponse]), - ) - - async def query_spans( - self, - *, - attribute_filters: Iterable[QueryConditionParam], - attributes_to_return: SequenceNotStr[str], - max_depth: int | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryQuerySpansResponse: - """ - Query spans. - - Args: - attribute_filters: The attribute filters to apply to the spans. - - attributes_to_return: The attributes to return in the spans. - - max_depth: The maximum depth of the tree. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return await self._post( - "/v1alpha/telemetry/spans", - body=await async_maybe_transform( - { - "attribute_filters": attribute_filters, - "attributes_to_return": attributes_to_return, - "max_depth": max_depth, - }, - telemetry_query_spans_params.TelemetryQuerySpansParams, - ), - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - post_parser=DataWrapper[TelemetryQuerySpansResponse]._unwrapper, - ), - cast_to=cast(Type[TelemetryQuerySpansResponse], DataWrapper[TelemetryQuerySpansResponse]), - ) - - async def query_traces( - self, - *, - attribute_filters: Iterable[QueryConditionParam] | Omit = omit, - limit: int | Omit = omit, - offset: int | Omit = omit, - order_by: SequenceNotStr[str] | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> TelemetryQueryTracesResponse: - """ - Query traces. - - Args: - attribute_filters: The attribute filters to apply to the traces. - - limit: The limit of traces to return. - - offset: The offset of the traces to return. - - order_by: The order by of the traces to return. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return await self._post( - "/v1alpha/telemetry/traces", - body=await async_maybe_transform( - { - "attribute_filters": attribute_filters, - "limit": limit, - "offset": offset, - "order_by": order_by, - }, - telemetry_query_traces_params.TelemetryQueryTracesParams, - ), - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - post_parser=DataWrapper[TelemetryQueryTracesResponse]._unwrapper, - ), - cast_to=cast(Type[TelemetryQueryTracesResponse], DataWrapper[TelemetryQueryTracesResponse]), - ) - - async def save_spans_to_dataset( - self, - *, - attribute_filters: Iterable[QueryConditionParam], - attributes_to_save: SequenceNotStr[str], - dataset_id: str, - max_depth: int | Omit = omit, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> None: - """ - Save spans to a dataset. - - Args: - attribute_filters: The attribute filters to apply to the spans. - - attributes_to_save: The attributes to save to the dataset. - - dataset_id: The ID of the dataset to save the spans to. - - max_depth: The maximum depth of the tree. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - extra_headers = {"Accept": "*/*", **(extra_headers or {})} - return await self._post( - "/v1alpha/telemetry/spans/export", - body=await async_maybe_transform( - { - "attribute_filters": attribute_filters, - "attributes_to_save": attributes_to_save, - "dataset_id": dataset_id, - "max_depth": max_depth, - }, - telemetry_save_spans_to_dataset_params.TelemetrySaveSpansToDatasetParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=NoneType, - ) - - -class TelemetryResourceWithRawResponse: - def __init__(self, telemetry: TelemetryResource) -> None: - self._telemetry = telemetry - - self.get_span = to_raw_response_wrapper( - telemetry.get_span, - ) - self.get_span_tree = to_raw_response_wrapper( - telemetry.get_span_tree, - ) - self.get_trace = to_raw_response_wrapper( - telemetry.get_trace, - ) - self.query_metrics = to_raw_response_wrapper( - telemetry.query_metrics, - ) - self.query_spans = to_raw_response_wrapper( - telemetry.query_spans, - ) - self.query_traces = to_raw_response_wrapper( - telemetry.query_traces, - ) - self.save_spans_to_dataset = to_raw_response_wrapper( - telemetry.save_spans_to_dataset, - ) - - -class AsyncTelemetryResourceWithRawResponse: - def __init__(self, telemetry: AsyncTelemetryResource) -> None: - self._telemetry = telemetry - - self.get_span = async_to_raw_response_wrapper( - telemetry.get_span, - ) - self.get_span_tree = async_to_raw_response_wrapper( - telemetry.get_span_tree, - ) - self.get_trace = async_to_raw_response_wrapper( - telemetry.get_trace, - ) - self.query_metrics = async_to_raw_response_wrapper( - telemetry.query_metrics, - ) - self.query_spans = async_to_raw_response_wrapper( - telemetry.query_spans, - ) - self.query_traces = async_to_raw_response_wrapper( - telemetry.query_traces, - ) - self.save_spans_to_dataset = async_to_raw_response_wrapper( - telemetry.save_spans_to_dataset, - ) - - -class TelemetryResourceWithStreamingResponse: - def __init__(self, telemetry: TelemetryResource) -> None: - self._telemetry = telemetry - - self.get_span = to_streamed_response_wrapper( - telemetry.get_span, - ) - self.get_span_tree = to_streamed_response_wrapper( - telemetry.get_span_tree, - ) - self.get_trace = to_streamed_response_wrapper( - telemetry.get_trace, - ) - self.query_metrics = to_streamed_response_wrapper( - telemetry.query_metrics, - ) - self.query_spans = to_streamed_response_wrapper( - telemetry.query_spans, - ) - self.query_traces = to_streamed_response_wrapper( - telemetry.query_traces, - ) - self.save_spans_to_dataset = to_streamed_response_wrapper( - telemetry.save_spans_to_dataset, - ) - - -class AsyncTelemetryResourceWithStreamingResponse: - def __init__(self, telemetry: AsyncTelemetryResource) -> None: - self._telemetry = telemetry - - self.get_span = async_to_streamed_response_wrapper( - telemetry.get_span, - ) - self.get_span_tree = async_to_streamed_response_wrapper( - telemetry.get_span_tree, - ) - self.get_trace = async_to_streamed_response_wrapper( - telemetry.get_trace, - ) - self.query_metrics = async_to_streamed_response_wrapper( - telemetry.query_metrics, - ) - self.query_spans = async_to_streamed_response_wrapper( - telemetry.query_spans, - ) - self.query_traces = async_to_streamed_response_wrapper( - telemetry.query_traces, - ) - self.save_spans_to_dataset = async_to_streamed_response_wrapper( - telemetry.save_spans_to_dataset, - ) diff --git a/src/llama_stack_client/resources/tool_runtime/rag_tool.py b/src/llama_stack_client/resources/tool_runtime/rag_tool.py index af4a7d64..7db478f7 100644 --- a/src/llama_stack_client/resources/tool_runtime/rag_tool.py +++ b/src/llama_stack_client/resources/tool_runtime/rag_tool.py @@ -57,7 +57,7 @@ def insert( *, chunk_size_in_tokens: int, documents: Iterable[Document], - vector_db_id: str, + vector_store_id: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -73,7 +73,7 @@ def insert( documents: List of documents to index in the RAG system - vector_db_id: ID of the vector database to store the document embeddings + vector_store_id: ID of the vector database to store the document embeddings extra_headers: Send extra headers @@ -90,7 +90,7 @@ def insert( { "chunk_size_in_tokens": chunk_size_in_tokens, "documents": documents, - "vector_db_id": vector_db_id, + "vector_store_id": vector_store_id, }, rag_tool_insert_params.RagToolInsertParams, ), @@ -104,7 +104,7 @@ def query( self, *, content: InterleavedContent, - vector_db_ids: SequenceNotStr[str], + vector_store_ids: SequenceNotStr[str], query_config: QueryConfig | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -119,7 +119,7 @@ def query( Args: content: The query content to search for in the indexed documents - vector_db_ids: List of vector database IDs to search within + vector_store_ids: List of vector database IDs to search within query_config: (Optional) Configuration parameters for the query operation @@ -136,7 +136,7 @@ def query( body=maybe_transform( { "content": content, - "vector_db_ids": vector_db_ids, + "vector_store_ids": vector_store_ids, "query_config": query_config, }, rag_tool_query_params.RagToolQueryParams, @@ -173,7 +173,7 @@ async def insert( *, chunk_size_in_tokens: int, documents: Iterable[Document], - vector_db_id: str, + vector_store_id: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -189,7 +189,7 @@ async def insert( documents: List of documents to index in the RAG system - vector_db_id: ID of the vector database to store the document embeddings + vector_store_id: ID of the vector database to store the document embeddings extra_headers: Send extra headers @@ -206,7 +206,7 @@ async def insert( { "chunk_size_in_tokens": chunk_size_in_tokens, "documents": documents, - "vector_db_id": vector_db_id, + "vector_store_id": vector_store_id, }, rag_tool_insert_params.RagToolInsertParams, ), @@ -220,7 +220,7 @@ async def query( self, *, content: InterleavedContent, - vector_db_ids: SequenceNotStr[str], + vector_store_ids: SequenceNotStr[str], query_config: QueryConfig | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -235,7 +235,7 @@ async def query( Args: content: The query content to search for in the indexed documents - vector_db_ids: List of vector database IDs to search within + vector_store_ids: List of vector database IDs to search within query_config: (Optional) Configuration parameters for the query operation @@ -252,7 +252,7 @@ async def query( body=await async_maybe_transform( { "content": content, - "vector_db_ids": vector_db_ids, + "vector_store_ids": vector_store_ids, "query_config": query_config, }, rag_tool_query_params.RagToolQueryParams, diff --git a/src/llama_stack_client/resources/vector_io.py b/src/llama_stack_client/resources/vector_io.py index 2659c139..dda04f33 100644 --- a/src/llama_stack_client/resources/vector_io.py +++ b/src/llama_stack_client/resources/vector_io.py @@ -54,7 +54,7 @@ def insert( self, *, chunks: Iterable[vector_io_insert_params.Chunk], - vector_db_id: str, + vector_store_id: str, ttl_seconds: int | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -74,7 +74,7 @@ def insert( configure how Llama Stack formats the chunk during generation. If `embedding` is not provided, it will be computed later. - vector_db_id: The identifier of the vector database to insert the chunks into. + vector_store_id: The identifier of the vector database to insert the chunks into. ttl_seconds: The time to live of the chunks. @@ -92,7 +92,7 @@ def insert( body=maybe_transform( { "chunks": chunks, - "vector_db_id": vector_db_id, + "vector_store_id": vector_store_id, "ttl_seconds": ttl_seconds, }, vector_io_insert_params.VectorIoInsertParams, @@ -107,7 +107,7 @@ def query( self, *, query: InterleavedContent, - vector_db_id: str, + vector_store_id: str, params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -122,7 +122,7 @@ def query( Args: query: The query to search for. - vector_db_id: The identifier of the vector database to query. + vector_store_id: The identifier of the vector database to query. params: The parameters of the query. @@ -139,7 +139,7 @@ def query( body=maybe_transform( { "query": query, - "vector_db_id": vector_db_id, + "vector_store_id": vector_store_id, "params": params, }, vector_io_query_params.VectorIoQueryParams, @@ -175,7 +175,7 @@ async def insert( self, *, chunks: Iterable[vector_io_insert_params.Chunk], - vector_db_id: str, + vector_store_id: str, ttl_seconds: int | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -195,7 +195,7 @@ async def insert( configure how Llama Stack formats the chunk during generation. If `embedding` is not provided, it will be computed later. - vector_db_id: The identifier of the vector database to insert the chunks into. + vector_store_id: The identifier of the vector database to insert the chunks into. ttl_seconds: The time to live of the chunks. @@ -213,7 +213,7 @@ async def insert( body=await async_maybe_transform( { "chunks": chunks, - "vector_db_id": vector_db_id, + "vector_store_id": vector_store_id, "ttl_seconds": ttl_seconds, }, vector_io_insert_params.VectorIoInsertParams, @@ -228,7 +228,7 @@ async def query( self, *, query: InterleavedContent, - vector_db_id: str, + vector_store_id: str, params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -243,7 +243,7 @@ async def query( Args: query: The query to search for. - vector_db_id: The identifier of the vector database to query. + vector_store_id: The identifier of the vector database to query. params: The parameters of the query. @@ -260,7 +260,7 @@ async def query( body=await async_maybe_transform( { "query": query, - "vector_db_id": vector_db_id, + "vector_store_id": vector_store_id, "params": params, }, vector_io_query_params.VectorIoQueryParams, diff --git a/src/llama_stack_client/types/__init__.py b/src/llama_stack_client/types/__init__.py index f36ab479..b03446ec 100644 --- a/src/llama_stack_client/types/__init__.py +++ b/src/llama_stack_client/types/__init__.py @@ -10,7 +10,6 @@ from .file import File as File from .model import Model as Model -from .trace import Trace as Trace from .shared import ( Message as Message, Document as Document, @@ -44,7 +43,6 @@ from .create_response import CreateResponse as CreateResponse from .response_object import ResponseObject as ResponseObject from .file_list_params import FileListParams as FileListParams -from .span_with_status import SpanWithStatus as SpanWithStatus from .tool_list_params import ToolListParams as ToolListParams from .scoring_fn_params import ScoringFnParams as ScoringFnParams from .file_create_params import FileCreateParams as FileCreateParams @@ -57,17 +55,13 @@ from .delete_file_response import DeleteFileResponse as DeleteFileResponse from .list_models_response import ListModelsResponse as ListModelsResponse from .list_routes_response import ListRoutesResponse as ListRoutesResponse -from .query_spans_response import QuerySpansResponse as QuerySpansResponse from .response_list_params import ResponseListParams as ResponseListParams from .scoring_score_params import ScoringScoreParams as ScoringScoreParams from .shield_list_response import ShieldListResponse as ShieldListResponse from .chat_completion_chunk import ChatCompletionChunk as ChatCompletionChunk -from .dataset_list_response import DatasetListResponse as DatasetListResponse from .list_shields_response import ListShieldsResponse as ListShieldsResponse from .model_register_params import ModelRegisterParams as ModelRegisterParams from .query_chunks_response import QueryChunksResponse as QueryChunksResponse -from .query_condition_param import QueryConditionParam as QueryConditionParam -from .list_datasets_response import ListDatasetsResponse as ListDatasetsResponse from .provider_list_response import ProviderListResponse as ProviderListResponse from .response_create_params import ResponseCreateParams as ResponseCreateParams from .response_list_response import ResponseListResponse as ResponseListResponse @@ -77,8 +71,6 @@ from .tool_invocation_result import ToolInvocationResult as ToolInvocationResult from .vector_io_query_params import VectorIoQueryParams as VectorIoQueryParams from .benchmark_list_response import BenchmarkListResponse as BenchmarkListResponse -from .dataset_iterrows_params import DatasetIterrowsParams as DatasetIterrowsParams -from .dataset_register_params import DatasetRegisterParams as DatasetRegisterParams from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams from .list_providers_response import ListProvidersResponse as ListProvidersResponse from .scoring_fn_params_param import ScoringFnParamsParam as ScoringFnParamsParam @@ -91,10 +83,6 @@ from .safety_run_shield_params import SafetyRunShieldParams as SafetyRunShieldParams from .vector_store_list_params import VectorStoreListParams as VectorStoreListParams from .benchmark_register_params import BenchmarkRegisterParams as BenchmarkRegisterParams -from .dataset_appendrows_params import DatasetAppendrowsParams as DatasetAppendrowsParams -from .dataset_iterrows_response import DatasetIterrowsResponse as DatasetIterrowsResponse -from .dataset_register_response import DatasetRegisterResponse as DatasetRegisterResponse -from .dataset_retrieve_response import DatasetRetrieveResponse as DatasetRetrieveResponse from .list_tool_groups_response import ListToolGroupsResponse as ListToolGroupsResponse from .toolgroup_register_params import ToolgroupRegisterParams as ToolgroupRegisterParams from .completion_create_response import CompletionCreateResponse as CompletionCreateResponse @@ -106,29 +94,17 @@ from .vector_store_search_params import VectorStoreSearchParams as VectorStoreSearchParams from .vector_store_update_params import VectorStoreUpdateParams as VectorStoreUpdateParams from .list_vector_stores_response import ListVectorStoresResponse as ListVectorStoresResponse -from .telemetry_get_span_response import TelemetryGetSpanResponse as TelemetryGetSpanResponse from .conversation_delete_response import ConversationDeleteResponse as ConversationDeleteResponse from .scoring_score_batch_response import ScoringScoreBatchResponse as ScoringScoreBatchResponse -from .telemetry_query_spans_params import TelemetryQuerySpansParams as TelemetryQuerySpansParams from .vector_store_delete_response import VectorStoreDeleteResponse as VectorStoreDeleteResponse from .vector_store_search_response import VectorStoreSearchResponse as VectorStoreSearchResponse -from .telemetry_query_traces_params import TelemetryQueryTracesParams as TelemetryQueryTracesParams from .scoring_function_list_response import ScoringFunctionListResponse as ScoringFunctionListResponse -from .telemetry_get_span_tree_params import TelemetryGetSpanTreeParams as TelemetryGetSpanTreeParams -from .telemetry_query_metrics_params import TelemetryQueryMetricsParams as TelemetryQueryMetricsParams -from .telemetry_query_spans_response import TelemetryQuerySpansResponse as TelemetryQuerySpansResponse from .tool_runtime_list_tools_params import ToolRuntimeListToolsParams as ToolRuntimeListToolsParams from .list_scoring_functions_response import ListScoringFunctionsResponse as ListScoringFunctionsResponse -from .telemetry_query_traces_response import TelemetryQueryTracesResponse as TelemetryQueryTracesResponse from .tool_runtime_invoke_tool_params import ToolRuntimeInvokeToolParams as ToolRuntimeInvokeToolParams from .scoring_function_register_params import ScoringFunctionRegisterParams as ScoringFunctionRegisterParams -from .telemetry_get_span_tree_response import TelemetryGetSpanTreeResponse as TelemetryGetSpanTreeResponse -from .telemetry_query_metrics_response import TelemetryQueryMetricsResponse as TelemetryQueryMetricsResponse from .tool_runtime_list_tools_response import ToolRuntimeListToolsResponse as ToolRuntimeListToolsResponse from .synthetic_data_generation_response import SyntheticDataGenerationResponse as SyntheticDataGenerationResponse -from .telemetry_save_spans_to_dataset_params import ( - TelemetrySaveSpansToDatasetParams as TelemetrySaveSpansToDatasetParams, -) from .synthetic_data_generation_generate_params import ( SyntheticDataGenerationGenerateParams as SyntheticDataGenerationGenerateParams, ) diff --git a/src/llama_stack_client/types/alpha/memory_retrieval_step.py b/src/llama_stack_client/types/alpha/memory_retrieval_step.py index 727c0ec0..1b5708ce 100644 --- a/src/llama_stack_client/types/alpha/memory_retrieval_step.py +++ b/src/llama_stack_client/types/alpha/memory_retrieval_step.py @@ -29,7 +29,7 @@ class MemoryRetrievalStep(BaseModel): turn_id: str """The ID of the turn.""" - vector_db_ids: str + vector_store_ids: str """The IDs of the vector databases to retrieve context from.""" completed_at: Optional[datetime] = None diff --git a/src/llama_stack_client/types/beta/__init__.py b/src/llama_stack_client/types/beta/__init__.py new file mode 100644 index 00000000..aab8d1b8 --- /dev/null +++ b/src/llama_stack_client/types/beta/__init__.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .dataset_list_response import DatasetListResponse as DatasetListResponse +from .list_datasets_response import ListDatasetsResponse as ListDatasetsResponse +from .dataset_iterrows_params import DatasetIterrowsParams as DatasetIterrowsParams +from .dataset_register_params import DatasetRegisterParams as DatasetRegisterParams +from .dataset_appendrows_params import DatasetAppendrowsParams as DatasetAppendrowsParams +from .dataset_iterrows_response import DatasetIterrowsResponse as DatasetIterrowsResponse +from .dataset_register_response import DatasetRegisterResponse as DatasetRegisterResponse +from .dataset_retrieve_response import DatasetRetrieveResponse as DatasetRetrieveResponse diff --git a/src/llama_stack_client/types/dataset_appendrows_params.py b/src/llama_stack_client/types/beta/dataset_appendrows_params.py similarity index 100% rename from src/llama_stack_client/types/dataset_appendrows_params.py rename to src/llama_stack_client/types/beta/dataset_appendrows_params.py diff --git a/src/llama_stack_client/types/dataset_iterrows_params.py b/src/llama_stack_client/types/beta/dataset_iterrows_params.py similarity index 100% rename from src/llama_stack_client/types/dataset_iterrows_params.py rename to src/llama_stack_client/types/beta/dataset_iterrows_params.py diff --git a/src/llama_stack_client/types/dataset_iterrows_response.py b/src/llama_stack_client/types/beta/dataset_iterrows_response.py similarity index 95% rename from src/llama_stack_client/types/dataset_iterrows_response.py rename to src/llama_stack_client/types/beta/dataset_iterrows_response.py index ad25c87e..5b23d46d 100644 --- a/src/llama_stack_client/types/dataset_iterrows_response.py +++ b/src/llama_stack_client/types/beta/dataset_iterrows_response.py @@ -8,7 +8,7 @@ from typing import Dict, List, Union, Optional -from .._models import BaseModel +from ..._models import BaseModel __all__ = ["DatasetIterrowsResponse"] diff --git a/src/llama_stack_client/types/dataset_list_response.py b/src/llama_stack_client/types/beta/dataset_list_response.py similarity index 96% rename from src/llama_stack_client/types/dataset_list_response.py rename to src/llama_stack_client/types/beta/dataset_list_response.py index cfad32d5..7e6c1141 100644 --- a/src/llama_stack_client/types/dataset_list_response.py +++ b/src/llama_stack_client/types/beta/dataset_list_response.py @@ -9,8 +9,8 @@ from typing import Dict, List, Union, Optional from typing_extensions import Literal, Annotated, TypeAlias -from .._utils import PropertyInfo -from .._models import BaseModel +from ..._utils import PropertyInfo +from ..._models import BaseModel __all__ = [ "DatasetListResponse", diff --git a/src/llama_stack_client/types/dataset_register_params.py b/src/llama_stack_client/types/beta/dataset_register_params.py similarity index 100% rename from src/llama_stack_client/types/dataset_register_params.py rename to src/llama_stack_client/types/beta/dataset_register_params.py diff --git a/src/llama_stack_client/types/dataset_register_response.py b/src/llama_stack_client/types/beta/dataset_register_response.py similarity index 96% rename from src/llama_stack_client/types/dataset_register_response.py rename to src/llama_stack_client/types/beta/dataset_register_response.py index 196aa695..e9bb82d2 100644 --- a/src/llama_stack_client/types/dataset_register_response.py +++ b/src/llama_stack_client/types/beta/dataset_register_response.py @@ -9,8 +9,8 @@ from typing import Dict, List, Union, Optional from typing_extensions import Literal, Annotated, TypeAlias -from .._utils import PropertyInfo -from .._models import BaseModel +from ..._utils import PropertyInfo +from ..._models import BaseModel __all__ = ["DatasetRegisterResponse", "Source", "SourceUriDataSource", "SourceRowsDataSource"] diff --git a/src/llama_stack_client/types/dataset_retrieve_response.py b/src/llama_stack_client/types/beta/dataset_retrieve_response.py similarity index 96% rename from src/llama_stack_client/types/dataset_retrieve_response.py rename to src/llama_stack_client/types/beta/dataset_retrieve_response.py index 95b6543c..3358288d 100644 --- a/src/llama_stack_client/types/dataset_retrieve_response.py +++ b/src/llama_stack_client/types/beta/dataset_retrieve_response.py @@ -9,8 +9,8 @@ from typing import Dict, List, Union, Optional from typing_extensions import Literal, Annotated, TypeAlias -from .._utils import PropertyInfo -from .._models import BaseModel +from ..._utils import PropertyInfo +from ..._models import BaseModel __all__ = ["DatasetRetrieveResponse", "Source", "SourceUriDataSource", "SourceRowsDataSource"] diff --git a/src/llama_stack_client/types/list_datasets_response.py b/src/llama_stack_client/types/beta/list_datasets_response.py similarity index 93% rename from src/llama_stack_client/types/list_datasets_response.py rename to src/llama_stack_client/types/beta/list_datasets_response.py index 976e6560..4f71ae16 100644 --- a/src/llama_stack_client/types/list_datasets_response.py +++ b/src/llama_stack_client/types/beta/list_datasets_response.py @@ -6,7 +6,7 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from .._models import BaseModel +from ..._models import BaseModel from .dataset_list_response import DatasetListResponse __all__ = ["ListDatasetsResponse"] diff --git a/src/llama_stack_client/types/conversation_create_params.py b/src/llama_stack_client/types/conversation_create_params.py index c743c0ce..c51245dd 100644 --- a/src/llama_stack_client/types/conversation_create_params.py +++ b/src/llama_stack_client/types/conversation_create_params.py @@ -21,15 +21,20 @@ "ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "ItemOpenAIResponseMessageContentUnionMember2", - "ItemOpenAIResponseMessageContentUnionMember2Annotation", - "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", - "ItemOpenAIResponseOutputMessageFunctionToolCall", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", + "ItemOpenAIResponseOutputMessageWebSearchToolCall", "ItemOpenAIResponseOutputMessageFileSearchToolCall", "ItemOpenAIResponseOutputMessageFileSearchToolCallResult", - "ItemOpenAIResponseOutputMessageWebSearchToolCall", + "ItemOpenAIResponseOutputMessageFunctionToolCall", + "ItemOpenAIResponseInputFunctionToolCallOutput", + "ItemOpenAIResponseMcpApprovalRequest", + "ItemOpenAIResponseMcpApprovalResponse", "ItemOpenAIResponseOutputMessageMcpCall", "ItemOpenAIResponseOutputMessageMcpListTools", "ItemOpenAIResponseOutputMessageMcpListToolsTool", @@ -69,7 +74,7 @@ class ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont ] -class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation( +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( TypedDict, total=False ): file_id: Required[str] @@ -85,7 +90,9 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """Annotation type identifier, always "file_citation" """ -class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(TypedDict, total=False): +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( + TypedDict, total=False +): end_index: Required[int] """End position of the citation span in the content""" @@ -102,7 +109,7 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """URL of the referenced web resource""" -class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation( +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( TypedDict, total=False ): container_id: Required[str] @@ -118,7 +125,9 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Required[Literal["container_file_citation"]] -class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(TypedDict, total=False): +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( + TypedDict, total=False +): file_id: Required[str] index: Required[int] @@ -126,22 +135,38 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Required[Literal["file_path"]] -ItemOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Union[ - ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, +ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Union[ + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ] -class ItemOpenAIResponseMessageContentUnionMember2(TypedDict, total=False): - annotations: Required[Iterable[ItemOpenAIResponseMessageContentUnionMember2Annotation]] +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(TypedDict, total=False): + annotations: Required[ + Iterable[ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation] + ] text: Required[str] type: Required[Literal["output_text"]] +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(TypedDict, total=False): + refusal: Required[str] + """Refusal text supplied by the model""" + + type: Required[Literal["refusal"]] + """Content part type identifier, always "refusal" """ + + +ItemOpenAIResponseMessageContentUnionMember2: TypeAlias = Union[ + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, +] + + class ItemOpenAIResponseMessage(TypedDict, total=False): content: Required[ Union[ @@ -160,24 +185,15 @@ class ItemOpenAIResponseMessage(TypedDict, total=False): status: str -class ItemOpenAIResponseOutputMessageFunctionToolCall(TypedDict, total=False): - arguments: Required[str] - """JSON string containing the function arguments""" - - call_id: Required[str] - """Unique identifier for the function call""" - - name: Required[str] - """Name of the function being called""" - - type: Required[Literal["function_call"]] - """Tool call type identifier, always "function_call" """ +class ItemOpenAIResponseOutputMessageWebSearchToolCall(TypedDict, total=False): + id: Required[str] + """Unique identifier for this tool call""" - id: str - """(Optional) Additional identifier for the tool call""" + status: Required[str] + """Current status of the web search operation""" - status: str - """(Optional) Current status of the function call execution""" + type: Required[Literal["web_search_call"]] + """Tool call type identifier, always "web_search_call" """ class ItemOpenAIResponseOutputMessageFileSearchToolCallResult(TypedDict, total=False): @@ -214,15 +230,60 @@ class ItemOpenAIResponseOutputMessageFileSearchToolCall(TypedDict, total=False): """(Optional) Search results returned by the file search operation""" -class ItemOpenAIResponseOutputMessageWebSearchToolCall(TypedDict, total=False): +class ItemOpenAIResponseOutputMessageFunctionToolCall(TypedDict, total=False): + arguments: Required[str] + """JSON string containing the function arguments""" + + call_id: Required[str] + """Unique identifier for the function call""" + + name: Required[str] + """Name of the function being called""" + + type: Required[Literal["function_call"]] + """Tool call type identifier, always "function_call" """ + + id: str + """(Optional) Additional identifier for the tool call""" + + status: str + """(Optional) Current status of the function call execution""" + + +class ItemOpenAIResponseInputFunctionToolCallOutput(TypedDict, total=False): + call_id: Required[str] + + output: Required[str] + + type: Required[Literal["function_call_output"]] + + id: str + + status: str + + +class ItemOpenAIResponseMcpApprovalRequest(TypedDict, total=False): id: Required[str] - """Unique identifier for this tool call""" - status: Required[str] - """Current status of the web search operation""" + arguments: Required[str] - type: Required[Literal["web_search_call"]] - """Tool call type identifier, always "web_search_call" """ + name: Required[str] + + server_label: Required[str] + + type: Required[Literal["mcp_approval_request"]] + + +class ItemOpenAIResponseMcpApprovalResponse(TypedDict, total=False): + approval_request_id: Required[str] + + approve: Required[bool] + + type: Required[Literal["mcp_approval_response"]] + + id: str + + reason: str class ItemOpenAIResponseOutputMessageMcpCall(TypedDict, total=False): @@ -275,9 +336,12 @@ class ItemOpenAIResponseOutputMessageMcpListTools(TypedDict, total=False): Item: TypeAlias = Union[ ItemOpenAIResponseMessage, - ItemOpenAIResponseOutputMessageFunctionToolCall, - ItemOpenAIResponseOutputMessageFileSearchToolCall, ItemOpenAIResponseOutputMessageWebSearchToolCall, + ItemOpenAIResponseOutputMessageFileSearchToolCall, + ItemOpenAIResponseOutputMessageFunctionToolCall, + ItemOpenAIResponseInputFunctionToolCallOutput, + ItemOpenAIResponseMcpApprovalRequest, + ItemOpenAIResponseMcpApprovalResponse, ItemOpenAIResponseOutputMessageMcpCall, ItemOpenAIResponseOutputMessageMcpListTools, ] diff --git a/src/llama_stack_client/types/conversations/item_create_params.py b/src/llama_stack_client/types/conversations/item_create_params.py index 1331e6fb..8df31144 100644 --- a/src/llama_stack_client/types/conversations/item_create_params.py +++ b/src/llama_stack_client/types/conversations/item_create_params.py @@ -21,15 +21,20 @@ "ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "ItemOpenAIResponseMessageContentUnionMember2", - "ItemOpenAIResponseMessageContentUnionMember2Annotation", - "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", - "ItemOpenAIResponseOutputMessageFunctionToolCall", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", + "ItemOpenAIResponseOutputMessageWebSearchToolCall", "ItemOpenAIResponseOutputMessageFileSearchToolCall", "ItemOpenAIResponseOutputMessageFileSearchToolCallResult", - "ItemOpenAIResponseOutputMessageWebSearchToolCall", + "ItemOpenAIResponseOutputMessageFunctionToolCall", + "ItemOpenAIResponseInputFunctionToolCallOutput", + "ItemOpenAIResponseMcpApprovalRequest", + "ItemOpenAIResponseMcpApprovalResponse", "ItemOpenAIResponseOutputMessageMcpCall", "ItemOpenAIResponseOutputMessageMcpListTools", "ItemOpenAIResponseOutputMessageMcpListToolsTool", @@ -66,7 +71,7 @@ class ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont ] -class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation( +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( TypedDict, total=False ): file_id: Required[str] @@ -82,7 +87,9 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """Annotation type identifier, always "file_citation" """ -class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(TypedDict, total=False): +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( + TypedDict, total=False +): end_index: Required[int] """End position of the citation span in the content""" @@ -99,7 +106,7 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """URL of the referenced web resource""" -class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation( +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( TypedDict, total=False ): container_id: Required[str] @@ -115,7 +122,9 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Required[Literal["container_file_citation"]] -class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(TypedDict, total=False): +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( + TypedDict, total=False +): file_id: Required[str] index: Required[int] @@ -123,22 +132,38 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Required[Literal["file_path"]] -ItemOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Union[ - ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, +ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Union[ + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ] -class ItemOpenAIResponseMessageContentUnionMember2(TypedDict, total=False): - annotations: Required[Iterable[ItemOpenAIResponseMessageContentUnionMember2Annotation]] +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(TypedDict, total=False): + annotations: Required[ + Iterable[ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation] + ] text: Required[str] type: Required[Literal["output_text"]] +class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(TypedDict, total=False): + refusal: Required[str] + """Refusal text supplied by the model""" + + type: Required[Literal["refusal"]] + """Content part type identifier, always "refusal" """ + + +ItemOpenAIResponseMessageContentUnionMember2: TypeAlias = Union[ + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, +] + + class ItemOpenAIResponseMessage(TypedDict, total=False): content: Required[ Union[ @@ -157,24 +182,15 @@ class ItemOpenAIResponseMessage(TypedDict, total=False): status: str -class ItemOpenAIResponseOutputMessageFunctionToolCall(TypedDict, total=False): - arguments: Required[str] - """JSON string containing the function arguments""" - - call_id: Required[str] - """Unique identifier for the function call""" - - name: Required[str] - """Name of the function being called""" - - type: Required[Literal["function_call"]] - """Tool call type identifier, always "function_call" """ +class ItemOpenAIResponseOutputMessageWebSearchToolCall(TypedDict, total=False): + id: Required[str] + """Unique identifier for this tool call""" - id: str - """(Optional) Additional identifier for the tool call""" + status: Required[str] + """Current status of the web search operation""" - status: str - """(Optional) Current status of the function call execution""" + type: Required[Literal["web_search_call"]] + """Tool call type identifier, always "web_search_call" """ class ItemOpenAIResponseOutputMessageFileSearchToolCallResult(TypedDict, total=False): @@ -211,15 +227,60 @@ class ItemOpenAIResponseOutputMessageFileSearchToolCall(TypedDict, total=False): """(Optional) Search results returned by the file search operation""" -class ItemOpenAIResponseOutputMessageWebSearchToolCall(TypedDict, total=False): +class ItemOpenAIResponseOutputMessageFunctionToolCall(TypedDict, total=False): + arguments: Required[str] + """JSON string containing the function arguments""" + + call_id: Required[str] + """Unique identifier for the function call""" + + name: Required[str] + """Name of the function being called""" + + type: Required[Literal["function_call"]] + """Tool call type identifier, always "function_call" """ + + id: str + """(Optional) Additional identifier for the tool call""" + + status: str + """(Optional) Current status of the function call execution""" + + +class ItemOpenAIResponseInputFunctionToolCallOutput(TypedDict, total=False): + call_id: Required[str] + + output: Required[str] + + type: Required[Literal["function_call_output"]] + + id: str + + status: str + + +class ItemOpenAIResponseMcpApprovalRequest(TypedDict, total=False): id: Required[str] - """Unique identifier for this tool call""" - status: Required[str] - """Current status of the web search operation""" + arguments: Required[str] - type: Required[Literal["web_search_call"]] - """Tool call type identifier, always "web_search_call" """ + name: Required[str] + + server_label: Required[str] + + type: Required[Literal["mcp_approval_request"]] + + +class ItemOpenAIResponseMcpApprovalResponse(TypedDict, total=False): + approval_request_id: Required[str] + + approve: Required[bool] + + type: Required[Literal["mcp_approval_response"]] + + id: str + + reason: str class ItemOpenAIResponseOutputMessageMcpCall(TypedDict, total=False): @@ -272,9 +333,12 @@ class ItemOpenAIResponseOutputMessageMcpListTools(TypedDict, total=False): Item: TypeAlias = Union[ ItemOpenAIResponseMessage, - ItemOpenAIResponseOutputMessageFunctionToolCall, - ItemOpenAIResponseOutputMessageFileSearchToolCall, ItemOpenAIResponseOutputMessageWebSearchToolCall, + ItemOpenAIResponseOutputMessageFileSearchToolCall, + ItemOpenAIResponseOutputMessageFunctionToolCall, + ItemOpenAIResponseInputFunctionToolCallOutput, + ItemOpenAIResponseMcpApprovalRequest, + ItemOpenAIResponseMcpApprovalResponse, ItemOpenAIResponseOutputMessageMcpCall, ItemOpenAIResponseOutputMessageMcpListTools, ] diff --git a/src/llama_stack_client/types/conversations/item_create_response.py b/src/llama_stack_client/types/conversations/item_create_response.py index 61e52d3e..c382e2b9 100644 --- a/src/llama_stack_client/types/conversations/item_create_response.py +++ b/src/llama_stack_client/types/conversations/item_create_response.py @@ -20,15 +20,20 @@ "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "DataOpenAIResponseMessageContentUnionMember2", - "DataOpenAIResponseMessageContentUnionMember2Annotation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", - "DataOpenAIResponseOutputMessageFunctionToolCall", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", + "DataOpenAIResponseOutputMessageWebSearchToolCall", "DataOpenAIResponseOutputMessageFileSearchToolCall", "DataOpenAIResponseOutputMessageFileSearchToolCallResult", - "DataOpenAIResponseOutputMessageWebSearchToolCall", + "DataOpenAIResponseOutputMessageFunctionToolCall", + "DataOpenAIResponseInputFunctionToolCallOutput", + "DataOpenAIResponseMcpApprovalRequest", + "DataOpenAIResponseMcpApprovalResponse", "DataOpenAIResponseOutputMessageMcpCall", "DataOpenAIResponseOutputMessageMcpListTools", "DataOpenAIResponseOutputMessageMcpListToolsTool", @@ -63,7 +68,9 @@ class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont ] -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( + BaseModel +): file_id: str """Unique identifier of the referenced file""" @@ -77,7 +84,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """Annotation type identifier, always "file_citation" """ -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( + BaseModel +): end_index: int """End position of the citation span in the content""" @@ -94,7 +103,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """URL of the referenced web resource""" -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( + BaseModel +): container_id: str end_index: int @@ -108,7 +119,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Literal["container_file_citation"] -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( + BaseModel +): file_id: str index: int @@ -116,25 +129,44 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Literal["file_path"] -DataOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[ +DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class DataOpenAIResponseMessageContentUnionMember2(BaseModel): - annotations: List[DataOpenAIResponseMessageContentUnionMember2Annotation] +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel): + annotations: List[ + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation + ] text: str type: Literal["output_text"] +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel): + refusal: str + """Refusal text supplied by the model""" + + type: Literal["refusal"] + """Content part type identifier, always "refusal" """ + + +DataOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ + Union[ + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + ], + PropertyInfo(discriminator="type"), +] + + class DataOpenAIResponseMessage(BaseModel): content: Union[ str, List[DataOpenAIResponseMessageContentUnionMember1], List[DataOpenAIResponseMessageContentUnionMember2] @@ -149,24 +181,15 @@ class DataOpenAIResponseMessage(BaseModel): status: Optional[str] = None -class DataOpenAIResponseOutputMessageFunctionToolCall(BaseModel): - arguments: str - """JSON string containing the function arguments""" - - call_id: str - """Unique identifier for the function call""" - - name: str - """Name of the function being called""" - - type: Literal["function_call"] - """Tool call type identifier, always "function_call" """ +class DataOpenAIResponseOutputMessageWebSearchToolCall(BaseModel): + id: str + """Unique identifier for this tool call""" - id: Optional[str] = None - """(Optional) Additional identifier for the tool call""" + status: str + """Current status of the web search operation""" - status: Optional[str] = None - """(Optional) Current status of the function call execution""" + type: Literal["web_search_call"] + """Tool call type identifier, always "web_search_call" """ class DataOpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel): @@ -203,15 +226,60 @@ class DataOpenAIResponseOutputMessageFileSearchToolCall(BaseModel): """(Optional) Search results returned by the file search operation""" -class DataOpenAIResponseOutputMessageWebSearchToolCall(BaseModel): +class DataOpenAIResponseOutputMessageFunctionToolCall(BaseModel): + arguments: str + """JSON string containing the function arguments""" + + call_id: str + """Unique identifier for the function call""" + + name: str + """Name of the function being called""" + + type: Literal["function_call"] + """Tool call type identifier, always "function_call" """ + + id: Optional[str] = None + """(Optional) Additional identifier for the tool call""" + + status: Optional[str] = None + """(Optional) Current status of the function call execution""" + + +class DataOpenAIResponseInputFunctionToolCallOutput(BaseModel): + call_id: str + + output: str + + type: Literal["function_call_output"] + + id: Optional[str] = None + + status: Optional[str] = None + + +class DataOpenAIResponseMcpApprovalRequest(BaseModel): id: str - """Unique identifier for this tool call""" - status: str - """Current status of the web search operation""" + arguments: str - type: Literal["web_search_call"] - """Tool call type identifier, always "web_search_call" """ + name: str + + server_label: str + + type: Literal["mcp_approval_request"] + + +class DataOpenAIResponseMcpApprovalResponse(BaseModel): + approval_request_id: str + + approve: bool + + type: Literal["mcp_approval_response"] + + id: Optional[str] = None + + reason: Optional[str] = None class DataOpenAIResponseOutputMessageMcpCall(BaseModel): @@ -265,9 +333,12 @@ class DataOpenAIResponseOutputMessageMcpListTools(BaseModel): Data: TypeAlias = Annotated[ Union[ DataOpenAIResponseMessage, - DataOpenAIResponseOutputMessageFunctionToolCall, - DataOpenAIResponseOutputMessageFileSearchToolCall, DataOpenAIResponseOutputMessageWebSearchToolCall, + DataOpenAIResponseOutputMessageFileSearchToolCall, + DataOpenAIResponseOutputMessageFunctionToolCall, + DataOpenAIResponseInputFunctionToolCallOutput, + DataOpenAIResponseMcpApprovalRequest, + DataOpenAIResponseMcpApprovalResponse, DataOpenAIResponseOutputMessageMcpCall, DataOpenAIResponseOutputMessageMcpListTools, ], diff --git a/src/llama_stack_client/types/conversations/item_get_response.py b/src/llama_stack_client/types/conversations/item_get_response.py index f3be9240..9f8d4bda 100644 --- a/src/llama_stack_client/types/conversations/item_get_response.py +++ b/src/llama_stack_client/types/conversations/item_get_response.py @@ -19,15 +19,20 @@ "OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "OpenAIResponseMessageContentUnionMember2", - "OpenAIResponseMessageContentUnionMember2Annotation", - "OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", - "OpenAIResponseOutputMessageFunctionToolCall", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", + "OpenAIResponseOutputMessageWebSearchToolCall", "OpenAIResponseOutputMessageFileSearchToolCall", "OpenAIResponseOutputMessageFileSearchToolCallResult", - "OpenAIResponseOutputMessageWebSearchToolCall", + "OpenAIResponseOutputMessageFunctionToolCall", + "OpenAIResponseInputFunctionToolCallOutput", + "OpenAIResponseMcpApprovalRequest", + "OpenAIResponseMcpApprovalResponse", "OpenAIResponseOutputMessageMcpCall", "OpenAIResponseOutputMessageMcpListTools", "OpenAIResponseOutputMessageMcpListToolsTool", @@ -62,7 +67,9 @@ class OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentI ] -class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel): +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( + BaseModel +): file_id: str """Unique identifier of the referenced file""" @@ -76,7 +83,9 @@ class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotation """Annotation type identifier, always "file_citation" """ -class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel): +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( + BaseModel +): end_index: int """End position of the citation span in the content""" @@ -93,7 +102,9 @@ class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotation """URL of the referenced web resource""" -class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel): +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( + BaseModel +): container_id: str end_index: int @@ -107,7 +118,9 @@ class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotation type: Literal["container_file_citation"] -class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel): +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( + BaseModel +): file_id: str index: int @@ -115,25 +128,42 @@ class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotation type: Literal["file_path"] -OpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[ +OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class OpenAIResponseMessageContentUnionMember2(BaseModel): - annotations: List[OpenAIResponseMessageContentUnionMember2Annotation] +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel): + annotations: List[OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation] text: str type: Literal["output_text"] +class OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel): + refusal: str + """Refusal text supplied by the model""" + + type: Literal["refusal"] + """Content part type identifier, always "refusal" """ + + +OpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ + Union[ + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + ], + PropertyInfo(discriminator="type"), +] + + class OpenAIResponseMessage(BaseModel): content: Union[str, List[OpenAIResponseMessageContentUnionMember1], List[OpenAIResponseMessageContentUnionMember2]] @@ -146,24 +176,15 @@ class OpenAIResponseMessage(BaseModel): status: Optional[str] = None -class OpenAIResponseOutputMessageFunctionToolCall(BaseModel): - arguments: str - """JSON string containing the function arguments""" - - call_id: str - """Unique identifier for the function call""" - - name: str - """Name of the function being called""" - - type: Literal["function_call"] - """Tool call type identifier, always "function_call" """ +class OpenAIResponseOutputMessageWebSearchToolCall(BaseModel): + id: str + """Unique identifier for this tool call""" - id: Optional[str] = None - """(Optional) Additional identifier for the tool call""" + status: str + """Current status of the web search operation""" - status: Optional[str] = None - """(Optional) Current status of the function call execution""" + type: Literal["web_search_call"] + """Tool call type identifier, always "web_search_call" """ class OpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel): @@ -200,15 +221,60 @@ class OpenAIResponseOutputMessageFileSearchToolCall(BaseModel): """(Optional) Search results returned by the file search operation""" -class OpenAIResponseOutputMessageWebSearchToolCall(BaseModel): +class OpenAIResponseOutputMessageFunctionToolCall(BaseModel): + arguments: str + """JSON string containing the function arguments""" + + call_id: str + """Unique identifier for the function call""" + + name: str + """Name of the function being called""" + + type: Literal["function_call"] + """Tool call type identifier, always "function_call" """ + + id: Optional[str] = None + """(Optional) Additional identifier for the tool call""" + + status: Optional[str] = None + """(Optional) Current status of the function call execution""" + + +class OpenAIResponseInputFunctionToolCallOutput(BaseModel): + call_id: str + + output: str + + type: Literal["function_call_output"] + + id: Optional[str] = None + + status: Optional[str] = None + + +class OpenAIResponseMcpApprovalRequest(BaseModel): id: str - """Unique identifier for this tool call""" - status: str - """Current status of the web search operation""" + arguments: str + + name: str + + server_label: str + + type: Literal["mcp_approval_request"] - type: Literal["web_search_call"] - """Tool call type identifier, always "web_search_call" """ + +class OpenAIResponseMcpApprovalResponse(BaseModel): + approval_request_id: str + + approve: bool + + type: Literal["mcp_approval_response"] + + id: Optional[str] = None + + reason: Optional[str] = None class OpenAIResponseOutputMessageMcpCall(BaseModel): @@ -262,9 +328,12 @@ class OpenAIResponseOutputMessageMcpListTools(BaseModel): ItemGetResponse: TypeAlias = Annotated[ Union[ OpenAIResponseMessage, - OpenAIResponseOutputMessageFunctionToolCall, - OpenAIResponseOutputMessageFileSearchToolCall, OpenAIResponseOutputMessageWebSearchToolCall, + OpenAIResponseOutputMessageFileSearchToolCall, + OpenAIResponseOutputMessageFunctionToolCall, + OpenAIResponseInputFunctionToolCallOutput, + OpenAIResponseMcpApprovalRequest, + OpenAIResponseMcpApprovalResponse, OpenAIResponseOutputMessageMcpCall, OpenAIResponseOutputMessageMcpListTools, ], diff --git a/src/llama_stack_client/types/conversations/item_list_response.py b/src/llama_stack_client/types/conversations/item_list_response.py index 8d1d73af..414e4f76 100644 --- a/src/llama_stack_client/types/conversations/item_list_response.py +++ b/src/llama_stack_client/types/conversations/item_list_response.py @@ -20,15 +20,20 @@ "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "DataOpenAIResponseMessageContentUnionMember2", - "DataOpenAIResponseMessageContentUnionMember2Annotation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", - "DataOpenAIResponseOutputMessageFunctionToolCall", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", + "DataOpenAIResponseOutputMessageWebSearchToolCall", "DataOpenAIResponseOutputMessageFileSearchToolCall", "DataOpenAIResponseOutputMessageFileSearchToolCallResult", - "DataOpenAIResponseOutputMessageWebSearchToolCall", + "DataOpenAIResponseOutputMessageFunctionToolCall", + "DataOpenAIResponseInputFunctionToolCallOutput", + "DataOpenAIResponseMcpApprovalRequest", + "DataOpenAIResponseMcpApprovalResponse", "DataOpenAIResponseOutputMessageMcpCall", "DataOpenAIResponseOutputMessageMcpListTools", "DataOpenAIResponseOutputMessageMcpListToolsTool", @@ -63,7 +68,9 @@ class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont ] -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( + BaseModel +): file_id: str """Unique identifier of the referenced file""" @@ -77,7 +84,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """Annotation type identifier, always "file_citation" """ -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( + BaseModel +): end_index: int """End position of the citation span in the content""" @@ -94,7 +103,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """URL of the referenced web resource""" -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( + BaseModel +): container_id: str end_index: int @@ -108,7 +119,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Literal["container_file_citation"] -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( + BaseModel +): file_id: str index: int @@ -116,25 +129,44 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Literal["file_path"] -DataOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[ +DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class DataOpenAIResponseMessageContentUnionMember2(BaseModel): - annotations: List[DataOpenAIResponseMessageContentUnionMember2Annotation] +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel): + annotations: List[ + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation + ] text: str type: Literal["output_text"] +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel): + refusal: str + """Refusal text supplied by the model""" + + type: Literal["refusal"] + """Content part type identifier, always "refusal" """ + + +DataOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ + Union[ + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + ], + PropertyInfo(discriminator="type"), +] + + class DataOpenAIResponseMessage(BaseModel): content: Union[ str, List[DataOpenAIResponseMessageContentUnionMember1], List[DataOpenAIResponseMessageContentUnionMember2] @@ -149,24 +181,15 @@ class DataOpenAIResponseMessage(BaseModel): status: Optional[str] = None -class DataOpenAIResponseOutputMessageFunctionToolCall(BaseModel): - arguments: str - """JSON string containing the function arguments""" - - call_id: str - """Unique identifier for the function call""" - - name: str - """Name of the function being called""" - - type: Literal["function_call"] - """Tool call type identifier, always "function_call" """ +class DataOpenAIResponseOutputMessageWebSearchToolCall(BaseModel): + id: str + """Unique identifier for this tool call""" - id: Optional[str] = None - """(Optional) Additional identifier for the tool call""" + status: str + """Current status of the web search operation""" - status: Optional[str] = None - """(Optional) Current status of the function call execution""" + type: Literal["web_search_call"] + """Tool call type identifier, always "web_search_call" """ class DataOpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel): @@ -203,15 +226,60 @@ class DataOpenAIResponseOutputMessageFileSearchToolCall(BaseModel): """(Optional) Search results returned by the file search operation""" -class DataOpenAIResponseOutputMessageWebSearchToolCall(BaseModel): +class DataOpenAIResponseOutputMessageFunctionToolCall(BaseModel): + arguments: str + """JSON string containing the function arguments""" + + call_id: str + """Unique identifier for the function call""" + + name: str + """Name of the function being called""" + + type: Literal["function_call"] + """Tool call type identifier, always "function_call" """ + + id: Optional[str] = None + """(Optional) Additional identifier for the tool call""" + + status: Optional[str] = None + """(Optional) Current status of the function call execution""" + + +class DataOpenAIResponseInputFunctionToolCallOutput(BaseModel): + call_id: str + + output: str + + type: Literal["function_call_output"] + + id: Optional[str] = None + + status: Optional[str] = None + + +class DataOpenAIResponseMcpApprovalRequest(BaseModel): id: str - """Unique identifier for this tool call""" - status: str - """Current status of the web search operation""" + arguments: str - type: Literal["web_search_call"] - """Tool call type identifier, always "web_search_call" """ + name: str + + server_label: str + + type: Literal["mcp_approval_request"] + + +class DataOpenAIResponseMcpApprovalResponse(BaseModel): + approval_request_id: str + + approve: bool + + type: Literal["mcp_approval_response"] + + id: Optional[str] = None + + reason: Optional[str] = None class DataOpenAIResponseOutputMessageMcpCall(BaseModel): @@ -265,9 +333,12 @@ class DataOpenAIResponseOutputMessageMcpListTools(BaseModel): Data: TypeAlias = Annotated[ Union[ DataOpenAIResponseMessage, - DataOpenAIResponseOutputMessageFunctionToolCall, - DataOpenAIResponseOutputMessageFileSearchToolCall, DataOpenAIResponseOutputMessageWebSearchToolCall, + DataOpenAIResponseOutputMessageFileSearchToolCall, + DataOpenAIResponseOutputMessageFunctionToolCall, + DataOpenAIResponseInputFunctionToolCallOutput, + DataOpenAIResponseMcpApprovalRequest, + DataOpenAIResponseMcpApprovalResponse, DataOpenAIResponseOutputMessageMcpCall, DataOpenAIResponseOutputMessageMcpListTools, ], diff --git a/src/llama_stack_client/types/query_condition_param.py b/src/llama_stack_client/types/query_condition_param.py deleted file mode 100644 index 649be818..00000000 --- a/src/llama_stack_client/types/query_condition_param.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union, Iterable -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["QueryConditionParam"] - - -class QueryConditionParam(TypedDict, total=False): - key: Required[str] - """The attribute key to filter on""" - - op: Required[Literal["eq", "ne", "gt", "lt"]] - """The comparison operator to apply""" - - value: Required[Union[bool, float, str, Iterable[object], object, None]] - """The value to compare against""" diff --git a/src/llama_stack_client/types/query_spans_response.py b/src/llama_stack_client/types/query_spans_response.py deleted file mode 100644 index 11d5714a..00000000 --- a/src/llama_stack_client/types/query_spans_response.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .._models import BaseModel -from .telemetry_query_spans_response import TelemetryQuerySpansResponse - -__all__ = ["QuerySpansResponse"] - - -class QuerySpansResponse(BaseModel): - data: TelemetryQuerySpansResponse - """List of spans matching the query criteria""" diff --git a/src/llama_stack_client/types/response_create_params.py b/src/llama_stack_client/types/response_create_params.py index fdee8a70..87c8ab68 100644 --- a/src/llama_stack_client/types/response_create_params.py +++ b/src/llama_stack_client/types/response_create_params.py @@ -23,16 +23,21 @@ "InputUnionMember1OpenAIResponseInputFunctionToolCallOutput", "InputUnionMember1OpenAIResponseMcpApprovalRequest", "InputUnionMember1OpenAIResponseMcpApprovalResponse", + "InputUnionMember1OpenAIResponseOutputMessageMcpCall", + "InputUnionMember1OpenAIResponseOutputMessageMcpListTools", + "InputUnionMember1OpenAIResponseOutputMessageMcpListToolsTool", "InputUnionMember1OpenAIResponseMessage", "InputUnionMember1OpenAIResponseMessageContentUnionMember1", "InputUnionMember1OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "InputUnionMember1OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "InputUnionMember1OpenAIResponseMessageContentUnionMember2", - "InputUnionMember1OpenAIResponseMessageContentUnionMember2Annotation", - "InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", + "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", "Text", "TextFormat", "Tool", @@ -189,6 +194,54 @@ class InputUnionMember1OpenAIResponseMcpApprovalResponse(TypedDict, total=False) reason: str +class InputUnionMember1OpenAIResponseOutputMessageMcpCall(TypedDict, total=False): + id: Required[str] + """Unique identifier for this MCP call""" + + arguments: Required[str] + """JSON string containing the MCP call arguments""" + + name: Required[str] + """Name of the MCP method being called""" + + server_label: Required[str] + """Label identifying the MCP server handling the call""" + + type: Required[Literal["mcp_call"]] + """Tool call type identifier, always "mcp_call" """ + + error: str + """(Optional) Error message if the MCP call failed""" + + output: str + """(Optional) Output result from the successful MCP call""" + + +class InputUnionMember1OpenAIResponseOutputMessageMcpListToolsTool(TypedDict, total=False): + input_schema: Required[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] + """JSON schema defining the tool's input parameters""" + + name: Required[str] + """Name of the tool""" + + description: str + """(Optional) Description of what the tool does""" + + +class InputUnionMember1OpenAIResponseOutputMessageMcpListTools(TypedDict, total=False): + id: Required[str] + """Unique identifier for this MCP list tools operation""" + + server_label: Required[str] + """Label identifying the MCP server providing the tools""" + + tools: Required[Iterable[InputUnionMember1OpenAIResponseOutputMessageMcpListToolsTool]] + """List of available tools provided by the MCP server""" + + type: Required[Literal["mcp_list_tools"]] + """Tool call type identifier, always "mcp_list_tools" """ + + class InputUnionMember1OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText( TypedDict, total=False ): @@ -218,7 +271,7 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember1OpenAIResponseInp ] -class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation( +class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( TypedDict, total=False ): file_id: Required[str] @@ -234,7 +287,7 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIR """Annotation type identifier, always "file_citation" """ -class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation( +class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( TypedDict, total=False ): end_index: Required[int] @@ -253,7 +306,7 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIR """URL of the referenced web resource""" -class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation( +class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( TypedDict, total=False ): container_id: Required[str] @@ -269,7 +322,7 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIR type: Required[Literal["container_file_citation"]] -class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath( +class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( TypedDict, total=False ): file_id: Required[str] @@ -279,22 +332,42 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIR type: Required[Literal["file_path"]] -InputUnionMember1OpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Union[ - InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, +InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Union[ + InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ] -class InputUnionMember1OpenAIResponseMessageContentUnionMember2(TypedDict, total=False): - annotations: Required[Iterable[InputUnionMember1OpenAIResponseMessageContentUnionMember2Annotation]] +class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText( + TypedDict, total=False +): + annotations: Required[ + Iterable[ + InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation + ] + ] text: Required[str] type: Required[Literal["output_text"]] +class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(TypedDict, total=False): + refusal: Required[str] + """Refusal text supplied by the model""" + + type: Required[Literal["refusal"]] + """Content part type identifier, always "refusal" """ + + +InputUnionMember1OpenAIResponseMessageContentUnionMember2: TypeAlias = Union[ + InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, +] + + class InputUnionMember1OpenAIResponseMessage(TypedDict, total=False): content: Required[ Union[ @@ -320,6 +393,8 @@ class InputUnionMember1OpenAIResponseMessage(TypedDict, total=False): InputUnionMember1OpenAIResponseInputFunctionToolCallOutput, InputUnionMember1OpenAIResponseMcpApprovalRequest, InputUnionMember1OpenAIResponseMcpApprovalResponse, + InputUnionMember1OpenAIResponseOutputMessageMcpCall, + InputUnionMember1OpenAIResponseOutputMessageMcpListTools, InputUnionMember1OpenAIResponseMessage, ] diff --git a/src/llama_stack_client/types/response_list_response.py b/src/llama_stack_client/types/response_list_response.py index a3c94f65..8a091316 100644 --- a/src/llama_stack_client/types/response_list_response.py +++ b/src/llama_stack_client/types/response_list_response.py @@ -24,27 +24,34 @@ "InputOpenAIResponseInputFunctionToolCallOutput", "InputOpenAIResponseMcpApprovalRequest", "InputOpenAIResponseMcpApprovalResponse", + "InputOpenAIResponseOutputMessageMcpCall", + "InputOpenAIResponseOutputMessageMcpListTools", + "InputOpenAIResponseOutputMessageMcpListToolsTool", "InputOpenAIResponseMessage", "InputOpenAIResponseMessageContentUnionMember1", "InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "InputOpenAIResponseMessageContentUnionMember2", - "InputOpenAIResponseMessageContentUnionMember2Annotation", - "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", + "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", "Output", "OutputOpenAIResponseMessage", "OutputOpenAIResponseMessageContentUnionMember1", "OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "OutputOpenAIResponseMessageContentUnionMember2", - "OutputOpenAIResponseMessageContentUnionMember2Annotation", - "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", "OutputOpenAIResponseOutputMessageWebSearchToolCall", "OutputOpenAIResponseOutputMessageFileSearchToolCall", "OutputOpenAIResponseOutputMessageFileSearchToolCallResult", @@ -171,6 +178,54 @@ class InputOpenAIResponseMcpApprovalResponse(BaseModel): reason: Optional[str] = None +class InputOpenAIResponseOutputMessageMcpCall(BaseModel): + id: str + """Unique identifier for this MCP call""" + + arguments: str + """JSON string containing the MCP call arguments""" + + name: str + """Name of the MCP method being called""" + + server_label: str + """Label identifying the MCP server handling the call""" + + type: Literal["mcp_call"] + """Tool call type identifier, always "mcp_call" """ + + error: Optional[str] = None + """(Optional) Error message if the MCP call failed""" + + output: Optional[str] = None + """(Optional) Output result from the successful MCP call""" + + +class InputOpenAIResponseOutputMessageMcpListToolsTool(BaseModel): + input_schema: Dict[str, Union[bool, float, str, List[object], object, None]] + """JSON schema defining the tool's input parameters""" + + name: str + """Name of the tool""" + + description: Optional[str] = None + """(Optional) Description of what the tool does""" + + +class InputOpenAIResponseOutputMessageMcpListTools(BaseModel): + id: str + """Unique identifier for this MCP list tools operation""" + + server_label: str + """Label identifying the MCP server providing the tools""" + + tools: List[InputOpenAIResponseOutputMessageMcpListToolsTool] + """List of available tools provided by the MCP server""" + + type: Literal["mcp_list_tools"] + """Tool call type identifier, always "mcp_list_tools" """ + + class InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel): text: str """The text content of the input message""" @@ -199,7 +254,9 @@ class InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCon ] -class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel): +class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( + BaseModel +): file_id: str """Unique identifier of the referenced file""" @@ -213,7 +270,9 @@ class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnot """Annotation type identifier, always "file_citation" """ -class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel): +class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( + BaseModel +): end_index: int """End position of the citation span in the content""" @@ -230,7 +289,9 @@ class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnot """URL of the referenced web resource""" -class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel): +class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( + BaseModel +): container_id: str end_index: int @@ -244,7 +305,9 @@ class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnot type: Literal["container_file_citation"] -class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel): +class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( + BaseModel +): file_id: str index: int @@ -252,25 +315,44 @@ class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnot type: Literal["file_path"] -InputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[ +InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, + InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class InputOpenAIResponseMessageContentUnionMember2(BaseModel): - annotations: List[InputOpenAIResponseMessageContentUnionMember2Annotation] +class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel): + annotations: List[ + InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation + ] text: str type: Literal["output_text"] +class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel): + refusal: str + """Refusal text supplied by the model""" + + type: Literal["refusal"] + """Content part type identifier, always "refusal" """ + + +InputOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ + Union[ + InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + InputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + ], + PropertyInfo(discriminator="type"), +] + + class InputOpenAIResponseMessage(BaseModel): content: Union[ str, List[InputOpenAIResponseMessageContentUnionMember1], List[InputOpenAIResponseMessageContentUnionMember2] @@ -292,6 +374,8 @@ class InputOpenAIResponseMessage(BaseModel): InputOpenAIResponseInputFunctionToolCallOutput, InputOpenAIResponseMcpApprovalRequest, InputOpenAIResponseMcpApprovalResponse, + InputOpenAIResponseOutputMessageMcpCall, + InputOpenAIResponseOutputMessageMcpListTools, InputOpenAIResponseMessage, ] @@ -324,7 +408,9 @@ class OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCo ] -class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel): +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( + BaseModel +): file_id: str """Unique identifier of the referenced file""" @@ -338,7 +424,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno """Annotation type identifier, always "file_citation" """ -class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel): +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( + BaseModel +): end_index: int """End position of the citation span in the content""" @@ -355,7 +443,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno """URL of the referenced web resource""" -class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel): +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( + BaseModel +): container_id: str end_index: int @@ -369,7 +459,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno type: Literal["container_file_citation"] -class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel): +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( + BaseModel +): file_id: str index: int @@ -377,25 +469,44 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno type: Literal["file_path"] -OutputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[ +OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class OutputOpenAIResponseMessageContentUnionMember2(BaseModel): - annotations: List[OutputOpenAIResponseMessageContentUnionMember2Annotation] +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel): + annotations: List[ + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation + ] text: str type: Literal["output_text"] +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel): + refusal: str + """Refusal text supplied by the model""" + + type: Literal["refusal"] + """Content part type identifier, always "refusal" """ + + +OutputOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ + Union[ + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + ], + PropertyInfo(discriminator="type"), +] + + class OutputOpenAIResponseMessage(BaseModel): content: Union[ str, List[OutputOpenAIResponseMessageContentUnionMember1], List[OutputOpenAIResponseMessageContentUnionMember2] @@ -724,6 +835,9 @@ class ResponseListResponse(BaseModel): error: Optional[Error] = None """(Optional) Error details if the response generation failed""" + instructions: Optional[str] = None + """(Optional) System message inserted into the model's context""" + previous_response_id: Optional[str] = None """(Optional) ID of the previous response in a conversation""" diff --git a/src/llama_stack_client/types/response_object.py b/src/llama_stack_client/types/response_object.py index f890f74e..57f708ce 100644 --- a/src/llama_stack_client/types/response_object.py +++ b/src/llama_stack_client/types/response_object.py @@ -22,11 +22,13 @@ "OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "OutputOpenAIResponseMessageContentUnionMember2", - "OutputOpenAIResponseMessageContentUnionMember2Annotation", - "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", "OutputOpenAIResponseOutputMessageWebSearchToolCall", "OutputOpenAIResponseOutputMessageFileSearchToolCall", "OutputOpenAIResponseOutputMessageFileSearchToolCallResult", @@ -80,7 +82,9 @@ class OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCo ] -class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel): +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( + BaseModel +): file_id: str """Unique identifier of the referenced file""" @@ -94,7 +98,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno """Annotation type identifier, always "file_citation" """ -class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel): +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( + BaseModel +): end_index: int """End position of the citation span in the content""" @@ -111,7 +117,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno """URL of the referenced web resource""" -class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel): +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( + BaseModel +): container_id: str end_index: int @@ -125,7 +133,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno type: Literal["container_file_citation"] -class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel): +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( + BaseModel +): file_id: str index: int @@ -133,25 +143,44 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno type: Literal["file_path"] -OutputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[ +OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class OutputOpenAIResponseMessageContentUnionMember2(BaseModel): - annotations: List[OutputOpenAIResponseMessageContentUnionMember2Annotation] +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel): + annotations: List[ + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation + ] text: str type: Literal["output_text"] +class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel): + refusal: str + """Refusal text supplied by the model""" + + type: Literal["refusal"] + """Content part type identifier, always "refusal" """ + + +OutputOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ + Union[ + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + ], + PropertyInfo(discriminator="type"), +] + + class OutputOpenAIResponseMessage(BaseModel): content: Union[ str, List[OutputOpenAIResponseMessageContentUnionMember1], List[OutputOpenAIResponseMessageContentUnionMember2] @@ -487,6 +516,9 @@ def output_text(self) -> str: error: Optional[Error] = None """(Optional) Error details if the response generation failed""" + instructions: Optional[str] = None + """(Optional) System message inserted into the model's context""" + previous_response_id: Optional[str] = None """(Optional) ID of the previous response in a conversation""" diff --git a/src/llama_stack_client/types/response_object_stream.py b/src/llama_stack_client/types/response_object_stream.py index 189eaa5c..a75ac721 100644 --- a/src/llama_stack_client/types/response_object_stream.py +++ b/src/llama_stack_client/types/response_object_stream.py @@ -24,11 +24,13 @@ "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2", - "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2Annotation", - "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", + "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseOutputMessageWebSearchToolCall", "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseOutputMessageFileSearchToolCall", "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseOutputMessageFileSearchToolCallResult", @@ -44,11 +46,13 @@ "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2", - "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2Annotation", - "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", + "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseOutputMessageWebSearchToolCall", "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseOutputMessageFileSearchToolCall", "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseOutputMessageFileSearchToolCallResult", @@ -168,7 +172,7 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage ] -class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation( +class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( BaseModel ): file_id: str @@ -184,7 +188,7 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage """Annotation type identifier, always "file_citation" """ -class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation( +class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( BaseModel ): end_index: int @@ -203,7 +207,7 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage """URL of the referenced web resource""" -class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation( +class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( BaseModel ): container_id: str @@ -219,7 +223,7 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage type: Literal["container_file_citation"] -class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath( +class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( BaseModel ): file_id: str @@ -229,20 +233,22 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage type: Literal["file_path"] -OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[ +OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, + OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2(BaseModel): +class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText( + BaseModel +): annotations: List[ - OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2Annotation + OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation ] text: str @@ -250,6 +256,25 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage type: Literal["output_text"] +class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal( + BaseModel +): + refusal: str + """Refusal text supplied by the model""" + + type: Literal["refusal"] + """Content part type identifier, always "refusal" """ + + +OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ + Union[ + OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + ], + PropertyInfo(discriminator="type"), +] + + class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage(BaseModel): content: Union[ str, @@ -458,7 +483,7 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC ] -class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation( +class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( BaseModel ): file_id: str @@ -474,7 +499,7 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC """Annotation type identifier, always "file_citation" """ -class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation( +class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( BaseModel ): end_index: int @@ -493,7 +518,7 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC """URL of the referenced web resource""" -class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation( +class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( BaseModel ): container_id: str @@ -509,7 +534,7 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC type: Literal["container_file_citation"] -class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath( +class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( BaseModel ): file_id: str @@ -519,20 +544,22 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC type: Literal["file_path"] -OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[ +OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, + OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2(BaseModel): +class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText( + BaseModel +): annotations: List[ - OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2Annotation + OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation ] text: str @@ -540,6 +567,25 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC type: Literal["output_text"] +class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal( + BaseModel +): + refusal: str + """Refusal text supplied by the model""" + + type: Literal["refusal"] + """Content part type identifier, always "refusal" """ + + +OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ + Union[ + OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + ], + PropertyInfo(discriminator="type"), +] + + class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessage(BaseModel): content: Union[ str, diff --git a/src/llama_stack_client/types/responses/input_item_list_response.py b/src/llama_stack_client/types/responses/input_item_list_response.py index 30a92ebf..3cb26346 100644 --- a/src/llama_stack_client/types/responses/input_item_list_response.py +++ b/src/llama_stack_client/types/responses/input_item_list_response.py @@ -22,16 +22,21 @@ "DataOpenAIResponseInputFunctionToolCallOutput", "DataOpenAIResponseMcpApprovalRequest", "DataOpenAIResponseMcpApprovalResponse", + "DataOpenAIResponseOutputMessageMcpCall", + "DataOpenAIResponseOutputMessageMcpListTools", + "DataOpenAIResponseOutputMessageMcpListToolsTool", "DataOpenAIResponseMessage", "DataOpenAIResponseMessageContentUnionMember1", "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", "DataOpenAIResponseMessageContentUnionMember2", - "DataOpenAIResponseMessageContentUnionMember2Annotation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation", - "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", ] @@ -136,6 +141,54 @@ class DataOpenAIResponseMcpApprovalResponse(BaseModel): reason: Optional[str] = None +class DataOpenAIResponseOutputMessageMcpCall(BaseModel): + id: str + """Unique identifier for this MCP call""" + + arguments: str + """JSON string containing the MCP call arguments""" + + name: str + """Name of the MCP method being called""" + + server_label: str + """Label identifying the MCP server handling the call""" + + type: Literal["mcp_call"] + """Tool call type identifier, always "mcp_call" """ + + error: Optional[str] = None + """(Optional) Error message if the MCP call failed""" + + output: Optional[str] = None + """(Optional) Output result from the successful MCP call""" + + +class DataOpenAIResponseOutputMessageMcpListToolsTool(BaseModel): + input_schema: Dict[str, Union[bool, float, str, List[object], object, None]] + """JSON schema defining the tool's input parameters""" + + name: str + """Name of the tool""" + + description: Optional[str] = None + """(Optional) Description of what the tool does""" + + +class DataOpenAIResponseOutputMessageMcpListTools(BaseModel): + id: str + """Unique identifier for this MCP list tools operation""" + + server_label: str + """Label identifying the MCP server providing the tools""" + + tools: List[DataOpenAIResponseOutputMessageMcpListToolsTool] + """List of available tools provided by the MCP server""" + + type: Literal["mcp_list_tools"] + """Tool call type identifier, always "mcp_list_tools" """ + + class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel): text: str """The text content of the input message""" @@ -164,7 +217,9 @@ class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont ] -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( + BaseModel +): file_id: str """Unique identifier of the referenced file""" @@ -178,7 +233,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """Annotation type identifier, always "file_citation" """ -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( + BaseModel +): end_index: int """End position of the citation span in the content""" @@ -195,7 +252,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota """URL of the referenced web resource""" -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( + BaseModel +): container_id: str end_index: int @@ -209,7 +268,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Literal["container_file_citation"] -class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel): +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( + BaseModel +): file_id: str index: int @@ -217,25 +278,44 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota type: Literal["file_path"] -DataOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[ +DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation, - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation, - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation, - DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class DataOpenAIResponseMessageContentUnionMember2(BaseModel): - annotations: List[DataOpenAIResponseMessageContentUnionMember2Annotation] +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel): + annotations: List[ + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation + ] text: str type: Literal["output_text"] +class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel): + refusal: str + """Refusal text supplied by the model""" + + type: Literal["refusal"] + """Content part type identifier, always "refusal" """ + + +DataOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ + Union[ + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + ], + PropertyInfo(discriminator="type"), +] + + class DataOpenAIResponseMessage(BaseModel): content: Union[ str, List[DataOpenAIResponseMessageContentUnionMember1], List[DataOpenAIResponseMessageContentUnionMember2] @@ -257,6 +337,8 @@ class DataOpenAIResponseMessage(BaseModel): DataOpenAIResponseInputFunctionToolCallOutput, DataOpenAIResponseMcpApprovalRequest, DataOpenAIResponseMcpApprovalResponse, + DataOpenAIResponseOutputMessageMcpCall, + DataOpenAIResponseOutputMessageMcpListTools, DataOpenAIResponseMessage, ] diff --git a/src/llama_stack_client/types/span_with_status.py b/src/llama_stack_client/types/span_with_status.py deleted file mode 100644 index caebfd91..00000000 --- a/src/llama_stack_client/types/span_with_status.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, List, Union, Optional -from datetime import datetime -from typing_extensions import Literal - -from .._models import BaseModel - -__all__ = ["SpanWithStatus"] - - -class SpanWithStatus(BaseModel): - name: str - """Human-readable name describing the operation this span represents""" - - span_id: str - """Unique identifier for the span""" - - start_time: datetime - """Timestamp when the operation began""" - - trace_id: str - """Unique identifier for the trace this span belongs to""" - - attributes: Optional[Dict[str, Union[bool, float, str, List[object], object, None]]] = None - """(Optional) Key-value pairs containing additional metadata about the span""" - - end_time: Optional[datetime] = None - """(Optional) Timestamp when the operation finished, if completed""" - - parent_span_id: Optional[str] = None - """(Optional) Unique identifier for the parent span, if this is a child span""" - - status: Optional[Literal["ok", "error"]] = None - """(Optional) The current status of the span""" diff --git a/src/llama_stack_client/types/telemetry_get_span_response.py b/src/llama_stack_client/types/telemetry_get_span_response.py deleted file mode 100644 index ae984875..00000000 --- a/src/llama_stack_client/types/telemetry_get_span_response.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, List, Union, Optional -from datetime import datetime - -from .._models import BaseModel - -__all__ = ["TelemetryGetSpanResponse"] - - -class TelemetryGetSpanResponse(BaseModel): - name: str - """Human-readable name describing the operation this span represents""" - - span_id: str - """Unique identifier for the span""" - - start_time: datetime - """Timestamp when the operation began""" - - trace_id: str - """Unique identifier for the trace this span belongs to""" - - attributes: Optional[Dict[str, Union[bool, float, str, List[object], object, None]]] = None - """(Optional) Key-value pairs containing additional metadata about the span""" - - end_time: Optional[datetime] = None - """(Optional) Timestamp when the operation finished, if completed""" - - parent_span_id: Optional[str] = None - """(Optional) Unique identifier for the parent span, if this is a child span""" diff --git a/src/llama_stack_client/types/telemetry_get_span_tree_params.py b/src/llama_stack_client/types/telemetry_get_span_tree_params.py deleted file mode 100644 index 9e94ba69..00000000 --- a/src/llama_stack_client/types/telemetry_get_span_tree_params.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import TypedDict - -from .._types import SequenceNotStr - -__all__ = ["TelemetryGetSpanTreeParams"] - - -class TelemetryGetSpanTreeParams(TypedDict, total=False): - attributes_to_return: SequenceNotStr[str] - """The attributes to return in the tree.""" - - max_depth: int - """The maximum depth of the tree.""" diff --git a/src/llama_stack_client/types/telemetry_get_span_tree_response.py b/src/llama_stack_client/types/telemetry_get_span_tree_response.py deleted file mode 100644 index 5506aed6..00000000 --- a/src/llama_stack_client/types/telemetry_get_span_tree_response.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict -from typing_extensions import TypeAlias - -from .span_with_status import SpanWithStatus - -__all__ = ["TelemetryGetSpanTreeResponse"] - -TelemetryGetSpanTreeResponse: TypeAlias = Dict[str, SpanWithStatus] diff --git a/src/llama_stack_client/types/telemetry_query_metrics_params.py b/src/llama_stack_client/types/telemetry_query_metrics_params.py deleted file mode 100644 index e36a771e..00000000 --- a/src/llama_stack_client/types/telemetry_query_metrics_params.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Iterable -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["TelemetryQueryMetricsParams", "LabelMatcher"] - - -class TelemetryQueryMetricsParams(TypedDict, total=False): - query_type: Required[Literal["range", "instant"]] - """The type of query to perform.""" - - start_time: Required[int] - """The start time of the metric to query.""" - - end_time: int - """The end time of the metric to query.""" - - granularity: str - """The granularity of the metric to query.""" - - label_matchers: Iterable[LabelMatcher] - """The label matchers to apply to the metric.""" - - -class LabelMatcher(TypedDict, total=False): - name: Required[str] - """The name of the label to match""" - - operator: Required[Literal["=", "!=", "=~", "!~"]] - """The comparison operator to use for matching""" - - value: Required[str] - """The value to match against""" diff --git a/src/llama_stack_client/types/telemetry_query_metrics_response.py b/src/llama_stack_client/types/telemetry_query_metrics_response.py deleted file mode 100644 index 17d03301..00000000 --- a/src/llama_stack_client/types/telemetry_query_metrics_response.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List -from typing_extensions import TypeAlias - -from .._models import BaseModel - -__all__ = [ - "TelemetryQueryMetricsResponse", - "TelemetryQueryMetricsResponseItem", - "TelemetryQueryMetricsResponseItemLabel", - "TelemetryQueryMetricsResponseItemValue", -] - - -class TelemetryQueryMetricsResponseItemLabel(BaseModel): - name: str - """The name of the label""" - - value: str - """The value of the label""" - - -class TelemetryQueryMetricsResponseItemValue(BaseModel): - timestamp: int - """Unix timestamp when the metric value was recorded""" - - unit: str - - value: float - """The numeric value of the metric at this timestamp""" - - -class TelemetryQueryMetricsResponseItem(BaseModel): - labels: List[TelemetryQueryMetricsResponseItemLabel] - """List of labels associated with this metric series""" - - metric: str - """The name of the metric""" - - values: List[TelemetryQueryMetricsResponseItemValue] - """List of data points in chronological order""" - - -TelemetryQueryMetricsResponse: TypeAlias = List[TelemetryQueryMetricsResponseItem] diff --git a/src/llama_stack_client/types/telemetry_query_spans_params.py b/src/llama_stack_client/types/telemetry_query_spans_params.py deleted file mode 100644 index 305e0d22..00000000 --- a/src/llama_stack_client/types/telemetry_query_spans_params.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Iterable -from typing_extensions import Required, TypedDict - -from .._types import SequenceNotStr -from .query_condition_param import QueryConditionParam - -__all__ = ["TelemetryQuerySpansParams"] - - -class TelemetryQuerySpansParams(TypedDict, total=False): - attribute_filters: Required[Iterable[QueryConditionParam]] - """The attribute filters to apply to the spans.""" - - attributes_to_return: Required[SequenceNotStr[str]] - """The attributes to return in the spans.""" - - max_depth: int - """The maximum depth of the tree.""" diff --git a/src/llama_stack_client/types/telemetry_query_spans_response.py b/src/llama_stack_client/types/telemetry_query_spans_response.py deleted file mode 100644 index 49a53808..00000000 --- a/src/llama_stack_client/types/telemetry_query_spans_response.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict, List, Union, Optional -from datetime import datetime -from typing_extensions import TypeAlias - -from .._models import BaseModel - -__all__ = ["TelemetryQuerySpansResponse", "TelemetryQuerySpansResponseItem"] - - -class TelemetryQuerySpansResponseItem(BaseModel): - name: str - """Human-readable name describing the operation this span represents""" - - span_id: str - """Unique identifier for the span""" - - start_time: datetime - """Timestamp when the operation began""" - - trace_id: str - """Unique identifier for the trace this span belongs to""" - - attributes: Optional[Dict[str, Union[bool, float, str, List[object], object, None]]] = None - """(Optional) Key-value pairs containing additional metadata about the span""" - - end_time: Optional[datetime] = None - """(Optional) Timestamp when the operation finished, if completed""" - - parent_span_id: Optional[str] = None - """(Optional) Unique identifier for the parent span, if this is a child span""" - - -TelemetryQuerySpansResponse: TypeAlias = List[TelemetryQuerySpansResponseItem] diff --git a/src/llama_stack_client/types/telemetry_query_traces_params.py b/src/llama_stack_client/types/telemetry_query_traces_params.py deleted file mode 100644 index 03ef4984..00000000 --- a/src/llama_stack_client/types/telemetry_query_traces_params.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Iterable -from typing_extensions import TypedDict - -from .._types import SequenceNotStr -from .query_condition_param import QueryConditionParam - -__all__ = ["TelemetryQueryTracesParams"] - - -class TelemetryQueryTracesParams(TypedDict, total=False): - attribute_filters: Iterable[QueryConditionParam] - """The attribute filters to apply to the traces.""" - - limit: int - """The limit of traces to return.""" - - offset: int - """The offset of the traces to return.""" - - order_by: SequenceNotStr[str] - """The order by of the traces to return.""" diff --git a/src/llama_stack_client/types/telemetry_query_traces_response.py b/src/llama_stack_client/types/telemetry_query_traces_response.py deleted file mode 100644 index a058570d..00000000 --- a/src/llama_stack_client/types/telemetry_query_traces_response.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List -from typing_extensions import TypeAlias - -from .trace import Trace - -__all__ = ["TelemetryQueryTracesResponse"] - -TelemetryQueryTracesResponse: TypeAlias = List[Trace] diff --git a/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py b/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py deleted file mode 100644 index c3dd5158..00000000 --- a/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Iterable -from typing_extensions import Required, TypedDict - -from .._types import SequenceNotStr -from .query_condition_param import QueryConditionParam - -__all__ = ["TelemetrySaveSpansToDatasetParams"] - - -class TelemetrySaveSpansToDatasetParams(TypedDict, total=False): - attribute_filters: Required[Iterable[QueryConditionParam]] - """The attribute filters to apply to the spans.""" - - attributes_to_save: Required[SequenceNotStr[str]] - """The attributes to save to the dataset.""" - - dataset_id: Required[str] - """The ID of the dataset to save the spans to.""" - - max_depth: int - """The maximum depth of the tree.""" diff --git a/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py b/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py index 095a2a69..d65980c5 100644 --- a/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py +++ b/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py @@ -23,5 +23,5 @@ class RagToolInsertParams(TypedDict, total=False): documents: Required[Iterable[Document]] """List of documents to index in the RAG system""" - vector_db_id: Required[str] + vector_store_id: Required[str] """ID of the vector database to store the document embeddings""" diff --git a/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py b/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py index 08d1f998..6e8fa8ce 100644 --- a/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py +++ b/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py @@ -21,7 +21,7 @@ class RagToolQueryParams(TypedDict, total=False): content: Required[InterleavedContent] """The query content to search for in the indexed documents""" - vector_db_ids: Required[SequenceNotStr[str]] + vector_store_ids: Required[SequenceNotStr[str]] """List of vector database IDs to search within""" query_config: QueryConfig diff --git a/src/llama_stack_client/types/trace.py b/src/llama_stack_client/types/trace.py deleted file mode 100644 index 65570f78..00000000 --- a/src/llama_stack_client/types/trace.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional -from datetime import datetime - -from .._models import BaseModel - -__all__ = ["Trace"] - - -class Trace(BaseModel): - root_span_id: str - """Unique identifier for the root span that started this trace""" - - start_time: datetime - """Timestamp when the trace began""" - - trace_id: str - """Unique identifier for the trace""" - - end_time: Optional[datetime] = None - """(Optional) Timestamp when the trace finished, if completed""" diff --git a/src/llama_stack_client/types/vector_io_insert_params.py b/src/llama_stack_client/types/vector_io_insert_params.py index 5613251f..1584f807 100644 --- a/src/llama_stack_client/types/vector_io_insert_params.py +++ b/src/llama_stack_client/types/vector_io_insert_params.py @@ -27,7 +27,7 @@ class VectorIoInsertParams(TypedDict, total=False): later. """ - vector_db_id: Required[str] + vector_store_id: Required[str] """The identifier of the vector database to insert the chunks into.""" ttl_seconds: int diff --git a/src/llama_stack_client/types/vector_io_query_params.py b/src/llama_stack_client/types/vector_io_query_params.py index a2fdc561..538604ac 100644 --- a/src/llama_stack_client/types/vector_io_query_params.py +++ b/src/llama_stack_client/types/vector_io_query_params.py @@ -20,7 +20,7 @@ class VectorIoQueryParams(TypedDict, total=False): query: Required[InterleavedContent] """The query to search for.""" - vector_db_id: Required[str] + vector_store_id: Required[str] """The identifier of the vector database to query.""" params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] diff --git a/tests/api_resources/beta/__init__.py b/tests/api_resources/beta/__init__.py new file mode 100644 index 00000000..fd8019a9 --- /dev/null +++ b/tests/api_resources/beta/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/test_datasets.py b/tests/api_resources/beta/test_datasets.py similarity index 84% rename from tests/api_resources/test_datasets.py rename to tests/api_resources/beta/test_datasets.py index 98f0f0b8..7a6fc7c9 100644 --- a/tests/api_resources/test_datasets.py +++ b/tests/api_resources/beta/test_datasets.py @@ -15,7 +15,7 @@ from tests.utils import assert_matches_type from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient -from llama_stack_client.types import ( +from llama_stack_client.types.beta import ( DatasetListResponse, DatasetIterrowsResponse, DatasetRegisterResponse, @@ -30,14 +30,14 @@ class TestDatasets: @parametrize def test_method_retrieve(self, client: LlamaStackClient) -> None: - dataset = client.datasets.retrieve( + dataset = client.beta.datasets.retrieve( "dataset_id", ) assert_matches_type(DatasetRetrieveResponse, dataset, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: LlamaStackClient) -> None: - response = client.datasets.with_raw_response.retrieve( + response = client.beta.datasets.with_raw_response.retrieve( "dataset_id", ) @@ -48,7 +48,7 @@ def test_raw_response_retrieve(self, client: LlamaStackClient) -> None: @parametrize def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None: - with client.datasets.with_streaming_response.retrieve( + with client.beta.datasets.with_streaming_response.retrieve( "dataset_id", ) as response: assert not response.is_closed @@ -62,18 +62,18 @@ def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None: @parametrize def test_path_params_retrieve(self, client: LlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"): - client.datasets.with_raw_response.retrieve( + client.beta.datasets.with_raw_response.retrieve( "", ) @parametrize def test_method_list(self, client: LlamaStackClient) -> None: - dataset = client.datasets.list() + dataset = client.beta.datasets.list() assert_matches_type(DatasetListResponse, dataset, path=["response"]) @parametrize def test_raw_response_list(self, client: LlamaStackClient) -> None: - response = client.datasets.with_raw_response.list() + response = client.beta.datasets.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -82,7 +82,7 @@ def test_raw_response_list(self, client: LlamaStackClient) -> None: @parametrize def test_streaming_response_list(self, client: LlamaStackClient) -> None: - with client.datasets.with_streaming_response.list() as response: + with client.beta.datasets.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -93,7 +93,7 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None: @parametrize def test_method_appendrows(self, client: LlamaStackClient) -> None: - dataset = client.datasets.appendrows( + dataset = client.beta.datasets.appendrows( dataset_id="dataset_id", rows=[{"foo": True}], ) @@ -101,7 +101,7 @@ def test_method_appendrows(self, client: LlamaStackClient) -> None: @parametrize def test_raw_response_appendrows(self, client: LlamaStackClient) -> None: - response = client.datasets.with_raw_response.appendrows( + response = client.beta.datasets.with_raw_response.appendrows( dataset_id="dataset_id", rows=[{"foo": True}], ) @@ -113,7 +113,7 @@ def test_raw_response_appendrows(self, client: LlamaStackClient) -> None: @parametrize def test_streaming_response_appendrows(self, client: LlamaStackClient) -> None: - with client.datasets.with_streaming_response.appendrows( + with client.beta.datasets.with_streaming_response.appendrows( dataset_id="dataset_id", rows=[{"foo": True}], ) as response: @@ -128,21 +128,21 @@ def test_streaming_response_appendrows(self, client: LlamaStackClient) -> None: @parametrize def test_path_params_appendrows(self, client: LlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"): - client.datasets.with_raw_response.appendrows( + client.beta.datasets.with_raw_response.appendrows( dataset_id="", rows=[{"foo": True}], ) @parametrize def test_method_iterrows(self, client: LlamaStackClient) -> None: - dataset = client.datasets.iterrows( + dataset = client.beta.datasets.iterrows( dataset_id="dataset_id", ) assert_matches_type(DatasetIterrowsResponse, dataset, path=["response"]) @parametrize def test_method_iterrows_with_all_params(self, client: LlamaStackClient) -> None: - dataset = client.datasets.iterrows( + dataset = client.beta.datasets.iterrows( dataset_id="dataset_id", limit=0, start_index=0, @@ -151,7 +151,7 @@ def test_method_iterrows_with_all_params(self, client: LlamaStackClient) -> None @parametrize def test_raw_response_iterrows(self, client: LlamaStackClient) -> None: - response = client.datasets.with_raw_response.iterrows( + response = client.beta.datasets.with_raw_response.iterrows( dataset_id="dataset_id", ) @@ -162,7 +162,7 @@ def test_raw_response_iterrows(self, client: LlamaStackClient) -> None: @parametrize def test_streaming_response_iterrows(self, client: LlamaStackClient) -> None: - with client.datasets.with_streaming_response.iterrows( + with client.beta.datasets.with_streaming_response.iterrows( dataset_id="dataset_id", ) as response: assert not response.is_closed @@ -176,13 +176,13 @@ def test_streaming_response_iterrows(self, client: LlamaStackClient) -> None: @parametrize def test_path_params_iterrows(self, client: LlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"): - client.datasets.with_raw_response.iterrows( + client.beta.datasets.with_raw_response.iterrows( dataset_id="", ) @parametrize def test_method_register(self, client: LlamaStackClient) -> None: - dataset = client.datasets.register( + dataset = client.beta.datasets.register( purpose="post-training/messages", source={ "type": "uri", @@ -193,7 +193,7 @@ def test_method_register(self, client: LlamaStackClient) -> None: @parametrize def test_method_register_with_all_params(self, client: LlamaStackClient) -> None: - dataset = client.datasets.register( + dataset = client.beta.datasets.register( purpose="post-training/messages", source={ "type": "uri", @@ -206,7 +206,7 @@ def test_method_register_with_all_params(self, client: LlamaStackClient) -> None @parametrize def test_raw_response_register(self, client: LlamaStackClient) -> None: - response = client.datasets.with_raw_response.register( + response = client.beta.datasets.with_raw_response.register( purpose="post-training/messages", source={ "type": "uri", @@ -221,7 +221,7 @@ def test_raw_response_register(self, client: LlamaStackClient) -> None: @parametrize def test_streaming_response_register(self, client: LlamaStackClient) -> None: - with client.datasets.with_streaming_response.register( + with client.beta.datasets.with_streaming_response.register( purpose="post-training/messages", source={ "type": "uri", @@ -238,14 +238,14 @@ def test_streaming_response_register(self, client: LlamaStackClient) -> None: @parametrize def test_method_unregister(self, client: LlamaStackClient) -> None: - dataset = client.datasets.unregister( + dataset = client.beta.datasets.unregister( "dataset_id", ) assert dataset is None @parametrize def test_raw_response_unregister(self, client: LlamaStackClient) -> None: - response = client.datasets.with_raw_response.unregister( + response = client.beta.datasets.with_raw_response.unregister( "dataset_id", ) @@ -256,7 +256,7 @@ def test_raw_response_unregister(self, client: LlamaStackClient) -> None: @parametrize def test_streaming_response_unregister(self, client: LlamaStackClient) -> None: - with client.datasets.with_streaming_response.unregister( + with client.beta.datasets.with_streaming_response.unregister( "dataset_id", ) as response: assert not response.is_closed @@ -270,7 +270,7 @@ def test_streaming_response_unregister(self, client: LlamaStackClient) -> None: @parametrize def test_path_params_unregister(self, client: LlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"): - client.datasets.with_raw_response.unregister( + client.beta.datasets.with_raw_response.unregister( "", ) @@ -282,14 +282,14 @@ class TestAsyncDatasets: @parametrize async def test_method_retrieve(self, async_client: AsyncLlamaStackClient) -> None: - dataset = await async_client.datasets.retrieve( + dataset = await async_client.beta.datasets.retrieve( "dataset_id", ) assert_matches_type(DatasetRetrieveResponse, dataset, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.datasets.with_raw_response.retrieve( + response = await async_client.beta.datasets.with_raw_response.retrieve( "dataset_id", ) @@ -300,7 +300,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient) @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.datasets.with_streaming_response.retrieve( + async with async_client.beta.datasets.with_streaming_response.retrieve( "dataset_id", ) as response: assert not response.is_closed @@ -314,18 +314,18 @@ async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackCl @parametrize async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"): - await async_client.datasets.with_raw_response.retrieve( + await async_client.beta.datasets.with_raw_response.retrieve( "", ) @parametrize async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None: - dataset = await async_client.datasets.list() + dataset = await async_client.beta.datasets.list() assert_matches_type(DatasetListResponse, dataset, path=["response"]) @parametrize async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.datasets.with_raw_response.list() + response = await async_client.beta.datasets.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -334,7 +334,7 @@ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> N @parametrize async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.datasets.with_streaming_response.list() as response: + async with async_client.beta.datasets.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -345,7 +345,7 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient @parametrize async def test_method_appendrows(self, async_client: AsyncLlamaStackClient) -> None: - dataset = await async_client.datasets.appendrows( + dataset = await async_client.beta.datasets.appendrows( dataset_id="dataset_id", rows=[{"foo": True}], ) @@ -353,7 +353,7 @@ async def test_method_appendrows(self, async_client: AsyncLlamaStackClient) -> N @parametrize async def test_raw_response_appendrows(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.datasets.with_raw_response.appendrows( + response = await async_client.beta.datasets.with_raw_response.appendrows( dataset_id="dataset_id", rows=[{"foo": True}], ) @@ -365,7 +365,7 @@ async def test_raw_response_appendrows(self, async_client: AsyncLlamaStackClient @parametrize async def test_streaming_response_appendrows(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.datasets.with_streaming_response.appendrows( + async with async_client.beta.datasets.with_streaming_response.appendrows( dataset_id="dataset_id", rows=[{"foo": True}], ) as response: @@ -380,21 +380,21 @@ async def test_streaming_response_appendrows(self, async_client: AsyncLlamaStack @parametrize async def test_path_params_appendrows(self, async_client: AsyncLlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"): - await async_client.datasets.with_raw_response.appendrows( + await async_client.beta.datasets.with_raw_response.appendrows( dataset_id="", rows=[{"foo": True}], ) @parametrize async def test_method_iterrows(self, async_client: AsyncLlamaStackClient) -> None: - dataset = await async_client.datasets.iterrows( + dataset = await async_client.beta.datasets.iterrows( dataset_id="dataset_id", ) assert_matches_type(DatasetIterrowsResponse, dataset, path=["response"]) @parametrize async def test_method_iterrows_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: - dataset = await async_client.datasets.iterrows( + dataset = await async_client.beta.datasets.iterrows( dataset_id="dataset_id", limit=0, start_index=0, @@ -403,7 +403,7 @@ async def test_method_iterrows_with_all_params(self, async_client: AsyncLlamaSta @parametrize async def test_raw_response_iterrows(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.datasets.with_raw_response.iterrows( + response = await async_client.beta.datasets.with_raw_response.iterrows( dataset_id="dataset_id", ) @@ -414,7 +414,7 @@ async def test_raw_response_iterrows(self, async_client: AsyncLlamaStackClient) @parametrize async def test_streaming_response_iterrows(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.datasets.with_streaming_response.iterrows( + async with async_client.beta.datasets.with_streaming_response.iterrows( dataset_id="dataset_id", ) as response: assert not response.is_closed @@ -428,13 +428,13 @@ async def test_streaming_response_iterrows(self, async_client: AsyncLlamaStackCl @parametrize async def test_path_params_iterrows(self, async_client: AsyncLlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"): - await async_client.datasets.with_raw_response.iterrows( + await async_client.beta.datasets.with_raw_response.iterrows( dataset_id="", ) @parametrize async def test_method_register(self, async_client: AsyncLlamaStackClient) -> None: - dataset = await async_client.datasets.register( + dataset = await async_client.beta.datasets.register( purpose="post-training/messages", source={ "type": "uri", @@ -445,7 +445,7 @@ async def test_method_register(self, async_client: AsyncLlamaStackClient) -> Non @parametrize async def test_method_register_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: - dataset = await async_client.datasets.register( + dataset = await async_client.beta.datasets.register( purpose="post-training/messages", source={ "type": "uri", @@ -458,7 +458,7 @@ async def test_method_register_with_all_params(self, async_client: AsyncLlamaSta @parametrize async def test_raw_response_register(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.datasets.with_raw_response.register( + response = await async_client.beta.datasets.with_raw_response.register( purpose="post-training/messages", source={ "type": "uri", @@ -473,7 +473,7 @@ async def test_raw_response_register(self, async_client: AsyncLlamaStackClient) @parametrize async def test_streaming_response_register(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.datasets.with_streaming_response.register( + async with async_client.beta.datasets.with_streaming_response.register( purpose="post-training/messages", source={ "type": "uri", @@ -490,14 +490,14 @@ async def test_streaming_response_register(self, async_client: AsyncLlamaStackCl @parametrize async def test_method_unregister(self, async_client: AsyncLlamaStackClient) -> None: - dataset = await async_client.datasets.unregister( + dataset = await async_client.beta.datasets.unregister( "dataset_id", ) assert dataset is None @parametrize async def test_raw_response_unregister(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.datasets.with_raw_response.unregister( + response = await async_client.beta.datasets.with_raw_response.unregister( "dataset_id", ) @@ -508,7 +508,7 @@ async def test_raw_response_unregister(self, async_client: AsyncLlamaStackClient @parametrize async def test_streaming_response_unregister(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.datasets.with_streaming_response.unregister( + async with async_client.beta.datasets.with_streaming_response.unregister( "dataset_id", ) as response: assert not response.is_closed @@ -522,6 +522,6 @@ async def test_streaming_response_unregister(self, async_client: AsyncLlamaStack @parametrize async def test_path_params_unregister(self, async_client: AsyncLlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"): - await async_client.datasets.with_raw_response.unregister( + await async_client.beta.datasets.with_raw_response.unregister( "", ) diff --git a/tests/api_resources/test_telemetry.py b/tests/api_resources/test_telemetry.py deleted file mode 100644 index 483c724f..00000000 --- a/tests/api_resources/test_telemetry.py +++ /dev/null @@ -1,815 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import os -from typing import Any, cast - -import pytest - -from tests.utils import assert_matches_type -from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient -from llama_stack_client.types import ( - Trace, - TelemetryGetSpanResponse, - TelemetryQuerySpansResponse, - TelemetryGetSpanTreeResponse, - TelemetryQueryTracesResponse, - TelemetryQueryMetricsResponse, -) - -base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") - - -class TestTelemetry: - parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - - @parametrize - def test_method_get_span(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.get_span( - span_id="span_id", - trace_id="trace_id", - ) - assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"]) - - @parametrize - def test_raw_response_get_span(self, client: LlamaStackClient) -> None: - response = client.telemetry.with_raw_response.get_span( - span_id="span_id", - trace_id="trace_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = response.parse() - assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"]) - - @parametrize - def test_streaming_response_get_span(self, client: LlamaStackClient) -> None: - with client.telemetry.with_streaming_response.get_span( - span_id="span_id", - trace_id="trace_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = response.parse() - assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - def test_path_params_get_span(self, client: LlamaStackClient) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `trace_id` but received ''"): - client.telemetry.with_raw_response.get_span( - span_id="span_id", - trace_id="", - ) - - with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"): - client.telemetry.with_raw_response.get_span( - span_id="", - trace_id="trace_id", - ) - - @parametrize - def test_method_get_span_tree(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.get_span_tree( - span_id="span_id", - ) - assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"]) - - @parametrize - def test_method_get_span_tree_with_all_params(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.get_span_tree( - span_id="span_id", - attributes_to_return=["string"], - max_depth=0, - ) - assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"]) - - @parametrize - def test_raw_response_get_span_tree(self, client: LlamaStackClient) -> None: - response = client.telemetry.with_raw_response.get_span_tree( - span_id="span_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = response.parse() - assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"]) - - @parametrize - def test_streaming_response_get_span_tree(self, client: LlamaStackClient) -> None: - with client.telemetry.with_streaming_response.get_span_tree( - span_id="span_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = response.parse() - assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - def test_path_params_get_span_tree(self, client: LlamaStackClient) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"): - client.telemetry.with_raw_response.get_span_tree( - span_id="", - ) - - @parametrize - def test_method_get_trace(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.get_trace( - "trace_id", - ) - assert_matches_type(Trace, telemetry, path=["response"]) - - @parametrize - def test_raw_response_get_trace(self, client: LlamaStackClient) -> None: - response = client.telemetry.with_raw_response.get_trace( - "trace_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = response.parse() - assert_matches_type(Trace, telemetry, path=["response"]) - - @parametrize - def test_streaming_response_get_trace(self, client: LlamaStackClient) -> None: - with client.telemetry.with_streaming_response.get_trace( - "trace_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = response.parse() - assert_matches_type(Trace, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - def test_path_params_get_trace(self, client: LlamaStackClient) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `trace_id` but received ''"): - client.telemetry.with_raw_response.get_trace( - "", - ) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_method_query_metrics(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.query_metrics( - metric_name="metric_name", - query_type="range", - start_time=0, - ) - assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_method_query_metrics_with_all_params(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.query_metrics( - metric_name="metric_name", - query_type="range", - start_time=0, - end_time=0, - granularity="granularity", - label_matchers=[ - { - "name": "name", - "operator": "=", - "value": "value", - } - ], - ) - assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_raw_response_query_metrics(self, client: LlamaStackClient) -> None: - response = client.telemetry.with_raw_response.query_metrics( - metric_name="metric_name", - query_type="range", - start_time=0, - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = response.parse() - assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_streaming_response_query_metrics(self, client: LlamaStackClient) -> None: - with client.telemetry.with_streaming_response.query_metrics( - metric_name="metric_name", - query_type="range", - start_time=0, - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = response.parse() - assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_path_params_query_metrics(self, client: LlamaStackClient) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `metric_name` but received ''"): - client.telemetry.with_raw_response.query_metrics( - metric_name="", - query_type="range", - start_time=0, - ) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_method_query_spans(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.query_spans( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_return=["string"], - ) - assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_method_query_spans_with_all_params(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.query_spans( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_return=["string"], - max_depth=0, - ) - assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_raw_response_query_spans(self, client: LlamaStackClient) -> None: - response = client.telemetry.with_raw_response.query_spans( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_return=["string"], - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = response.parse() - assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_streaming_response_query_spans(self, client: LlamaStackClient) -> None: - with client.telemetry.with_streaming_response.query_spans( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_return=["string"], - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = response.parse() - assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_method_query_traces(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.query_traces() - assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_method_query_traces_with_all_params(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.query_traces( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - limit=0, - offset=0, - order_by=["string"], - ) - assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_raw_response_query_traces(self, client: LlamaStackClient) -> None: - response = client.telemetry.with_raw_response.query_traces() - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = response.parse() - assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - def test_streaming_response_query_traces(self, client: LlamaStackClient) -> None: - with client.telemetry.with_streaming_response.query_traces() as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = response.parse() - assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - def test_method_save_spans_to_dataset(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.save_spans_to_dataset( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_save=["string"], - dataset_id="dataset_id", - ) - assert telemetry is None - - @parametrize - def test_method_save_spans_to_dataset_with_all_params(self, client: LlamaStackClient) -> None: - telemetry = client.telemetry.save_spans_to_dataset( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_save=["string"], - dataset_id="dataset_id", - max_depth=0, - ) - assert telemetry is None - - @parametrize - def test_raw_response_save_spans_to_dataset(self, client: LlamaStackClient) -> None: - response = client.telemetry.with_raw_response.save_spans_to_dataset( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_save=["string"], - dataset_id="dataset_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = response.parse() - assert telemetry is None - - @parametrize - def test_streaming_response_save_spans_to_dataset(self, client: LlamaStackClient) -> None: - with client.telemetry.with_streaming_response.save_spans_to_dataset( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_save=["string"], - dataset_id="dataset_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = response.parse() - assert telemetry is None - - assert cast(Any, response.is_closed) is True - - -class TestAsyncTelemetry: - parametrize = pytest.mark.parametrize( - "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] - ) - - @parametrize - async def test_method_get_span(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.get_span( - span_id="span_id", - trace_id="trace_id", - ) - assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"]) - - @parametrize - async def test_raw_response_get_span(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.telemetry.with_raw_response.get_span( - span_id="span_id", - trace_id="trace_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = await response.parse() - assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"]) - - @parametrize - async def test_streaming_response_get_span(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.telemetry.with_streaming_response.get_span( - span_id="span_id", - trace_id="trace_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = await response.parse() - assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - async def test_path_params_get_span(self, async_client: AsyncLlamaStackClient) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `trace_id` but received ''"): - await async_client.telemetry.with_raw_response.get_span( - span_id="span_id", - trace_id="", - ) - - with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"): - await async_client.telemetry.with_raw_response.get_span( - span_id="", - trace_id="trace_id", - ) - - @parametrize - async def test_method_get_span_tree(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.get_span_tree( - span_id="span_id", - ) - assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"]) - - @parametrize - async def test_method_get_span_tree_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.get_span_tree( - span_id="span_id", - attributes_to_return=["string"], - max_depth=0, - ) - assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"]) - - @parametrize - async def test_raw_response_get_span_tree(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.telemetry.with_raw_response.get_span_tree( - span_id="span_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = await response.parse() - assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"]) - - @parametrize - async def test_streaming_response_get_span_tree(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.telemetry.with_streaming_response.get_span_tree( - span_id="span_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = await response.parse() - assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - async def test_path_params_get_span_tree(self, async_client: AsyncLlamaStackClient) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"): - await async_client.telemetry.with_raw_response.get_span_tree( - span_id="", - ) - - @parametrize - async def test_method_get_trace(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.get_trace( - "trace_id", - ) - assert_matches_type(Trace, telemetry, path=["response"]) - - @parametrize - async def test_raw_response_get_trace(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.telemetry.with_raw_response.get_trace( - "trace_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = await response.parse() - assert_matches_type(Trace, telemetry, path=["response"]) - - @parametrize - async def test_streaming_response_get_trace(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.telemetry.with_streaming_response.get_trace( - "trace_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = await response.parse() - assert_matches_type(Trace, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - async def test_path_params_get_trace(self, async_client: AsyncLlamaStackClient) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `trace_id` but received ''"): - await async_client.telemetry.with_raw_response.get_trace( - "", - ) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_method_query_metrics(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.query_metrics( - metric_name="metric_name", - query_type="range", - start_time=0, - ) - assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_method_query_metrics_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.query_metrics( - metric_name="metric_name", - query_type="range", - start_time=0, - end_time=0, - granularity="granularity", - label_matchers=[ - { - "name": "name", - "operator": "=", - "value": "value", - } - ], - ) - assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_raw_response_query_metrics(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.telemetry.with_raw_response.query_metrics( - metric_name="metric_name", - query_type="range", - start_time=0, - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = await response.parse() - assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_streaming_response_query_metrics(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.telemetry.with_streaming_response.query_metrics( - metric_name="metric_name", - query_type="range", - start_time=0, - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = await response.parse() - assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_path_params_query_metrics(self, async_client: AsyncLlamaStackClient) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `metric_name` but received ''"): - await async_client.telemetry.with_raw_response.query_metrics( - metric_name="", - query_type="range", - start_time=0, - ) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_method_query_spans(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.query_spans( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_return=["string"], - ) - assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_method_query_spans_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.query_spans( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_return=["string"], - max_depth=0, - ) - assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_raw_response_query_spans(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.telemetry.with_raw_response.query_spans( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_return=["string"], - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = await response.parse() - assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_streaming_response_query_spans(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.telemetry.with_streaming_response.query_spans( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_return=["string"], - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = await response.parse() - assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_method_query_traces(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.query_traces() - assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_method_query_traces_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.query_traces( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - limit=0, - offset=0, - order_by=["string"], - ) - assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_raw_response_query_traces(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.telemetry.with_raw_response.query_traces() - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = await response.parse() - assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"]) - - @pytest.mark.skip(reason="unsupported query params in java / kotlin") - @parametrize - async def test_streaming_response_query_traces(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.telemetry.with_streaming_response.query_traces() as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = await response.parse() - assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @parametrize - async def test_method_save_spans_to_dataset(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.save_spans_to_dataset( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_save=["string"], - dataset_id="dataset_id", - ) - assert telemetry is None - - @parametrize - async def test_method_save_spans_to_dataset_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: - telemetry = await async_client.telemetry.save_spans_to_dataset( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_save=["string"], - dataset_id="dataset_id", - max_depth=0, - ) - assert telemetry is None - - @parametrize - async def test_raw_response_save_spans_to_dataset(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.telemetry.with_raw_response.save_spans_to_dataset( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_save=["string"], - dataset_id="dataset_id", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - telemetry = await response.parse() - assert telemetry is None - - @parametrize - async def test_streaming_response_save_spans_to_dataset(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.telemetry.with_streaming_response.save_spans_to_dataset( - attribute_filters=[ - { - "key": "key", - "op": "eq", - "value": True, - } - ], - attributes_to_save=["string"], - dataset_id="dataset_id", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - telemetry = await response.parse() - assert telemetry is None - - assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_vector_io.py b/tests/api_resources/test_vector_io.py index 2e37edcf..9adf721a 100644 --- a/tests/api_resources/test_vector_io.py +++ b/tests/api_resources/test_vector_io.py @@ -32,7 +32,7 @@ def test_method_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert vector_io is None @@ -60,7 +60,7 @@ def test_method_insert_with_all_params(self, client: LlamaStackClient) -> None: "stored_chunk_id": "stored_chunk_id", } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ttl_seconds=0, ) assert vector_io is None @@ -74,7 +74,7 @@ def test_raw_response_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert response.is_closed is True @@ -91,7 +91,7 @@ def test_streaming_response_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -105,7 +105,7 @@ def test_streaming_response_insert(self, client: LlamaStackClient) -> None: def test_method_query(self, client: LlamaStackClient) -> None: vector_io = client.vector_io.query( query="string", - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert_matches_type(QueryChunksResponse, vector_io, path=["response"]) @@ -113,7 +113,7 @@ def test_method_query(self, client: LlamaStackClient) -> None: def test_method_query_with_all_params(self, client: LlamaStackClient) -> None: vector_io = client.vector_io.query( query="string", - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", params={"foo": True}, ) assert_matches_type(QueryChunksResponse, vector_io, path=["response"]) @@ -122,7 +122,7 @@ def test_method_query_with_all_params(self, client: LlamaStackClient) -> None: def test_raw_response_query(self, client: LlamaStackClient) -> None: response = client.vector_io.with_raw_response.query( query="string", - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert response.is_closed is True @@ -134,7 +134,7 @@ def test_raw_response_query(self, client: LlamaStackClient) -> None: def test_streaming_response_query(self, client: LlamaStackClient) -> None: with client.vector_io.with_streaming_response.query( query="string", - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -159,7 +159,7 @@ async def test_method_insert(self, async_client: AsyncLlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert vector_io is None @@ -187,7 +187,7 @@ async def test_method_insert_with_all_params(self, async_client: AsyncLlamaStack "stored_chunk_id": "stored_chunk_id", } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ttl_seconds=0, ) assert vector_io is None @@ -201,7 +201,7 @@ async def test_raw_response_insert(self, async_client: AsyncLlamaStackClient) -> "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert response.is_closed is True @@ -218,7 +218,7 @@ async def test_streaming_response_insert(self, async_client: AsyncLlamaStackClie "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -232,7 +232,7 @@ async def test_streaming_response_insert(self, async_client: AsyncLlamaStackClie async def test_method_query(self, async_client: AsyncLlamaStackClient) -> None: vector_io = await async_client.vector_io.query( query="string", - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert_matches_type(QueryChunksResponse, vector_io, path=["response"]) @@ -240,7 +240,7 @@ async def test_method_query(self, async_client: AsyncLlamaStackClient) -> None: async def test_method_query_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: vector_io = await async_client.vector_io.query( query="string", - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", params={"foo": True}, ) assert_matches_type(QueryChunksResponse, vector_io, path=["response"]) @@ -249,7 +249,7 @@ async def test_method_query_with_all_params(self, async_client: AsyncLlamaStackC async def test_raw_response_query(self, async_client: AsyncLlamaStackClient) -> None: response = await async_client.vector_io.with_raw_response.query( query="string", - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert response.is_closed is True @@ -261,7 +261,7 @@ async def test_raw_response_query(self, async_client: AsyncLlamaStackClient) -> async def test_streaming_response_query(self, async_client: AsyncLlamaStackClient) -> None: async with async_client.vector_io.with_streaming_response.query( query="string", - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/tool_runtime/test_rag_tool.py b/tests/api_resources/tool_runtime/test_rag_tool.py index 8dd7e752..fd743058 100644 --- a/tests/api_resources/tool_runtime/test_rag_tool.py +++ b/tests/api_resources/tool_runtime/test_rag_tool.py @@ -34,7 +34,7 @@ def test_method_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert rag_tool is None @@ -49,7 +49,7 @@ def test_raw_response_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert response.is_closed is True @@ -68,7 +68,7 @@ def test_streaming_response_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -82,7 +82,7 @@ def test_streaming_response_insert(self, client: LlamaStackClient) -> None: def test_method_query(self, client: LlamaStackClient) -> None: rag_tool = client.tool_runtime.rag_tool.query( content="string", - vector_db_ids=["string"], + vector_store_ids=["string"], ) assert_matches_type(QueryResult, rag_tool, path=["response"]) @@ -90,7 +90,7 @@ def test_method_query(self, client: LlamaStackClient) -> None: def test_method_query_with_all_params(self, client: LlamaStackClient) -> None: rag_tool = client.tool_runtime.rag_tool.query( content="string", - vector_db_ids=["string"], + vector_store_ids=["string"], query_config={ "chunk_template": "chunk_template", "max_chunks": 0, @@ -112,7 +112,7 @@ def test_method_query_with_all_params(self, client: LlamaStackClient) -> None: def test_raw_response_query(self, client: LlamaStackClient) -> None: response = client.tool_runtime.rag_tool.with_raw_response.query( content="string", - vector_db_ids=["string"], + vector_store_ids=["string"], ) assert response.is_closed is True @@ -124,7 +124,7 @@ def test_raw_response_query(self, client: LlamaStackClient) -> None: def test_streaming_response_query(self, client: LlamaStackClient) -> None: with client.tool_runtime.rag_tool.with_streaming_response.query( content="string", - vector_db_ids=["string"], + vector_store_ids=["string"], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -151,7 +151,7 @@ async def test_method_insert(self, async_client: AsyncLlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert rag_tool is None @@ -166,7 +166,7 @@ async def test_raw_response_insert(self, async_client: AsyncLlamaStackClient) -> "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) assert response.is_closed is True @@ -185,7 +185,7 @@ async def test_streaming_response_insert(self, async_client: AsyncLlamaStackClie "metadata": {"foo": True}, } ], - vector_db_id="vector_db_id", + vector_store_id="vector_store_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -199,7 +199,7 @@ async def test_streaming_response_insert(self, async_client: AsyncLlamaStackClie async def test_method_query(self, async_client: AsyncLlamaStackClient) -> None: rag_tool = await async_client.tool_runtime.rag_tool.query( content="string", - vector_db_ids=["string"], + vector_store_ids=["string"], ) assert_matches_type(QueryResult, rag_tool, path=["response"]) @@ -207,7 +207,7 @@ async def test_method_query(self, async_client: AsyncLlamaStackClient) -> None: async def test_method_query_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: rag_tool = await async_client.tool_runtime.rag_tool.query( content="string", - vector_db_ids=["string"], + vector_store_ids=["string"], query_config={ "chunk_template": "chunk_template", "max_chunks": 0, @@ -229,7 +229,7 @@ async def test_method_query_with_all_params(self, async_client: AsyncLlamaStackC async def test_raw_response_query(self, async_client: AsyncLlamaStackClient) -> None: response = await async_client.tool_runtime.rag_tool.with_raw_response.query( content="string", - vector_db_ids=["string"], + vector_store_ids=["string"], ) assert response.is_closed is True @@ -241,7 +241,7 @@ async def test_raw_response_query(self, async_client: AsyncLlamaStackClient) -> async def test_streaming_response_query(self, async_client: AsyncLlamaStackClient) -> None: async with async_client.tool_runtime.rag_tool.with_streaming_response.query( content="string", - vector_db_ids=["string"], + vector_store_ids=["string"], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" From 44e5decf4c417d7d55fd5a7ce33c1daac7ee0922 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 21 Oct 2025 18:37:36 +0000 Subject: [PATCH 3/6] feat(api): sync --- .stats.yml | 6 +- api.md | 50 ++++++------- src/llama_stack_client/_client.py | 72 +++++++++---------- src/llama_stack_client/resources/__init__.py | 12 ++-- .../resources/tool_runtime/rag_tool.py | 24 +++---- src/llama_stack_client/resources/vector_io.py | 24 +++---- .../types/alpha/memory_retrieval_step.py | 2 +- .../tool_runtime/rag_tool_insert_params.py | 2 +- .../tool_runtime/rag_tool_query_params.py | 2 +- .../types/vector_io_insert_params.py | 2 +- .../types/vector_io_query_params.py | 2 +- tests/api_resources/test_vector_io.py | 32 ++++----- .../tool_runtime/test_rag_tool.py | 28 ++++---- 13 files changed, 129 insertions(+), 129 deletions(-) diff --git a/.stats.yml b/.stats.yml index 89ced1ec..77e48a77 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 104 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-2acd62f8d5c4523bad4ddb2cc50608135249858b7047a71b48e25befa1e1f43f.yml -openapi_spec_hash: 1ad726ff81dc21720c8c3443d33c0562 -config_hash: 6c5b61acff5bb6534edd57f93ed60106 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-fcaa960dc2de2029f4f67f13ff1d0fc1ff70e683810ed9739be805debef1673d.yml +openapi_spec_hash: a25e7616ad6230f872b46c2cb6fa0a96 +config_hash: ff421daf28f90ad4bd4e13f374b18a00 diff --git a/api.md b/api.md index e28c1b59..4c078d02 100644 --- a/api.md +++ b/api.md @@ -431,31 +431,6 @@ Methods: - client.files.delete(file_id) -> DeleteFileResponse - client.files.content(file_id) -> object -# Beta - -## Datasets - -Types: - -```python -from llama_stack_client.types.beta import ( - ListDatasetsResponse, - DatasetRetrieveResponse, - DatasetListResponse, - DatasetIterrowsResponse, - DatasetRegisterResponse, -) -``` - -Methods: - -- client.beta.datasets.retrieve(dataset_id) -> DatasetRetrieveResponse -- client.beta.datasets.list() -> DatasetListResponse -- client.beta.datasets.appendrows(dataset_id, \*\*params) -> None -- client.beta.datasets.iterrows(dataset_id, \*\*params) -> DatasetIterrowsResponse -- client.beta.datasets.register(\*\*params) -> DatasetRegisterResponse -- client.beta.datasets.unregister(dataset_id) -> None - # Alpha ## Inference @@ -601,3 +576,28 @@ Methods: - client.alpha.agents.turn.create(session_id, \*, agent_id, \*\*params) -> Turn - client.alpha.agents.turn.retrieve(turn_id, \*, agent_id, session_id) -> Turn - client.alpha.agents.turn.resume(turn_id, \*, agent_id, session_id, \*\*params) -> Turn + +# Beta + +## Datasets + +Types: + +```python +from llama_stack_client.types.beta import ( + ListDatasetsResponse, + DatasetRetrieveResponse, + DatasetListResponse, + DatasetIterrowsResponse, + DatasetRegisterResponse, +) +``` + +Methods: + +- client.beta.datasets.retrieve(dataset_id) -> DatasetRetrieveResponse +- client.beta.datasets.list() -> DatasetListResponse +- client.beta.datasets.appendrows(dataset_id, \*\*params) -> None +- client.beta.datasets.iterrows(dataset_id, \*\*params) -> DatasetIterrowsResponse +- client.beta.datasets.register(\*\*params) -> DatasetRegisterResponse +- client.beta.datasets.unregister(dataset_id) -> None diff --git a/src/llama_stack_client/_client.py b/src/llama_stack_client/_client.py index a70956ab..2c96b202 100644 --- a/src/llama_stack_client/_client.py +++ b/src/llama_stack_client/_client.py @@ -293,18 +293,18 @@ def files(self) -> FilesResource: return FilesResource(self) - @cached_property - def beta(self) -> BetaResource: - from .resources.beta import BetaResource - - return BetaResource(self) - @cached_property def alpha(self) -> AlphaResource: from .resources.alpha import AlphaResource return AlphaResource(self) + @cached_property + def beta(self) -> BetaResource: + from .resources.beta import BetaResource + + return BetaResource(self) + @cached_property def with_raw_response(self) -> LlamaStackClientWithRawResponse: return LlamaStackClientWithRawResponse(self) @@ -609,18 +609,18 @@ def files(self) -> AsyncFilesResource: return AsyncFilesResource(self) - @cached_property - def beta(self) -> AsyncBetaResource: - from .resources.beta import AsyncBetaResource - - return AsyncBetaResource(self) - @cached_property def alpha(self) -> AsyncAlphaResource: from .resources.alpha import AsyncAlphaResource return AsyncAlphaResource(self) + @cached_property + def beta(self) -> AsyncBetaResource: + from .resources.beta import AsyncBetaResource + + return AsyncBetaResource(self) + @cached_property def with_raw_response(self) -> AsyncLlamaStackClientWithRawResponse: return AsyncLlamaStackClientWithRawResponse(self) @@ -874,18 +874,18 @@ def files(self) -> files.FilesResourceWithRawResponse: return FilesResourceWithRawResponse(self._client.files) - @cached_property - def beta(self) -> beta.BetaResourceWithRawResponse: - from .resources.beta import BetaResourceWithRawResponse - - return BetaResourceWithRawResponse(self._client.beta) - @cached_property def alpha(self) -> alpha.AlphaResourceWithRawResponse: from .resources.alpha import AlphaResourceWithRawResponse return AlphaResourceWithRawResponse(self._client.alpha) + @cached_property + def beta(self) -> beta.BetaResourceWithRawResponse: + from .resources.beta import BetaResourceWithRawResponse + + return BetaResourceWithRawResponse(self._client.beta) + class AsyncLlamaStackClientWithRawResponse: _client: AsyncLlamaStackClient @@ -1027,18 +1027,18 @@ def files(self) -> files.AsyncFilesResourceWithRawResponse: return AsyncFilesResourceWithRawResponse(self._client.files) - @cached_property - def beta(self) -> beta.AsyncBetaResourceWithRawResponse: - from .resources.beta import AsyncBetaResourceWithRawResponse - - return AsyncBetaResourceWithRawResponse(self._client.beta) - @cached_property def alpha(self) -> alpha.AsyncAlphaResourceWithRawResponse: from .resources.alpha import AsyncAlphaResourceWithRawResponse return AsyncAlphaResourceWithRawResponse(self._client.alpha) + @cached_property + def beta(self) -> beta.AsyncBetaResourceWithRawResponse: + from .resources.beta import AsyncBetaResourceWithRawResponse + + return AsyncBetaResourceWithRawResponse(self._client.beta) + class LlamaStackClientWithStreamedResponse: _client: LlamaStackClient @@ -1180,18 +1180,18 @@ def files(self) -> files.FilesResourceWithStreamingResponse: return FilesResourceWithStreamingResponse(self._client.files) - @cached_property - def beta(self) -> beta.BetaResourceWithStreamingResponse: - from .resources.beta import BetaResourceWithStreamingResponse - - return BetaResourceWithStreamingResponse(self._client.beta) - @cached_property def alpha(self) -> alpha.AlphaResourceWithStreamingResponse: from .resources.alpha import AlphaResourceWithStreamingResponse return AlphaResourceWithStreamingResponse(self._client.alpha) + @cached_property + def beta(self) -> beta.BetaResourceWithStreamingResponse: + from .resources.beta import BetaResourceWithStreamingResponse + + return BetaResourceWithStreamingResponse(self._client.beta) + class AsyncLlamaStackClientWithStreamedResponse: _client: AsyncLlamaStackClient @@ -1333,18 +1333,18 @@ def files(self) -> files.AsyncFilesResourceWithStreamingResponse: return AsyncFilesResourceWithStreamingResponse(self._client.files) - @cached_property - def beta(self) -> beta.AsyncBetaResourceWithStreamingResponse: - from .resources.beta import AsyncBetaResourceWithStreamingResponse - - return AsyncBetaResourceWithStreamingResponse(self._client.beta) - @cached_property def alpha(self) -> alpha.AsyncAlphaResourceWithStreamingResponse: from .resources.alpha import AsyncAlphaResourceWithStreamingResponse return AsyncAlphaResourceWithStreamingResponse(self._client.alpha) + @cached_property + def beta(self) -> beta.AsyncBetaResourceWithStreamingResponse: + from .resources.beta import AsyncBetaResourceWithStreamingResponse + + return AsyncBetaResourceWithStreamingResponse(self._client.beta) + Client = LlamaStackClient diff --git a/src/llama_stack_client/resources/__init__.py b/src/llama_stack_client/resources/__init__.py index f09606b6..751e9303 100644 --- a/src/llama_stack_client/resources/__init__.py +++ b/src/llama_stack_client/resources/__init__.py @@ -332,16 +332,16 @@ "AsyncFilesResourceWithRawResponse", "FilesResourceWithStreamingResponse", "AsyncFilesResourceWithStreamingResponse", - "BetaResource", - "AsyncBetaResource", - "BetaResourceWithRawResponse", - "AsyncBetaResourceWithRawResponse", - "BetaResourceWithStreamingResponse", - "AsyncBetaResourceWithStreamingResponse", "AlphaResource", "AsyncAlphaResource", "AlphaResourceWithRawResponse", "AsyncAlphaResourceWithRawResponse", "AlphaResourceWithStreamingResponse", "AsyncAlphaResourceWithStreamingResponse", + "BetaResource", + "AsyncBetaResource", + "BetaResourceWithRawResponse", + "AsyncBetaResourceWithRawResponse", + "BetaResourceWithStreamingResponse", + "AsyncBetaResourceWithStreamingResponse", ] diff --git a/src/llama_stack_client/resources/tool_runtime/rag_tool.py b/src/llama_stack_client/resources/tool_runtime/rag_tool.py index 7db478f7..af4a7d64 100644 --- a/src/llama_stack_client/resources/tool_runtime/rag_tool.py +++ b/src/llama_stack_client/resources/tool_runtime/rag_tool.py @@ -57,7 +57,7 @@ def insert( *, chunk_size_in_tokens: int, documents: Iterable[Document], - vector_store_id: str, + vector_db_id: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -73,7 +73,7 @@ def insert( documents: List of documents to index in the RAG system - vector_store_id: ID of the vector database to store the document embeddings + vector_db_id: ID of the vector database to store the document embeddings extra_headers: Send extra headers @@ -90,7 +90,7 @@ def insert( { "chunk_size_in_tokens": chunk_size_in_tokens, "documents": documents, - "vector_store_id": vector_store_id, + "vector_db_id": vector_db_id, }, rag_tool_insert_params.RagToolInsertParams, ), @@ -104,7 +104,7 @@ def query( self, *, content: InterleavedContent, - vector_store_ids: SequenceNotStr[str], + vector_db_ids: SequenceNotStr[str], query_config: QueryConfig | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -119,7 +119,7 @@ def query( Args: content: The query content to search for in the indexed documents - vector_store_ids: List of vector database IDs to search within + vector_db_ids: List of vector database IDs to search within query_config: (Optional) Configuration parameters for the query operation @@ -136,7 +136,7 @@ def query( body=maybe_transform( { "content": content, - "vector_store_ids": vector_store_ids, + "vector_db_ids": vector_db_ids, "query_config": query_config, }, rag_tool_query_params.RagToolQueryParams, @@ -173,7 +173,7 @@ async def insert( *, chunk_size_in_tokens: int, documents: Iterable[Document], - vector_store_id: str, + vector_db_id: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -189,7 +189,7 @@ async def insert( documents: List of documents to index in the RAG system - vector_store_id: ID of the vector database to store the document embeddings + vector_db_id: ID of the vector database to store the document embeddings extra_headers: Send extra headers @@ -206,7 +206,7 @@ async def insert( { "chunk_size_in_tokens": chunk_size_in_tokens, "documents": documents, - "vector_store_id": vector_store_id, + "vector_db_id": vector_db_id, }, rag_tool_insert_params.RagToolInsertParams, ), @@ -220,7 +220,7 @@ async def query( self, *, content: InterleavedContent, - vector_store_ids: SequenceNotStr[str], + vector_db_ids: SequenceNotStr[str], query_config: QueryConfig | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -235,7 +235,7 @@ async def query( Args: content: The query content to search for in the indexed documents - vector_store_ids: List of vector database IDs to search within + vector_db_ids: List of vector database IDs to search within query_config: (Optional) Configuration parameters for the query operation @@ -252,7 +252,7 @@ async def query( body=await async_maybe_transform( { "content": content, - "vector_store_ids": vector_store_ids, + "vector_db_ids": vector_db_ids, "query_config": query_config, }, rag_tool_query_params.RagToolQueryParams, diff --git a/src/llama_stack_client/resources/vector_io.py b/src/llama_stack_client/resources/vector_io.py index dda04f33..2659c139 100644 --- a/src/llama_stack_client/resources/vector_io.py +++ b/src/llama_stack_client/resources/vector_io.py @@ -54,7 +54,7 @@ def insert( self, *, chunks: Iterable[vector_io_insert_params.Chunk], - vector_store_id: str, + vector_db_id: str, ttl_seconds: int | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -74,7 +74,7 @@ def insert( configure how Llama Stack formats the chunk during generation. If `embedding` is not provided, it will be computed later. - vector_store_id: The identifier of the vector database to insert the chunks into. + vector_db_id: The identifier of the vector database to insert the chunks into. ttl_seconds: The time to live of the chunks. @@ -92,7 +92,7 @@ def insert( body=maybe_transform( { "chunks": chunks, - "vector_store_id": vector_store_id, + "vector_db_id": vector_db_id, "ttl_seconds": ttl_seconds, }, vector_io_insert_params.VectorIoInsertParams, @@ -107,7 +107,7 @@ def query( self, *, query: InterleavedContent, - vector_store_id: str, + vector_db_id: str, params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -122,7 +122,7 @@ def query( Args: query: The query to search for. - vector_store_id: The identifier of the vector database to query. + vector_db_id: The identifier of the vector database to query. params: The parameters of the query. @@ -139,7 +139,7 @@ def query( body=maybe_transform( { "query": query, - "vector_store_id": vector_store_id, + "vector_db_id": vector_db_id, "params": params, }, vector_io_query_params.VectorIoQueryParams, @@ -175,7 +175,7 @@ async def insert( self, *, chunks: Iterable[vector_io_insert_params.Chunk], - vector_store_id: str, + vector_db_id: str, ttl_seconds: int | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -195,7 +195,7 @@ async def insert( configure how Llama Stack formats the chunk during generation. If `embedding` is not provided, it will be computed later. - vector_store_id: The identifier of the vector database to insert the chunks into. + vector_db_id: The identifier of the vector database to insert the chunks into. ttl_seconds: The time to live of the chunks. @@ -213,7 +213,7 @@ async def insert( body=await async_maybe_transform( { "chunks": chunks, - "vector_store_id": vector_store_id, + "vector_db_id": vector_db_id, "ttl_seconds": ttl_seconds, }, vector_io_insert_params.VectorIoInsertParams, @@ -228,7 +228,7 @@ async def query( self, *, query: InterleavedContent, - vector_store_id: str, + vector_db_id: str, params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -243,7 +243,7 @@ async def query( Args: query: The query to search for. - vector_store_id: The identifier of the vector database to query. + vector_db_id: The identifier of the vector database to query. params: The parameters of the query. @@ -260,7 +260,7 @@ async def query( body=await async_maybe_transform( { "query": query, - "vector_store_id": vector_store_id, + "vector_db_id": vector_db_id, "params": params, }, vector_io_query_params.VectorIoQueryParams, diff --git a/src/llama_stack_client/types/alpha/memory_retrieval_step.py b/src/llama_stack_client/types/alpha/memory_retrieval_step.py index 1b5708ce..727c0ec0 100644 --- a/src/llama_stack_client/types/alpha/memory_retrieval_step.py +++ b/src/llama_stack_client/types/alpha/memory_retrieval_step.py @@ -29,7 +29,7 @@ class MemoryRetrievalStep(BaseModel): turn_id: str """The ID of the turn.""" - vector_store_ids: str + vector_db_ids: str """The IDs of the vector databases to retrieve context from.""" completed_at: Optional[datetime] = None diff --git a/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py b/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py index d65980c5..095a2a69 100644 --- a/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py +++ b/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py @@ -23,5 +23,5 @@ class RagToolInsertParams(TypedDict, total=False): documents: Required[Iterable[Document]] """List of documents to index in the RAG system""" - vector_store_id: Required[str] + vector_db_id: Required[str] """ID of the vector database to store the document embeddings""" diff --git a/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py b/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py index 6e8fa8ce..08d1f998 100644 --- a/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py +++ b/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py @@ -21,7 +21,7 @@ class RagToolQueryParams(TypedDict, total=False): content: Required[InterleavedContent] """The query content to search for in the indexed documents""" - vector_store_ids: Required[SequenceNotStr[str]] + vector_db_ids: Required[SequenceNotStr[str]] """List of vector database IDs to search within""" query_config: QueryConfig diff --git a/src/llama_stack_client/types/vector_io_insert_params.py b/src/llama_stack_client/types/vector_io_insert_params.py index 1584f807..5613251f 100644 --- a/src/llama_stack_client/types/vector_io_insert_params.py +++ b/src/llama_stack_client/types/vector_io_insert_params.py @@ -27,7 +27,7 @@ class VectorIoInsertParams(TypedDict, total=False): later. """ - vector_store_id: Required[str] + vector_db_id: Required[str] """The identifier of the vector database to insert the chunks into.""" ttl_seconds: int diff --git a/src/llama_stack_client/types/vector_io_query_params.py b/src/llama_stack_client/types/vector_io_query_params.py index 538604ac..a2fdc561 100644 --- a/src/llama_stack_client/types/vector_io_query_params.py +++ b/src/llama_stack_client/types/vector_io_query_params.py @@ -20,7 +20,7 @@ class VectorIoQueryParams(TypedDict, total=False): query: Required[InterleavedContent] """The query to search for.""" - vector_store_id: Required[str] + vector_db_id: Required[str] """The identifier of the vector database to query.""" params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] diff --git a/tests/api_resources/test_vector_io.py b/tests/api_resources/test_vector_io.py index 9adf721a..2e37edcf 100644 --- a/tests/api_resources/test_vector_io.py +++ b/tests/api_resources/test_vector_io.py @@ -32,7 +32,7 @@ def test_method_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert vector_io is None @@ -60,7 +60,7 @@ def test_method_insert_with_all_params(self, client: LlamaStackClient) -> None: "stored_chunk_id": "stored_chunk_id", } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ttl_seconds=0, ) assert vector_io is None @@ -74,7 +74,7 @@ def test_raw_response_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert response.is_closed is True @@ -91,7 +91,7 @@ def test_streaming_response_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -105,7 +105,7 @@ def test_streaming_response_insert(self, client: LlamaStackClient) -> None: def test_method_query(self, client: LlamaStackClient) -> None: vector_io = client.vector_io.query( query="string", - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert_matches_type(QueryChunksResponse, vector_io, path=["response"]) @@ -113,7 +113,7 @@ def test_method_query(self, client: LlamaStackClient) -> None: def test_method_query_with_all_params(self, client: LlamaStackClient) -> None: vector_io = client.vector_io.query( query="string", - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", params={"foo": True}, ) assert_matches_type(QueryChunksResponse, vector_io, path=["response"]) @@ -122,7 +122,7 @@ def test_method_query_with_all_params(self, client: LlamaStackClient) -> None: def test_raw_response_query(self, client: LlamaStackClient) -> None: response = client.vector_io.with_raw_response.query( query="string", - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert response.is_closed is True @@ -134,7 +134,7 @@ def test_raw_response_query(self, client: LlamaStackClient) -> None: def test_streaming_response_query(self, client: LlamaStackClient) -> None: with client.vector_io.with_streaming_response.query( query="string", - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -159,7 +159,7 @@ async def test_method_insert(self, async_client: AsyncLlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert vector_io is None @@ -187,7 +187,7 @@ async def test_method_insert_with_all_params(self, async_client: AsyncLlamaStack "stored_chunk_id": "stored_chunk_id", } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ttl_seconds=0, ) assert vector_io is None @@ -201,7 +201,7 @@ async def test_raw_response_insert(self, async_client: AsyncLlamaStackClient) -> "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert response.is_closed is True @@ -218,7 +218,7 @@ async def test_streaming_response_insert(self, async_client: AsyncLlamaStackClie "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -232,7 +232,7 @@ async def test_streaming_response_insert(self, async_client: AsyncLlamaStackClie async def test_method_query(self, async_client: AsyncLlamaStackClient) -> None: vector_io = await async_client.vector_io.query( query="string", - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert_matches_type(QueryChunksResponse, vector_io, path=["response"]) @@ -240,7 +240,7 @@ async def test_method_query(self, async_client: AsyncLlamaStackClient) -> None: async def test_method_query_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: vector_io = await async_client.vector_io.query( query="string", - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", params={"foo": True}, ) assert_matches_type(QueryChunksResponse, vector_io, path=["response"]) @@ -249,7 +249,7 @@ async def test_method_query_with_all_params(self, async_client: AsyncLlamaStackC async def test_raw_response_query(self, async_client: AsyncLlamaStackClient) -> None: response = await async_client.vector_io.with_raw_response.query( query="string", - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert response.is_closed is True @@ -261,7 +261,7 @@ async def test_raw_response_query(self, async_client: AsyncLlamaStackClient) -> async def test_streaming_response_query(self, async_client: AsyncLlamaStackClient) -> None: async with async_client.vector_io.with_streaming_response.query( query="string", - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/tool_runtime/test_rag_tool.py b/tests/api_resources/tool_runtime/test_rag_tool.py index fd743058..8dd7e752 100644 --- a/tests/api_resources/tool_runtime/test_rag_tool.py +++ b/tests/api_resources/tool_runtime/test_rag_tool.py @@ -34,7 +34,7 @@ def test_method_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert rag_tool is None @@ -49,7 +49,7 @@ def test_raw_response_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert response.is_closed is True @@ -68,7 +68,7 @@ def test_streaming_response_insert(self, client: LlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -82,7 +82,7 @@ def test_streaming_response_insert(self, client: LlamaStackClient) -> None: def test_method_query(self, client: LlamaStackClient) -> None: rag_tool = client.tool_runtime.rag_tool.query( content="string", - vector_store_ids=["string"], + vector_db_ids=["string"], ) assert_matches_type(QueryResult, rag_tool, path=["response"]) @@ -90,7 +90,7 @@ def test_method_query(self, client: LlamaStackClient) -> None: def test_method_query_with_all_params(self, client: LlamaStackClient) -> None: rag_tool = client.tool_runtime.rag_tool.query( content="string", - vector_store_ids=["string"], + vector_db_ids=["string"], query_config={ "chunk_template": "chunk_template", "max_chunks": 0, @@ -112,7 +112,7 @@ def test_method_query_with_all_params(self, client: LlamaStackClient) -> None: def test_raw_response_query(self, client: LlamaStackClient) -> None: response = client.tool_runtime.rag_tool.with_raw_response.query( content="string", - vector_store_ids=["string"], + vector_db_ids=["string"], ) assert response.is_closed is True @@ -124,7 +124,7 @@ def test_raw_response_query(self, client: LlamaStackClient) -> None: def test_streaming_response_query(self, client: LlamaStackClient) -> None: with client.tool_runtime.rag_tool.with_streaming_response.query( content="string", - vector_store_ids=["string"], + vector_db_ids=["string"], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -151,7 +151,7 @@ async def test_method_insert(self, async_client: AsyncLlamaStackClient) -> None: "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert rag_tool is None @@ -166,7 +166,7 @@ async def test_raw_response_insert(self, async_client: AsyncLlamaStackClient) -> "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) assert response.is_closed is True @@ -185,7 +185,7 @@ async def test_streaming_response_insert(self, async_client: AsyncLlamaStackClie "metadata": {"foo": True}, } ], - vector_store_id="vector_store_id", + vector_db_id="vector_db_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -199,7 +199,7 @@ async def test_streaming_response_insert(self, async_client: AsyncLlamaStackClie async def test_method_query(self, async_client: AsyncLlamaStackClient) -> None: rag_tool = await async_client.tool_runtime.rag_tool.query( content="string", - vector_store_ids=["string"], + vector_db_ids=["string"], ) assert_matches_type(QueryResult, rag_tool, path=["response"]) @@ -207,7 +207,7 @@ async def test_method_query(self, async_client: AsyncLlamaStackClient) -> None: async def test_method_query_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: rag_tool = await async_client.tool_runtime.rag_tool.query( content="string", - vector_store_ids=["string"], + vector_db_ids=["string"], query_config={ "chunk_template": "chunk_template", "max_chunks": 0, @@ -229,7 +229,7 @@ async def test_method_query_with_all_params(self, async_client: AsyncLlamaStackC async def test_raw_response_query(self, async_client: AsyncLlamaStackClient) -> None: response = await async_client.tool_runtime.rag_tool.with_raw_response.query( content="string", - vector_store_ids=["string"], + vector_db_ids=["string"], ) assert response.is_closed is True @@ -241,7 +241,7 @@ async def test_raw_response_query(self, async_client: AsyncLlamaStackClient) -> async def test_streaming_response_query(self, async_client: AsyncLlamaStackClient) -> None: async with async_client.tool_runtime.rag_tool.with_streaming_response.query( content="string", - vector_store_ids=["string"], + vector_db_ids=["string"], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" From a7d787fd746e66208fd2855d1ed310cd8005d5ee Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 22 Oct 2025 20:11:27 +0000 Subject: [PATCH 4/6] feat(api): manual updates --- .stats.yml | 6 +- api.md | 2 +- .../resources/conversations/items.py | 87 +++++----- .../resources/models/models.py | 4 +- .../types/conversations/item_list_params.py | 32 ++-- .../types/conversations/item_list_response.py | 149 ++++++++---------- src/llama_stack_client/types/model.py | 2 +- .../types/model_register_params.py | 2 +- .../api_resources/conversations/test_items.py | 59 +++---- 9 files changed, 157 insertions(+), 186 deletions(-) diff --git a/.stats.yml b/.stats.yml index 77e48a77..b72d7751 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 104 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-fcaa960dc2de2029f4f67f13ff1d0fc1ff70e683810ed9739be805debef1673d.yml -openapi_spec_hash: a25e7616ad6230f872b46c2cb6fa0a96 -config_hash: ff421daf28f90ad4bd4e13f374b18a00 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-97b91eca4a3ff251edc02636b1a638866675d6c1abd46cd9fc18bc50a1de9656.yml +openapi_spec_hash: 7302f1aa50090e3de78e34c184371267 +config_hash: 364c8cc514fe5aa69bd24c7f3721985b diff --git a/api.md b/api.md index 4c078d02..6fc13146 100644 --- a/api.md +++ b/api.md @@ -132,7 +132,7 @@ from llama_stack_client.types.conversations import ( Methods: - client.conversations.items.create(conversation_id, \*\*params) -> ItemCreateResponse -- client.conversations.items.list(conversation_id, \*\*params) -> ItemListResponse +- client.conversations.items.list(conversation_id, \*\*params) -> SyncOpenAICursorPage[ItemListResponse] - client.conversations.items.get(item_id, \*, conversation_id) -> ItemGetResponse # Inspect diff --git a/src/llama_stack_client/resources/conversations/items.py b/src/llama_stack_client/resources/conversations/items.py index 598e3168..4850adf8 100644 --- a/src/llama_stack_client/resources/conversations/items.py +++ b/src/llama_stack_client/resources/conversations/items.py @@ -8,12 +8,12 @@ from __future__ import annotations -from typing import Any, List, Union, Iterable, cast +from typing import Any, List, Iterable, cast from typing_extensions import Literal import httpx -from ..._types import Body, Query, Headers, NotGiven, not_given +from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -23,7 +23,8 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import make_request_options +from ...pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage +from ..._base_client import AsyncPaginator, make_request_options from ...types.conversations import item_list_params, item_create_params from ...types.conversations.item_get_response import ItemGetResponse from ...types.conversations.item_list_response import ItemListResponse @@ -94,29 +95,28 @@ def list( self, conversation_id: str, *, - after: Union[str, object], - include: Union[ - List[ - Literal[ - "code_interpreter_call.outputs", - "computer_call_output.output.image_url", - "file_search_call.results", - "message.input_image.image_url", - "message.output_text.logprobs", - "reasoning.encrypted_content", - ] - ], - object, - ], - limit: Union[int, object], - order: Union[Literal["asc", "desc"], object], + after: str | Omit = omit, + include: List[ + Literal[ + "web_search_call.action.sources", + "code_interpreter_call.outputs", + "computer_call_output.output.image_url", + "file_search_call.results", + "message.input_image.image_url", + "message.output_text.logprobs", + "reasoning.encrypted_content", + ] + ] + | Omit = omit, + limit: int | Omit = omit, + order: Literal["asc", "desc"] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> ItemListResponse: + ) -> SyncOpenAICursorPage[ItemListResponse]: """List items. List items in the conversation. @@ -140,8 +140,9 @@ def list( """ if not conversation_id: raise ValueError(f"Expected a non-empty value for `conversation_id` but received {conversation_id!r}") - return self._get( + return self._get_api_list( f"/v1/conversations/{conversation_id}/items", + page=SyncOpenAICursorPage[ItemListResponse], options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -157,7 +158,7 @@ def list( item_list_params.ItemListParams, ), ), - cast_to=ItemListResponse, + model=cast(Any, ItemListResponse), # Union types cannot be passed in as arguments in the type system ) def get( @@ -259,33 +260,32 @@ async def create( cast_to=ItemCreateResponse, ) - async def list( + def list( self, conversation_id: str, *, - after: Union[str, object], - include: Union[ - List[ - Literal[ - "code_interpreter_call.outputs", - "computer_call_output.output.image_url", - "file_search_call.results", - "message.input_image.image_url", - "message.output_text.logprobs", - "reasoning.encrypted_content", - ] - ], - object, - ], - limit: Union[int, object], - order: Union[Literal["asc", "desc"], object], + after: str | Omit = omit, + include: List[ + Literal[ + "web_search_call.action.sources", + "code_interpreter_call.outputs", + "computer_call_output.output.image_url", + "file_search_call.results", + "message.input_image.image_url", + "message.output_text.logprobs", + "reasoning.encrypted_content", + ] + ] + | Omit = omit, + limit: int | Omit = omit, + order: Literal["asc", "desc"] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = not_given, - ) -> ItemListResponse: + ) -> AsyncPaginator[ItemListResponse, AsyncOpenAICursorPage[ItemListResponse]]: """List items. List items in the conversation. @@ -309,14 +309,15 @@ async def list( """ if not conversation_id: raise ValueError(f"Expected a non-empty value for `conversation_id` but received {conversation_id!r}") - return await self._get( + return self._get_api_list( f"/v1/conversations/{conversation_id}/items", + page=AsyncOpenAICursorPage[ItemListResponse], options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout, - query=await async_maybe_transform( + query=maybe_transform( { "after": after, "include": include, @@ -326,7 +327,7 @@ async def list( item_list_params.ItemListParams, ), ), - cast_to=ItemListResponse, + model=cast(Any, ItemListResponse), # Union types cannot be passed in as arguments in the type system ) async def get( diff --git a/src/llama_stack_client/resources/models/models.py b/src/llama_stack_client/resources/models/models.py index e7b33aa5..99ebccdd 100644 --- a/src/llama_stack_client/resources/models/models.py +++ b/src/llama_stack_client/resources/models/models.py @@ -126,7 +126,7 @@ def register( *, model_id: str, metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit, - model_type: Literal["llm", "embedding"] | Omit = omit, + model_type: Literal["llm", "embedding", "rerank"] | Omit = omit, provider_id: str | Omit = omit, provider_model_id: str | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -299,7 +299,7 @@ async def register( *, model_id: str, metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit, - model_type: Literal["llm", "embedding"] | Omit = omit, + model_type: Literal["llm", "embedding", "rerank"] | Omit = omit, provider_id: str | Omit = omit, provider_model_id: str | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. diff --git a/src/llama_stack_client/types/conversations/item_list_params.py b/src/llama_stack_client/types/conversations/item_list_params.py index f81fc929..768a91b7 100644 --- a/src/llama_stack_client/types/conversations/item_list_params.py +++ b/src/llama_stack_client/types/conversations/item_list_params.py @@ -8,35 +8,31 @@ from __future__ import annotations -from typing import List, Union -from typing_extensions import Literal, Required, TypedDict +from typing import List +from typing_extensions import Literal, TypedDict __all__ = ["ItemListParams"] class ItemListParams(TypedDict, total=False): - after: Required[Union[str, object]] + after: str """An item ID to list items after, used in pagination.""" - include: Required[ - Union[ - List[ - Literal[ - "code_interpreter_call.outputs", - "computer_call_output.output.image_url", - "file_search_call.results", - "message.input_image.image_url", - "message.output_text.logprobs", - "reasoning.encrypted_content", - ] - ], - object, + include: List[ + Literal[ + "web_search_call.action.sources", + "code_interpreter_call.outputs", + "computer_call_output.output.image_url", + "file_search_call.results", + "message.input_image.image_url", + "message.output_text.logprobs", + "reasoning.encrypted_content", ] ] """Specify additional output data to include in the response.""" - limit: Required[Union[int, object]] + limit: int """A limit on the number of objects to be returned (1-100, default 20).""" - order: Required[Union[Literal["asc", "desc"], object]] + order: Literal["asc", "desc"] """The order to return items in (asc or desc, default desc).""" diff --git a/src/llama_stack_client/types/conversations/item_list_response.py b/src/llama_stack_client/types/conversations/item_list_response.py index 414e4f76..b95f56fb 100644 --- a/src/llama_stack_client/types/conversations/item_list_response.py +++ b/src/llama_stack_client/types/conversations/item_list_response.py @@ -14,33 +14,32 @@ __all__ = [ "ItemListResponse", - "Data", - "DataOpenAIResponseMessage", - "DataOpenAIResponseMessageContentUnionMember1", - "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", - "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", - "DataOpenAIResponseMessageContentUnionMember2", - "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", - "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", - "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", - "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", - "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", - "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", - "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", - "DataOpenAIResponseOutputMessageWebSearchToolCall", - "DataOpenAIResponseOutputMessageFileSearchToolCall", - "DataOpenAIResponseOutputMessageFileSearchToolCallResult", - "DataOpenAIResponseOutputMessageFunctionToolCall", - "DataOpenAIResponseInputFunctionToolCallOutput", - "DataOpenAIResponseMcpApprovalRequest", - "DataOpenAIResponseMcpApprovalResponse", - "DataOpenAIResponseOutputMessageMcpCall", - "DataOpenAIResponseOutputMessageMcpListTools", - "DataOpenAIResponseOutputMessageMcpListToolsTool", + "OpenAIResponseMessage", + "OpenAIResponseMessageContentUnionMember1", + "OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText", + "OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage", + "OpenAIResponseMessageContentUnionMember2", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath", + "OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal", + "OpenAIResponseOutputMessageWebSearchToolCall", + "OpenAIResponseOutputMessageFileSearchToolCall", + "OpenAIResponseOutputMessageFileSearchToolCallResult", + "OpenAIResponseOutputMessageFunctionToolCall", + "OpenAIResponseInputFunctionToolCallOutput", + "OpenAIResponseMcpApprovalRequest", + "OpenAIResponseMcpApprovalResponse", + "OpenAIResponseOutputMessageMcpCall", + "OpenAIResponseOutputMessageMcpListTools", + "OpenAIResponseOutputMessageMcpListToolsTool", ] -class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel): +class OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel): text: str """The text content of the input message""" @@ -48,7 +47,7 @@ class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont """Content type identifier, always "input_text" """ -class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage(BaseModel): +class OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage(BaseModel): detail: Literal["low", "high", "auto"] """Level of detail for image processing, can be "low", "high", or "auto" """ @@ -59,16 +58,16 @@ class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont """(Optional) URL of the image content""" -DataOpenAIResponseMessageContentUnionMember1: TypeAlias = Annotated[ +OpenAIResponseMessageContentUnionMember1: TypeAlias = Annotated[ Union[ - DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText, - DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage, + OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText, + OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage, ], PropertyInfo(discriminator="type"), ] -class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation( BaseModel ): file_id: str @@ -84,7 +83,7 @@ class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageCon """Annotation type identifier, always "file_citation" """ -class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation( BaseModel ): end_index: int @@ -103,7 +102,7 @@ class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageCon """URL of the referenced web resource""" -class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation( BaseModel ): container_id: str @@ -119,7 +118,7 @@ class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageCon type: Literal["container_file_citation"] -class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath( BaseModel ): file_id: str @@ -129,28 +128,26 @@ class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageCon type: Literal["file_path"] -DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ +OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[ Union[ - DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, - DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, - DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, - DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation, + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation, + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation, + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath, ], PropertyInfo(discriminator="type"), ] -class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel): - annotations: List[ - DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation - ] +class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel): + annotations: List[OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation] text: str type: Literal["output_text"] -class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel): +class OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel): refusal: str """Refusal text supplied by the model""" @@ -158,19 +155,17 @@ class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefus """Content part type identifier, always "refusal" """ -DataOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ +OpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[ Union[ - DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, - DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, + OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText, + OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal, ], PropertyInfo(discriminator="type"), ] -class DataOpenAIResponseMessage(BaseModel): - content: Union[ - str, List[DataOpenAIResponseMessageContentUnionMember1], List[DataOpenAIResponseMessageContentUnionMember2] - ] +class OpenAIResponseMessage(BaseModel): + content: Union[str, List[OpenAIResponseMessageContentUnionMember1], List[OpenAIResponseMessageContentUnionMember2]] role: Literal["system", "developer", "user", "assistant"] @@ -181,7 +176,7 @@ class DataOpenAIResponseMessage(BaseModel): status: Optional[str] = None -class DataOpenAIResponseOutputMessageWebSearchToolCall(BaseModel): +class OpenAIResponseOutputMessageWebSearchToolCall(BaseModel): id: str """Unique identifier for this tool call""" @@ -192,7 +187,7 @@ class DataOpenAIResponseOutputMessageWebSearchToolCall(BaseModel): """Tool call type identifier, always "web_search_call" """ -class DataOpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel): +class OpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel): attributes: Dict[str, Union[bool, float, str, List[object], object, None]] """(Optional) Key-value attributes associated with the file""" @@ -209,7 +204,7 @@ class DataOpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel): """Text content of the search result""" -class DataOpenAIResponseOutputMessageFileSearchToolCall(BaseModel): +class OpenAIResponseOutputMessageFileSearchToolCall(BaseModel): id: str """Unique identifier for this tool call""" @@ -222,11 +217,11 @@ class DataOpenAIResponseOutputMessageFileSearchToolCall(BaseModel): type: Literal["file_search_call"] """Tool call type identifier, always "file_search_call" """ - results: Optional[List[DataOpenAIResponseOutputMessageFileSearchToolCallResult]] = None + results: Optional[List[OpenAIResponseOutputMessageFileSearchToolCallResult]] = None """(Optional) Search results returned by the file search operation""" -class DataOpenAIResponseOutputMessageFunctionToolCall(BaseModel): +class OpenAIResponseOutputMessageFunctionToolCall(BaseModel): arguments: str """JSON string containing the function arguments""" @@ -246,7 +241,7 @@ class DataOpenAIResponseOutputMessageFunctionToolCall(BaseModel): """(Optional) Current status of the function call execution""" -class DataOpenAIResponseInputFunctionToolCallOutput(BaseModel): +class OpenAIResponseInputFunctionToolCallOutput(BaseModel): call_id: str output: str @@ -258,7 +253,7 @@ class DataOpenAIResponseInputFunctionToolCallOutput(BaseModel): status: Optional[str] = None -class DataOpenAIResponseMcpApprovalRequest(BaseModel): +class OpenAIResponseMcpApprovalRequest(BaseModel): id: str arguments: str @@ -270,7 +265,7 @@ class DataOpenAIResponseMcpApprovalRequest(BaseModel): type: Literal["mcp_approval_request"] -class DataOpenAIResponseMcpApprovalResponse(BaseModel): +class OpenAIResponseMcpApprovalResponse(BaseModel): approval_request_id: str approve: bool @@ -282,7 +277,7 @@ class DataOpenAIResponseMcpApprovalResponse(BaseModel): reason: Optional[str] = None -class DataOpenAIResponseOutputMessageMcpCall(BaseModel): +class OpenAIResponseOutputMessageMcpCall(BaseModel): id: str """Unique identifier for this MCP call""" @@ -305,7 +300,7 @@ class DataOpenAIResponseOutputMessageMcpCall(BaseModel): """(Optional) Output result from the successful MCP call""" -class DataOpenAIResponseOutputMessageMcpListToolsTool(BaseModel): +class OpenAIResponseOutputMessageMcpListToolsTool(BaseModel): input_schema: Dict[str, Union[bool, float, str, List[object], object, None]] """JSON schema defining the tool's input parameters""" @@ -316,43 +311,31 @@ class DataOpenAIResponseOutputMessageMcpListToolsTool(BaseModel): """(Optional) Description of what the tool does""" -class DataOpenAIResponseOutputMessageMcpListTools(BaseModel): +class OpenAIResponseOutputMessageMcpListTools(BaseModel): id: str """Unique identifier for this MCP list tools operation""" server_label: str """Label identifying the MCP server providing the tools""" - tools: List[DataOpenAIResponseOutputMessageMcpListToolsTool] + tools: List[OpenAIResponseOutputMessageMcpListToolsTool] """List of available tools provided by the MCP server""" type: Literal["mcp_list_tools"] """Tool call type identifier, always "mcp_list_tools" """ -Data: TypeAlias = Annotated[ +ItemListResponse: TypeAlias = Annotated[ Union[ - DataOpenAIResponseMessage, - DataOpenAIResponseOutputMessageWebSearchToolCall, - DataOpenAIResponseOutputMessageFileSearchToolCall, - DataOpenAIResponseOutputMessageFunctionToolCall, - DataOpenAIResponseInputFunctionToolCallOutput, - DataOpenAIResponseMcpApprovalRequest, - DataOpenAIResponseMcpApprovalResponse, - DataOpenAIResponseOutputMessageMcpCall, - DataOpenAIResponseOutputMessageMcpListTools, + OpenAIResponseMessage, + OpenAIResponseOutputMessageWebSearchToolCall, + OpenAIResponseOutputMessageFileSearchToolCall, + OpenAIResponseOutputMessageFunctionToolCall, + OpenAIResponseInputFunctionToolCallOutput, + OpenAIResponseMcpApprovalRequest, + OpenAIResponseMcpApprovalResponse, + OpenAIResponseOutputMessageMcpCall, + OpenAIResponseOutputMessageMcpListTools, ], PropertyInfo(discriminator="type"), ] - - -class ItemListResponse(BaseModel): - data: List[Data] - - has_more: bool - - object: str - - first_id: Optional[str] = None - - last_id: Optional[str] = None diff --git a/src/llama_stack_client/types/model.py b/src/llama_stack_client/types/model.py index 538547fa..5807b0ed 100644 --- a/src/llama_stack_client/types/model.py +++ b/src/llama_stack_client/types/model.py @@ -23,7 +23,7 @@ class Model(BaseModel): metadata: Dict[str, Union[bool, float, str, List[object], object, None]] """Any additional metadata for this model""" - api_model_type: Literal["llm", "embedding"] = FieldInfo(alias="model_type") + api_model_type: Literal["llm", "embedding", "rerank"] = FieldInfo(alias="model_type") """The type of model (LLM or embedding model)""" provider_id: str diff --git a/src/llama_stack_client/types/model_register_params.py b/src/llama_stack_client/types/model_register_params.py index f0a235aa..7fdda65d 100644 --- a/src/llama_stack_client/types/model_register_params.py +++ b/src/llama_stack_client/types/model_register_params.py @@ -21,7 +21,7 @@ class ModelRegisterParams(TypedDict, total=False): metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] """Any additional metadata for this model.""" - model_type: Literal["llm", "embedding"] + model_type: Literal["llm", "embedding", "rerank"] """The type of model to register.""" provider_id: str diff --git a/tests/api_resources/conversations/test_items.py b/tests/api_resources/conversations/test_items.py index 3556f887..2d2ce821 100644 --- a/tests/api_resources/conversations/test_items.py +++ b/tests/api_resources/conversations/test_items.py @@ -15,6 +15,7 @@ from tests.utils import assert_matches_type from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient +from llama_stack_client.pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage from llama_stack_client.types.conversations import ( ItemGetResponse, ItemListResponse, @@ -97,42 +98,41 @@ def test_path_params_create(self, client: LlamaStackClient) -> None: def test_method_list(self, client: LlamaStackClient) -> None: item = client.conversations.items.list( conversation_id="conversation_id", - after="string", - include=["code_interpreter_call.outputs"], + ) + assert_matches_type(SyncOpenAICursorPage[ItemListResponse], item, path=["response"]) + + @parametrize + def test_method_list_with_all_params(self, client: LlamaStackClient) -> None: + item = client.conversations.items.list( + conversation_id="conversation_id", + after="after", + include=["web_search_call.action.sources"], limit=0, order="asc", ) - assert_matches_type(ItemListResponse, item, path=["response"]) + assert_matches_type(SyncOpenAICursorPage[ItemListResponse], item, path=["response"]) @parametrize def test_raw_response_list(self, client: LlamaStackClient) -> None: response = client.conversations.items.with_raw_response.list( conversation_id="conversation_id", - after="string", - include=["code_interpreter_call.outputs"], - limit=0, - order="asc", ) assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" item = response.parse() - assert_matches_type(ItemListResponse, item, path=["response"]) + assert_matches_type(SyncOpenAICursorPage[ItemListResponse], item, path=["response"]) @parametrize def test_streaming_response_list(self, client: LlamaStackClient) -> None: with client.conversations.items.with_streaming_response.list( conversation_id="conversation_id", - after="string", - include=["code_interpreter_call.outputs"], - limit=0, - order="asc", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" item = response.parse() - assert_matches_type(ItemListResponse, item, path=["response"]) + assert_matches_type(SyncOpenAICursorPage[ItemListResponse], item, path=["response"]) assert cast(Any, response.is_closed) is True @@ -141,10 +141,6 @@ def test_path_params_list(self, client: LlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `conversation_id` but received ''"): client.conversations.items.with_raw_response.list( conversation_id="", - after="string", - include=["code_interpreter_call.outputs"], - limit=0, - order="asc", ) @parametrize @@ -271,42 +267,41 @@ async def test_path_params_create(self, async_client: AsyncLlamaStackClient) -> async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None: item = await async_client.conversations.items.list( conversation_id="conversation_id", - after="string", - include=["code_interpreter_call.outputs"], + ) + assert_matches_type(AsyncOpenAICursorPage[ItemListResponse], item, path=["response"]) + + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: + item = await async_client.conversations.items.list( + conversation_id="conversation_id", + after="after", + include=["web_search_call.action.sources"], limit=0, order="asc", ) - assert_matches_type(ItemListResponse, item, path=["response"]) + assert_matches_type(AsyncOpenAICursorPage[ItemListResponse], item, path=["response"]) @parametrize async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None: response = await async_client.conversations.items.with_raw_response.list( conversation_id="conversation_id", - after="string", - include=["code_interpreter_call.outputs"], - limit=0, - order="asc", ) assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" item = await response.parse() - assert_matches_type(ItemListResponse, item, path=["response"]) + assert_matches_type(AsyncOpenAICursorPage[ItemListResponse], item, path=["response"]) @parametrize async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None: async with async_client.conversations.items.with_streaming_response.list( conversation_id="conversation_id", - after="string", - include=["code_interpreter_call.outputs"], - limit=0, - order="asc", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" item = await response.parse() - assert_matches_type(ItemListResponse, item, path=["response"]) + assert_matches_type(AsyncOpenAICursorPage[ItemListResponse], item, path=["response"]) assert cast(Any, response.is_closed) is True @@ -315,10 +310,6 @@ async def test_path_params_list(self, async_client: AsyncLlamaStackClient) -> No with pytest.raises(ValueError, match=r"Expected a non-empty value for `conversation_id` but received ''"): await async_client.conversations.items.with_raw_response.list( conversation_id="", - after="string", - include=["code_interpreter_call.outputs"], - limit=0, - order="asc", ) @parametrize From 5b288dc7f1f1bf1ade741b2e7789077046e00a0e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 22 Oct 2025 22:14:56 +0000 Subject: [PATCH 5/6] feat(api): manual updates --- .stats.yml | 2 +- api.md | 28 +++++------ src/llama_stack_client/_client.py | 38 --------------- src/llama_stack_client/resources/__init__.py | 14 ------ .../resources/alpha/__init__.py | 14 ++++++ .../resources/alpha/alpha.py | 32 +++++++++++++ .../resources/{ => alpha}/benchmarks.py | 20 ++++---- src/llama_stack_client/types/__init__.py | 4 -- .../types/alpha/__init__.py | 4 ++ .../types/{ => alpha}/benchmark.py | 2 +- .../{ => alpha}/benchmark_list_response.py | 0 .../{ => alpha}/benchmark_register_params.py | 2 +- .../{ => alpha}/list_benchmarks_response.py | 2 +- .../{ => alpha}/test_benchmarks.py | 46 +++++++++---------- 14 files changed, 101 insertions(+), 107 deletions(-) rename src/llama_stack_client/resources/{ => alpha}/benchmarks.py (96%) rename src/llama_stack_client/types/{ => alpha}/benchmark.py (96%) rename src/llama_stack_client/types/{ => alpha}/benchmark_list_response.py (100%) rename src/llama_stack_client/types/{ => alpha}/benchmark_register_params.py (96%) rename src/llama_stack_client/types/{ => alpha}/list_benchmarks_response.py (93%) rename tests/api_resources/{ => alpha}/test_benchmarks.py (84%) diff --git a/.stats.yml b/.stats.yml index b72d7751..aadaf6e2 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 104 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-97b91eca4a3ff251edc02636b1a638866675d6c1abd46cd9fc18bc50a1de9656.yml openapi_spec_hash: 7302f1aa50090e3de78e34c184371267 -config_hash: 364c8cc514fe5aa69bd24c7f3721985b +config_hash: a3829dbdaa491194d01f399784d532cd diff --git a/api.md b/api.md index 6fc13146..5f7a90a2 100644 --- a/api.md +++ b/api.md @@ -401,20 +401,6 @@ Methods: - client.scoring_functions.list() -> ScoringFunctionListResponse - client.scoring_functions.register(\*\*params) -> None -# Benchmarks - -Types: - -```python -from llama_stack_client.types import Benchmark, ListBenchmarksResponse, BenchmarkListResponse -``` - -Methods: - -- client.benchmarks.retrieve(benchmark_id) -> Benchmark -- client.benchmarks.list() -> BenchmarkListResponse -- client.benchmarks.register(\*\*params) -> None - # Files Types: @@ -481,6 +467,20 @@ Methods: - client.alpha.post_training.job.cancel(\*\*params) -> None - client.alpha.post_training.job.status(\*\*params) -> JobStatusResponse +## Benchmarks + +Types: + +```python +from llama_stack_client.types.alpha import Benchmark, ListBenchmarksResponse, BenchmarkListResponse +``` + +Methods: + +- client.alpha.benchmarks.retrieve(benchmark_id) -> Benchmark +- client.alpha.benchmarks.list() -> BenchmarkListResponse +- client.alpha.benchmarks.register(\*\*params) -> None + ## Eval Types: diff --git a/src/llama_stack_client/_client.py b/src/llama_stack_client/_client.py index 2c96b202..34de181a 100644 --- a/src/llama_stack_client/_client.py +++ b/src/llama_stack_client/_client.py @@ -53,7 +53,6 @@ providers, responses, vector_io, - benchmarks, embeddings, toolgroups, completions, @@ -75,7 +74,6 @@ from .resources.chat.chat import ChatResource, AsyncChatResource from .resources.providers import ProvidersResource, AsyncProvidersResource from .resources.vector_io import VectorIoResource, AsyncVectorIoResource - from .resources.benchmarks import BenchmarksResource, AsyncBenchmarksResource from .resources.embeddings import EmbeddingsResource, AsyncEmbeddingsResource from .resources.toolgroups import ToolgroupsResource, AsyncToolgroupsResource from .resources.alpha.alpha import AlphaResource, AsyncAlphaResource @@ -281,12 +279,6 @@ def scoring_functions(self) -> ScoringFunctionsResource: return ScoringFunctionsResource(self) - @cached_property - def benchmarks(self) -> BenchmarksResource: - from .resources.benchmarks import BenchmarksResource - - return BenchmarksResource(self) - @cached_property def files(self) -> FilesResource: from .resources.files import FilesResource @@ -597,12 +589,6 @@ def scoring_functions(self) -> AsyncScoringFunctionsResource: return AsyncScoringFunctionsResource(self) - @cached_property - def benchmarks(self) -> AsyncBenchmarksResource: - from .resources.benchmarks import AsyncBenchmarksResource - - return AsyncBenchmarksResource(self) - @cached_property def files(self) -> AsyncFilesResource: from .resources.files import AsyncFilesResource @@ -862,12 +848,6 @@ def scoring_functions(self) -> scoring_functions.ScoringFunctionsResourceWithRaw return ScoringFunctionsResourceWithRawResponse(self._client.scoring_functions) - @cached_property - def benchmarks(self) -> benchmarks.BenchmarksResourceWithRawResponse: - from .resources.benchmarks import BenchmarksResourceWithRawResponse - - return BenchmarksResourceWithRawResponse(self._client.benchmarks) - @cached_property def files(self) -> files.FilesResourceWithRawResponse: from .resources.files import FilesResourceWithRawResponse @@ -1015,12 +995,6 @@ def scoring_functions(self) -> scoring_functions.AsyncScoringFunctionsResourceWi return AsyncScoringFunctionsResourceWithRawResponse(self._client.scoring_functions) - @cached_property - def benchmarks(self) -> benchmarks.AsyncBenchmarksResourceWithRawResponse: - from .resources.benchmarks import AsyncBenchmarksResourceWithRawResponse - - return AsyncBenchmarksResourceWithRawResponse(self._client.benchmarks) - @cached_property def files(self) -> files.AsyncFilesResourceWithRawResponse: from .resources.files import AsyncFilesResourceWithRawResponse @@ -1168,12 +1142,6 @@ def scoring_functions(self) -> scoring_functions.ScoringFunctionsResourceWithStr return ScoringFunctionsResourceWithStreamingResponse(self._client.scoring_functions) - @cached_property - def benchmarks(self) -> benchmarks.BenchmarksResourceWithStreamingResponse: - from .resources.benchmarks import BenchmarksResourceWithStreamingResponse - - return BenchmarksResourceWithStreamingResponse(self._client.benchmarks) - @cached_property def files(self) -> files.FilesResourceWithStreamingResponse: from .resources.files import FilesResourceWithStreamingResponse @@ -1321,12 +1289,6 @@ def scoring_functions(self) -> scoring_functions.AsyncScoringFunctionsResourceWi return AsyncScoringFunctionsResourceWithStreamingResponse(self._client.scoring_functions) - @cached_property - def benchmarks(self) -> benchmarks.AsyncBenchmarksResourceWithStreamingResponse: - from .resources.benchmarks import AsyncBenchmarksResourceWithStreamingResponse - - return AsyncBenchmarksResourceWithStreamingResponse(self._client.benchmarks) - @cached_property def files(self) -> files.AsyncFilesResourceWithStreamingResponse: from .resources.files import AsyncFilesResourceWithStreamingResponse diff --git a/src/llama_stack_client/resources/__init__.py b/src/llama_stack_client/resources/__init__.py index 751e9303..60b18979 100644 --- a/src/llama_stack_client/resources/__init__.py +++ b/src/llama_stack_client/resources/__init__.py @@ -118,14 +118,6 @@ VectorIoResourceWithStreamingResponse, AsyncVectorIoResourceWithStreamingResponse, ) -from .benchmarks import ( - BenchmarksResource, - AsyncBenchmarksResource, - BenchmarksResourceWithRawResponse, - AsyncBenchmarksResourceWithRawResponse, - BenchmarksResourceWithStreamingResponse, - AsyncBenchmarksResourceWithStreamingResponse, -) from .embeddings import ( EmbeddingsResource, AsyncEmbeddingsResource, @@ -320,12 +312,6 @@ "AsyncScoringFunctionsResourceWithRawResponse", "ScoringFunctionsResourceWithStreamingResponse", "AsyncScoringFunctionsResourceWithStreamingResponse", - "BenchmarksResource", - "AsyncBenchmarksResource", - "BenchmarksResourceWithRawResponse", - "AsyncBenchmarksResourceWithRawResponse", - "BenchmarksResourceWithStreamingResponse", - "AsyncBenchmarksResourceWithStreamingResponse", "FilesResource", "AsyncFilesResource", "FilesResourceWithRawResponse", diff --git a/src/llama_stack_client/resources/alpha/__init__.py b/src/llama_stack_client/resources/alpha/__init__.py index dadd6d3e..ae13bed1 100644 --- a/src/llama_stack_client/resources/alpha/__init__.py +++ b/src/llama_stack_client/resources/alpha/__init__.py @@ -38,6 +38,14 @@ InferenceResourceWithStreamingResponse, AsyncInferenceResourceWithStreamingResponse, ) +from .benchmarks import ( + BenchmarksResource, + AsyncBenchmarksResource, + BenchmarksResourceWithRawResponse, + AsyncBenchmarksResourceWithRawResponse, + BenchmarksResourceWithStreamingResponse, + AsyncBenchmarksResourceWithStreamingResponse, +) from .post_training import ( PostTrainingResource, AsyncPostTrainingResource, @@ -60,6 +68,12 @@ "AsyncPostTrainingResourceWithRawResponse", "PostTrainingResourceWithStreamingResponse", "AsyncPostTrainingResourceWithStreamingResponse", + "BenchmarksResource", + "AsyncBenchmarksResource", + "BenchmarksResourceWithRawResponse", + "AsyncBenchmarksResourceWithRawResponse", + "BenchmarksResourceWithStreamingResponse", + "AsyncBenchmarksResourceWithStreamingResponse", "EvalResource", "AsyncEvalResource", "EvalResourceWithRawResponse", diff --git a/src/llama_stack_client/resources/alpha/alpha.py b/src/llama_stack_client/resources/alpha/alpha.py index a3e5e544..9ba65570 100644 --- a/src/llama_stack_client/resources/alpha/alpha.py +++ b/src/llama_stack_client/resources/alpha/alpha.py @@ -25,6 +25,14 @@ InferenceResourceWithStreamingResponse, AsyncInferenceResourceWithStreamingResponse, ) +from .benchmarks import ( + BenchmarksResource, + AsyncBenchmarksResource, + BenchmarksResourceWithRawResponse, + AsyncBenchmarksResourceWithRawResponse, + BenchmarksResourceWithStreamingResponse, + AsyncBenchmarksResourceWithStreamingResponse, +) from ..._resource import SyncAPIResource, AsyncAPIResource from .agents.agents import ( AgentsResource, @@ -55,6 +63,10 @@ def inference(self) -> InferenceResource: def post_training(self) -> PostTrainingResource: return PostTrainingResource(self._client) + @cached_property + def benchmarks(self) -> BenchmarksResource: + return BenchmarksResource(self._client) + @cached_property def eval(self) -> EvalResource: return EvalResource(self._client) @@ -92,6 +104,10 @@ def inference(self) -> AsyncInferenceResource: def post_training(self) -> AsyncPostTrainingResource: return AsyncPostTrainingResource(self._client) + @cached_property + def benchmarks(self) -> AsyncBenchmarksResource: + return AsyncBenchmarksResource(self._client) + @cached_property def eval(self) -> AsyncEvalResource: return AsyncEvalResource(self._client) @@ -132,6 +148,10 @@ def inference(self) -> InferenceResourceWithRawResponse: def post_training(self) -> PostTrainingResourceWithRawResponse: return PostTrainingResourceWithRawResponse(self._alpha.post_training) + @cached_property + def benchmarks(self) -> BenchmarksResourceWithRawResponse: + return BenchmarksResourceWithRawResponse(self._alpha.benchmarks) + @cached_property def eval(self) -> EvalResourceWithRawResponse: return EvalResourceWithRawResponse(self._alpha.eval) @@ -153,6 +173,10 @@ def inference(self) -> AsyncInferenceResourceWithRawResponse: def post_training(self) -> AsyncPostTrainingResourceWithRawResponse: return AsyncPostTrainingResourceWithRawResponse(self._alpha.post_training) + @cached_property + def benchmarks(self) -> AsyncBenchmarksResourceWithRawResponse: + return AsyncBenchmarksResourceWithRawResponse(self._alpha.benchmarks) + @cached_property def eval(self) -> AsyncEvalResourceWithRawResponse: return AsyncEvalResourceWithRawResponse(self._alpha.eval) @@ -174,6 +198,10 @@ def inference(self) -> InferenceResourceWithStreamingResponse: def post_training(self) -> PostTrainingResourceWithStreamingResponse: return PostTrainingResourceWithStreamingResponse(self._alpha.post_training) + @cached_property + def benchmarks(self) -> BenchmarksResourceWithStreamingResponse: + return BenchmarksResourceWithStreamingResponse(self._alpha.benchmarks) + @cached_property def eval(self) -> EvalResourceWithStreamingResponse: return EvalResourceWithStreamingResponse(self._alpha.eval) @@ -195,6 +223,10 @@ def inference(self) -> AsyncInferenceResourceWithStreamingResponse: def post_training(self) -> AsyncPostTrainingResourceWithStreamingResponse: return AsyncPostTrainingResourceWithStreamingResponse(self._alpha.post_training) + @cached_property + def benchmarks(self) -> AsyncBenchmarksResourceWithStreamingResponse: + return AsyncBenchmarksResourceWithStreamingResponse(self._alpha.benchmarks) + @cached_property def eval(self) -> AsyncEvalResourceWithStreamingResponse: return AsyncEvalResourceWithStreamingResponse(self._alpha.eval) diff --git a/src/llama_stack_client/resources/benchmarks.py b/src/llama_stack_client/resources/alpha/benchmarks.py similarity index 96% rename from src/llama_stack_client/resources/benchmarks.py rename to src/llama_stack_client/resources/alpha/benchmarks.py index 354d3015..dc74cc85 100644 --- a/src/llama_stack_client/resources/benchmarks.py +++ b/src/llama_stack_client/resources/alpha/benchmarks.py @@ -12,21 +12,21 @@ import httpx -from ..types import benchmark_register_params -from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given -from .._utils import maybe_transform, async_maybe_transform -from .._compat import cached_property -from .._resource import SyncAPIResource, AsyncAPIResource -from .._response import ( +from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( to_raw_response_wrapper, to_streamed_response_wrapper, async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._wrappers import DataWrapper -from .._base_client import make_request_options -from ..types.benchmark import Benchmark -from ..types.benchmark_list_response import BenchmarkListResponse +from ..._wrappers import DataWrapper +from ...types.alpha import benchmark_register_params +from ..._base_client import make_request_options +from ...types.alpha.benchmark import Benchmark +from ...types.alpha.benchmark_list_response import BenchmarkListResponse __all__ = ["BenchmarksResource", "AsyncBenchmarksResource"] diff --git a/src/llama_stack_client/types/__init__.py b/src/llama_stack_client/types/__init__.py index b03446ec..173a1e03 100644 --- a/src/llama_stack_client/types/__init__.py +++ b/src/llama_stack_client/types/__init__.py @@ -31,7 +31,6 @@ ) from .shield import Shield as Shield from .tool_def import ToolDef as ToolDef -from .benchmark import Benchmark as Benchmark from .route_info import RouteInfo as RouteInfo from .scoring_fn import ScoringFn as ScoringFn from .tool_group import ToolGroup as ToolGroup @@ -70,19 +69,16 @@ from .shield_register_params import ShieldRegisterParams as ShieldRegisterParams from .tool_invocation_result import ToolInvocationResult as ToolInvocationResult from .vector_io_query_params import VectorIoQueryParams as VectorIoQueryParams -from .benchmark_list_response import BenchmarkListResponse as BenchmarkListResponse from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams from .list_providers_response import ListProvidersResponse as ListProvidersResponse from .scoring_fn_params_param import ScoringFnParamsParam as ScoringFnParamsParam from .toolgroup_list_response import ToolgroupListResponse as ToolgroupListResponse from .vector_io_insert_params import VectorIoInsertParams as VectorIoInsertParams from .completion_create_params import CompletionCreateParams as CompletionCreateParams -from .list_benchmarks_response import ListBenchmarksResponse as ListBenchmarksResponse from .moderation_create_params import ModerationCreateParams as ModerationCreateParams from .response_delete_response import ResponseDeleteResponse as ResponseDeleteResponse from .safety_run_shield_params import SafetyRunShieldParams as SafetyRunShieldParams from .vector_store_list_params import VectorStoreListParams as VectorStoreListParams -from .benchmark_register_params import BenchmarkRegisterParams as BenchmarkRegisterParams from .list_tool_groups_response import ListToolGroupsResponse as ListToolGroupsResponse from .toolgroup_register_params import ToolgroupRegisterParams as ToolgroupRegisterParams from .completion_create_response import CompletionCreateResponse as CompletionCreateResponse diff --git a/src/llama_stack_client/types/alpha/__init__.py b/src/llama_stack_client/types/alpha/__init__.py index a28cad3a..61e02a4e 100644 --- a/src/llama_stack_client/types/alpha/__init__.py +++ b/src/llama_stack_client/types/alpha/__init__.py @@ -9,6 +9,7 @@ from __future__ import annotations from .job import Job as Job +from .benchmark import Benchmark as Benchmark from .tool_response import ToolResponse as ToolResponse from .inference_step import InferenceStep as InferenceStep from .shield_call_step import ShieldCallStep as ShieldCallStep @@ -25,7 +26,10 @@ from .algorithm_config_param import AlgorithmConfigParam as AlgorithmConfigParam from .benchmark_config_param import BenchmarkConfigParam as BenchmarkConfigParam from .agent_retrieve_response import AgentRetrieveResponse as AgentRetrieveResponse +from .benchmark_list_response import BenchmarkListResponse as BenchmarkListResponse from .inference_rerank_params import InferenceRerankParams as InferenceRerankParams +from .list_benchmarks_response import ListBenchmarksResponse as ListBenchmarksResponse +from .benchmark_register_params import BenchmarkRegisterParams as BenchmarkRegisterParams from .eval_evaluate_rows_params import EvalEvaluateRowsParams as EvalEvaluateRowsParams from .inference_rerank_response import InferenceRerankResponse as InferenceRerankResponse from .eval_run_eval_alpha_params import EvalRunEvalAlphaParams as EvalRunEvalAlphaParams diff --git a/src/llama_stack_client/types/benchmark.py b/src/llama_stack_client/types/alpha/benchmark.py similarity index 96% rename from src/llama_stack_client/types/benchmark.py rename to src/llama_stack_client/types/alpha/benchmark.py index 3e23f7a8..b70c8f28 100644 --- a/src/llama_stack_client/types/benchmark.py +++ b/src/llama_stack_client/types/alpha/benchmark.py @@ -9,7 +9,7 @@ from typing import Dict, List, Union, Optional from typing_extensions import Literal -from .._models import BaseModel +from ..._models import BaseModel __all__ = ["Benchmark"] diff --git a/src/llama_stack_client/types/benchmark_list_response.py b/src/llama_stack_client/types/alpha/benchmark_list_response.py similarity index 100% rename from src/llama_stack_client/types/benchmark_list_response.py rename to src/llama_stack_client/types/alpha/benchmark_list_response.py diff --git a/src/llama_stack_client/types/benchmark_register_params.py b/src/llama_stack_client/types/alpha/benchmark_register_params.py similarity index 96% rename from src/llama_stack_client/types/benchmark_register_params.py rename to src/llama_stack_client/types/alpha/benchmark_register_params.py index 656d2705..84be3786 100644 --- a/src/llama_stack_client/types/benchmark_register_params.py +++ b/src/llama_stack_client/types/alpha/benchmark_register_params.py @@ -11,7 +11,7 @@ from typing import Dict, Union, Iterable from typing_extensions import Required, TypedDict -from .._types import SequenceNotStr +from ..._types import SequenceNotStr __all__ = ["BenchmarkRegisterParams"] diff --git a/src/llama_stack_client/types/list_benchmarks_response.py b/src/llama_stack_client/types/alpha/list_benchmarks_response.py similarity index 93% rename from src/llama_stack_client/types/list_benchmarks_response.py rename to src/llama_stack_client/types/alpha/list_benchmarks_response.py index ec867db1..8ea3b963 100644 --- a/src/llama_stack_client/types/list_benchmarks_response.py +++ b/src/llama_stack_client/types/alpha/list_benchmarks_response.py @@ -6,7 +6,7 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from .._models import BaseModel +from ..._models import BaseModel from .benchmark_list_response import BenchmarkListResponse __all__ = ["ListBenchmarksResponse"] diff --git a/tests/api_resources/test_benchmarks.py b/tests/api_resources/alpha/test_benchmarks.py similarity index 84% rename from tests/api_resources/test_benchmarks.py rename to tests/api_resources/alpha/test_benchmarks.py index a54c48a5..71ad6bc2 100644 --- a/tests/api_resources/test_benchmarks.py +++ b/tests/api_resources/alpha/test_benchmarks.py @@ -15,7 +15,7 @@ from tests.utils import assert_matches_type from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient -from llama_stack_client.types import Benchmark, BenchmarkListResponse +from llama_stack_client.types.alpha import Benchmark, BenchmarkListResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -25,14 +25,14 @@ class TestBenchmarks: @parametrize def test_method_retrieve(self, client: LlamaStackClient) -> None: - benchmark = client.benchmarks.retrieve( + benchmark = client.alpha.benchmarks.retrieve( "benchmark_id", ) assert_matches_type(Benchmark, benchmark, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: LlamaStackClient) -> None: - response = client.benchmarks.with_raw_response.retrieve( + response = client.alpha.benchmarks.with_raw_response.retrieve( "benchmark_id", ) @@ -43,7 +43,7 @@ def test_raw_response_retrieve(self, client: LlamaStackClient) -> None: @parametrize def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None: - with client.benchmarks.with_streaming_response.retrieve( + with client.alpha.benchmarks.with_streaming_response.retrieve( "benchmark_id", ) as response: assert not response.is_closed @@ -57,18 +57,18 @@ def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None: @parametrize def test_path_params_retrieve(self, client: LlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `benchmark_id` but received ''"): - client.benchmarks.with_raw_response.retrieve( + client.alpha.benchmarks.with_raw_response.retrieve( "", ) @parametrize def test_method_list(self, client: LlamaStackClient) -> None: - benchmark = client.benchmarks.list() + benchmark = client.alpha.benchmarks.list() assert_matches_type(BenchmarkListResponse, benchmark, path=["response"]) @parametrize def test_raw_response_list(self, client: LlamaStackClient) -> None: - response = client.benchmarks.with_raw_response.list() + response = client.alpha.benchmarks.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -77,7 +77,7 @@ def test_raw_response_list(self, client: LlamaStackClient) -> None: @parametrize def test_streaming_response_list(self, client: LlamaStackClient) -> None: - with client.benchmarks.with_streaming_response.list() as response: + with client.alpha.benchmarks.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -88,7 +88,7 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None: @parametrize def test_method_register(self, client: LlamaStackClient) -> None: - benchmark = client.benchmarks.register( + benchmark = client.alpha.benchmarks.register( benchmark_id="benchmark_id", dataset_id="dataset_id", scoring_functions=["string"], @@ -97,7 +97,7 @@ def test_method_register(self, client: LlamaStackClient) -> None: @parametrize def test_method_register_with_all_params(self, client: LlamaStackClient) -> None: - benchmark = client.benchmarks.register( + benchmark = client.alpha.benchmarks.register( benchmark_id="benchmark_id", dataset_id="dataset_id", scoring_functions=["string"], @@ -109,7 +109,7 @@ def test_method_register_with_all_params(self, client: LlamaStackClient) -> None @parametrize def test_raw_response_register(self, client: LlamaStackClient) -> None: - response = client.benchmarks.with_raw_response.register( + response = client.alpha.benchmarks.with_raw_response.register( benchmark_id="benchmark_id", dataset_id="dataset_id", scoring_functions=["string"], @@ -122,7 +122,7 @@ def test_raw_response_register(self, client: LlamaStackClient) -> None: @parametrize def test_streaming_response_register(self, client: LlamaStackClient) -> None: - with client.benchmarks.with_streaming_response.register( + with client.alpha.benchmarks.with_streaming_response.register( benchmark_id="benchmark_id", dataset_id="dataset_id", scoring_functions=["string"], @@ -143,14 +143,14 @@ class TestAsyncBenchmarks: @parametrize async def test_method_retrieve(self, async_client: AsyncLlamaStackClient) -> None: - benchmark = await async_client.benchmarks.retrieve( + benchmark = await async_client.alpha.benchmarks.retrieve( "benchmark_id", ) assert_matches_type(Benchmark, benchmark, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.benchmarks.with_raw_response.retrieve( + response = await async_client.alpha.benchmarks.with_raw_response.retrieve( "benchmark_id", ) @@ -161,7 +161,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient) @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.benchmarks.with_streaming_response.retrieve( + async with async_client.alpha.benchmarks.with_streaming_response.retrieve( "benchmark_id", ) as response: assert not response.is_closed @@ -175,18 +175,18 @@ async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackCl @parametrize async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `benchmark_id` but received ''"): - await async_client.benchmarks.with_raw_response.retrieve( + await async_client.alpha.benchmarks.with_raw_response.retrieve( "", ) @parametrize async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None: - benchmark = await async_client.benchmarks.list() + benchmark = await async_client.alpha.benchmarks.list() assert_matches_type(BenchmarkListResponse, benchmark, path=["response"]) @parametrize async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.benchmarks.with_raw_response.list() + response = await async_client.alpha.benchmarks.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -195,7 +195,7 @@ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> N @parametrize async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.benchmarks.with_streaming_response.list() as response: + async with async_client.alpha.benchmarks.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -206,7 +206,7 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient @parametrize async def test_method_register(self, async_client: AsyncLlamaStackClient) -> None: - benchmark = await async_client.benchmarks.register( + benchmark = await async_client.alpha.benchmarks.register( benchmark_id="benchmark_id", dataset_id="dataset_id", scoring_functions=["string"], @@ -215,7 +215,7 @@ async def test_method_register(self, async_client: AsyncLlamaStackClient) -> Non @parametrize async def test_method_register_with_all_params(self, async_client: AsyncLlamaStackClient) -> None: - benchmark = await async_client.benchmarks.register( + benchmark = await async_client.alpha.benchmarks.register( benchmark_id="benchmark_id", dataset_id="dataset_id", scoring_functions=["string"], @@ -227,7 +227,7 @@ async def test_method_register_with_all_params(self, async_client: AsyncLlamaSta @parametrize async def test_raw_response_register(self, async_client: AsyncLlamaStackClient) -> None: - response = await async_client.benchmarks.with_raw_response.register( + response = await async_client.alpha.benchmarks.with_raw_response.register( benchmark_id="benchmark_id", dataset_id="dataset_id", scoring_functions=["string"], @@ -240,7 +240,7 @@ async def test_raw_response_register(self, async_client: AsyncLlamaStackClient) @parametrize async def test_streaming_response_register(self, async_client: AsyncLlamaStackClient) -> None: - async with async_client.benchmarks.with_streaming_response.register( + async with async_client.alpha.benchmarks.with_streaming_response.register( benchmark_id="benchmark_id", dataset_id="dataset_id", scoring_functions=["string"], From 7f1769e193f244832d41f1221d160a525aeae31f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 22 Oct 2025 22:15:20 +0000 Subject: [PATCH 6/6] release: 0.3.1-alpha.1 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 21 +++++++++++++++++++++ pyproject.toml | 2 +- 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index f8093838..193b35f2 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.3.0-alpha.6" + ".": "0.3.1-alpha.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index cc3fcc10..24d4ea22 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## 0.3.1-alpha.1 (2025-10-22) + +Full Changelog: [v0.3.0-alpha.6...v0.3.1-alpha.1](https://github.com/llamastack/llama-stack-client-python/compare/v0.3.0-alpha.6...v0.3.1-alpha.1) + +### Features + +* **api:** manual updates ([5b288dc](https://github.com/llamastack/llama-stack-client-python/commit/5b288dc7f1f1bf1ade741b2e7789077046e00a0e)) +* **api:** manual updates ([a7d787f](https://github.com/llamastack/llama-stack-client-python/commit/a7d787fd746e66208fd2855d1ed310cd8005d5ee)) +* **api:** move datasets to beta, vector_db -> vector_store ([88ed3fb](https://github.com/llamastack/llama-stack-client-python/commit/88ed3fb387aa6a752fd04b3fd82c727dd95384a0)) +* **api:** sync ([44e5dec](https://github.com/llamastack/llama-stack-client-python/commit/44e5decf4c417d7d55fd5a7ce33c1daac7ee0922)) + + +### Chores + +* bump `httpx-aiohttp` version to 0.1.9 ([794344a](https://github.com/llamastack/llama-stack-client-python/commit/794344a0baf1d85554988aba346cbd47cd2d6c2d)) + + +### Build System + +* Bump version to 0.3.0 ([fb7fa38](https://github.com/llamastack/llama-stack-client-python/commit/fb7fa389f583767bf33e6836dec2e50b842a8567)) + ## 0.3.0-alpha.6 (2025-10-13) Full Changelog: [v0.3.0-alpha.5...v0.3.0-alpha.6](https://github.com/llamastack/llama-stack-client-python/compare/v0.3.0-alpha.5...v0.3.0-alpha.6) diff --git a/pyproject.toml b/pyproject.toml index 8574870f..447a59ae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "llama_stack_client" -version = "0.3.0" +version = "0.3.1-alpha.1" description = "The official Python library for the llama-stack-client API" dynamic = ["readme"] license = "MIT"