chore(python-deps): bump llama-api-client from 0.1.2 to 0.2.0 (#3173)

Bumps [llama-api-client](https://github.com/meta-llama/llama-api-python)
from 0.1.2 to 0.2.0.
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/meta-llama/llama-api-python/releases">llama-api-client's
releases</a>.</em></p>
<blockquote>
<h2>v0.2.0</h2>
<h2>0.2.0 (2025-08-07)</h2>
<p>Full Changelog: <a
href="https://github.com/meta-llama/llama-api-python/compare/v0.1.2...v0.2.0">v0.1.2...v0.2.0</a></p>
<h3>Features</h3>
<ul>
<li>clean up environment call outs (<a
href="4afbd01ed7">4afbd01</a>)</li>
<li><strong>client:</strong> support file upload requests (<a
href="ec42e80b62">ec42e80</a>)</li>
</ul>
<h3>Bug Fixes</h3>
<ul>
<li><strong>api:</strong> remove chat completion request model (<a
href="94c4e9fd50">94c4e9f</a>)</li>
<li><strong>client:</strong> don't send Content-Type header on GET
requests (<a
href="efec88aa51">efec88a</a>)</li>
<li><strong>parsing:</strong> correctly handle nested discriminated
unions (<a
href="b6276863be">b627686</a>)</li>
<li><strong>parsing:</strong> ignore empty metadata (<a
href="d6ee85101e">d6ee851</a>)</li>
<li><strong>parsing:</strong> parse extra field types (<a
href="f03ca22860">f03ca22</a>)</li>
</ul>
<h3>Chores</h3>
<ul>
<li>add examples (<a
href="abfa065721">abfa065</a>)</li>
<li><strong>internal:</strong> bump pinned h11 dep (<a
href="d40e1b1d73">d40e1b1</a>)</li>
<li><strong>internal:</strong> fix ruff target version (<a
href="c900ebc528">c900ebc</a>)</li>
<li><strong>package:</strong> mark python 3.13 as supported (<a
href="ef5bc36693">ef5bc36</a>)</li>
<li><strong>project:</strong> add settings file for vscode (<a
href="e3103801d6">e310380</a>)</li>
<li><strong>readme:</strong> fix version rendering on pypi (<a
href="786f9fbdb7">786f9fb</a>)</li>
<li>sync repo (<a
href="7e697f6550">7e697f6</a>)</li>
<li>update SDK settings (<a
href="de22c0ece7">de22c0e</a>)</li>
</ul>
<h3>Documentation</h3>
<ul>
<li>code of conduct (<a
href="efe1af28fb">efe1af2</a>)</li>
<li>readme and license (<a
href="d53eafd104">d53eafd</a>)</li>
</ul>
</blockquote>
</details>
<details>
<summary>Changelog</summary>
<p><em>Sourced from <a
href="https://github.com/meta-llama/llama-api-python/blob/main/CHANGELOG.md">llama-api-client's
changelog</a>.</em></p>
<blockquote>
<h2>0.2.0 (2025-08-07)</h2>
<p>Full Changelog: <a
href="https://github.com/meta-llama/llama-api-python/compare/v0.1.2...v0.2.0">v0.1.2...v0.2.0</a></p>
<h3>Features</h3>
<ul>
<li>clean up environment call outs (<a
href="4afbd01ed7">4afbd01</a>)</li>
<li><strong>client:</strong> support file upload requests (<a
href="ec42e80b62">ec42e80</a>)</li>
</ul>
<h3>Bug Fixes</h3>
<ul>
<li><strong>api:</strong> remove chat completion request model (<a
href="94c4e9fd50">94c4e9f</a>)</li>
<li><strong>client:</strong> don't send Content-Type header on GET
requests (<a
href="efec88aa51">efec88a</a>)</li>
<li><strong>parsing:</strong> correctly handle nested discriminated
unions (<a
href="b6276863be">b627686</a>)</li>
<li><strong>parsing:</strong> ignore empty metadata (<a
href="d6ee85101e">d6ee851</a>)</li>
<li><strong>parsing:</strong> parse extra field types (<a
href="f03ca22860">f03ca22</a>)</li>
</ul>
<h3>Chores</h3>
<ul>
<li>add examples (<a
href="abfa065721">abfa065</a>)</li>
<li><strong>internal:</strong> bump pinned h11 dep (<a
href="d40e1b1d73">d40e1b1</a>)</li>
<li><strong>internal:</strong> fix ruff target version (<a
href="c900ebc528">c900ebc</a>)</li>
<li><strong>package:</strong> mark python 3.13 as supported (<a
href="ef5bc36693">ef5bc36</a>)</li>
<li><strong>project:</strong> add settings file for vscode (<a
href="e3103801d6">e310380</a>)</li>
<li><strong>readme:</strong> fix version rendering on pypi (<a
href="786f9fbdb7">786f9fb</a>)</li>
<li>sync repo (<a
href="7e697f6550">7e697f6</a>)</li>
<li>update SDK settings (<a
href="de22c0ece7">de22c0e</a>)</li>
</ul>
<h3>Documentation</h3>
<ul>
<li>code of conduct (<a
href="efe1af28fb">efe1af2</a>)</li>
<li>readme and license (<a
href="d53eafd104">d53eafd</a>)</li>
</ul>
</blockquote>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="7a8c5838af"><code>7a8c583</code></a>
release: 0.2.0</li>
<li><a
href="4f1a04e5c1"><code>4f1a04e</code></a>
chore(internal): fix ruff target version</li>
<li><a
href="06485e995a"><code>06485e9</code></a>
feat(client): support file upload requests</li>
<li><a
href="131b474ad1"><code>131b474</code></a>
chore(project): add settings file for vscode</li>
<li><a
href="ef4cee6d8b"><code>ef4cee6</code></a>
fix(parsing): parse extra field types</li>
<li><a
href="fcbc699718"><code>fcbc699</code></a>
fix(parsing): ignore empty metadata</li>
<li><a
href="b6656cd0b8"><code>b6656cd</code></a>
fix(api): remove chat completion request model</li>
<li><a
href="0deda5590c"><code>0deda55</code></a>
feat: clean up environment call outs</li>
<li><a
href="ecf91026ac"><code>ecf9102</code></a>
fix(client): don't send Content-Type header on GET requests</li>
<li><a
href="0ac6285cbe"><code>0ac6285</code></a>
chore(readme): fix version rendering on pypi</li>
<li>Additional commits viewable in <a
href="https://github.com/meta-llama/llama-api-python/compare/v0.1.2...v0.2.0">compare
view</a></li>
</ul>
</details>
<br />


[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=llama-api-client&package-manager=uv&previous-version=0.1.2&new-version=0.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)

Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.

[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)

---

<details>
<summary>Dependabot commands and options</summary>
<br />

You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show <dependency name> ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)


</details>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
dependabot[bot] 2025-08-20 16:50:34 -07:00 committed by GitHub
parent 886af85e0c
commit bd1a794add
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

6
uv.lock generated
View file

@ -1689,7 +1689,7 @@ wheels = [
[[package]]
name = "llama-api-client"
version = "0.1.2"
version = "0.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@ -1699,9 +1699,9 @@ dependencies = [
{ name = "sniffio" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d0/78/875de3a16efd0442718ac47cc27319cd80cc5f38e12298e454e08611acc4/llama_api_client-0.1.2.tar.gz", hash = "sha256:709011f2d506009b1b3b3bceea1c84f2a3a7600df1420fb256e680fcd7251387", size = 113695, upload-time = "2025-06-27T19:56:14.057Z" }
sdist = { url = "https://files.pythonhosted.org/packages/59/41/fa8521a0faff96bf5f810e2ab5b78c638f5ba44afd09aa86f94b6a1226ad/llama_api_client-0.2.0.tar.gz", hash = "sha256:b9bd5f5ad332b9133f0775a105f0940f057cbb311891f1d4487247d001c31f17", size = 117108, upload-time = "2025-08-12T17:07:07.734Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/99/08/5d7e6e7e6af5353391376288c200acacebb8e6b156d3636eae598a451673/llama_api_client-0.1.2-py3-none-any.whl", hash = "sha256:8ad6e10726f74b2302bfd766c61c41355a9ecf60f57cde2961882d22af998941", size = 84091, upload-time = "2025-06-27T19:56:12.8Z" },
{ url = "https://files.pythonhosted.org/packages/1d/11/198e65c1a50d9e839b4e3d346b4bd0f624e532446e468d1aba6c74ed7484/llama_api_client-0.2.0-py3-none-any.whl", hash = "sha256:50614ed991e1a72439e6a624a97e6000615ada1b9e2046ecc026fe62f107663c", size = 85002, upload-time = "2025-08-12T17:07:06.293Z" },
]
[[package]]