Skip to content

feat: provider update #219

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/llama_stack_client/lib/cli/providers/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from .list import list_providers
from .inspect import inspect_provider
from .update import update_provider


@click.group()
Expand All @@ -13,3 +14,4 @@ def providers():
# Register subcommands
providers.add_command(list_providers)
providers.add_command(inspect_provider)
providers.add_command(update_provider)
35 changes: 35 additions & 0 deletions src/llama_stack_client/lib/cli/providers/update.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import click
import yaml
from rich.console import Console
from ..common.utils import handle_client_errors


@click.command(name="update")
@click.argument("api")
@click.argument("provider_id")
@click.argument("provider_type")
@click.argument("config")
@click.pass_context
@handle_client_errors("update providers")
def update_provider(ctx, api, provider_id, provider_type, config):
"""Show available providers on distribution endpoint"""
client = ctx.obj["client"]
console = Console()

import ast

config = ast.literal_eval(config)

providers_response = client.providers.update(
provider_id=provider_id, provider_type=provider_type, api=api, config=config
)

if not providers_response:
click.secho("Provider not found", fg="red")
raise click.exceptions.Exit(1)

console.print(f"provider_id={providers_response.provider_id}")
console.print(f"provider_type={providers_response.provider_type}")
console.print("config:")
for line in yaml.dump(providers_response.config, indent=2).split("\n"):
console.print(line)
96 changes: 95 additions & 1 deletion src/llama_stack_client/resources/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

from typing import Type, cast
from typing import Type, cast, Dict, Any

import httpx

Expand All @@ -15,10 +15,15 @@
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
from .._utils import (
maybe_transform,
async_maybe_transform,
)
from .._wrappers import DataWrapper
from .._base_client import make_request_options
from ..types.provider_info import ProviderInfo
from ..types.provider_list_response import ProviderListResponse
from ..types import provider_update_params

__all__ = ["ProvidersResource", "AsyncProvidersResource"]

Expand All @@ -42,6 +47,44 @@ def with_streaming_response(self) -> ProvidersResourceWithStreamingResponse:
For more information, see https://www.github.com/stainless-sdks/llama-stack-python#with_streaming_response
"""
return ProvidersResourceWithStreamingResponse(self)

def update(
self,
api: str,
provider_id: str,
provider_type: str,
*,
config: Dict[str, Any],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> ProviderInfo:
"""
Args:
extra_headers: Send extra headers

extra_query: Add additional query parameters to the request

extra_body: Add additional JSON properties to the request

timeout: Override the client-level default timeout for this request, in seconds
"""
return self._put(
f"/v1/providers/{api}/{provider_id}/{provider_type}",
body=maybe_transform(
{
"config": config,
},
provider_update_params.UpdateProviderRequest,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=ProviderInfo,
)

def retrieve(
self,
Expand Down Expand Up @@ -116,6 +159,45 @@ def with_streaming_response(self) -> AsyncProvidersResourceWithStreamingResponse
For more information, see https://www.github.com/stainless-sdks/llama-stack-python#with_streaming_response
"""
return AsyncProvidersResourceWithStreamingResponse(self)


async def update(
self,
api: str,
provider_id: str,
provider_type: str,
*,
config: Dict[str, Any],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> ProviderInfo:
"""
Args:
extra_headers: Send extra headers

extra_query: Add additional query parameters to the request

extra_body: Add additional JSON properties to the request

timeout: Override the client-level default timeout for this request, in seconds
"""
return await self._put(
f"/v1/providers/{api}/{provider_id}/{provider_type}",
body=async_maybe_transform(
{
"config": config,
},
provider_update_params.UpdateProviderRequest,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=ProviderInfo,
)

async def retrieve(
self,
Expand Down Expand Up @@ -175,6 +257,9 @@ class ProvidersResourceWithRawResponse:
def __init__(self, providers: ProvidersResource) -> None:
self._providers = providers

self.update = to_raw_response_wrapper(
providers.update,
)
self.retrieve = to_raw_response_wrapper(
providers.retrieve,
)
Expand All @@ -187,6 +272,9 @@ class AsyncProvidersResourceWithRawResponse:
def __init__(self, providers: AsyncProvidersResource) -> None:
self._providers = providers

self.update = async_to_raw_response_wrapper(
providers.update,
)
self.retrieve = async_to_raw_response_wrapper(
providers.retrieve,
)
Expand All @@ -199,6 +287,9 @@ class ProvidersResourceWithStreamingResponse:
def __init__(self, providers: ProvidersResource) -> None:
self._providers = providers

self.update = to_streamed_response_wrapper(
providers.update,
)
self.retrieve = to_streamed_response_wrapper(
providers.retrieve,
)
Expand All @@ -211,6 +302,9 @@ class AsyncProvidersResourceWithStreamingResponse:
def __init__(self, providers: AsyncProvidersResource) -> None:
self._providers = providers

self.update = async_to_streamed_response_wrapper(
providers.update,
)
self.retrieve = async_to_streamed_response_wrapper(
providers.retrieve,
)
Expand Down
12 changes: 12 additions & 0 deletions src/llama_stack_client/types/provider_update_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

from typing import Dict, Union, Iterable, Any
from typing_extensions import Literal, Required, TypedDict

__all__ = ["UpdateProviderRequest"]


class UpdateProviderRequest(TypedDict, total=False):
config: Dict[str, Any]
Loading