Skip to content

pixano.app.routers.inference.models

delete_model(model_name, settings) async

Delete a model from pixano inference client.

Source code in pixano/app/routers/inference/models.py
@router.delete("/delete/{model_name}")
async def delete_model(
    model_name: str,
    settings: Annotated[Settings, Depends(get_settings)],
) -> None:
    """Delete a model from pixano inference client."""
    client = get_client_from_settings(settings)
    return await client.delete_model(model_name=model_name)

instantiate_model(config, provider, settings) async

Instantiate a model from pixano inference client.

Source code in pixano/app/routers/inference/models.py
@router.post("/instantiate")
async def instantiate_model(
    config: Annotated[ModelConfig, Body(embed=True)],
    provider: Annotated[str, Body(embed=True)],
    settings: Annotated[Settings, Depends(get_settings)],
) -> ModelInfo:
    """Instantiate a model from pixano inference client."""
    client = get_client_from_settings(settings)
    return await client.instantiate_model(provider=provider, config=config)

list_models(settings, task=None) async

List all models from pixano inference client.

Source code in pixano/app/routers/inference/models.py
@router.get("/list/", response_model=list[ModelInfo])
async def list_models(
    settings: Annotated[Settings, Depends(get_settings)], task: str | None = None
) -> list[ModelInfo]:
    """List all models from pixano inference client."""
    client = get_client_from_settings(settings)
    models = await client.list_models()
    if task is None:
        return models
    return [m for m in models if task == m.task]