Skip to content

pixano_inference.settings

Settings for the Pixano Inference API.

Settings(**data)

Bases: BaseSettings

Application settings.

Attributes:

Name Type Description
app_name str

The name of the application.

app_version str

The version of the application.

app_description str

A description of the application.

num_cpus int

The number of CPUs accessible to the application.

num_gpus int

The number of GPUs available for inference.

num_nodes int

The number of nodes available for inference.

gpus_used list[int]

The list of GPUs used by the application.

Source code in pixano_inference/settings.py
def __init__(self, **data: Any):
    """Initialize the settings."""
    if "num_cpus" not in data:
        data["num_cpus"] = os.cpu_count()
    if "num_gpus" not in data:
        if is_torch_installed():
            if torch.cuda.is_available():
                data["num_gpus"] = torch.cuda.device_count()
            else:
                data["num_gpus"] = 0
        else:
            data["num_gpus"] = 0

    super().__init__(**data)

gpus_available property

Return the available GPUs.

add_model(model, task)

Add a model.

Source code in pixano_inference/settings.py
def add_model(self, model: str, task: str) -> int | None:
    """Add a model."""
    if model in self.models:
        raise ValueError(f"Model {model} already registered.")
    gpu = self.assign_model_gpu(model)
    self.models_to_task[model] = task
    self.models.append(model)
    return gpu

assign_model_gpu(model)

Assign a model to a gpu.

Parameters:

Name Type Description Default
model str

The model name.

required

Returns:

Type Description
int | None

The gpu index. If no gpu available, returns None.

Source code in pixano_inference/settings.py
def assign_model_gpu(self, model: str) -> int | None:
    """Assign a model to a gpu.

    Args:
        model: The model name.

    Returns:
        The gpu index. If no gpu available, returns None.
    """
    gpu = self.reserve_gpu()
    if gpu is not None:
        self.gpu_to_model[gpu] = model
    return gpu

free_gpu(gpu)

Free a GPU if used.

Source code in pixano_inference/settings.py
def free_gpu(self, gpu: int) -> None:
    """Free a GPU if used."""
    try:
        self.gpus_used.remove(gpu)
    except ValueError:
        pass
    self.gpu_to_model.pop(gpu)
    return

remove_model(model)

Remove a model.

Source code in pixano_inference/settings.py
def remove_model(self, model: str) -> None:
    """Remove a model."""
    self.models.remove(model)
    self.models_to_task.pop(model, None)
    for gpu, model_stored in self.gpu_to_model.items():
        if model_stored == model:
            self.free_gpu(gpu)
            break
    return

reserve_gpu()

Reserve a gpu if any available.

Source code in pixano_inference/settings.py
def reserve_gpu(self) -> int | None:
    """Reserve a gpu if any available."""
    gpus = self.gpus_available
    if len(gpus) == 0:
        return None
    selected_gpu = gpus[0]
    self.gpus_used.append(selected_gpu)
    return selected_gpu

get_pixano_inference_settings()

Return the settings.

Source code in pixano_inference/settings.py
def get_pixano_inference_settings() -> Settings:
    """Return the settings."""
    return PIXANO_INFERENCE_SETTINGS