@client
def text_completion(
    prompt: Union[
        str, List[Union[str, List[Union[str, List[int]]]]]
    ],  # Required: The prompt(s) to generate completions for.
    model: Optional[str] = None,  # Optional: either `model` or `engine` can be set
    *args,
    **kwargs,
):
    # ... more code ...
    _, custom_llm_provider, dynamic_api_key, api_base = get_llm_provider(model=model, custom_llm_provider=custom_llm_provider, api_base=api_base)  
    # type: ignore
    # ... more code ...

    # default case, non OpenAI requests go through here
    messages = [{"role": "system", "content": prompt}]
    kwargs.pop("prompt", None)
    response = completion(
        model=model,
        messages=messages,
        *args,
        **kwargs,
        **optional_params,
    )