From ad7ca84bb2c3dfab3a182ef21f953e9767ac086f Mon Sep 17 00:00:00 2001 From: Adam Jones Date: Sun, 6 Oct 2024 00:54:27 +0100 Subject: [PATCH] server types: Move 'model' parameter to clarify it is used The 'model' parameter has been supported since https://github.com/abetlen/llama-cpp-python/pull/931. Its placement in this section was copied from an older version of the file, and hasn't been corrected since. Correcting this will make it clearer what parameters are supported by llama-cpp-python. --- llama_cpp/server/types.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama_cpp/server/types.py b/llama_cpp/server/types.py index fdd164456..eda2b9280 100644 --- a/llama_cpp/server/types.py +++ b/llama_cpp/server/types.py @@ -107,6 +107,7 @@ class CreateCompletionRequest(BaseModel): + model: Optional[str] = model_field prompt: Union[str, List[str]] = Field( default="", description="The prompt to generate completions for." ) @@ -138,7 +139,6 @@ class CreateCompletionRequest(BaseModel): seed: Optional[int] = Field(None) # ignored or currently unsupported - model: Optional[str] = model_field n: Optional[int] = 1 best_of: Optional[int] = 1 user: Optional[str] = Field(default=None) @@ -190,6 +190,7 @@ class ChatCompletionRequestMessage(BaseModel): class CreateChatCompletionRequest(BaseModel): + model: Optional[str] = model_field messages: List[llama_cpp.ChatCompletionRequestMessage] = Field( default=[], description="A list of messages to generate completions for." ) @@ -237,7 +238,6 @@ class CreateChatCompletionRequest(BaseModel): ) # ignored or currently unsupported - model: Optional[str] = model_field n: Optional[int] = 1 user: Optional[str] = Field(None)