Skip to content

Commit 9270a99

Browse files
committed
add in capabilities to set full power for config
1 parent b11af25 commit 9270a99

File tree

1 file changed

+11
-2
lines changed

1 file changed

+11
-2
lines changed

llama_cpp/server/settings.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,12 +67,12 @@ class ModelSettings(BaseSettings):
6767
n_threads: int = Field(
6868
default=max(multiprocessing.cpu_count() // 2, 1),
6969
ge=1,
70-
description="The number of threads to use.",
70+
description="The number of threads to use. Use -1 for max cpu threads",
7171
)
7272
n_threads_batch: int = Field(
7373
default=max(multiprocessing.cpu_count() // 2, 1),
7474
ge=0,
75-
description="The number of threads to use when batch processing.",
75+
description="The number of threads to use when batch processing. Use -1 for max cpu threads",
7676
)
7777
rope_scaling_type: int = Field(
7878
default=llama_cpp.LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED
@@ -163,6 +163,15 @@ class ModelSettings(BaseSettings):
163163
verbose: bool = Field(
164164
default=True, description="Whether to print debug information."
165165
)
166+
@root_validator(pre=True) # pre=True to ensure this runs before any other validation
167+
def set_dynamic_defaults(cls, values):
168+
# If n_threads or n_threads_batch is -1, set it to multiprocessing.cpu_count()
169+
cpu_count = multiprocessing.cpu_count()
170+
if values.get('n_threads', 0) == -1:
171+
values['n_threads'] = cpu_count
172+
if values.get('n_threads_batch', 0) == -1:
173+
values['n_threads_batch'] = cpu_count
174+
return values
166175

167176

168177
class ServerSettings(BaseSettings):

0 commit comments

Comments
 (0)