Skip to content

Commit a65e2ed

Browse files
committed
dhn
1 parent c7c700b commit a65e2ed

File tree

1 file changed

+4
-0
lines changed

1 file changed

+4
-0
lines changed

llama_cpp/llama.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -262,6 +262,10 @@ def __init__(
262262

263263
self.params = llama_cpp.llama_context_default_params()
264264
self.params.n_ctx = n_ctx
265+
if n_gqa is not None:
266+
self.params.n_gqa = n_gqa
267+
if rms_norm_eps is not None:
268+
self.params.rms_norm_eps = rms_norm_eps
265269
self.params.n_gpu_layers = n_gpu_layers
266270
self.params.seed = seed
267271
self.params.f16_kv = f16_kv

0 commit comments

Comments
 (0)