From 010d4c2f399850ac8c56957a2dbaa671d9f6d2c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20du=20Hamel?= Date: Fri, 11 Apr 2025 16:37:51 +0200 Subject: [PATCH] model: support T5 with llama.cpp naming convention --- model.cpp | 58 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/model.cpp b/model.cpp index 24da39f6d..66a87a660 100644 --- a/model.cpp +++ b/model.cpp @@ -177,6 +177,64 @@ std::unordered_map pmid_v2_name_map = { std::string convert_open_clip_to_hf_clip(const std::string& name) { std::string new_name = name; std::string prefix; + if (contains(new_name, ".enc.")) { + // llama.cpp naming convention for T5 + size_t pos = new_name.find(".enc."); + if (pos != std::string::npos) { + new_name.replace(pos, 5, ".encoder."); + } + pos = new_name.find("blk."); + if (pos != std::string::npos) { + new_name.replace(pos, 4, "block."); + } + pos = new_name.find("output_norm."); + if (pos != std::string::npos) { + new_name.replace(pos, 12, "final_layer_norm."); + } + pos = new_name.find("attn_k."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.0.SelfAttention.k."); + } + pos = new_name.find("attn_v."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.0.SelfAttention.v."); + } + pos = new_name.find("attn_o."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.0.SelfAttention.o."); + } + pos = new_name.find("attn_q."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.0.SelfAttention.q."); + } + pos = new_name.find("attn_norm."); + if (pos != std::string::npos) { + new_name.replace(pos, 10, "layer.0.layer_norm."); + } + pos = new_name.find("ffn_norm."); + if (pos != std::string::npos) { + new_name.replace(pos, 9, "layer.1.layer_norm."); + } + pos = new_name.find("ffn_up."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.1.DenseReluDense.wi_1."); + } + pos = new_name.find("ffn_down."); + if (pos != std::string::npos) { + new_name.replace(pos, 9, "layer.1.DenseReluDense.wo."); + } + pos = new_name.find("ffn_gate."); + if (pos != std::string::npos) { + new_name.replace(pos, 9, "layer.1.DenseReluDense.wi_0."); + } + pos = new_name.find("attn_rel_b."); + if (pos != std::string::npos) { + new_name.replace(pos, 11, "layer.0.SelfAttention.relative_attention_bias."); + } + } else if (name == "text_encoders.t5xxl.transformer.token_embd.weight") { + new_name = "text_encoders.t5xxl.transformer.shared.weight"; + } + if (starts_with(new_name, "conditioner.embedders.0.open_clip.")) { prefix = "cond_stage_model."; new_name = new_name.substr(strlen("conditioner.embedders.0.open_clip."));