|
| 1 | +#include "llama-arch.h" |
| 2 | +#include "llama-impl.h" |
| 3 | + |
| 4 | +#include <map> |
| 5 | + |
| 6 | +static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = { |
| 7 | + { LLM_ARCH_LLAMA, "llama" }, |
| 8 | + { LLM_ARCH_LLAMA4, "llama4" }, |
| 9 | + { LLM_ARCH_DECI, "deci" }, |
| 10 | + { LLM_ARCH_FALCON, "falcon" }, |
| 11 | + { LLM_ARCH_GROK, "grok" }, |
| 12 | + { LLM_ARCH_GPT2, "gpt2" }, |
| 13 | + { LLM_ARCH_GPTJ, "gptj" }, |
| 14 | + { LLM_ARCH_GPTNEOX, "gptneox" }, |
| 15 | + { LLM_ARCH_MPT, "mpt" }, |
| 16 | + { LLM_ARCH_BAICHUAN, "baichuan" }, |
| 17 | + { LLM_ARCH_STARCODER, "starcoder" }, |
| 18 | + { LLM_ARCH_REFACT, "refact" }, |
| 19 | + { LLM_ARCH_BERT, "bert" }, |
| 20 | + { LLM_ARCH_NOMIC_BERT, "nomic-bert" }, |
| 21 | + { LLM_ARCH_JINA_BERT_V2, "jina-bert-v2" }, |
| 22 | + { LLM_ARCH_BLOOM, "bloom" }, |
| 23 | + { LLM_ARCH_STABLELM, "stablelm" }, |
| 24 | + { LLM_ARCH_QWEN, "qwen" }, |
| 25 | + { LLM_ARCH_QWEN2, "qwen2" }, |
| 26 | + { LLM_ARCH_QWEN2MOE, "qwen2moe" }, |
| 27 | + { LLM_ARCH_QWEN2VL, "qwen2vl" }, |
| 28 | + { LLM_ARCH_QWEN3, "qwen3" }, |
| 29 | + { LLM_ARCH_QWEN3MOE, "qwen3moe" }, |
| 30 | + { LLM_ARCH_PHI2, "phi2" }, |
| 31 | + { LLM_ARCH_PHI3, "phi3" }, |
| 32 | + { LLM_ARCH_PLAMO, "plamo" }, |
| 33 | + { LLM_ARCH_CODESHELL, "codeshell" }, |
| 34 | + { LLM_ARCH_ORION, "orion" }, |
| 35 | + { LLM_ARCH_INTERNLM2, "internlm2" }, |
| 36 | + { LLM_ARCH_MINICPM, "minicpm" }, |
| 37 | + { LLM_ARCH_GEMMA, "gemma" }, |
| 38 | + { LLM_ARCH_GEMMA2, "gemma2" }, |
| 39 | + { LLM_ARCH_GEMMA3, "gemma3" }, |
| 40 | + { LLM_ARCH_STARCODER2, "starcoder2" }, |
| 41 | + { LLM_ARCH_MAMBA, "mamba" }, |
| 42 | + { LLM_ARCH_XVERSE, "xverse" }, |
| 43 | + { LLM_ARCH_COMMAND_R, "command-r" }, |
| 44 | + { LLM_ARCH_DBRX, "dbrx" }, |
| 45 | + { LLM_ARCH_OLMO, "olmo" }, |
| 46 | + { LLM_ARCH_OPENELM, "openelm" }, |
| 47 | + { LLM_ARCH_ARCTIC, "arctic" }, |
| 48 | + { LLM_ARCH_DEEPSEEK2, "deepseek2" }, |
| 49 | + { LLM_ARCH_CHATGLM, "chatglm" }, |
| 50 | + { LLM_ARCH_GLM4, "glm4" }, |
| 51 | + { LLM_ARCH_GLM4_MOE, "glm4moe" }, |
| 52 | + { LLM_ARCH_BITNET, "bitnet" }, |
| 53 | + { LLM_ARCH_BITNET_25, "bitnet-25" }, |
| 54 | + { LLM_ARCH_BITNET_B158, "bitnet-b1.58" }, |
| 55 | + { LLM_ARCH_T5, "t5" }, |
| 56 | + { LLM_ARCH_T5ENCODER, "t5encoder" }, |
| 57 | + { LLM_ARCH_JAIS, "jais" }, |
| 58 | + { LLM_ARCH_GRANITE, "granite" }, |
| 59 | + { LLM_ARCH_GRANITE_MOE, "granitemoe" }, |
| 60 | + { LLM_ARCH_COHERE2, "cohere2" }, |
| 61 | + { LLM_ARCH_DOTS1, "dots1" }, |
| 62 | + { LLM_ARCH_ERNIE4_5, "ernie4_5" }, |
| 63 | + { LLM_ARCH_ERNIE4_5_MOE, "ernie4_5-moe" }, |
| 64 | + { LLM_ARCH_HUNYUAN_MOE, "hunyuan-moe" }, |
| 65 | + { LLM_ARCH_OPENAI_MOE, "gpt-oss" }, |
| 66 | + { LLM_ARCH_UNKNOWN, "(unknown)" }, |
| 67 | +}; |
| 68 | + |
| 69 | +llm_arch llm_arch_from_string(const std::string & name) { |
| 70 | + for (const auto & kv : LLM_ARCH_NAMES) { // NOLINT |
| 71 | + if (kv.second == name) { |
| 72 | + return kv.first; |
| 73 | + } |
| 74 | + } |
| 75 | + |
| 76 | + return LLM_ARCH_UNKNOWN; |
| 77 | +} |
| 78 | + |
| 79 | +static const std::map<llm_kv, const char *> LLM_KV_NAMES = { |
| 80 | + { LLM_KV_GENERAL_TYPE, "general.type" }, |
| 81 | + { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" }, |
| 82 | + { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" }, |
| 83 | + { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" }, |
| 84 | + { LLM_KV_GENERAL_NAME, "general.name" }, |
| 85 | + { LLM_KV_GENERAL_AUTHOR, "general.author" }, |
| 86 | + { LLM_KV_GENERAL_VERSION, "general.version" }, |
| 87 | + { LLM_KV_GENERAL_URL, "general.url" }, |
| 88 | + { LLM_KV_GENERAL_DESCRIPTION, "general.description" }, |
| 89 | + { LLM_KV_GENERAL_LICENSE, "general.license" }, |
| 90 | + { LLM_KV_GENERAL_SOURCE_URL, "general.source.url" }, |
| 91 | + { LLM_KV_GENERAL_SOURCE_HF_REPO, "general.source.huggingface.repository" }, |
| 92 | + |
| 93 | + { LLM_KV_VOCAB_SIZE, "%s.vocab_size" }, |
| 94 | + { LLM_KV_CONTEXT_LENGTH, "%s.context_length" }, |
| 95 | + { LLM_KV_EMBEDDING_LENGTH, "%s.embedding_length" }, |
| 96 | + { LLM_KV_BLOCK_COUNT, "%s.block_count" }, |
| 97 | + { LLM_KV_LEADING_DENSE_BLOCK_COUNT, "%s.leading_dense_block_count" }, |
| 98 | + { LLM_KV_FEED_FORWARD_LENGTH, "%s.feed_forward_length" }, |
| 99 | + { LLM_KV_EXPERT_FEED_FORWARD_LENGTH, "%s.expert_feed_forward_length" }, |
| 100 | + { LLM_KV_EXPERT_SHARED_FEED_FORWARD_LENGTH, "%s.expert_shared_feed_forward_length" }, |
| 101 | + { LLM_KV_USE_PARALLEL_RESIDUAL, "%s.use_parallel_residual" }, |
| 102 | + { LLM_KV_TENSOR_DATA_LAYOUT, "%s.tensor_data_layout" }, |
| 103 | + { LLM_KV_EXPERT_COUNT, "%s.expert_count" }, |
| 104 | + { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" }, |
| 105 | + { LLM_KV_EXPERT_SHARED_COUNT, "%s.expert_shared_count" }, |
| 106 | + { LLM_KV_EXPERT_WEIGHTS_SCALE, "%s.expert_weights_scale" }, |
| 107 | + { LLM_KV_EXPERT_WEIGHTS_NORM, "%s.expert_weights_norm" }, |
| 108 | + { LLM_KV_EXPERT_GATING_FUNC, "%s.expert_gating_func" }, |
| 109 | + { LLM_KV_NEXTN_PREDICT_LAYERS, "%s.nextn_predict_layers" }, |
| 110 | + { LLM_KV_POOLING_TYPE , "%s.pooling_type" }, |
| 111 | + { LLM_KV_LOGIT_SCALE, "%s.logit_scale" }, |
| 112 | + { LLM_KV_DECODER_START_TOKEN_ID, "%s.decoder_start_token_id" }, |
| 113 | + { LLM_KV_ATTN_LOGIT_SOFTCAPPING, "%s.attn_logit_softcapping" }, |
| 114 | + { LLM_KV_ROUTER_LOGIT_SOFTCAPPING, "%s.router_logit_softcapping" }, |
| 115 | + { LLM_KV_FINAL_LOGIT_SOFTCAPPING, "%s.final_logit_softcapping" }, |
| 116 | + { LLM_KV_RESIDUAL_SCALE, "%s.residual_scale" }, |
| 117 | + { LLM_KV_EMBEDDING_SCALE, "%s.embedding_scale" }, |
| 118 | + { LLM_KV_TOKEN_SHIFT_COUNT, "%s.token_shift_count" }, |
| 119 | + { LLM_KV_INTERLEAVE_MOE_LAYER_STEP, "%s.interleave_moe_layer_step" }, |
| 120 | + |
| 121 | + { LLM_KV_ATTENTION_HEAD_COUNT, "%s.attention.head_count" }, |
| 122 | + { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" }, |
| 123 | + { LLM_KV_ATTENTION_MAX_ALIBI_BIAS, "%s.attention.max_alibi_bias" }, |
| 124 | + { LLM_KV_ATTENTION_CLAMP_KQV, "%s.attention.clamp_kqv" }, |
| 125 | + { LLM_KV_ATTENTION_KEY_LENGTH, "%s.attention.key_length" }, |
| 126 | + { LLM_KV_ATTENTION_VALUE_LENGTH, "%s.attention.value_length" }, |
| 127 | + { LLM_KV_ATTENTION_LAYERNORM_EPS, "%s.attention.layer_norm_epsilon" }, |
| 128 | + { LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, "%s.attention.layer_norm_rms_epsilon" }, |
| 129 | + { LLM_KV_ATTENTION_CAUSAL, "%s.attention.causal" }, |
| 130 | + { LLM_KV_ATTENTION_Q_LORA_RANK, "%s.attention.q_lora_rank" }, |
| 131 | + { LLM_KV_ATTENTION_KV_LORA_RANK, "%s.attention.kv_lora_rank" }, |
| 132 | + { LLM_KV_ATTENTION_RELATIVE_BUCKETS_COUNT, "%s.attention.relative_buckets_count" }, |
| 133 | + { LLM_KV_ATTENTION_SLIDING_WINDOW, "%s.attention.sliding_window" }, |
| 134 | + { LLM_KV_ATTENTION_SCALE, "%s.attention.scale" }, |
| 135 | + { LLM_KV_ATTENTION_OUTPUT_SCALE, "%s.attention.output_scale" }, |
| 136 | + { LLM_KV_ATTENTION_TEMPERATURE_LENGTH, "%s.attention.temperature_length" }, |
| 137 | + |
| 138 | + { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" }, |
| 139 | + { LLM_KV_ROPE_DIMENSION_SECTIONS, "%s.rope.dimension_sections" }, |
| 140 | + { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" }, |
| 141 | + { LLM_KV_ROPE_SCALE_LINEAR, "%s.rope.scale_linear" }, |
| 142 | + { LLM_KV_ROPE_SCALING_TYPE, "%s.rope.scaling.type" }, |
| 143 | + { LLM_KV_ROPE_SCALING_FACTOR, "%s.rope.scaling.factor" }, |
| 144 | + { LLM_KV_ROPE_SCALING_ATTN_FACTOR, "%s.rope.scaling.attn_factor" }, |
| 145 | + { LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, "%s.rope.scaling.original_context_length" }, |
| 146 | + { LLM_KV_ROPE_SCALING_FINETUNED, "%s.rope.scaling.finetuned" }, |
| 147 | + { LLM_KV_ROPE_SCALING_YARN_LOG_MUL, "%s.rope.scaling.yarn_log_multiplier" }, |
| 148 | + { LLM_KV_ROPE_SCALING_YARN_EXT_FACTOR, "%s.rope.scaling.yarn_ext_factor" }, |
| 149 | + { LLM_KV_ROPE_SCALING_YARN_ATTN_FACTOR, "%s.rope.scaling.yarn_attn_factor" }, |
| 150 | + { LLM_KV_ROPE_SCALING_YARN_BETA_FAST, "%s.rope.scaling.yarn_beta_fast" }, |
| 151 | + { LLM_KV_ROPE_SCALING_YARN_BETA_SLOW, "%s.rope.scaling.yarn_beta_slow" }, |
| 152 | + |
| 153 | + { LLM_KV_SPLIT_NO, "split.no" }, |
| 154 | + { LLM_KV_SPLIT_COUNT, "split.count" }, |
| 155 | + { LLM_KV_SPLIT_TENSORS_COUNT, "split.tensors.count" }, |
| 156 | + |
| 157 | + { LLM_KV_SSM_CONV_KERNEL, "%s.ssm.conv_kernel" }, |
| 158 | + { LLM_KV_SSM_INNER_SIZE, "%s.ssm.inner_size" }, |
| 159 | + { LLM_KV_SSM_STATE_SIZE, "%s.ssm.state_size" }, |
| 160 | + { LLM_KV_SSM_TIME_STEP_RANK, "%s.ssm.time_step_rank" }, |
| 161 | + |
| 162 | + { LLM_KV_TOKENIZER_MODEL, "tokenizer.ggml.model" }, |
| 163 | + { LLM_KV_TOKENIZER_PRE, "tokenizer.ggml.pre" }, |
| 164 | + { LLM_KV_TOKENIZER_LIST, "tokenizer.ggml.tokens" }, |
| 165 | + { LLM_KV_TOKENIZER_TOKEN_TYPE, "tokenizer.ggml.token_type" }, |
| 166 | + { LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, "tokenizer.ggml.token_type_count" }, |
| 167 | + { LLM_KV_TOKENIZER_SCORES, "tokenizer.ggml.scores" }, |
| 168 | + { LLM_KV_TOKENIZER_MERGES, "tokenizer.ggml.merges" }, |
| 169 | + { LLM_KV_TOKENIZER_BOS_ID, "tokenizer.ggml.bos_token_id" }, |
| 170 | + { LLM_KV_TOKENIZER_EOS_ID, "tokenizer.ggml.eos_token_id" }, |
| 171 | + { LLM_KV_TOKENIZER_UNK_ID, "tokenizer.ggml.unknown_token_id" }, |
| 172 | + { LLM_KV_TOKENIZER_SEP_ID, "tokenizer.ggml.seperator_token_id" }, |
| 173 | + { LLM_KV_TOKENIZER_PAD_ID, "tokenizer.ggml.padding_token_id" }, |
| 174 | + { LLM_KV_TOKENIZER_CLS_ID, "tokenizer.ggml.cls_token_id" }, |
| 175 | + { LLM_KV_TOKENIZER_MASK_ID, "tokenizer.ggml.mask_token_id" }, |
| 176 | + { LLM_KV_TOKENIZER_ADD_BOS, "tokenizer.ggml.add_bos_token" }, |
| 177 | + { LLM_KV_TOKENIZER_ADD_EOS, "tokenizer.ggml.add_eos_token" }, |
| 178 | + { LLM_KV_TOKENIZER_ADD_SEP, "tokenizer.ggml.add_sep_token" }, |
| 179 | + { LLM_KV_TOKENIZER_ADD_PREFIX, "tokenizer.ggml.add_space_prefix" }, |
| 180 | + { LLM_KV_TOKENIZER_REMOVE_EXTRA_WS, "tokenizer.ggml.remove_extra_whitespaces" }, |
| 181 | + { LLM_KV_TOKENIZER_PRECOMPILED_CHARSMAP, "tokenizer.ggml.precompiled_charsmap" }, |
| 182 | + { LLM_KV_TOKENIZER_HF_JSON, "tokenizer.huggingface.json" }, |
| 183 | + { LLM_KV_TOKENIZER_RWKV, "tokenizer.rwkv.world" }, |
| 184 | + { LLM_KV_TOKENIZER_CHAT_TEMPLATE, "tokenizer.chat_template" }, |
| 185 | + { LLM_KV_TOKENIZER_CHAT_TEMPLATE_N, "tokenizer.chat_template.%s" }, |
| 186 | + { LLM_KV_TOKENIZER_FIM_PRE_ID, "tokenizer.ggml.fim_pre_token_id" }, |
| 187 | + { LLM_KV_TOKENIZER_FIM_SUF_ID, "tokenizer.ggml.fim_suf_token_id" }, |
| 188 | + { LLM_KV_TOKENIZER_FIM_MID_ID, "tokenizer.ggml.fim_mid_token_id" }, |
| 189 | + { LLM_KV_TOKENIZER_FIM_PAD_ID, "tokenizer.ggml.fim_pad_token_id" }, |
| 190 | + { LLM_KV_TOKENIZER_FIM_REP_ID, "tokenizer.ggml.fim_rep_token_id" }, |
| 191 | + { LLM_KV_TOKENIZER_FIM_SEP_ID, "tokenizer.ggml.fim_sep_token_id" }, |
| 192 | + |
| 193 | + { LLM_KV_TOKENIZER_PREFIX_ID, "tokenizer.ggml.prefix_token_id" }, |
| 194 | + { LLM_KV_TOKENIZER_SUFFIX_ID, "tokenizer.ggml.suffix_token_id" }, |
| 195 | + { LLM_KV_TOKENIZER_MIDDLE_ID, "tokenizer.ggml.middle_token_id" }, |
| 196 | + { LLM_KV_TOKENIZER_EOT_ID, "tokenizer.ggml.eot_token_id" }, |
| 197 | + { LLM_KV_TOKENIZER_EOM_ID, "tokenizer.ggml.eom_token_id" }, |
| 198 | + |
| 199 | + { LLM_KV_ADAPTER_TYPE, "adapter.type" }, |
| 200 | + { LLM_KV_ADAPTER_LORA_ALPHA, "adapter.lora.alpha" }, |
| 201 | +}; |
| 202 | + |
| 203 | +LLM_KV::LLM_KV(llm_arch arch, const char* suffix) : arch(arch), suffix(suffix) {} |
| 204 | + |
| 205 | +std::string LLM_KV::operator()(llm_kv kv) const { |
| 206 | + return suffix ? ::format(LLM_KV_NAMES.at(kv), LLM_ARCH_NAMES.at(arch), suffix) |
| 207 | + : ::format(LLM_KV_NAMES.at(kv), LLM_ARCH_NAMES.at(arch)); |
| 208 | +} |
| 209 | + |
| 210 | +const char * llama_model_arch_name(llm_arch arch) { |
| 211 | + auto it = LLM_ARCH_NAMES.find(arch); |
| 212 | + if (it == LLM_ARCH_NAMES.end()) { |
| 213 | + return "unknown"; |
| 214 | + } |
| 215 | + return it->second; |
| 216 | +} |
| 217 | + |
0 commit comments