|
25 | 25 |
|
26 | 26 | # check for any of the given keys in the dictionary and return the value of the first key found
|
27 | 27 | def get_key_opts(d, keys):
|
28 |
| - vals = [] |
29 | 28 | for k in keys:
|
30 | 29 | if k in d:
|
31 | 30 | return d[k]
|
@@ -267,7 +266,6 @@ def _set_vocab_gpt2(self):
|
267 | 266 | toktypes.append(gguf.TokenType.USER_DEFINED)
|
268 | 267 | elif reverse_vocab[i] in added_vocab:
|
269 | 268 | tokens.append(reverse_vocab[i])
|
270 |
| - # check if tokenizer has added_tokens_decoder |
271 | 269 | if hasattr(tokenizer, "added_tokens_decoder"):
|
272 | 270 | if tokenizer.added_tokens_decoder[i].special:
|
273 | 271 | toktypes.append(gguf.TokenType.CONTROL)
|
@@ -1092,7 +1090,9 @@ def set_gguf_parameters(self):
|
1092 | 1090 | self.gguf_writer.add_head_count_kv(get_key_opts(self.hparams, ["n_head", "num_attention_heads"]))
|
1093 | 1091 | self.gguf_writer.add_layer_norm_eps(get_key_opts(self.hparams, ["layer_norm_epsilon", "layer_norm_eps"]))
|
1094 | 1092 | self.gguf_writer.add_rope_dimension_count(
|
1095 |
| - int(get_key_opts(self.hparams, ["partial_rotary_factor"]) * get_key_opts(self.hparams, ["n_embd", "hidden_size"])) // get_key_opts(self.hparams, ["n_head", "num_attention_heads"])) |
| 1093 | + int(get_key_opts(self.hparams, ["partial_rotary_factor"]) * |
| 1094 | + get_key_opts(self.hparams, ["n_embd", "hidden_size"])) // |
| 1095 | + get_key_opts(self.hparams, ["n_head", "num_attention_heads"])) |
1096 | 1096 | self.gguf_writer.add_file_type(self.ftype)
|
1097 | 1097 | self.gguf_writer.add_add_bos_token(False)
|
1098 | 1098 |
|
|
0 commit comments