Skip to content

Commit ecf8043

Browse files
author
katsu560
committed
yolo : add files to gguf file script
1 parent 661588c commit ecf8043

File tree

1 file changed

+278
-0
lines changed

1 file changed

+278
-0
lines changed

examples/yolo/gguf-addfile.py

Lines changed: 278 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,278 @@
1+
#!/usr/bin/env python3
2+
# gguf-addfile.py srcfile dstfile addfiles ...
3+
4+
from __future__ import annotations
5+
6+
import logging
7+
import argparse
8+
import os
9+
import sys
10+
from pathlib import Path
11+
from typing import Any
12+
13+
import numpy as np
14+
15+
# Necessary to load the local gguf package
16+
if "NO_LOCAL_GGUF" not in os.environ and (Path(__file__).parent.parent.parent / 'gguf-py').exists():
17+
#print("add path", str(Path(__file__).parent.parent))
18+
sys.path.insert(0, str(Path(__file__).parent.parent))
19+
20+
from gguf import GGUFReader, GGUFWriter, ReaderField, GGUFEndian, GGUFValueType, Keys, NamedObject # noqa: E402
21+
22+
logger = logging.getLogger("gguf-addfile")
23+
24+
25+
def get_file_host_endian(reader: GGUFReader) -> tuple[str, str]:
26+
host_endian = 'LITTLE' if np.uint32(1) == np.uint32(1).newbyteorder("<") else 'BIG'
27+
if reader.byte_order == 'S':
28+
file_endian = 'BIG' if host_endian == 'LITTLE' else 'LITTLE'
29+
else:
30+
file_endian = host_endian
31+
return (host_endian, file_endian)
32+
33+
34+
# For more information about what field.parts and field.data represent,
35+
# please see the comments in the modify_gguf.py example.
36+
def dump_metadata(reader: GGUFReader, args: argparse.Namespace) -> None:
37+
host_endian, file_endian = get_file_host_endian(reader)
38+
print(f'* File is {file_endian} endian, script is running on a {host_endian} endian host.')
39+
print(f'\n* Dumping {len(reader.fields)} key/value pair(s)')
40+
for n, field in enumerate(reader.fields.values(), 1):
41+
if not field.types:
42+
pretty_type = 'N/A'
43+
elif field.types[0] == GGUFValueType.ARRAY:
44+
nest_count = len(field.types) - 1
45+
pretty_type = '[' * nest_count + str(field.types[-1].name) + ']' * nest_count
46+
else:
47+
pretty_type = str(field.types[-1].name)
48+
print(f' {n:5}: {pretty_type:11} | {len(field.data):8} | {field.name}', end = '')
49+
if len(field.types) == 1:
50+
curr_type = field.types[0]
51+
if curr_type == GGUFValueType.STRING:
52+
print(' = {0}'.format(repr(str(bytes(field.parts[-1]), encoding='utf8')[:60])), end = '')
53+
elif curr_type == GGUFValueType.NAMEDOBJECT:
54+
print(' = {0}'.format(repr(str(bytes(field.parts[4]), encoding='utf8')[:60])), end = '')
55+
print(', {0}'.format(int(field.parts[5]))[:20], end = '')
56+
elif field.types[0] in reader.gguf_scalar_to_np:
57+
print(' = {0}'.format(field.parts[-1][0]), end = '')
58+
print()
59+
if args.no_tensors:
60+
return
61+
print(f'\n* Dumping {len(reader.tensors)} tensor(s)')
62+
for n, tensor in enumerate(reader.tensors, 1):
63+
prettydims = ', '.join('{0:5}'.format(d) for d in list(tensor.shape) + [1] * (4 - len(tensor.shape)))
64+
print(f' {n:5}: {tensor.n_elements:10} | {prettydims} | {tensor.tensor_type.name:7} | {tensor.name}')
65+
66+
67+
def dump_metadata_json(reader: GGUFReader, args: argparse.Namespace) -> None:
68+
import json
69+
host_endian, file_endian = get_file_host_endian(reader)
70+
metadata: dict[str, Any] = {}
71+
tensors: dict[str, Any] = {}
72+
result = {
73+
"filename": args.input,
74+
"endian": file_endian,
75+
"metadata": metadata,
76+
"tensors": tensors,
77+
}
78+
for idx, field in enumerate(reader.fields.values()):
79+
curr: dict[str, Any] = {
80+
"index": idx,
81+
"type": field.types[0].name if field.types else 'UNKNOWN',
82+
"offset": field.offset,
83+
}
84+
metadata[field.name] = curr
85+
if field.types[:1] == [GGUFValueType.ARRAY]:
86+
curr["array_types"] = [t.name for t in field.types][1:]
87+
if not args.json_array:
88+
continue
89+
itype = field.types[-1]
90+
if itype == GGUFValueType.STRING:
91+
curr["value"] = [str(bytes(field.parts[idx]), encoding="utf-8") for idx in field.data]
92+
elif itype == GGUFValueType.NAMEDOBJECT:
93+
curr["value"] = [str(bytes(field.parts[idx]), encoding="utf-8") for idx in field.data]
94+
else:
95+
curr["value"] = [pv for idx in field.data for pv in field.parts[idx].tolist()]
96+
elif field.types[0] == GGUFValueType.STRING:
97+
curr["value"] = str(bytes(field.parts[-1]), encoding="utf-8")
98+
elif field.types[0] == GGUFValueType.NAMEDOBJECT:
99+
curr["value"] = str(bytes(field.parts[4]), encoding="utf-8")
100+
curr["value"] = int(field.parts[5])
101+
else:
102+
curr["value"] = field.parts[-1].tolist()[0]
103+
if not args.no_tensors:
104+
for idx, tensor in enumerate(reader.tensors):
105+
tensors[tensor.name] = {
106+
"index": idx,
107+
"shape": tensor.shape.tolist(),
108+
"type": tensor.tensor_type.name,
109+
"offset": tensor.field.offset,
110+
}
111+
json.dump(result, sys.stdout)
112+
113+
114+
def get_byteorder(reader: GGUFReader) -> GGUFEndian:
115+
if np.uint32(1) == np.uint32(1).newbyteorder("<"):
116+
# Host is little endian
117+
host_endian = GGUFEndian.LITTLE
118+
swapped_endian = GGUFEndian.BIG
119+
else:
120+
# Sorry PDP or other weird systems that don't use BE or LE.
121+
host_endian = GGUFEndian.BIG
122+
swapped_endian = GGUFEndian.LITTLE
123+
124+
if reader.byte_order == "S":
125+
return swapped_endian
126+
else:
127+
return host_endian
128+
129+
130+
def decode_field(field: ReaderField) -> Any:
131+
if field and field.types:
132+
main_type = field.types[0]
133+
134+
if main_type == GGUFValueType.ARRAY:
135+
sub_type = field.types[-1]
136+
137+
if sub_type == GGUFValueType.STRING:
138+
return [str(bytes(field.parts[idx]), encoding='utf8') for idx in field.data]
139+
elif sub_type == GGUFValueType.NAMEDOBJECT:
140+
return [str(bytes(field.parts[idx]), encoding='utf8') for idx in field.data]
141+
else:
142+
return [pv for idx in field.data for pv in field.parts[idx].tolist()]
143+
if main_type == GGUFValueType.STRING:
144+
return str(bytes(field.parts[-1]), encoding='utf8')
145+
elif main_type == GGUFValueType.NAMEDOBJECT:
146+
return str(bytes(field.parts[4]), encoding='utf8')
147+
else:
148+
return field.parts[-1][0]
149+
150+
return None
151+
152+
153+
def get_field_data(reader: GGUFReader, key: str) -> Any:
154+
field = reader.get_field(key)
155+
156+
return decode_field(field)
157+
158+
159+
def copy_with_new_metadata(reader: gguf.GGUFReader, writer: gguf.GGUFWriter, new_metadata: Mapping[str, str], array: NamedObject[Any] | None = None) -> None:
160+
for field in reader.fields.values():
161+
# Suppress virtual fields and fields written by GGUFWriter
162+
if field.name == Keys.General.ARCHITECTURE or field.name.startswith('GGUF.'):
163+
logger.debug(f'Suppressing {field.name}')
164+
continue
165+
166+
# Skip old chat templates if we have new ones
167+
if field.name.startswith(Keys.Tokenizer.CHAT_TEMPLATE) and Keys.Tokenizer.CHAT_TEMPLATE in new_metadata:
168+
logger.debug(f'Skipping {field.name}')
169+
continue
170+
171+
old_val = decode_field(field)
172+
val = new_metadata.get(field.name, old_val)
173+
174+
if field.name in new_metadata:
175+
logger.debug(f'Modifying {field.name}: "{old_val}" -> "{val}"')
176+
del new_metadata[field.name]
177+
elif val is not None:
178+
logger.debug(f'Copying {field.name}')
179+
180+
if val is not None:
181+
writer.add_key(field.name)
182+
writer.add_val(val, field.types[0])
183+
184+
if Keys.Tokenizer.CHAT_TEMPLATE in new_metadata:
185+
logger.debug('Adding chat template(s)')
186+
writer.add_chat_template(new_metadata[Keys.Tokenizer.CHAT_TEMPLATE])
187+
del new_metadata[Keys.Tokenizer.CHAT_TEMPLATE]
188+
189+
if array is None:
190+
for key, name in new_metadata.items():
191+
logger.debug(f'Adding {key}: {name}')
192+
# named object
193+
with open(name, "rb") as f:
194+
val = f.read()
195+
writer.add_namedobject(key, val, name)
196+
else:
197+
for key, name in new_metadata.items():
198+
logger.debug(f'Adding array {key}: {name}')
199+
# named object
200+
writer.add_namedobject(key, 'val', name, array=array)
201+
202+
for tensor in reader.tensors:
203+
# Dimensions are written in reverse order, so flip them first
204+
shape = np.flipud(tensor.shape)
205+
writer.add_tensor_info(tensor.name, shape, tensor.data.dtype, tensor.data.nbytes, tensor.tensor_type)
206+
207+
writer.write_header_to_file()
208+
writer.write_kv_data_to_file()
209+
writer.write_ti_data_to_file()
210+
211+
for tensor in reader.tensors:
212+
writer.write_tensor_data(tensor.data)
213+
214+
writer.close()
215+
216+
217+
def main() -> None:
218+
parser = argparse.ArgumentParser(description="Add files to GGUF file metadata")
219+
parser.add_argument("input", type=str, help="GGUF format model input filename")
220+
parser.add_argument("output", type=str, help="GGUF format model output filename")
221+
parser.add_argument("addfiles", type=str, nargs='+', help="add filenames ...")
222+
parser.add_argument("--array", action="store_true", help="add files to namedobject array")
223+
parser.add_argument("--no-tensors", action="store_true", help="Don't dump tensor metadata")
224+
parser.add_argument("--json", action="store_true", help="Produce JSON output")
225+
parser.add_argument("--json-array", action="store_true", help="Include full array values in JSON output (long)")
226+
parser.add_argument("--verbose", action="store_true", help="Increase output verbosity")
227+
args = parser.parse_args(None if len(sys.argv) > 1 else ["--help"])
228+
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
229+
230+
logger.info(f'* Loading: {args.input}')
231+
reader = GGUFReader(args.input, 'r')
232+
arch = get_field_data(reader, Keys.General.ARCHITECTURE)
233+
endianess = get_byteorder(reader)
234+
235+
logger.info(f'* Writing: {args.output}')
236+
writer = GGUFWriter(args.output, arch=arch, endianess=endianess)
237+
238+
alignment = get_field_data(reader, Keys.General.ALIGNMENT)
239+
if alignment is not None:
240+
logger.debug(f'Setting custom alignment: {alignment}')
241+
writer.data_alignment = alignment
242+
243+
logger.info(f'* Adding: {args.addfiles}')
244+
new_metadata = {}
245+
count = 0
246+
if args.array is False:
247+
for path in args.addfiles:
248+
count += 1
249+
key = Keys.General.NAMEDOBJECT + Keys.General.CONNECT + str(count)
250+
new_metadata[key] = path
251+
logger.info(f'* Adding: {key} = {path}')
252+
copy_with_new_metadata(reader, writer, new_metadata)
253+
else:
254+
key = Keys.General.NAMEDOBJECT
255+
# array is dummy
256+
new_metadata[key] = 'array'
257+
files = []
258+
for path in args.addfiles:
259+
with open(path, "rb") as f:
260+
val = f.read()
261+
#print(f'files[{count}] = {path}')
262+
files.append(NamedObject(path, val))
263+
logger.info(f'* Adding: {key}[{count}] = {path}')
264+
count += 1
265+
copy_with_new_metadata(reader, writer, new_metadata, array=files)
266+
267+
if args.json:
268+
dump_metadata_json(reader, args)
269+
else:
270+
dump_metadata(reader, args)
271+
272+
logger.info(f'* Reading: {args.output}')
273+
reader = GGUFReader(args.output, 'r')
274+
dump_metadata(reader, args)
275+
276+
277+
if __name__ == '__main__':
278+
main()

0 commit comments

Comments
 (0)