|
2 | 2 | # -*- coding: utf-8 -*- |
3 | 3 | from __future__ import absolute_import, division, print_function, unicode_literals |
4 | 4 |
|
5 | | -"""PyNeoFile CLI (core-only) |
6 | | -- Uses only the `pyneofile` core module. |
7 | | -- Supports --no-json fast path for scans. |
8 | | -- Adds verbose (-d) printing during CREATE (-c), mirroring original output. |
9 | | -""" |
10 | | - |
11 | 5 | import sys, os, io, argparse |
12 | | -import pyneofile as P |
13 | | - |
14 | | -def _read_input_bytes(path): |
15 | | - if path in (None, '-', b'-'): |
16 | | - return getattr(sys.stdin, 'buffer', sys.stdin).read() |
17 | | - with io.open(path, 'rb') as fp: return fp.read() |
18 | | - |
19 | | -def _write_output_bytes(path, data): |
20 | | - if isinstance(data, str): data = data.encode('utf-8') |
21 | | - if path in (None, '-', b'-'): |
22 | | - getattr(sys.stdout, 'buffer', sys.stdout).write(data); return |
23 | | - d = os.path.dirname(path); (os.makedirs(d) if d and not os.path.isdir(d) else None) |
24 | | - with io.open(path, 'wb') as fp: fp.write(data) |
25 | | - |
26 | | -def main(argv=None): |
27 | | - p = argparse.ArgumentParser(prog="neofile", description="PyNeoFile CLI (core-only)") |
| 6 | + |
| 7 | +try: |
| 8 | + import pyneofile as P # core must provide *_neo functions |
| 9 | +except Exception as e: |
| 10 | + sys.stderr.write("Failed to import core module 'pyneofile': %s\n" % (e,)) |
| 11 | + sys.exit(2) |
| 12 | + |
| 13 | + |
| 14 | +def _expand_combined_short_opts(argv): |
| 15 | + out = [argv[0]] |
| 16 | + i = 1 |
| 17 | + while i < len(argv): |
| 18 | + a = argv[i] |
| 19 | + if a.startswith("--") or not (a.startswith("-") and len(a) > 2): |
| 20 | + out.append(a); i += 1; continue |
| 21 | + for ch in a[1:]: |
| 22 | + out.append("-" + ch) |
| 23 | + i += 1 |
| 24 | + return out |
| 25 | + |
| 26 | + |
| 27 | +def main(): |
| 28 | + argv = _expand_combined_short_opts(sys.argv) |
| 29 | + |
| 30 | + p = argparse.ArgumentParser( |
| 31 | + description="PyNeoFile CLI (uses pyneofile core)") |
28 | 32 | g = p.add_mutually_exclusive_group(required=True) |
29 | | - g.add_argument('-l', '--list', action='store_true') |
30 | | - g.add_argument('-e', '--extract', action='store_true') |
31 | | - g.add_argument('-c', '--create', action='store_true') |
32 | | - g.add_argument('-r', '--repack', action='store_true') |
33 | | - g.add_argument('--validate', action='store_true') |
34 | | - g.add_argument('-t', '--convert', action='store_true') |
35 | | - p.add_argument('-i','--input'); p.add_argument('-o','--output') |
36 | | - p.add_argument('-P','--compression', default='auto') |
37 | | - p.add_argument('-L','--level', default=None, type=int) |
38 | | - p.add_argument('--skipchecksum', action='store_true') |
39 | | - p.add_argument('-d','--verbose', action='store_true') |
40 | | - p.add_argument('--no-json', action='store_true') |
41 | | - a = p.parse_args(argv) |
42 | | - |
43 | | - if a.list: |
44 | | - src = a.input |
45 | | - if src in (None, '-', b'-'): |
46 | | - data = _read_input_bytes(src) |
47 | | - entries = P.archivefilelistfiles_neo(data, advanced=a.verbose, include_dirs=True, skipjson=True if a.no_json else True) |
| 33 | + g.add_argument("-l", "--list", action="store_true", help="List archive") |
| 34 | + g.add_argument("-e", "--extract", action="store_true", help="Extract archive") |
| 35 | + g.add_argument("-c", "--create", action="store_true", help="Create archive from path") |
| 36 | + g.add_argument("-r", "--repack", action="store_true", help="Repack (recompress) an archive") |
| 37 | + g.add_argument("-E", "--empty", action="store_true", help="Create an empty archive") |
| 38 | + |
| 39 | + p.add_argument("-i", "--input", help="Input file/path", nargs="*") |
| 40 | + p.add_argument("-o", "--output", help="Output file/dir (or '-' for stdout)") |
| 41 | + p.add_argument("-d", "--verbose", action="store_true", help="Verbose/detailed listing") |
| 42 | + p.add_argument("--no-json", action="store_true", help="Skip JSON parsing on read (faster)") |
| 43 | + p.add_argument("-P", "--compression", default="auto", help="Compression algo (auto, none, zlib, gzip, bz2, lzma)") |
| 44 | + p.add_argument("-L", "--level", type=int, default=None, help="Compression level/preset") |
| 45 | + p.add_argument("--checksum", default="crc32", help="Checksum type for header/content/json (default: crc32)") |
| 46 | + |
| 47 | + args = p.parse_args(argv[1:]) |
| 48 | + |
| 49 | + src = None |
| 50 | + if args.input: |
| 51 | + if isinstance(args.input, list) and len(args.input) == 1: |
| 52 | + src = args.input[0] |
| 53 | + elif isinstance(args.input, list) and len(args.input) > 1: |
| 54 | + src = args.input[0] |
48 | 55 | else: |
49 | | - entries = P.archivefilelistfiles_neo(src, advanced=a.verbose, include_dirs=True, skipjson=a.no_json) |
50 | | - if a.verbose: |
| 56 | + src = args.input |
| 57 | + |
| 58 | + if args.empty: |
| 59 | + dst = args.output or "-" |
| 60 | + blob_or_true = P.make_empty_file_neo(dst, fmttype="auto", checksumtype=args.checksum, formatspecs=None, encoding="UTF-8", returnfp=False) |
| 61 | + if dst in (None, "-"): |
| 62 | + data = blob_or_true if isinstance(blob_or_true, (bytes, bytearray)) else b"" |
| 63 | + if hasattr(sys.stdout, "buffer"): |
| 64 | + sys.stdout.buffer.write(data) |
| 65 | + else: |
| 66 | + sys.stdout.write(data.decode("latin1")) |
| 67 | + return 0 |
| 68 | + |
| 69 | + if args.list: |
| 70 | + if not src: |
| 71 | + p.error("list requires -i <archive>") |
| 72 | + entries = P.archivefilelistfiles_neo(src, advanced=args.verbose, include_dirs=True, skipjson=args.no_json) |
| 73 | + if args.verbose: |
51 | 74 | for e in entries: |
52 | | - if isinstance(e, dict): print("{type}\t{compression}\t{size}\t{name}".format(**e)) |
53 | | - else: print(e) |
| 75 | + if isinstance(e, dict): |
| 76 | + print("{type}\t{compression}\t{size}\t{name}".format(**e)) |
| 77 | + else: |
| 78 | + print(e) |
54 | 79 | else: |
55 | | - for e in entries: print(e['name'] if isinstance(e, dict) else e) |
| 80 | + for n in entries: |
| 81 | + print(n if isinstance(n, str) else n.get("name")) |
56 | 82 | return 0 |
57 | 83 |
|
58 | | - if a.validate: |
59 | | - src = a.input |
60 | | - if src in (None, '-', b'-'): |
61 | | - data = _read_input_bytes(src) |
62 | | - ok, details = P.archivefilevalidate_neo(data, verbose=a.verbose, return_details=True, skipjson=a.no_json) |
63 | | - else: |
64 | | - ok, details = P.archivefilevalidate_neo(src, verbose=a.verbose, return_details=True, skipjson=a.no_json) |
65 | | - print("OK" if ok else "BAD") |
66 | | - if a.verbose: |
67 | | - for d in details: print("{index}\t{name}\t{header_ok}\t{json_ok}\t{content_ok}".format(**d)) |
68 | | - return 0 if ok else 2 |
69 | | - |
70 | | - if a.extract: |
71 | | - src = a.input; outdir = a.output or '.' |
72 | | - if src in (None, '-', b'-'): |
73 | | - data = _read_input_bytes(src); ok = P.unpack_neo(data, outdir, skipchecksum=a.skipchecksum, uncompress=True) |
74 | | - else: |
75 | | - ok = P.unpack_neo(src, outdir, skipchecksum=a.skipchecksum, uncompress=True) |
| 84 | + if args.extract: |
| 85 | + if not src: |
| 86 | + p.error("extract requires -i <archive>") |
| 87 | + outdir = args.output or "." |
| 88 | + ok = P.unpack_neo(src, outdir, formatspecs=None, skipchecksum=False, uncompress=True) |
76 | 89 | return 0 if ok else 1 |
77 | 90 |
|
78 | | - if a.create: |
79 | | - dst = a.output or '-'; src_path = a.input |
80 | | - if src_path in (None, '-', b'-'): |
81 | | - data = _read_input_bytes(src_path); payload = {"stdin.bin": data} |
82 | | - blob = P.pack_neo(payload, outfile=None, checksumtypes=('crc32','crc32','crc32'), |
83 | | - encoding='UTF-8', compression=a.compression, compression_level=a.level) |
84 | | - _write_output_bytes(dst, blob); return 0 |
85 | | - if a.verbose: |
86 | | - norm = os.path.normpath(src_path) |
87 | | - if os.path.isfile(norm): |
88 | | - base = os.path.basename(norm).replace('\\','/'); print('./' + base) |
| 91 | + if args.create: |
| 92 | + if not src: |
| 93 | + p.error("create requires -i <path>") |
| 94 | + if args.verbose: |
| 95 | + walkroot = src |
| 96 | + if os.path.isdir(walkroot): |
| 97 | + print(walkroot) |
| 98 | + for root, dirs, files in os.walk(walkroot): |
| 99 | + relroot = root if root.startswith("./") else "./" + root.replace("\\", "/") |
| 100 | + if root != walkroot: |
| 101 | + print(relroot) |
| 102 | + for name in sorted(files): |
| 103 | + path = os.path.join(root, name).replace("\\", "/") |
| 104 | + if not path.startswith("./"): |
| 105 | + path = "./" + path |
| 106 | + print(path) |
89 | 107 | else: |
90 | | - base = os.path.basename(norm).replace('\\','/') |
91 | | - for root, dirs, files in os.walk(norm, topdown=True): |
92 | | - rel = base if root == norm else base + '/' + os.path.relpath(root, norm).replace('\\','/') |
93 | | - print('./' + rel) |
94 | | - for fname in sorted(files): print('./' + rel + '/' + fname) |
95 | | - res = P.pack_neo(src_path, outfile=dst, checksumtypes=('crc32','crc32','crc32'), |
96 | | - encoding='UTF-8', compression=a.compression, compression_level=a.level) |
97 | | - if isinstance(res, (bytes, bytearray)): _write_output_bytes(dst, res) |
98 | | - return 0 |
| 108 | + path = src if src.startswith("./") else "./" + src |
| 109 | + print(path) |
99 | 110 |
|
100 | | - if a.repack: |
101 | | - src = a.input; dst = a.output or '-' |
102 | | - data_or_path = src if src not in (None, '-', b'-') else _read_input_bytes(src) |
103 | | - res = P.repack_neo(data_or_path, outfile=dst, checksumtypes=('crc32','crc32','crc32'), |
104 | | - compression=a.compression, compression_level=a.level) |
105 | | - if isinstance(res, (bytes, bytearray)): _write_output_bytes(dst, res) |
106 | | - return 0 |
| 111 | + outpath = args.output or "-" |
| 112 | + ok = P.pack_neo(src, outpath, formatspecs=None, checksumtypes=("crc32","crc32","crc32"), |
| 113 | + encoding="UTF-8", compression=args.compression, compression_level=args.level) |
| 114 | + if outpath in (None, "-") and isinstance(ok, (bytes, bytearray)): |
| 115 | + if hasattr(sys.stdout, "buffer"): |
| 116 | + sys.stdout.buffer.write(ok) |
| 117 | + else: |
| 118 | + sys.stdout.write(ok.decode("latin1")) |
| 119 | + return 0 |
| 120 | + return 0 if ok else 1 |
| 121 | + |
| 122 | + if args.repack: |
| 123 | + if not src: |
| 124 | + p.error("repack requires -i <archive>") |
| 125 | + outpath = args.output or "-" |
| 126 | + ok = P.repack_neo(src, outpath, formatspecs=None, checksumtypes=("crc32","crc32","crc32"), |
| 127 | + compression=args.compression, compression_level=args.level) |
| 128 | + if outpath in (None, "-") and isinstance(ok, (bytes, bytearray)): |
| 129 | + if hasattr(sys.stdout, "buffer"): |
| 130 | + sys.stdout.buffer.write(ok) |
| 131 | + else: |
| 132 | + sys.stdout.write(ok.decode("latin1")) |
| 133 | + return 0 |
| 134 | + return 0 if ok else 1 |
| 135 | + |
| 136 | + return 0 |
107 | 137 |
|
108 | | - if a.convert: |
109 | | - src = a.input; dst = a.output or '-' |
110 | | - if src in (None, '-', b'-'): raise SystemExit("convert requires a path input (zip/tar)") |
111 | | - res = P.convert_foreign_to_neo(src, outfile=dst, checksumtypes=('crc32','crc32','crc32'), |
112 | | - compression=a.compression, compression_level=a.level) |
113 | | - if isinstance(res, (bytes, bytearray)): _write_output_bytes(dst, res) |
114 | | - return 0 |
115 | 138 |
|
116 | 139 | if __name__ == "__main__": |
117 | 140 | sys.exit(main()) |
0 commit comments