Skip to content

Commit bfb6fb5

Browse files
author
Kazuki Suzuki Przyborowski
committed
Update pyarchivefile.py
1 parent 9c21864 commit bfb6fb5

File tree

1 file changed

+93
-31
lines changed

1 file changed

+93
-31
lines changed

pyarchivefile.py

Lines changed: 93 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -2088,10 +2088,10 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
20882088
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
20892089
if(fjsonsize > 0):
20902090
try:
2091-
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent).decode("UTF-8"))
2091+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
20922092
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
20932093
try:
2094-
fjsoncontent = json.loads(fprejsoncontent.decode("UTF-8"))
2094+
fjsoncontent = json.loads(fprejsoncontent)
20952095
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
20962096
fprejsoncontent = ""
20972097
fjsoncontent = {}
@@ -2230,16 +2230,39 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
22302230
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
22312231
if(fjsonsize > 0):
22322232
try:
2233-
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent).decode("UTF-8"))
2233+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
2234+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
22342235
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
22352236
try:
2236-
fjsoncontent = json.loads(fprejsoncontent.decode("UTF-8"))
2237+
fjsonrawcontent = fprejsoncontent
2238+
fjsoncontent = json.loads(fprejsoncontent)
22372239
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
22382240
fprejsoncontent = ""
2241+
fjsonrawcontent = fprejsoncontent
22392242
fjsoncontent = {}
22402243
else:
22412244
fprejsoncontent = ""
2245+
fjsonrawcontent = fprejsoncontent
22422246
fjsoncontent = {}
2247+
elif(fjsontype=="list"):
2248+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
2249+
flisttmp = BytesIO()
2250+
flisttmp.write(fprejsoncontent.encode())
2251+
flisttmp.seek(0)
2252+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
2253+
flisttmp.close()
2254+
fjsonrawcontent = fjsoncontent
2255+
if(outfjsonlen==1):
2256+
try:
2257+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
2258+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
2259+
outfjsonlen = len(fjsoncontent)
2260+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
2261+
try:
2262+
fjsonrawcontent = fjsoncontent[0]
2263+
fjsoncontent = json.loads(fjsoncontent[0])
2264+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
2265+
pass
22432266
fp.seek(len(delimiter), 1)
22442267
fcs = HeaderOut[-2].lower()
22452268
fccs = HeaderOut[-1].lower()
@@ -2310,7 +2333,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
23102333
if(not contentasfile):
23112334
fcontents = fcontents.read()
23122335
outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
2313-
'fdev': fdev, 'fminor': fdev_minor, 'fmajor': fdev_major, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsondata': fjsoncontent, 'fheaderchecksum': fcs, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
2336+
'fdev': fdev, 'fminor': fdev_minor, 'fmajor': fdev_major, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fheaderchecksum': fcs, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
23142337
return outlist
23152338

23162339

@@ -2383,16 +2406,39 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
23832406
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
23842407
if(fjsonsize > 0):
23852408
try:
2386-
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent).decode("UTF-8"))
2409+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
2410+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
23872411
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
23882412
try:
2389-
fjsoncontent = json.loads(fprejsoncontent.decode("UTF-8"))
2413+
fjsonrawcontent = fprejsoncontent
2414+
fjsoncontent = json.loads(fprejsoncontent)
23902415
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
23912416
fprejsoncontent = ""
2417+
fjsonrawcontent = fprejsoncontent
23922418
fjsoncontent = {}
23932419
else:
23942420
fprejsoncontent = ""
2421+
fjsonrawcontent = fprejsoncontent
23952422
fjsoncontent = {}
2423+
elif(fjsontype=="list"):
2424+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
2425+
flisttmp = BytesIO()
2426+
flisttmp.write(fprejsoncontent.encode())
2427+
flisttmp.seek(0)
2428+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
2429+
flisttmp.close()
2430+
fjsonrawcontent = fjsoncontent
2431+
if(outfjsonlen==1):
2432+
try:
2433+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
2434+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
2435+
outfjsonlen = len(fjsoncontent)
2436+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
2437+
try:
2438+
fjsonrawcontent = fjsoncontent[0]
2439+
fjsoncontent = json.loads(fjsoncontent[0])
2440+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
2441+
pass
23962442
fp.seek(len(delimiter), 1)
23972443
fcs = HeaderOut[-2].lower()
23982444
fccs = HeaderOut[-1].lower()
@@ -7287,17 +7333,9 @@ def ArchiveFileValidate(infile, fmttype="auto", formatspecs=__file_format_multi_
72877333
outfjsoncontent = {}
72887334
outfprejsoncontent = fp.read(outfjsonsize).decode("UTF-8")
72897335
if(outfjsonsize > 0):
7290-
try:
7291-
outfjsoncontent = json.loads(base64.b64decode(outfprejsoncontent.encode("UTF-8")).decode("UTF-8"))
7292-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7293-
try:
7294-
outfjsoncontent = json.loads(outfprejsoncontent)
7295-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7296-
outfprejsoncontent = ""
7297-
outfjsoncontent = {}
7336+
pass
72987337
else:
72997338
outfprejsoncontent = ""
7300-
outfjsoncontent = {}
73017339
fp.seek(len(formatspecs['format_delimiter']), 1)
73027340
outfextrasize = int(inheaderdata[30], 16)
73037341
outfextrafields = int(inheaderdata[31], 16)
@@ -7716,20 +7754,44 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
77167754
outfjsontype = inheaderdata[27]
77177755
outfjsonlen = int(inheaderdata[28], 16)
77187756
outfjsonsize = int(inheaderdata[29], 16)
7719-
outfjsoncontent = {}
7720-
outfprejsoncontent = fp.read(outfjsonsize).decode("UTF-8")
7721-
if(outfjsonsize > 0):
7722-
try:
7723-
outfjsoncontent = json.loads(base64.b64decode(outfprejsoncontent.encode("UTF-8")).decode("UTF-8"))
7724-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7757+
if(outfjsontype=="json"):
7758+
outfjsoncontent = {}
7759+
outfprejsoncontent = fp.read(outfjsonsize).decode("UTF-8")
7760+
if(outfjsonsize > 0):
77257761
try:
7726-
outfjsoncontent = json.loads(outfprejsoncontent)
7762+
outfjsonrawcontent = base64.b64decode(outfprejsoncontent.encode("UTF-8")).decode("UTF-8")
7763+
outfjsoncontent = json.loads(base64.b64decode(outfprejsoncontent.encode("UTF-8")).decode("UTF-8"))
77277764
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7728-
outfprejsoncontent = ""
7729-
outfjsoncontent = {}
7730-
else:
7731-
outfprejsoncontent = ""
7732-
outfjsoncontent = {}
7765+
try:
7766+
outfjsonrawcontent = outfprejsoncontent
7767+
outfjsoncontent = json.loads(outfprejsoncontent)
7768+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7769+
outfprejsoncontent = ""
7770+
outfjsonrawcontent = outfprejsoncontent
7771+
outfjsoncontent = {}
7772+
else:
7773+
outfprejsoncontent = ""
7774+
outfjsonrawcontent = outfprejsoncontent
7775+
outfjsoncontent = {}
7776+
elif(outfjsontype=="list"):
7777+
outfprejsoncontent = fp.read(outfjsonsize).decode("UTF-8")
7778+
flisttmp = BytesIO()
7779+
flisttmp.write(outfprejsoncontent.encode())
7780+
flisttmp.seek(0)
7781+
outfjsoncontent = ReadFileHeaderData(flisttmp, outfjsonlen, formatspecs['format_delimiter'])
7782+
flisttmp.close()
7783+
outfjsonrawcontent = outfjsoncontent
7784+
if(fextrafields==1):
7785+
try:
7786+
outfjsonrawcontent = base64.b64decode(outfjsoncontent[0]).decode("UTF-8")
7787+
outfjsoncontent = json.loads(base64.b64decode(outfjsoncontent[0]).decode("UTF-8"))
7788+
fextrafields = len(outfjsoncontent)
7789+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7790+
try:
7791+
outfjsonrawcontent = outfjsoncontent[0]
7792+
outfjsoncontent = json.loads(outfjsoncontent[0])
7793+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7794+
pass
77337795
fp.seek(len(formatspecs['format_delimiter']), 1)
77347796
outfextrasize = int(inheaderdata[30], 16)
77357797
outfextrafields = int(inheaderdata[31], 16)
@@ -7818,7 +7880,7 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
78187880
outfcontents.seek(0, 0)
78197881
if(not contentasfile):
78207882
outfcontents = outfcontents.read()
7821-
outlist['ffilelist'].append({'fid': realidnum, 'fidalt': fileidnum, 'fheadersize': outfheadsize, 'fhstart': outfhstart, 'fhend': outfhend, 'ftype': outftype, 'fencoding': outfencoding, 'fcencoding': outfcencoding, 'fname': outfname, 'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize, 'fatime': outfatime, 'fmtime': outfmtime, 'fctime': outfctime, 'fbtime': outfbtime, 'fmode': outfmode, 'fchmode': outfchmode, 'ftypemod': outftypemod, 'fwinattributes': outfwinattributes, 'fcompression': outfcompression, 'fcsize': outfcsize, 'fuid': outfuid, 'funame': outfuname, 'fgid': outfgid, 'fgname': outfgname, 'finode': outfinode, 'flinkcount': outflinkcount, 'fdev': outfdev, 'fminor': outfdev_minor, 'fmajor': outfdev_major, 'fseeknextfile': outfseeknextfile, 'fheaderchecksumtype': inheaderdata[-4], 'fcontentchecksumtype': inheaderdata[-3], 'fnumfields': outfnumfields + 2, 'frawheader': inheaderdata, 'fextrafields': outfextrafields, 'fextrafieldsize': outfextrasize, 'fextradata': extrafieldslist, 'fjsontype': outfjsontype, 'fjsonlen': outfjsonlen, 'fjsonsize': outfjsonsize, 'fjsondata': outfjsoncontent, 'fheaderchecksum': outfcs, 'fcontentchecksum': outfccs, 'fhascontents': pyhascontents, 'fcontentstart': outfcontentstart, 'fcontentend': outfcontentend, 'fcontentasfile': contentasfile, 'fcontents': outfcontents})
7883+
outlist['ffilelist'].append({'fid': realidnum, 'fidalt': fileidnum, 'fheadersize': outfheadsize, 'fhstart': outfhstart, 'fhend': outfhend, 'ftype': outftype, 'fencoding': outfencoding, 'fcencoding': outfcencoding, 'fname': outfname, 'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize, 'fatime': outfatime, 'fmtime': outfmtime, 'fctime': outfctime, 'fbtime': outfbtime, 'fmode': outfmode, 'fchmode': outfchmode, 'ftypemod': outftypemod, 'fwinattributes': outfwinattributes, 'fcompression': outfcompression, 'fcsize': outfcsize, 'fuid': outfuid, 'funame': outfuname, 'fgid': outfgid, 'fgname': outfgname, 'finode': outfinode, 'flinkcount': outflinkcount, 'fdev': outfdev, 'fminor': outfdev_minor, 'fmajor': outfdev_major, 'fseeknextfile': outfseeknextfile, 'fheaderchecksumtype': inheaderdata[-4], 'fcontentchecksumtype': inheaderdata[-3], 'fnumfields': outfnumfields + 2, 'frawheader': inheaderdata, 'fextrafields': outfextrafields, 'fextrafieldsize': outfextrasize, 'fextradata': extrafieldslist, 'fjsontype': outfjsontype, 'fjsonlen': outfjsonlen, 'fjsonsize': outfjsonsize, 'fjsonrawdata': outfjsonrawcontent, 'fjsondata': outfjsoncontent, 'fheaderchecksum': outfcs, 'fcontentchecksum': outfccs, 'fhascontents': pyhascontents, 'fcontentstart': outfcontentstart, 'fcontentend': outfcontentend, 'fcontentasfile': contentasfile, 'fcontents': outfcontents})
78227884
fileidnum = fileidnum + 1
78237885
realidnum = realidnum + 1
78247886
if(returnfp):
@@ -8122,7 +8184,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
81228184
if(not followlink and len(extradata) < 0):
81238185
extradata = listarchivefiles['ffilelist'][reallcfi]['fextralist']
81248186
if(not followlink and len(jsondata) <= 0):
8125-
jsondata = listarchivefiles['ffilelist'][reallcfi]['jsondata']
8187+
jsondata = listarchivefiles['ffilelist'][reallcfi]['fjsondata']
81268188
fcontents = listarchivefiles['ffilelist'][reallcfi]['fcontents']
81278189
if(not listarchivefiles['ffilelist'][reallcfi]['fcontentasfile']):
81288190
fcontents = BytesIO(fcontents)
@@ -8204,7 +8266,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
82048266
if(len(extradata) < 0):
82058267
extradata = flinkinfo['fextralist']
82068268
if(len(jsondata) < 0):
8207-
extradata = flinkinfo['jsondata']
8269+
extradata = flinkinfo['fjsondata']
82088270
fcontents = flinkinfo['fcontents']
82098271
if(not flinkinfo['fcontentasfile']):
82108272
fcontents = BytesIO(fcontents)

0 commit comments

Comments
 (0)