Fixed Parsing Error in scrapper.py
This commit is contained in:
parent
79889722ac
commit
50541eadb1
1 changed files with 135 additions and 138 deletions
273
scrapper.py
273
scrapper.py
|
@ -1,138 +1,135 @@
|
||||||
import argparse
|
import argparse
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from construct import *
|
from construct import *
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
setglobalstringencoding(None)
|
setglobalstringencoding(None)
|
||||||
|
|
||||||
ScrapFile = Struct(
|
ScrapFile = Struct(
|
||||||
'path'/PascalString(Int32ul),
|
'path'/PascalString(Int32ul),
|
||||||
'size'/Int32ul,
|
'size'/Int32ul,
|
||||||
'offset'/Int32ul,
|
'offset'/Int32ul,
|
||||||
'data'/OnDemandPointer(this.offset,Bytes(this.size)),
|
'data'/OnDemandPointer(this.offset,Bytes(this.size)),
|
||||||
)
|
)
|
||||||
DummyFile = Struct(
|
DummyFile = Struct(
|
||||||
'path'/PascalString(Int32ul),
|
'path'/PascalString(Int32ul),
|
||||||
'size'/Int32ul,
|
'size'/Int32ul,
|
||||||
'offset'/Int32ul,
|
'offset'/Int32ul,
|
||||||
)
|
)
|
||||||
|
|
||||||
PackedHeader = Struct(
|
PackedHeader = Struct(
|
||||||
Const(b'BFPK'),
|
Const(b'BFPK'),
|
||||||
Const(b'\0\0\0\0'),
|
Const(b'\0\0\0\0'),
|
||||||
'files'/PrefixedArray(Int32ul,ScrapFile),
|
'files'/PrefixedArray(Int32ul,ScrapFile),
|
||||||
'offset'/Tell,
|
)
|
||||||
)
|
DummyHeader = Struct(
|
||||||
DummyHeader = Struct(
|
Const(b'BFPK'),
|
||||||
Const(b'BFPK'),
|
Const(b'\0\0\0\0'),
|
||||||
Const(b'\0\0\0\0'),
|
'files'/PrefixedArray(Int32ul,DummyFile),
|
||||||
'files_cnt'/Rebuild(Int32ul,len_(this.files)),
|
)
|
||||||
'files'/PrefixedArray(Int32ul,DummyFile),
|
parser = argparse.ArgumentParser(description='Unpack and Repack .packed files')
|
||||||
'offset'/Tell,
|
parser.add_argument('-u', '--unpack', action='store_true',
|
||||||
)
|
help='unpack file to \'extracted\' directory')
|
||||||
parser = argparse.ArgumentParser(description='Unpack and Repack .packed files')
|
parser.add_argument('-r', '--repack', action='store_true',
|
||||||
parser.add_argument('-u', '--unpack', action='store_true',
|
help='repack file from \'extracted\' directory')
|
||||||
help='unpack file to \'extracted\' directory')
|
|
||||||
parser.add_argument('-r', '--repack', action='store_true',
|
parser.add_argument(
|
||||||
help='repack file from \'extracted\' directory')
|
'--reset',
|
||||||
|
action='store_true',
|
||||||
parser.add_argument(
|
default=False,
|
||||||
'--reset',
|
help='restore backup')
|
||||||
action='store_true',
|
|
||||||
default=False,
|
parser.add_argument(
|
||||||
help='restore backup')
|
'scrap_dir',
|
||||||
|
metavar='Scrapland Directory',
|
||||||
parser.add_argument(
|
type=str,
|
||||||
'scrap_dir',
|
default=".",
|
||||||
metavar='Scrapland Directory',
|
help='Scrapland installation directory')
|
||||||
type=str,
|
options = parser.parse_args()
|
||||||
default=".",
|
scrap_dir = os.path.abspath(options.scrap_dir)
|
||||||
help='Scrapland installation directory')
|
|
||||||
options = parser.parse_args()
|
if options.reset:
|
||||||
scrap_dir = os.path.abspath(options.scrap_dir)
|
print('Restoring Backups and removing extracted folder...')
|
||||||
|
for packed_file in glob.glob(os.path.join(scrap_dir, '*.packed.bak')):
|
||||||
if options.reset:
|
outfile = os.path.basename(packed_file)
|
||||||
print('Restoring Backups and removing extracted folder...')
|
orig_filename = outfile[:-4]
|
||||||
for packed_file in glob.glob(os.path.join(scrap_dir, '*.packed.bak')):
|
if os.path.isfile(outfile):
|
||||||
outfile = os.path.basename(packed_file)
|
print('deleting', orig_filename)
|
||||||
orig_filename = outfile[:-4]
|
os.remove(orig_filename)
|
||||||
if os.path.isfile(outfile):
|
print('moving', outfile, '->', orig_filename)
|
||||||
print('deleting', orig_filename)
|
shutil.move(outfile, orig_filename)
|
||||||
os.remove(orig_filename)
|
target_folder = os.path.join(
|
||||||
print('moving', outfile, '->', orig_filename)
|
'extracted', os.path.basename(orig_filename))
|
||||||
shutil.move(outfile, orig_filename)
|
print('deleting', target_folder)
|
||||||
target_folder = os.path.join(
|
shutil.rmtree(target_folder)
|
||||||
'extracted', os.path.basename(orig_filename))
|
if os.path.isdir('extracted'):
|
||||||
print('deleting', target_folder)
|
input('Press enter to remove rest of extracted folder')
|
||||||
shutil.rmtree(target_folder)
|
shutil.rmtree('extracted')
|
||||||
if os.path.isdir('extracted'):
|
exit('Done!')
|
||||||
input('Press enter to remove rest of extracted folder')
|
|
||||||
shutil.rmtree('extracted')
|
if not (options.unpack or options.repack):
|
||||||
exit('Done!')
|
parser.print_help()
|
||||||
|
exit()
|
||||||
if not (options.unpack or options.repack):
|
pstatus = ''
|
||||||
parser.print_help()
|
if options.unpack:
|
||||||
exit()
|
if os.path.isdir('extracted'):
|
||||||
pstatus = ''
|
print("Removing extracted folder")
|
||||||
if options.unpack:
|
shutil.rmtree('extracted')
|
||||||
if os.path.isdir('extracted'):
|
for packed_file in glob.glob(os.path.join(scrap_dir, '*.packed')):
|
||||||
print("Removing extracted folder")
|
os.chdir(scrap_dir)
|
||||||
shutil.rmtree('extracted')
|
BN=os.path.basename(packed_file)
|
||||||
for packed_file in glob.glob(os.path.join(scrap_dir, '*.packed')):
|
target_folder = os.path.join(
|
||||||
os.chdir(scrap_dir)
|
'extracted', os.path.basename(packed_file))
|
||||||
BN=os.path.basename(packed_file)
|
os.makedirs(target_folder, exist_ok=True)
|
||||||
target_folder = os.path.join(
|
os.chdir(target_folder)
|
||||||
'extracted', os.path.basename(packed_file))
|
print('Unpacking {}'.format(os.path.basename(packed_file)))
|
||||||
os.makedirs(target_folder, exist_ok=True)
|
with open(packed_file, 'rb') as pkfile:
|
||||||
os.chdir(target_folder)
|
data = PackedHeader.parse_stream(pkfile)
|
||||||
print('Unpacking {}'.format(os.path.basename(packed_file)))
|
print("Offset:",hex(pkfile.tell()))
|
||||||
with open(packed_file, 'rb') as pkfile:
|
for file in tqdm(data.files,ascii=True):
|
||||||
data = PackedHeader.parse_stream(pkfile)
|
folder, filename = os.path.split(file.path)
|
||||||
print("Offset:",hex(data.offset))
|
if folder:
|
||||||
for file in tqdm(data.files,ascii=True):
|
os.makedirs(folder, exist_ok=True)
|
||||||
folder, filename = os.path.split(file.path)
|
with open(file.path, 'wb') as outfile:
|
||||||
if folder:
|
outfile.write(file.data())
|
||||||
os.makedirs(folder, exist_ok=True)
|
print('\r' + ' ' * len(pstatus) + '\r', end='', flush=True)
|
||||||
with open(file.path, 'wb') as outfile:
|
os.chdir(scrap_dir)
|
||||||
outfile.write(file.data())
|
|
||||||
print('\r' + ' ' * len(pstatus) + '\r', end='', flush=True)
|
if (options.unpack and options.repack):
|
||||||
os.chdir(scrap_dir)
|
input('Press enter to rebuild *.packed files from folders in \'extracted\' dir...') # noqa
|
||||||
|
pass
|
||||||
if (options.unpack and options.repack):
|
|
||||||
input('Press enter to rebuild *.packed files from folders in \'extracted\' dir...') # noqa
|
def file_gen(files,offset=0):
|
||||||
pass
|
for real_path,size,path in files:
|
||||||
|
file=dict(
|
||||||
def file_gen(files,offset=0):
|
path=path,
|
||||||
for real_path,size,path in files:
|
offset=offset,
|
||||||
file=dict(
|
size=size)
|
||||||
path=path,
|
yield file
|
||||||
offset=offset,
|
offset+=file['size']
|
||||||
size=size)
|
|
||||||
yield file
|
def make_header(files,offset=0):
|
||||||
offset+=file['size']
|
files_list=list(file_gen(files,offset))
|
||||||
|
return DummyHeader.build(dict(files=files_list))
|
||||||
def make_header(files,offset=0):
|
|
||||||
files_list=list(file_gen(files,offset))
|
if options.repack:
|
||||||
return DummyHeader.build(dict(files=files_list))
|
for folder in glob.glob(os.path.join(scrap_dir, 'extracted', '*.packed')):
|
||||||
|
data=[]
|
||||||
if options.repack:
|
filename=os.path.join(scrap_dir,os.path.basename(folder))
|
||||||
for folder in glob.glob(os.path.join(scrap_dir, 'extracted', '*.packed')):
|
for root,folders,files in os.walk(folder):
|
||||||
data=[]
|
for file in sorted(files):
|
||||||
filename=os.path.join(scrap_dir,os.path.basename(folder))
|
file=os.path.join(root,file)
|
||||||
for root,folders,files in os.walk(folder):
|
rel_path=bytes(file.replace(folder, '').replace('\\', '/').lstrip('/'), 'windows-1252')
|
||||||
for file in sorted(files):
|
size=os.stat(file).st_size
|
||||||
file=os.path.join(root,file)
|
data.append((file,size,rel_path))
|
||||||
rel_path=bytes(file.replace(folder, '').replace('\\', '/').lstrip('/'), 'windows-1252')
|
print("Found {} files for {}".format(len(data),filename))
|
||||||
size=os.stat(file).st_size
|
offset=len(make_header(data))
|
||||||
data.append((file,size,rel_path))
|
print("Writing",filename)
|
||||||
print("Found {} files for {}".format(len(data),filename))
|
header=make_header(data,offset)
|
||||||
offset=len(make_header(data))
|
with open(filename,"wb") as outfile:
|
||||||
print("Writing",filename)
|
outfile.write(header)
|
||||||
header=make_header(data,offset)
|
for file,size,rel_path in tqdm(data,ascii=True):
|
||||||
with open(filename,"wb") as outfile:
|
outfile.write(open(file,"rb").read())
|
||||||
outfile.write(header)
|
print('Done!')
|
||||||
for file,size,rel_path in tqdm(data,ascii=True):
|
|
||||||
outfile.write(open(file,"rb").read())
|
|
||||||
print('Done!')
|
|
||||||
|
|
Loading…
Reference in a new issue