Update scrapper.py

Cleaned up code a bit
This commit is contained in:
Earthnuker 2017-10-08 00:04:37 +00:00
parent cef9944fc6
commit 9a30112866
1 changed files with 138 additions and 213 deletions

View File

@ -1,213 +1,138 @@
import argparse import argparse
from collections import OrderedDict from collections import OrderedDict
import configparser import glob
import glob import os
import os import shutil
import shutil from construct import *
from construct import * from tqdm import tqdm
from tqdm import tqdm setglobalstringencoding(None)
setglobalstringencoding(None)
def find_file(name): ScrapFile = Struct(
global scrap_dir 'path'/PascalString(Int32ul),
for folder in glob.glob(os.path.join(scrap_dir, 'extracted', '*.packed')): 'size'/Int32ul,
for root, folders, files in os.walk(folder): 'offset'/Int32ul,
for filename in files: 'data'/OnDemandPointer(this.offset,Bytes(this.size)),
path = os.path.join(root, filename) )
if filename == name: DummyFile = Struct(
yield path 'path'/PascalString(Int32ul),
'size'/Int32ul,
def get_config(conf_parse, section, var, default=None): 'offset'/Int32ul,
return conf_parse[section].get(var, default) )
PackedHeader = Struct(
def patch_config(path, section, var, value): Const(b'BFPK'),
config = configparser.ConfigParser() Const(b'\0\0\0\0'),
config.read(path) 'files'/PrefixedArray(Int32ul,ScrapFile),
if get_config(config, section, var) == value: 'offset'/Tell,
return )
config[section][var] = value DummyHeader = Struct(
with open(path, 'w') as conf: Const(b'BFPK'),
config.write(conf) Const(b'\0\0\0\0'),
return True 'files_cnt'/Rebuild(Int32ul,len_(this.files)),
'files'/PrefixedArray(Int32ul,DummyFile),
'offset'/Tell,
def enable_debug_console_gui(): )
'''enable debug console (GUI)''' parser = argparse.ArgumentParser(description='Unpack and Repack .packed files')
for path in find_file('m3d.ini'): parser.add_argument('-u', '--unpack', action='store_true',
print('Found', path) help='unpack file to \'extracted\' directory')
return patch_config(path, 'video', 'ConsolaWnd', 'SI') parser.add_argument('-r', '--repack', action='store_true',
help='repack file from \'extracted\' directory')
def enable_debug_console_txt(): parser.add_argument(
'''enable debug console (Text Mode)''' '--reset',
path = "Test" action='store_true',
for path in find_file('m3d.ini'): default=False,
print('Found', path) help='restore backup')
return patch_config(path, 'video', 'ConsolaTxt', 'SI')
print(path) parser.add_argument(
'scrap_dir',
metavar='Scrapland Directory',
patches = [ type=str,
enable_debug_console_gui, default=".",
enable_debug_console_txt, help='Scrapland installation directory')
] options = parser.parse_args()
scrap_dir = os.path.abspath(options.scrap_dir)
def yn(prompt, default='n'): if options.reset:
c = ['y', 'n'] print('Restoring Backups and removing extracted folder...')
default = default.lower() for packed_file in glob.glob(os.path.join(scrap_dir, '*.packed.bak')):
assert default in c outfile = os.path.basename(packed_file)
c[c.index(default)] = c[c.index(default)].upper() orig_filename = outfile[:-4]
prompt += ' ({}) '.format('/'.join(c)) if os.path.isfile(outfile):
return (input(prompt) or default).lower()[0] == 'y' print('deleting', orig_filename)
os.remove(orig_filename)
ScrapFile = Struct( print('moving', outfile, '->', orig_filename)
'path'/PascalString(Int32ul), shutil.move(outfile, orig_filename)
'size'/Int32ul, target_folder = os.path.join(
'offset'/Int32ul, 'extracted', os.path.basename(orig_filename))
'data'/OnDemandPointer(this.offset,Bytes(this.size)), print('deleting', target_folder)
) shutil.rmtree(target_folder)
DummyFile = Struct( if os.path.isdir('extracted'):
'path'/PascalString(Int32ul), input('Press enter to remove rest of extracted folder')
'size'/Int32ul, shutil.rmtree('extracted')
'offset'/Int32ul, exit('Done!')
)
if not (options.unpack or options.repack):
PackedHeader = Struct( parser.print_help()
Const(b'BFPK'), exit()
Const(b'\0\0\0\0'), pstatus = ''
'files_cnt'/Rebuild(Int32ul,len_(this.files)), if options.unpack:
'files'/ScrapFile[this.files_cnt], if os.path.isdir('extracted'):
'offset'/Tell, print("Removing extracted folder")
) shutil.rmtree('extracted')
DummyHeader = Struct( for packed_file in glob.glob(os.path.join(scrap_dir, '*.packed')):
Const(b'BFPK'), os.chdir(scrap_dir)
Const(b'\0\0\0\0'), BN=os.path.basename(packed_file)
'files_cnt'/Rebuild(Int32ul,len_(this.files)), target_folder = os.path.join(
'files'/DummyFile[this.files_cnt], 'extracted', os.path.basename(packed_file))
'offset'/Tell, os.makedirs(target_folder, exist_ok=True)
) os.chdir(target_folder)
parser = argparse.ArgumentParser(description='Unpack and Repack .packed files') print('Unpacking {}'.format(os.path.basename(packed_file)))
parser.add_argument('-u', '--unpack', action='store_true', with open(packed_file, 'rb') as pkfile:
help='unpack file to \'extracted\' directory') data = PackedHeader.parse_stream(pkfile)
parser.add_argument('-r', '--repack', action='store_true', print("Offset:",hex(data.offset))
help='repack file from \'extracted\' directory') for file in tqdm(data.files,ascii=True):
parser.add_argument('-p', '--patch', action='store_true', folder, filename = os.path.split(file.path)
help='apply a premade patch') if folder:
os.makedirs(folder, exist_ok=True)
parser.add_argument( with open(file.path, 'wb') as outfile:
'--reset', outfile.write(file.data())
action='store_true', print('\r' + ' ' * len(pstatus) + '\r', end='', flush=True)
default=False, os.chdir(scrap_dir)
help='restore backup')
if (options.unpack and options.repack):
parser.add_argument( input('Press enter to rebuild *.packed files from folders in \'extracted\' dir...') # noqa
'scrap_dir', pass
metavar='Scrapland Directory',
type=str, def file_gen(files,offset=0):
help='Scrapland installation directory') for real_path,size,path in files:
options = parser.parse_args() file=dict(
scrap_dir = os.path.abspath(options.scrap_dir) path=path,
offset=offset,
if options.reset: size=size)
print('Restoring Backups and removing extracted folder...') yield file
for packed_file in glob.glob(os.path.join(scrap_dir, '*.packed.bak')): offset+=file['size']
outfile = os.path.basename(packed_file)
orig_filename = outfile[:-4] def make_header(files,offset=0):
if os.path.isfile(outfile): files_list=list(file_gen(files,offset))
print('deleting', orig_filename) return DummyHeader.build(dict(files=files_list))
os.remove(orig_filename)
print('moving', outfile, '->', orig_filename) if options.repack:
shutil.move(outfile, orig_filename) for folder in glob.glob(os.path.join(scrap_dir, 'extracted', '*.packed')):
target_folder = os.path.join( data=[]
'extracted', os.path.basename(orig_filename)) filename=os.path.join(scrap_dir,os.path.basename(folder))
print('deleting', target_folder) for root,folders,files in os.walk(folder):
shutil.rmtree(target_folder) for file in sorted(files):
if os.path.isdir('extracted'): file=os.path.join(root,file)
input('Press enter to remove rest of extracted folder') rel_path=bytes(file.replace(folder, '').replace('\\', '/').lstrip('/'), 'windows-1252')
shutil.rmtree('extracted') size=os.stat(file).st_size
exit('Done!') data.append((file,size,rel_path))
print("Found {} files for {}".format(len(data),filename))
if not (options.unpack or options.repack or options.patch): offset=len(make_header(data))
parser.print_help() print("Writing",filename)
exit() header=make_header(data,offset)
pstatus = '' with open(filename,"wb") as outfile:
if options.unpack: outfile.write(header)
if os.path.isdir('extracted'): for file,size,rel_path in tqdm(data,ascii=True):
print("Removing extracted folder") outfile.write(open(file,"rb").read())
shutil.rmtree('extracted') print('Done!')
for packed_file in glob.glob(os.path.join(scrap_dir, '*.packed')):
os.chdir(scrap_dir)
BN=os.path.basename(packed_file)
target_folder = os.path.join(
'extracted', os.path.basename(packed_file))
os.makedirs(target_folder, exist_ok=True)
os.chdir(target_folder)
print('Unpacking {}'.format(os.path.basename(packed_file)))
with open(packed_file, 'rb') as pkfile:
data = PackedHeader.parse_stream(pkfile)
print("Offset:",hex(data.offset))
for file in tqdm(data.files,ascii=True):
folder, filename = os.path.split(file.path)
if folder:
os.makedirs(folder, exist_ok=True)
with open(file.path, 'wb') as outfile:
outfile.write(file.data())
print('\r' + ' ' * len(pstatus) + '\r', end='', flush=True)
os.chdir(scrap_dir)
if (options.unpack and options.repack) and not options.patch:
#input('Press enter to rebuild *.packed files from folders in \'extracted\' dir...') # noqa
pass
if options.patch:
print()
print("Enter Nothing to continue")
for n, patch in enumerate(patches, 1):
print('{}. {}'.format(n, patch.__doc__.strip()))
while 1:
n = input('Patch to apply: ')
if not n:
break
n = int(n) - 1
if 0 <= n < len(patches):
res = patches[n]()
if res is True:
print('Applied Succesfully!')
elif res is None:
print('Already applied.')
elif res is False:
print('Error')
print()
def file_gen(files,offset=0):
for real_path,size,path in files:
file=dict(
path=path,
offset=offset,
size=size)
yield file
offset+=file['size']
def make_header(files,offset=0):
files_list=list(file_gen(files,offset))
return DummyHeader.build(dict(files=files_list))
if options.repack:
for folder in glob.glob(os.path.join(scrap_dir, 'extracted', '*.packed')):
data=[]
filename=os.path.join(scrap_dir,os.path.basename(folder))
for root,folders,files in os.walk(folder):
for file in sorted(files):
file=os.path.join(root,file)
rel_path=bytes(file.replace(folder, '').replace('\\', '/').lstrip('/'), 'windows-1252')
size=os.stat(file).st_size
data.append((file,size,rel_path))
print("Found {} files for {}".format(len(data),filename))
offset=len(make_header(data))
print("Writing",filename)
header=make_header(data,offset)
with open(filename,"wb") as outfile:
outfile.write(header)
for file,size,rel_path in tqdm(data,ascii=True):
outfile.write(open(file,"rb").read())
print('Done!')