Add files via upload
[PyArchiveFile.git] / pyarchivefile.py
index deb6aad..dc84f61 100755 (executable)
@@ -14,7 +14,7 @@
     Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
     Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
 
-    $FileInfo: pyarchivefile.py - Last Update: 10/1/2025 Ver. 0.23.0 RC 1 - Author: cooldude2k $
+    $FileInfo: pyarchivefile.py - Last Update: 10/31/2025 Ver. 0.24.2 RC 1 - Author: cooldude2k $
 '''
 
 from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -413,7 +413,12 @@ def is_only_nonprintable(var):
 __file_format_multi_dict__ = {}
 __file_format_default__ = "ArchiveFile"
 __include_defaults__ = True
-__use_inmemfile__ = False
+__use_inmemfile__ = True
+__use_spoolfile__ = False
+__use_spooldir__ = tempfile.gettempdir()
+BYTES_PER_MiB = 1024 * 1024
+DEFAULT_SPOOL_MAX = 8 * BYTES_PER_MiB
+__spoolfile_size__ = DEFAULT_SPOOL_MAX
 __program_name__ = "Py"+__file_format_default__
 __use_env_file__ = True
 __use_ini_file__ = True
@@ -450,6 +455,8 @@ if __use_ini_file__ and os.path.exists(__config_file__):
     __program_name__ = decode_unicode_escape(config.get('config', 'proname'))
     __include_defaults__ = config.getboolean('config', 'includedef')
     __use_inmemfile__ = config.getboolean('config', 'inmemfile')
+    __use_spoolfile__ = config.getboolean('config', 'usespoolfile')
+    __spoolfile_size__ = config.getint('config', 'spoolfilesize')
     # Loop through all sections
     for section in config.sections():
         if section == "config":
@@ -543,6 +550,8 @@ elif __use_json_file__ and os.path.exists(__config_file__):
     __program_name__        = decode_unicode_escape(_get(cfg_config, 'proname', ''))
     __include_defaults__    = _to_bool(_get(cfg_config, 'includedef', False))
     __use_inmemfile__       = _to_bool(_get(cfg_config, 'inmemfile', False))
+    __use_spoolfile__       = _to_bool(_get(cfg_config, 'usespoolfile', False))
+    __spoolfile_size__       = _to_int(_get(cfg_config, 'spoolfilesize', DEFAULT_SPOOL_MAX))
 
     # --- iterate format sections (everything except "config") ---
     required_keys = [
@@ -623,12 +632,12 @@ __file_format_extension__ = __file_format_multi_dict__[__file_format_default__][
 __file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
 __project__ = __program_name__
 __project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
-__version_info__ = (0, 23, 0, "RC 1", 1)
-__version_date_info__ = (2025, 10, 1, "RC 1", 1)
+__version_info__ = (0, 24, 2, "RC 1", 1)
+__version_date_info__ = (2025, 10, 31, "RC 1", 1)
 __version_date__ = str(__version_date_info__[0]) + "." + str(
     __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
 __revision__ = __version_info__[3]
-__revision_id__ = "$Id$"
+__revision_id__ = "$Id: 62e42aa44d2836eb3c32bec6badb12ea21c1ebc3 $"
 if(__version_info__[4] is not None):
     __version_date_plusrc__ = __version_date__ + \
         "-" + str(__version_date_info__[4])
@@ -1956,7 +1965,7 @@ def _normalize_initial_data(data, isbytes, encoding, errors=None):
 
 
 def MkTempFile(data=None,
-               inmem=True,
+               inmem=__use_inmemfile__,
                isbytes=True,
                prefix="",
                delete=True,
@@ -1964,9 +1973,9 @@ def MkTempFile(data=None,
                newline=None,      # text mode only; in-memory objects ignore newline semantics
                dir=None,
                suffix="",
-               use_spool=False,
-               spool_max=8 * 1024 * 1024,
-               spool_dir=None):
+               use_spool=__use_spoolfile__,
+               spool_max=__spoolfile_size__,
+               spool_dir=__use_spooldir__):
     """
     Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
 
@@ -2409,7 +2418,7 @@ class ZlibFile(object):
 
     def __init__(self, file_path=None, fileobj=None, mode='rb', level=6, wbits=15,
                  encoding=None, errors=None, newline=None,
-                 tolerant_read=False, scan_bytes=(64 << 10), spool_threshold=(8 << 20)):
+                 tolerant_read=False, scan_bytes=(64 << 10), spool_threshold=__spoolfile_size__):
 
         if file_path is None and fileobj is None:
             raise ValueError("Either file_path or fileobj must be provided")
@@ -2896,7 +2905,7 @@ class GzipFile(object):
 
     def __init__(self, file_path=None, fileobj=None, mode='rb',
                  level=6, encoding=None, errors=None, newline=None,
-                 tolerant_read=False, scan_bytes=(64 << 10), spool_threshold=(8 << 20)):
+                 tolerant_read=False, scan_bytes=(64 << 10), spool_threshold=__spoolfile_size__):
 
         if file_path is None and fileobj is None:
             raise ValueError("Either file_path or fileobj must be provided")
@@ -3342,7 +3351,7 @@ class LzopFile(object):
                  level=9, encoding=None, errors=None, newline=None,
                  write_header=True,
                  tolerant_read=False, scan_bytes=(64 << 10),
-                 spool_threshold=(8 << 20)):
+                 spool_threshold=__spoolfile_size__):
         """
         Custom LZO file (NOT the lzop(1) format).
         - streaming write/read, supports concatenated members
@@ -3817,7 +3826,7 @@ def lzop_compress_bytes(payload, level=9, text=False, **kw):
 
 
 def lzop_decompress_bytes(blob, mode='rb', tolerant_read=False, scan_bytes=(64 << 10),
-                          spool_threshold=(8 << 20), **kw):
+                          spool_threshold=__spoolfile_size__, **kw):
     """
     Decompress bytes produced by this custom container.
     - mode='rb' -> returns bytes; mode='rt' -> returns text (set encoding/errors/newline in kw)
@@ -6463,8 +6472,7 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
         pass
     return fp
 
-
-def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
+def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
     if(not hasattr(fp, "write")):
         return False
     advancedlist = formatspecs['use_advanced_list']
@@ -6514,6 +6522,16 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
     numfiles = int(len(GetDirList))
     fnumfiles = format(numfiles, 'x').lower()
     AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
+    try:
+        fp.flush()
+        if(hasattr(os, "sync")):
+            os.fsync(fp.fileno())
+    except io.UnsupportedOperation:
+        pass
+    except AttributeError:
+        pass
+    except OSError:
+        pass
     FullSizeFilesAlt = 0
     for curfname in GetDirList:
         fencoding = "UTF-8"
@@ -6765,6 +6783,16 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
                       fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
         AppendFileHeaderWithContent(
             fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
+        try:
+            fp.flush()
+            if(hasattr(os, "sync")):
+                os.fsync(fp.fileno())
+        except io.UnsupportedOperation:
+            pass
+        except AttributeError:
+            pass
+        except OSError:
+            pass
     return fp
 
 def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
@@ -6773,8 +6801,6 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
     if(verbose):
         logging.basicConfig(format="%(message)s",
                             stream=sys.stdout, level=logging.DEBUG)
-    formver = formatspecs['format_ver']
-    fileheaderver = str(int(formver.replace(".", "")))
     curinode = 0
     curfid = 0
     inodelist = []
@@ -6842,6 +6868,16 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
         return False
     numfiles = int(len(tarfp.getmembers()))
     AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
+    try:
+        fp.flush()
+        if(hasattr(os, "sync")):
+            os.fsync(fp.fileno())
+    except io.UnsupportedOperation:
+        pass
+    except AttributeError:
+        pass
+    except OSError:
+        pass
     for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
         fencoding = "UTF-8"
         if(re.findall("^[.|/]", member.name)):
@@ -6980,6 +7016,16 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
                       fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
         AppendFileHeaderWithContent(
             fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
+        try:
+            fp.flush()
+            if(hasattr(os, "sync")):
+                os.fsync(fp.fileno())
+        except io.UnsupportedOperation:
+            pass
+        except AttributeError:
+            pass
+        except OSError:
+            pass
         fcontents.close()
     return fp
 
@@ -6989,8 +7035,6 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
     if(verbose):
         logging.basicConfig(format="%(message)s",
                             stream=sys.stdout, level=logging.DEBUG)
-    formver = formatspecs['format_ver']
-    fileheaderver = str(int(formver.replace(".", "")))
     curinode = 0
     curfid = 0
     inodelist = []
@@ -7028,6 +7072,16 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
         VerbosePrintOut("Bad file found!")
     numfiles = int(len(zipfp.infolist()))
     AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
+    try:
+        fp.flush()
+        if(hasattr(os, "sync")):
+            os.fsync(fp.fileno())
+    except io.UnsupportedOperation:
+        pass
+    except AttributeError:
+        pass
+    except OSError:
+        pass
     for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
         fencoding = "UTF-8"
         if(re.findall("^[.|/]", member.filename)):
@@ -7190,6 +7244,16 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
                       fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
         AppendFileHeaderWithContent(
             fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
+        try:
+            fp.flush()
+            if(hasattr(os, "sync")):
+                os.fsync(fp.fileno())
+        except io.UnsupportedOperation:
+            pass
+        except AttributeError:
+            pass
+        except OSError:
+            pass
         fcontents.close()
     return fp
 
@@ -7204,8 +7268,6 @@ if(rarfile_support):
         if(verbose):
             logging.basicConfig(format="%(message)s",
                                 stream=sys.stdout, level=logging.DEBUG)
-        formver = formatspecs['format_ver']
-        fileheaderver = str(int(formver.replace(".", "")))
         curinode = 0
         curfid = 0
         inodelist = []
@@ -7232,6 +7294,16 @@ if(rarfile_support):
             pass
         except OSError:
             pass
+        try:
+            fp.flush()
+            if(hasattr(os, "sync")):
+                os.fsync(fp.fileno())
+        except io.UnsupportedOperation:
+            pass
+        except AttributeError:
+            pass
+        except OSError:
+            pass
         for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
             is_unix = False
             is_windows = False
@@ -7426,6 +7498,16 @@ if(rarfile_support):
                           fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
             AppendFileHeaderWithContent(
                 fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
+            try:
+                fp.flush()
+                if(hasattr(os, "sync")):
+                    os.fsync(fp.fileno())
+            except io.UnsupportedOperation:
+                pass
+            except AttributeError:
+                pass
+            except OSError:
+                pass
             fcontents.close()
         return fp
 
@@ -7458,6 +7540,16 @@ if(py7zr_support):
             VerbosePrintOut("Bad file found!")
         numfiles = int(len(szpfp.list()))
         AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
+        try:
+            fp.flush()
+            if(hasattr(os, "sync")):
+                os.fsync(fp.fileno())
+        except io.UnsupportedOperation:
+            pass
+        except AttributeError:
+            pass
+        except OSError:
+            pass
         for member in sorted(szpfp.list(), key=lambda x: x.filename):
             fencoding = "UTF-8"
             if(re.findall("^[.|/]", member.filename)):
@@ -7596,10 +7688,20 @@ if(py7zr_support):
                           fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
             AppendFileHeaderWithContent(
                 fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
+            try:
+                fp.flush()
+                if(hasattr(os, "sync")):
+                    os.fsync(fp.fileno())
+            except io.UnsupportedOperation:
+                pass
+            except AttributeError:
+                pass
+            except OSError:
+                pass
             fcontents.close()
         return fp
 
-def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
+def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
     if(not hasattr(fp, "write")):
         return False
     if(verbose):
@@ -7661,12 +7763,12 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extr
     return fp
 
 
-def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
+def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
     inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
-    return AppendListsWithContent(inlist, fp, dirlistfromtxt, filevalues, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
+    return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
 
 
-def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
+def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
     if(IsNestedDict(formatspecs) and fmttype=="auto" and 
         (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
         get_in_ext = os.path.splitext(outfile)
@@ -7710,7 +7812,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
             fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
         except PermissionError:
             return False
-    AppendFilesWithContent(infiles, fp, dirlistfromtxt, filevalues, extradata, jsondata, compression,
+    AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression,
                                    compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, verbose)
     if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
         fp = CompressOpenFileAlt(
@@ -7747,12 +7849,12 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
         fp.close()
         return True
 
-def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
+def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
     if not isinstance(infiles, list):
         infiles = [infiles]
     returnout = False
     for infileslist in infiles:
-        returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, filevalues, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
+        returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
         if(not returnout):
             break
         else:
@@ -7762,7 +7864,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
         return True
     return returnout
 
-def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
+def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
     if(IsNestedDict(formatspecs) and fmttype=="auto" and 
         (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
         get_in_ext = os.path.splitext(outfile)
@@ -7803,7 +7905,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
             fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
         except PermissionError:
             return False
-    AppendListsWithContent(inlist, fp, dirlistfromtxt, filevalues, extradata, jsondata, compression,
+    AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression,
                                    compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, verbose)
     if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
         fp = CompressOpenFileAlt(
@@ -7921,6 +8023,21 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
         fp.close()
         return True
 
+def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
+    if not isinstance(infiles, list):
+        infiles = [infiles]
+    returnout = False
+    for infileslist in infiles:
+        returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
+        if(not returnout):
+            break
+        else:
+            outfile = returnout
+    if(not returnfp and returnout):
+        returnout.close()
+        return True
+    return returnout
+
 def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
     if(IsNestedDict(formatspecs) and fmttype=="auto" and 
         (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
@@ -8001,6 +8118,21 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
         fp.close()
         return True
 
+def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
+    if not isinstance(infiles, list):
+        infiles = [infiles]
+    returnout = False
+    for infileslist in infiles:
+        returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
+        if(not returnout):
+            break
+        else:
+            outfile = returnout
+    if(not returnfp and returnout):
+        returnout.close()
+        return True
+    return returnout
+
 if(not rarfile_support):
     def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
         return False
@@ -8086,6 +8218,21 @@ if(rarfile_support):
             fp.close()
             return True
 
+def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
+    if not isinstance(infiles, list):
+        infiles = [infiles]
+    returnout = False
+    for infileslist in infiles:
+        returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
+        if(not returnout):
+            break
+        else:
+            outfile = returnout
+    if(not returnfp and returnout):
+        returnout.close()
+        return True
+    return returnout
+
 if(not py7zr_support):
     def AppendFilesWithContentFromSevenZipToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
         return False
@@ -8171,9 +8318,24 @@ if(py7zr_support):
             fp.close()
             return True
 
-def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
+def AppendFilesWithContentFromSevenZipToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
+    if not isinstance(infiles, list):
+        infiles = [infiles]
+    returnout = False
+    for infileslist in infiles:
+        returnout = AppendFilesWithContentFromSevenZipToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
+        if(not returnout):
+            break
+        else:
+            outfile = returnout
+    if(not returnfp and returnout):
+        returnout.close()
+        return True
+    return returnout
+
+def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
     inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
-    return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, filevalues, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
+    return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
 
 
 def PrintPermissionString(fchmode, ftype):
@@ -9631,7 +9793,7 @@ def fast_copy(infp, outfp, bufsize=1 << 20):
             outfp.write(data)
 
 
-def copy_file_to_mmap_dest(src_path, outfp, chunk_size=8 << 20):
+def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__):
     """
     Copy a disk file into an mmap-backed destination (FileLikeAdapter).
     Falls back to buffered copy if the source cannot be mmapped.
@@ -9949,10 +10111,10 @@ def CheckSumSupportAlt(checkfor, guaranteed=True):
 
 
 def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
-        return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, [], extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
+        return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
 
 def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
-        return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, [], extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
+        return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
 
 def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
     return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)