"EOFError: Ran out of input" when packaging a Python script with PyInstaller

169 views Asked by At

I'm developing an application for Windows operating systems written in Python 3.8 and which makes use of the nnunet library (https://pypi.org/project/nnunet/) which uses multiprocessing. I have tested the script and it works correctly.

Now I'm trying to package everything with pyinstaller v5.7.0. The creation of the .exe is successful but when I run it I get the following error:

Traceback (most recent call last):
  File "main.py", line 344, in <module>
  File "nnunet\inference\predict.py", line 694, in predict_from_folder
  File "nnunet\inference\predict.py", line 496, in predict_cases_fastest
  File "nnunet\inference\predict.py", line 123, in preprocess_multithreaded
  File "multiprocess\process.py", line 121, in start
  File "multiprocess\context.py", line 224, in _Popen
  File "multiprocess\context.py", line 327, in _Popen
  File "multiprocess\popen_spawn_win32.py", line 93, in __init__
  File "multiprocess\reduction.py", line 70, in dump
  File "dill\_dill.py", line 394, in dump
  File "pickle.py", line 487, in dump
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 603, in save
  File "pickle.py", line 717, in save_reduce
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 560, in save
  File "dill\_dill.py", line 1186, in save_module_dict
  File "pickle.py", line 971, in save_dict
Traceback (most recent call last):
  File "main.py", line 341, in <module>
  File "pickle.py", line 997, in _batch_setitems
  File "D:\MyProject\venv\Lib\site-packages\PyInstaller\hooks\rthooks\pyi_rth_multiprocessing.py", line 49, in _freeze_support
  File "dill\_dill.py", line 388, in save
    spawn.spawn_main(**kwds)
  File "pickle.py", line 560, in save
  File "pickle.py", line 901, in save_tuple
  File "dill\_dill.py", line 388, in save
  File "multiprocessing\spawn.py", line 116, in spawn_main
  File "pickle.py", line 560, in save
  File "multiprocessing\spawn.py", line 126, in _main
  File "dill\_dill.py", line 1427, in save_instancemethod0
EOFError: Ran out of input
[588] Failed to ex  File "pickle.py", line 692, in save_reduce
ecute script 'main' d  File "dill\_dill.py", line 388, in save
ue to unhandled  File "pickle.py", line 560, in save
 exception!
  File "pickle.py", line 886, in save_tuple
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 603, in save
  File "pickle.py", line 717, in save_reduce
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 560, in save
  File "dill\_dill.py", line 1186, in save_module_dict
  File "pickle.py", line 971, in save_dict
  File "pickle.py", line 997, in _batch_setitems
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 603, in save
  File "pickle.py", line 687, in save_reduce
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 560, in save
  File "dill\_dill.py", line 1698, in save_type
  File "dill\_dill.py", line 1070, in _save_with_postproc
  File "pickle.py", line 692, in save_reduce
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 560, in save
  File "pickle.py", line 901, in save_tuple
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 560, in save
  File "pickle.py", line 886, in save_tuple
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 560, in save
  File "dill\_dill.py", line 1698, in save_type
  File "dill\_dill.py", line 1084, in _save_with_postproc
  File "pickle.py", line 997, in _batch_setitems
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 603, in save
  File "pickle.py", line 717, in save_reduce
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 560, in save
  File "dill\_dill.py", line 1186, in save_module_dict
  File "pickle.py", line 971, in save_dict
  File "pickle.py", line 997, in _batch_setitems
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 603, in save
  File "pickle.py", line 717, in save_reduce
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 560, in save
  File "dill\_dill.py", line 1186, in save_module_dict
  File "pickle.py", line 971, in save_dict
  File "pickle.py", line 997, in _batch_setitems
  File "dill\_dill.py", line 388, in save
  File "pickle.py", line 578, in save
  File "PyInstaller\loader\pyimod01_archive.py", line 76, in __getattr__
AssertionError
[4392] Failed to execute script 'main' due to unhandled exception!

Below is the code of my python script:

#==============================
# main.py
#==============================

from multiprocessing import freeze_support
from nnunet.inference.predict import predict_from_folder

if __name__ == "__main__":
    freeze_support()
    ...
    predict_from_folder(...)
    ...

Below is the code of the nnunet library that triggers the error:

#==============================
# nnunet\inference\predict.py
#==============================

def preprocess_multithreaded(trainer, list_of_lists, output_files, num_processes=2, segs_from_prev_stage=None):
    if segs_from_prev_stage is None:
        segs_from_prev_stage = [None] * len(list_of_lists)

    num_processes = min(len(list_of_lists), num_processes)

    classes = list(range(1, trainer.num_classes))
    assert isinstance(trainer, nnUNetTrainer)
    q = Queue(1)
    processes = []
    for i in range(num_processes):
        pr = Process(
            target=preprocess_save_to_queue,
            args=(
                trainer.preprocess_patient,
                q,
                list_of_lists[i::num_processes],
                output_files[i::num_processes],
                segs_from_prev_stage[i::num_processes],
                classes,
                trainer.plans['transpose_forward']
            )
        )
        pr.start() ## <------------ The error is generated here!!!!!!!!!!!!!
        processes.append(pr)

    try:
        end_ctr = 0
        while end_ctr != num_processes:
            item = q.get()
            if item == "end":
                end_ctr += 1
                continue
            else:
                yield item

    finally:
        for p in processes:
            if p.is_alive():
                p.terminate()
            p.join()

        q.close()


def predict_cases_fastest(...):
    ...
    pool = Pool(num_threads_nifti_save)
    ...
    preprocessing = preprocess_multithreaded(
        trainer,
        list_of_lists,
        cleaned_output_files,
        num_threads_preprocessing,
        segs_from_prev_stage
    )
    ...
    pool.starmap_async(...)
    ...
    pool.close()
    pool.join()


def predict_from_folder(...):
    ...
    return predict_cases_fastest(...)


if __name__ == "__main__":
    ...

Edit 03-02-2023

I have created a public project with which it is possible to reproduce the reported problem: https://gitlab.com/carlopoletto/nnunet_pyinstaller_problem

In the ./scripts folder there are some scripts to install everything and run the tests:

  • ./scripts/install: dependency installation
  • ./scripts/dist: creating the executable with pyinstaller
  • ./scripts/run_py: running the python script (NB: this script automatically delete the ./temp folder and recreate it by copying the contents of ./data)
  • ./scripts/run_exe: running the executable created with ./scripts/dist (NB: this script automatically delete the ./temp folder and recreate it by copying the contents of ./data)

The problem appears to be internal to the nnunet library. I don't know if this problem can be solved by properly configuring pyinstaller.

0

There are 0 answers