Skip to main content
deleted 599 characters in body
Source Link
user
  • 891
  • 4
  • 13
  • 23
#!/bin/bash
set -x
set -eo pipefail

parallel_uploads="4"
s3_bucket_name="backup"

all_files=(
"/12.pbd"
"/13.pbd"
"/14.pbd"
"/15.pbd"
"/16.pbd"
"/17.pbd"
"/18.pbd"
"/19.pbd"
"/20.pbd"
"/21.pbd"
"/22.pbd"
"/23.pbd"
"/24.pbd"
"/25.pbd"
"/26.pbd"
"/27.pbd"
)


# Workaround for the posix shell bug they call it feature
# https://unix.stackexchange.com/questions/65532/why-does-set-e-not-work-inside-subshells-with-parenthesis-followed-by-an-or
function upload_to_s3acually_upload_to_s3()
{
    (set -x;
    set -eu -o pipefail;

    printf set'Doing -x;some\n';
    sleeptime="$(( RANDOM % 50 set+ -eo1 pipefail;))"
    sleep "$sleeptime";

    erroring_some;
    #printf add'Doing yourmore codesome\n';
}

function hereupload_to_s3()
{
    set -x;
   sleep 1
set -eu -o pipefail;
    # errornow
https://superuser.com/questions/403263/how-to-pass-bash-script-arguments-to-a-subshell
    /bin/bash -c "acually_upload_to_s3 $(printf "${1+ %q}" "$@")" || exit 255
}

function upload_all()
{
    export s3_bucket_name;
    export -f upload_to_s3;
    export -f acually_upload_to_s3;

    # https://unix.stackexchange.com/questions/566834/xargs-does-not-quit-on-error
    # https://stackoverflow.com/questions/11003418/calling-shell-functions-with-xargs
    # https://stackoverflow.com/questions/6441509/how-to-write-a-process-pool-bash-shell
    # https://stackoverflow.com/questions/356100/how-to-wait-in-bash-for-several-subprocesses-to-finish-and-return-exit-code-0
    printf "'%s'\n" "${all_files[@]}" | xargs \
            --max-procs="$parallel_uploads" \
            --max-args=1 \
            --replace={} \
            /bin/bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"';
}

time upload_all;
upload_all \
    && printf 'Successfully'%s Successfully uploaded all files\n' "$(date)" \
    || printf '%s Error: Could not upload some files\n'  "$(date)";

With errornow uncommeted:

$ bash upload_to_s3_glacier_deep.sh
+ set -eo pipefail
+ parallel_uploads=4
+ s3_bucket_name=backup
+ all_files=("/12.pbd" "/13.pbd" "/14.pbd" "/15.pbd" "/16.pbd" "/17.pbd" "/18.pbd" "/19.pbd" "/20.pbd" "/21.pbd" "/22.pbd" "/23.pbd" "/24.pbd" "/25.pbd" "/26.pbd" "/27.pbd")
+ upload_all
+ export s3_bucket_name
+ export -f upload_to_s3
+ printf ''\''%s'\''\n' /12.pbd /13.pbd /14.pbd /15.pbd /16.pbd /17.pbd /18.pbd /19.pbd /20.pbd /21.pbd /22.pbd /23.pbd /24.pbd /25.pbd /26.pbd /27.pbd
+ xargs --max-procs=4 --max-args=1 '--replace={}' bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"'
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ errornow
+ errornow
+ errornow
+ errornow
environment: line 3: errornow: command not found
environment: line 3: errornow: command not found
environment: line 3: errornow: command not found
environment: line 3: errornow: command not found

real    0m1.178s
user    0m0.000s
sys 0m0.061s

real    0m1.133s
user    0m0.015s
sys 0m0.045s

real    0m1.212s
user    0m0.046s
sys 0m0.030s

real    0m1.141s
user    0m0.030s
sys 0m0.030s
xargs: bash: exited with status 255; aborting
xargs: bash: exited with status 255; aborting

real    0m1.309s
user    0m0.046s
sys 0m0.197s

With errornow commented:

$ bash upload_to_s3_glacier_deep.sh
+ set -eo pipefail
+ parallel_uploads=4
+ s3_bucket_name=backup
+ all_files=("/12.pbd" "/13.pbd" "/14.pbd" "/15.pbd" "/16.pbd" "/17.pbd" "/18.pbd" "/19.pbd" "/20.pbd" "/21.pbd" "/22.pbd" "/23.pbd" "/24.pbd" "/25.pbd" "/26.pbd" "/27.pbd")
+ upload_all
+ export s3_bucket_name
+ export -f upload_to_s3
+ printf ''\''%s'\''\n' /12.pbd /13.pbd /14.pbd /15.pbd /16.pbd /17.pbd /18.pbd /19.pbd /20.pbd /21.pbd /22.pbd /23.pbd /24.pbd /25.pbd /26.pbd /27.pbd
+ xargs --max-procs=4 --max-args=1 '--replace={}' bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"'
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.045s
user    0m0.000s
sys 0m0.030s

real    0m1.043s
user    0m0.015s
sys 0m0.030s

real    0m1.044s
user    0m0.030s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1

real    0m1.072s
user    0m0.045s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.049s
user    0m0.015s
sys 0m0.015s

real    0m1.052s
user    0m0.015s
sys 0m0.015s

real    0m1.054s
user    0m0.000s
sys 0m0.031s
+ set -eo pipefail
+ sleep 1

real    0m1.048s
user    0m0.000s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.046s
user    0m0.000s
sys 0m0.045s
+ set -eo pipefail
+ sleep 1

real    0m1.050s
user    0m0.000s
sys 0m0.030s

real    0m1.050s
user    0m0.000s
sys 0m0.015s

real    0m1.066s
user    0m0.030s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.061s
user    0m0.000s
sys 0m0.015s

real    0m1.048s
user    0m0.000s
sys 0m0.015s

real    0m1.061s
user    0m0.000s
sys 0m0.015s

real    0m1.055s
user    0m0.015s
sys 0m0.030s

real    0m4.588s
user    0m0.406s
sys 0m0.757s
+ printf 'Successfully uploaded all files\n'
Successfully uploaded all files

ReferencesExample output:

  1. https://stackoverflow.com/questions/11003418/calling-shell-functions-with-xargs
  2. https://stackoverflow.com/questions/6441509/how-to-write-a-process-pool-bash-shell
$ bash upload_to_s3_glacier_deep.sh
+ set -eo pipefail
+ parallel_uploads=4
+ s3_bucket_name=backup
+ all_files=("/12.pbd" "/13.pbd" "/14.pbd" "/15.pbd" "/16.pbd" "/17.pbd" "/18.pbd" "/19.pbd" "/20.pbd" "/21.pbd" "/22.pbd" "/23.pbd" "/24.pbd" "/25.pbd" "/26.pbd" "/27.pbd")
+ upload_all
+ export s3_bucket_name
+ export -f upload_to_s3
+ export -f acually_upload_to_s3
+ printf ''\''%s'\''\n' /12.pbd /13.pbd /14.pbd /15.pbd /16.pbd /17.pbd /18.pbd /19.pbd /20.pbd /21.pbd /22.pbd /23.pbd /24.pbd /25.pbd /26.pbd /27.pbd
+ xargs --max-procs=4 --max-args=1 '--replace={}' /bin/bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"'
+ set -eu -o pipefail
++ printf ' %q' backup /12.pbd
+ /bin/bash -c 'acually_upload_to_s3  backup /12.pbd'
+ set -eu -o pipefail
++ printf ' %q' backup /13.pbd
+ /bin/bash -c 'acually_upload_to_s3  backup /13.pbd'
+ set -eu -o pipefail
++ printf ' %q' backup /14.pbd
+ /bin/bash -c 'acually_upload_to_s3  backup /14.pbd'
+ set -eu -o pipefail
+ printf 'Doing some\n'
Doing some
+ sleeptime=3
+ sleep 3
+ set -eu -o pipefail
++ printf ' %q' backup /15.pbd
+ /bin/bash -c 'acually_upload_to_s3  backup /15.pbd'
+ set -eu -o pipefail
+ printf 'Doing some\n'
Doing some
+ sleeptime=49
+ sleep 49
+ set -eu -o pipefail
+ printf 'Doing some\n'
Doing some
+ sleeptime=13
+ sleep 13
+ set -eu -o pipefail
+ printf 'Doing some\n'
Doing some
+ sleeptime=30
+ sleep 30
+ erroring_some
environment: line 5: erroring_some: command not found
+ exit 255

real    0m3.146s
user    0m0.045s
sys 0m0.123s
xargs: /bin/bash: exited with status 255; aborting
+ erroring_some
environment: line 5: erroring_some: command not found
+ exit 255

real    0m13.149s
user    0m0.015s
sys 0m0.123s
xargs: /bin/bash: exited with status 255; aborting

real    0m13.271s
user    0m0.075s
sys 0m0.337s
++ date
+ printf '%s Error: Could not upload some files\n' 'Fri, Nov 19, 2021 22:00:30'
Fri, Nov 19, 2021 22:00:30 Error: Could not upload some files
#!/bin/bash
set -x
set -eo pipefail

parallel_uploads="4"
s3_bucket_name="backup"

all_files=(
"/12.pbd"
"/13.pbd"
"/14.pbd"
"/15.pbd"
"/16.pbd"
"/17.pbd"
"/18.pbd"
"/19.pbd"
"/20.pbd"
"/21.pbd"
"/22.pbd"
"/23.pbd"
"/24.pbd"
"/25.pbd"
"/26.pbd"
"/27.pbd"
)


function upload_to_s3()
{
    (
        set -x;
        set -eo pipefail;

        # add your code here
        sleep 1
        errornow

    ) || exit 255
}

function upload_all()
{
    export s3_bucket_name;
    export -f upload_to_s3;

    printf "'%s'\n" "${all_files[@]}" | xargs \
            --max-procs="$parallel_uploads" \
            --max-args=1 \
            --replace={} \
            bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"';
}

time upload_all;

printf 'Successfully uploaded all files\n'

With errornow uncommeted:

$ bash upload_to_s3_glacier_deep.sh
+ set -eo pipefail
+ parallel_uploads=4
+ s3_bucket_name=backup
+ all_files=("/12.pbd" "/13.pbd" "/14.pbd" "/15.pbd" "/16.pbd" "/17.pbd" "/18.pbd" "/19.pbd" "/20.pbd" "/21.pbd" "/22.pbd" "/23.pbd" "/24.pbd" "/25.pbd" "/26.pbd" "/27.pbd")
+ upload_all
+ export s3_bucket_name
+ export -f upload_to_s3
+ printf ''\''%s'\''\n' /12.pbd /13.pbd /14.pbd /15.pbd /16.pbd /17.pbd /18.pbd /19.pbd /20.pbd /21.pbd /22.pbd /23.pbd /24.pbd /25.pbd /26.pbd /27.pbd
+ xargs --max-procs=4 --max-args=1 '--replace={}' bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"'
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ errornow
+ errornow
+ errornow
+ errornow
environment: line 3: errornow: command not found
environment: line 3: errornow: command not found
environment: line 3: errornow: command not found
environment: line 3: errornow: command not found

real    0m1.178s
user    0m0.000s
sys 0m0.061s

real    0m1.133s
user    0m0.015s
sys 0m0.045s

real    0m1.212s
user    0m0.046s
sys 0m0.030s

real    0m1.141s
user    0m0.030s
sys 0m0.030s
xargs: bash: exited with status 255; aborting
xargs: bash: exited with status 255; aborting

real    0m1.309s
user    0m0.046s
sys 0m0.197s

With errornow commented:

$ bash upload_to_s3_glacier_deep.sh
+ set -eo pipefail
+ parallel_uploads=4
+ s3_bucket_name=backup
+ all_files=("/12.pbd" "/13.pbd" "/14.pbd" "/15.pbd" "/16.pbd" "/17.pbd" "/18.pbd" "/19.pbd" "/20.pbd" "/21.pbd" "/22.pbd" "/23.pbd" "/24.pbd" "/25.pbd" "/26.pbd" "/27.pbd")
+ upload_all
+ export s3_bucket_name
+ export -f upload_to_s3
+ printf ''\''%s'\''\n' /12.pbd /13.pbd /14.pbd /15.pbd /16.pbd /17.pbd /18.pbd /19.pbd /20.pbd /21.pbd /22.pbd /23.pbd /24.pbd /25.pbd /26.pbd /27.pbd
+ xargs --max-procs=4 --max-args=1 '--replace={}' bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"'
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.045s
user    0m0.000s
sys 0m0.030s

real    0m1.043s
user    0m0.015s
sys 0m0.030s

real    0m1.044s
user    0m0.030s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1

real    0m1.072s
user    0m0.045s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.049s
user    0m0.015s
sys 0m0.015s

real    0m1.052s
user    0m0.015s
sys 0m0.015s

real    0m1.054s
user    0m0.000s
sys 0m0.031s
+ set -eo pipefail
+ sleep 1

real    0m1.048s
user    0m0.000s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.046s
user    0m0.000s
sys 0m0.045s
+ set -eo pipefail
+ sleep 1

real    0m1.050s
user    0m0.000s
sys 0m0.030s

real    0m1.050s
user    0m0.000s
sys 0m0.015s

real    0m1.066s
user    0m0.030s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.061s
user    0m0.000s
sys 0m0.015s

real    0m1.048s
user    0m0.000s
sys 0m0.015s

real    0m1.061s
user    0m0.000s
sys 0m0.015s

real    0m1.055s
user    0m0.015s
sys 0m0.030s

real    0m4.588s
user    0m0.406s
sys 0m0.757s
+ printf 'Successfully uploaded all files\n'
Successfully uploaded all files

References:

  1. https://stackoverflow.com/questions/11003418/calling-shell-functions-with-xargs
  2. https://stackoverflow.com/questions/6441509/how-to-write-a-process-pool-bash-shell
#!/bin/bash
set -x
set -eo pipefail

parallel_uploads="4"
s3_bucket_name="backup"

all_files=(
"/12.pbd"
"/13.pbd"
"/14.pbd"
"/15.pbd"
"/16.pbd"
"/17.pbd"
"/18.pbd"
"/19.pbd"
"/20.pbd"
"/21.pbd"
"/22.pbd"
"/23.pbd"
"/24.pbd"
"/25.pbd"
"/26.pbd"
"/27.pbd"
)


# Workaround for the posix shell bug they call it feature
# https://unix.stackexchange.com/questions/65532/why-does-set-e-not-work-inside-subshells-with-parenthesis-followed-by-an-or
function acually_upload_to_s3()
{
    set -x;
    set -eu -o pipefail;

    printf 'Doing some\n';
    sleeptime="$(( RANDOM % 50 + 1 ))"
    sleep "$sleeptime";

    erroring_some;
    printf 'Doing more some\n';
}

function upload_to_s3()
{
    set -x;
    set -eu -o pipefail;
    # https://superuser.com/questions/403263/how-to-pass-bash-script-arguments-to-a-subshell
    /bin/bash -c "acually_upload_to_s3 $(printf "${1+ %q}" "$@")" || exit 255
}

function upload_all()
{
    export s3_bucket_name;
    export -f upload_to_s3;
    export -f acually_upload_to_s3;

    # https://unix.stackexchange.com/questions/566834/xargs-does-not-quit-on-error
    # https://stackoverflow.com/questions/11003418/calling-shell-functions-with-xargs
    # https://stackoverflow.com/questions/6441509/how-to-write-a-process-pool-bash-shell
    # https://stackoverflow.com/questions/356100/how-to-wait-in-bash-for-several-subprocesses-to-finish-and-return-exit-code-0
    printf "'%s'\n" "${all_files[@]}" | xargs \
            --max-procs="$parallel_uploads" \
            --max-args=1 \
            --replace={} \
            /bin/bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"';
}

time upload_all \
    && printf '%s Successfully uploaded all files\n' "$(date)" \
    || printf '%s Error: Could not upload some files\n'  "$(date)";

Example output:

$ bash upload_to_s3_glacier_deep.sh
+ set -eo pipefail
+ parallel_uploads=4
+ s3_bucket_name=backup
+ all_files=("/12.pbd" "/13.pbd" "/14.pbd" "/15.pbd" "/16.pbd" "/17.pbd" "/18.pbd" "/19.pbd" "/20.pbd" "/21.pbd" "/22.pbd" "/23.pbd" "/24.pbd" "/25.pbd" "/26.pbd" "/27.pbd")
+ upload_all
+ export s3_bucket_name
+ export -f upload_to_s3
+ export -f acually_upload_to_s3
+ printf ''\''%s'\''\n' /12.pbd /13.pbd /14.pbd /15.pbd /16.pbd /17.pbd /18.pbd /19.pbd /20.pbd /21.pbd /22.pbd /23.pbd /24.pbd /25.pbd /26.pbd /27.pbd
+ xargs --max-procs=4 --max-args=1 '--replace={}' /bin/bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"'
+ set -eu -o pipefail
++ printf ' %q' backup /12.pbd
+ /bin/bash -c 'acually_upload_to_s3  backup /12.pbd'
+ set -eu -o pipefail
++ printf ' %q' backup /13.pbd
+ /bin/bash -c 'acually_upload_to_s3  backup /13.pbd'
+ set -eu -o pipefail
++ printf ' %q' backup /14.pbd
+ /bin/bash -c 'acually_upload_to_s3  backup /14.pbd'
+ set -eu -o pipefail
+ printf 'Doing some\n'
Doing some
+ sleeptime=3
+ sleep 3
+ set -eu -o pipefail
++ printf ' %q' backup /15.pbd
+ /bin/bash -c 'acually_upload_to_s3  backup /15.pbd'
+ set -eu -o pipefail
+ printf 'Doing some\n'
Doing some
+ sleeptime=49
+ sleep 49
+ set -eu -o pipefail
+ printf 'Doing some\n'
Doing some
+ sleeptime=13
+ sleep 13
+ set -eu -o pipefail
+ printf 'Doing some\n'
Doing some
+ sleeptime=30
+ sleep 30
+ erroring_some
environment: line 5: erroring_some: command not found
+ exit 255

real    0m3.146s
user    0m0.045s
sys 0m0.123s
xargs: /bin/bash: exited with status 255; aborting
+ erroring_some
environment: line 5: erroring_some: command not found
+ exit 255

real    0m13.149s
user    0m0.015s
sys 0m0.123s
xargs: /bin/bash: exited with status 255; aborting

real    0m13.271s
user    0m0.075s
sys 0m0.337s
++ date
+ printf '%s Error: Could not upload some files\n' 'Fri, Nov 19, 2021 22:00:30'
Fri, Nov 19, 2021 22:00:30 Error: Could not upload some files
Source Link
user
  • 891
  • 4
  • 13
  • 23

xargs is quite troublesome because it only stops when exiting with 255, then, using set -eo pipefail will be completely void when exporting a function, unless you use a subshell to encapsulate the set -eo pipefail failure and propagate it with 255.

On this example, comment out the errornow line to see the difference:

#!/bin/bash
set -x
set -eo pipefail

parallel_uploads="4"
s3_bucket_name="backup"

all_files=(
"/12.pbd"
"/13.pbd"
"/14.pbd"
"/15.pbd"
"/16.pbd"
"/17.pbd"
"/18.pbd"
"/19.pbd"
"/20.pbd"
"/21.pbd"
"/22.pbd"
"/23.pbd"
"/24.pbd"
"/25.pbd"
"/26.pbd"
"/27.pbd"
)


function upload_to_s3()
{
    (
        set -x;
        set -eo pipefail;

        # add your code here
        sleep 1
        errornow

    ) || exit 255
}

function upload_all()
{
    export s3_bucket_name;
    export -f upload_to_s3;

    printf "'%s'\n" "${all_files[@]}" | xargs \
            --max-procs="$parallel_uploads" \
            --max-args=1 \
            --replace={} \
            bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"';
}

time upload_all;

printf 'Successfully uploaded all files\n'

With errornow uncommeted:

$ bash upload_to_s3_glacier_deep.sh
+ set -eo pipefail
+ parallel_uploads=4
+ s3_bucket_name=backup
+ all_files=("/12.pbd" "/13.pbd" "/14.pbd" "/15.pbd" "/16.pbd" "/17.pbd" "/18.pbd" "/19.pbd" "/20.pbd" "/21.pbd" "/22.pbd" "/23.pbd" "/24.pbd" "/25.pbd" "/26.pbd" "/27.pbd")
+ upload_all
+ export s3_bucket_name
+ export -f upload_to_s3
+ printf ''\''%s'\''\n' /12.pbd /13.pbd /14.pbd /15.pbd /16.pbd /17.pbd /18.pbd /19.pbd /20.pbd /21.pbd /22.pbd /23.pbd /24.pbd /25.pbd /26.pbd /27.pbd
+ xargs --max-procs=4 --max-args=1 '--replace={}' bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"'
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ errornow
+ errornow
+ errornow
+ errornow
environment: line 3: errornow: command not found
environment: line 3: errornow: command not found
environment: line 3: errornow: command not found
environment: line 3: errornow: command not found

real    0m1.178s
user    0m0.000s
sys 0m0.061s

real    0m1.133s
user    0m0.015s
sys 0m0.045s

real    0m1.212s
user    0m0.046s
sys 0m0.030s

real    0m1.141s
user    0m0.030s
sys 0m0.030s
xargs: bash: exited with status 255; aborting
xargs: bash: exited with status 255; aborting

real    0m1.309s
user    0m0.046s
sys 0m0.197s

With errornow commented:

$ bash upload_to_s3_glacier_deep.sh
+ set -eo pipefail
+ parallel_uploads=4
+ s3_bucket_name=backup
+ all_files=("/12.pbd" "/13.pbd" "/14.pbd" "/15.pbd" "/16.pbd" "/17.pbd" "/18.pbd" "/19.pbd" "/20.pbd" "/21.pbd" "/22.pbd" "/23.pbd" "/24.pbd" "/25.pbd" "/26.pbd" "/27.pbd")
+ upload_all
+ export s3_bucket_name
+ export -f upload_to_s3
+ printf ''\''%s'\''\n' /12.pbd /13.pbd /14.pbd /15.pbd /16.pbd /17.pbd /18.pbd /19.pbd /20.pbd /21.pbd /22.pbd /23.pbd /24.pbd /25.pbd /26.pbd /27.pbd
+ xargs --max-procs=4 --max-args=1 '--replace={}' bash -c 'time upload_to_s3 "$s3_bucket_name" "{}"'
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.045s
user    0m0.000s
sys 0m0.030s

real    0m1.043s
user    0m0.015s
sys 0m0.030s

real    0m1.044s
user    0m0.030s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1

real    0m1.072s
user    0m0.045s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.049s
user    0m0.015s
sys 0m0.015s

real    0m1.052s
user    0m0.015s
sys 0m0.015s

real    0m1.054s
user    0m0.000s
sys 0m0.031s
+ set -eo pipefail
+ sleep 1

real    0m1.048s
user    0m0.000s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.046s
user    0m0.000s
sys 0m0.045s
+ set -eo pipefail
+ sleep 1

real    0m1.050s
user    0m0.000s
sys 0m0.030s

real    0m1.050s
user    0m0.000s
sys 0m0.015s

real    0m1.066s
user    0m0.030s
sys 0m0.015s
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1
+ set -eo pipefail
+ sleep 1

real    0m1.061s
user    0m0.000s
sys 0m0.015s

real    0m1.048s
user    0m0.000s
sys 0m0.015s

real    0m1.061s
user    0m0.000s
sys 0m0.015s

real    0m1.055s
user    0m0.015s
sys 0m0.030s

real    0m4.588s
user    0m0.406s
sys 0m0.757s
+ printf 'Successfully uploaded all files\n'
Successfully uploaded all files

References:

  1. https://stackoverflow.com/questions/11003418/calling-shell-functions-with-xargs
  2. https://stackoverflow.com/questions/6441509/how-to-write-a-process-pool-bash-shell