It depends on your use-case on how to transfer data from one function into another one.
I could not reproduce your error - maybe it has something to do with aws or s3cmd. using backticks as subshell is deprecated - you should use $().
If you just want to pass data and you are not interested in storing them to your hard drive you could use global arrays (everything you don't declare otherwise is global):
#!/usr/bin/env bash
command_to_get_files() {
local ifs
# store the internal field separator in order to change it back once we used it in the for loop
ifs=$IFS
# change IFS in order to split only on newlines and not on spaces (this is to support filenames with spaces in them)
IFS='
'
# i dont know the output of this command but it should work with minor modifications
# used for tests:
# for i in *; do
for file in $(aws s3 ls "s3://path1/path2/" | awk '{print $2}'); do
# add $file as a new element to the end of the array
files+=("${file}")
done
# restore IFS for the rest of the script to prevent possible issues at a later point in time
IFS=${ifs}
}
# needs a non-empty files array
command_to_get_filesizes() {
# check if the number of elements in the files-array is 0
if (( 0 == ${#files[@]} )) then
return 1
fi
local index
# iterate over the indices of the files array
for index in "${!files[@]}"; do
# $(( )) converts the expression to an integer - so not found files are of size 0
filesizes[${index}]=$(( $(s3cmd du -r "s3://path1/path2/${files[${index}]}" | awk '{print $1}') ))
# used for testing:
# filesizes[${index}]=$(( $(stat -c %s "${files[$i]}") ))
done
}
command_to_get_files
command_to_get_filesizes
# loop over indices of array (in our case 0, 1, 2, ...)
for index in "${!files[@]}"; do
echo "${files[${index}]}: ${filesizes[${index}]}"
done
notes about bash arrays:
- get the size of the array:
${#array[@]}
- get the size of the first element:
${#array[0]}
- get the indices of the array:
${!array[@]}
- get the first element of the array:
${array[0]}
for more information about arrays have a look here.
another method would be to just echo the names and provide them as parameters to the other function (this is difficult with multi-word filenames)
Using temporary files would result in something like this:
#!/usr/bin/env bash
readonly FILES=$(mktemp)
readonly FILESIZES=$(mktemp)
# at script exit remove temporary files
trap cleanup EXIT
cleanup() {
rm -f "$FILES" "$FILESIZES"
}
command_to_get_files() {
aws s3 ls "s3://path1/path2/" | awk '{print $2}' >> "$FILES"
}
command_to_get_filesizes() {
while read -r file; do
s3cmd du -r "s3://path1/path2/${file}" | awk '{print $1}' >> "$FILESIZES"
done < "$FILES"
}
command_to_get_files
command_to_get_filesizes