If you just want to get rid of many files as soon as possible ls -f1 /path/to/folder/with/many/files/ | xargs rm might work okay, but better don't run it on production systems unless you desire service impactbecause your system might become IO issues and applications might get stuck during the delete operation.
This script works nicely for many files and should not affect the ioload of the system.
#!/bin/bash
# Path to folder with many files
FOLDER="/path/to/folder/with/many/files"
# Temporary file to store file names
FILE_FILENAMES="/tmp/filenames"
if [ -z "$FOLDER" ]; then
echo "Prevented you from deleting everything! Correct your FOLDER variable!"
exit 1
fi
while true; do
FILES=$(ls -f1 $FOLDER | wc -l)
if [ "$FILES" -gt 10000 ]; then
printf "[%s] %s files found. going on with removing\n" "$(date)" "$FILES"
# Create new list of files
ls -f1 $FOLDER | head -n 5002 | tail -n 5000 > "$FILE_FILENAMES"
if [ -s $FILE_FILENAMES ]; then
while read FILE; do
rm "$FOLDER/$FILE"
sleep 0.005
done < "$FILE_FILENAMES"
fi
else
printf "[%s] script has finished, almost all files have been deleted" "$(date)"
break
fi
sleep 5
done