If you just want to get rid of many files as soon as possible ls -f1 /path/to/folder/with/many/files/ | xargs rm might work okay, but better don't run it on production systems unless you desire service impact.
This script works nicely for many files and should not affect the ioload of the system.
#!/bin/bash
FOLDER="/path/to/folder/with/many/files"
FILE_FILENAMES="/tmp/filenames"
while true; do
FILES=$(ls -f1 $FOLDER | wc -l)
if [ "$FILES" -gt 10000 ]; then
printf "[%s] %s files found. going on with removing\n" "$(date)" "$FILES"
# Create new list of files
ls -f1 $FOLDER | head -n 5002 | tail -n 5000 > "$FILE_FILENAMES"
if [ -s $FILE_FILENAMES ]; then
while read FILE; do
rm "$FOLDER$FILE"
sleep 0.005
done < "$FILE_FILENAMES"
fi
else
printf "[%s] script has finished, almost all files have been deleted" "$(date)"
break
fi
sleep 5
done