Works great!! I amended it to include my very simple additions.
#!/bin/bash
url_list=( http://www.bbc.co.uk http://www.cnn.com http://www.msn.com )
#Number of users to mimic simultaneously
users=20
function one_user () {
local user=$1
while [ 1 -eq 1 ]; do
local wait=`expr $RANDOM % 120 + 30`
local n=`expr $RANDOM % 3`
local url=${url_list[$n]}
time=`date +"%T"`
date=`date +"%m-%d-%y"`
wget=`wget -E -H -T 30 -k -K -p --delete-after --no-cache -e robots=off $url 2>&1 | grep Downloaded | awk -F " " '{print $6}'`
echo $date,$time,client$user,$url,$wget
# echo user = $user wait = $wait url = $url
sleep $wait
done
}
for (( user = 1; user <= $users; user++ )); do
one_user $user &
done
Sample output.
04-21-13,00:34:01,client1,http://www.msn.com,1.8s
04-21-13,00:34:01,client14,http://www.bbc.co.uk,3.6s
04-21-13,00:34:34,client2,http://www.msn.com,1.5s
04-21-13,00:34:39,client19,http://www.msn.com,1.7s
04-21-13,00:34:34,client12,http://www.bbc.co.uk,3.6s
04-21-13,00:34:34,client4,http://www.cnn.com,4.9s
04-21-13,00:34:40,client20,http://www.bbc.co.uk,3.4s
04-21-13,00:34:49,client11,http://www.msn.com,1.9s
04-21-13,00:34:58,client14,http://www.bbc.co.uk,0.9s
04-21-13,00:34:50,client8,http://www.bbc.co.uk,3.8s
04-21-13,00:34:58,client5,http://www.bbc.co.uk,3.6s
04-21-13,00:35:19,client12,http://www.msn.com,1.4s
04-21-13,00:35:25,client10,http://www.msn.com,1.5s
04-21-13,00:35:20,client13,http://www.bbc.co.uk,3.3s
04-21-13,00:35:29,client3,http://www.bbc.co.uk,3.1s
04-21-13,00:35:35,client8,http://www.bbc.co.uk,3.1s
04-21-13,00:35:46,client9,http://www.msn.com,1.4s
04-21-13,00:35:55,client17,http://www.msn.com,2.1s
04-21-13,00:35:58,client7,http://www.msn.com,1.4s
04-21-13,00:35:50,client18,http://www.cnn.com,4.4s
Much appreciated hanson44!!