I want to make a config file which contain all the paths.
i want to read the config file line by line and pass as an argument on my below function.
Replace all the path with reading config path line by line and pass in respective functions.
how can i achieve that?
Kindly guide.
#!/bin/sh
DATE=`date +"%Y-%m-%d %H:%M:%S"`
S1()
{
stream=S1
path=/sadique/S1
counter=0
while [ $counter -le 100 ]
do
for files in `printf "%s\n" $path/*.RTM | head -3`
do
if [[ -f $files ]];
then
TT_FIRST=`awk -F ',' 'NR==1{print $1}' $files`
TT_LAST=`awk -F ',' 'END{print $1}' $files`
FILENAME=`ls $files | cut -d '/' -f6`
TIMESTAMP=$( date -r $files +'%s')
FILE_CR_TIME=$( date +'%Y-%m-%d %H:%M:%S' -r $files)
TRANS_TIME_FIRST=$(date -d @$(printf '%.0f\n' $TT_FIRST) +'%s')
TRANS_TIME_LAST=$(date -d @$(printf '%.0f\n' $TT_LAST) +'%s')
RECORD_TIME_FIRST=$(date -d @$(printf '%.0f\n' $TT_FIRST) +"%Y-%m-%d %H:%M:%S")
RECORD_TIME_LAST=$(date -d @$(printf '%.0f\n' $TT_LAST) +"%Y-%m-%d %H:%M:%S")
TIME_LAG_FIRST=$(expr $TIMESTAMP - $TRANS_TIME_FIRST)
TIME_LAG_LAST=$(expr $TIMESTAMP - $TRANS_TIME_LAST)
DELAY_PROCESS=$((`expr $(date -u +"%s") - $TIMESTAMP`))
TRANS_DIFFTIME=$(expr $TRANS_TIME_LAST - $TRANS_TIME_FIRST)
counter=100
else
echo "file doesn't exist" >/dev/null
fi
if [ "${FILENAME}" != "" ];
then
echo "${DATE} ${stream} ${FILENAME} ${FILE_CR_TIME} ${RECORD_TIME_FIRST} ${TIME_LAG_FIRST} ${RECORD_TIME_LAST} ${TIME_LAG_LAST} ${DELAY_PROCESS} ${TRANS_DIFFTIME}" | hadoop dfs -appendToFile - /AUDIT/FINAL.csv
else
echo "Filename is empty" >/dev/null
fi
done
((counter++))
sleep 5s
done
}
IUPS()
{
stream=IUPS
path=/sadique/IUPS
counter=0
while [ $counter -le 100 ]
do
for files in `printf "%s\n" $path/*.RTM | head -3`
do
if [[ -f $files ]];
then
TT_FIRST=`awk -F ',' 'NR==1{print $1}' $files`
TT_LAST=`awk -F ',' 'END{print $1}' $files`
FILENAME=`ls $files | cut -d '/' -f6`
TIMESTAMP=$( date -r $files +'%s')
FILE_CR_TIME=$( date +'%Y-%m-%d %H:%M:%S' -r $files)
TRANS_TIME_FIRST=$(date -d @$(printf '%.0f\n' $TT_FIRST) +'%s')
TRANS_TIME_LAST=$(date -d @$(printf '%.0f\n' $TT_LAST) +'%s')
RECORD_TIME_FIRST=$(date -d @$(printf '%.0f\n' $TT_FIRST) +"%Y-%m-%d %H:%M:%S")
RECORD_TIME_LAST=$(date -d @$(printf '%.0f\n' $TT_LAST) +"%Y-%m-%d %H:%M:%S")
TIME_LAG_FIRST=$(expr $TIMESTAMP - $TRANS_TIME_FIRST)
TIME_LAG_LAST=$(expr $TIMESTAMP - $TRANS_TIME_LAST)
DELAY_PROCESS=$((`expr $(date -u +"%s") - $TIMESTAMP`))
TRANS_DIFFTIME=$(expr $TRANS_TIME_LAST - $TRANS_TIME_FIRST)
counter=100
else
echo "file doesn't exist" >/dev/null
fi
if [ "${FILENAME}" != "" ];
then
echo "${DATE} ${stream} ${FILENAME} ${FILE_CR_TIME} ${RECORD_TIME_FIRST} ${TIME_LAG_FIRST} ${RECORD_TIME_LAST} ${TIME_LAG_LAST} ${DELAY_PROCESS} ${TRANS_DIFFTIME}" | hadoop dfs -appendToFile - /AUDIT/FINAL.csv
else
echo "Filename is empty" >/dev/null
fi
done
((counter++))
sleep 5s
done
}
IUCS()
{
stream=IUCS
path=/sadique/IUCS
counter=0
while [ $counter -le 100 ]
do
for files in `printf "%s\n" $path/*| head -3`
do
if [[ -f $files ]] && [[ -s $files ]];
then
TTHEX_FIRST=`awk -F ',' 'NR==1{print $4}' $files`
TTHEX_LAST=`awk -F ',' 'END{print $4}' $files`
TIMESTAMP=$( date -r $files +'%s')
FILENAME=`ls $files | cut -d '/' -f5`
FILE_CR_TIME=$( date +'%Y-%m-%d %H:%M:%S' -r $files)
TRANS_TIME_FIRST=$(date -d @$(expr `printf "%d" 0x$TTHEX_FIRST` / 1000) +'%s')
TRANS_TIME_LAST=$(date -d @$(expr `printf "%d" 0x$TTHEX_LAST` / 1000) +'%s')
RECORD_TIME_FIRST=$(date -d @$(expr `printf "%d" 0x$TTHEX_FIRST` / 1000) +"%Y-%m-%d %H:%M:%S")
RECORD_TIME_LAST=$(date -d @$(expr `printf "%d" 0x$TTHEX_FIRST` / 1000) +"%Y-%m-%d %H:%M:%S")
TIME_LAG_FIRST=$(expr $TIMESTAMP - $TRANS_TIME_FIRST)
TIME_LAG_LAST=$(expr $TIMESTAMP - $TRANS_TIME_LAST)
DELAY_PROCESS=$((`expr $(date -u +"%s") - $TIMESTAMP`))
TRANS_DIFFTIME=$(expr $TRANS_TIME_LAST - $TRANS_TIME_FIRST)
counter=100
else
echo "file doesn't exist" >/dev/null
fi
if [ "${FILENAME}" != "" ] && [ "${FILENAME}" != "*.temp" ];
then
echo "${DATE} ${stream} ${FILENAME} ${FILE_CR_TIME} ${RECORD_TIME_FIRST} ${TIME_LAG_FIRST} ${RECORD_TIME_LAST} ${TIME_LAG_LAST} ${DELAY_PROCESS} ${TRANS_DIFFTIME}" | hadoop dfs -appendToFile - /AUDIT/FINAL.csv
else
echo "Filename is empty" >/dev/null
fi
done
((counter++))
sleep 5s
done
}
S1 &
IUPS &
IUCS &
---------- Post updated at 08:22 AM ---------- Previous update was at 05:45 AM ----------
created a file path and placed below:
export input1=/sadique/S1
export input2=/sadique/IUPS
export input3=/sadique/IUCS
then:
#!/bin/sh
DATE=`date +"%Y-%m-%d %H:%M:%S"`
source /DPI_IN/path
S1()
{
stream=S1
counter=0
while [ $counter -le 100 ]
do
for files in `printf "%s\n" ${input1}/*.RTM | head -3`
do
if [[ -f $files ]];
then
TT_FIRST=`awk -F ',' 'NR==1{print $1}' $files`
TT_LAST=`awk -F ',' 'END{print $1}' $files`
FILENAME=`ls $files | cut -d '/' -f6`
TIMESTAMP=$( date -r $files +'%s')
FILE_CR_TIME=$( date +'%Y-%m-%d %H:%M:%S' -r $files)
TRANS_TIME_FIRST=$(date -d @$(printf '%.0f\n' $TT_FIRST) +'%s')
TRANS_TIME_LAST=$(date -d @$(printf '%.0f\n' $TT_LAST) +'%s')
RECORD_TIME_FIRST=$(date -d @$(printf '%.0f\n' $TT_FIRST) +"%Y-%m-%d %H:%M:%S")
RECORD_TIME_LAST=$(date -d @$(printf '%.0f\n' $TT_LAST) +"%Y-%m-%d %H:%M:%S")
TIME_LAG_FIRST=$(expr $TIMESTAMP - $TRANS_TIME_FIRST)
TIME_LAG_LAST=$(expr $TIMESTAMP - $TRANS_TIME_LAST)
DELAY_PROCESS=$((`expr $(date -u +"%s") - $TIMESTAMP`))
TRANS_DIFFTIME=$(expr $TRANS_TIME_LAST - $TRANS_TIME_FIRST)
counter=100
else
echo "file doesn't exist" >/dev/null
fi
if [ "${FILENAME}" != "" ];
then
echo "${DATE} ${stream} ${FILENAME} ${FILE_CR_TIME} ${RECORD_TIME_FIRST} ${TIME_LAG_FIRST} ${RECORD_TIME_LAST} ${TIME_LAG_LAST} ${DELAY_PROCESS} ${TRANS_DIFFTIME}" | hadoop dfs -appendToFile - /AUDIT/FINAL.csv
else
echo "Filename is empty" >/dev/null
fi
done
((counter++))
sleep 5s
done
}
IUPS()
{
stream=IUPS
path=/sadique/IUPS
counter=0
while [ $counter -le 100 ]
do
for files in `printf "%s\n" ${input2}/*.RTM | head -3`
do
if [[ -f $files ]];
then
TT_FIRST=`awk -F ',' 'NR==1{print $1}' $files`
TT_LAST=`awk -F ',' 'END{print $1}' $files`
FILENAME=`ls $files | cut -d '/' -f6`
TIMESTAMP=$( date -r $files +'%s')
FILE_CR_TIME=$( date +'%Y-%m-%d %H:%M:%S' -r $files)
TRANS_TIME_FIRST=$(date -d @$(printf '%.0f\n' $TT_FIRST) +'%s')
TRANS_TIME_LAST=$(date -d @$(printf '%.0f\n' $TT_LAST) +'%s')
RECORD_TIME_FIRST=$(date -d @$(printf '%.0f\n' $TT_FIRST) +"%Y-%m-%d %H:%M:%S")
RECORD_TIME_LAST=$(date -d @$(printf '%.0f\n' $TT_LAST) +"%Y-%m-%d %H:%M:%S")
TIME_LAG_FIRST=$(expr $TIMESTAMP - $TRANS_TIME_FIRST)
TIME_LAG_LAST=$(expr $TIMESTAMP - $TRANS_TIME_LAST)
DELAY_PROCESS=$((`expr $(date -u +"%s") - $TIMESTAMP`))
TRANS_DIFFTIME=$(expr $TRANS_TIME_LAST - $TRANS_TIME_FIRST)
counter=100
else
echo "file doesn't exist" >/dev/null
fi
if [ "${FILENAME}" != "" ];
then
echo "${DATE} ${stream} ${FILENAME} ${FILE_CR_TIME} ${RECORD_TIME_FIRST} ${TIME_LAG_FIRST} ${RECORD_TIME_LAST} ${TIME_LAG_LAST} ${DELAY_PROCESS} ${TRANS_DIFFTIME}" | hadoop dfs -appendToFile - /AUDIT/FINAL.csv
else
echo "Filename is empty" >/dev/null
fi
done
((counter++))
sleep 5s
done
}
IUCS()
{
stream=IUCS
path=/sadique/IUCS
counter=0
while [ $counter -le 100 ]
do
for files in `printf "%s\n" ${input3}/*| head -3`
do
if [[ -f $files ]] && [[ -s $files ]];
then
TTHEX_FIRST=`awk -F ',' 'NR==1{print $4}' $files`
TTHEX_LAST=`awk -F ',' 'END{print $4}' $files`
TIMESTAMP=$( date -r $files +'%s')
FILENAME=`ls $files | cut -d '/' -f5`
FILE_CR_TIME=$( date +'%Y-%m-%d %H:%M:%S' -r $files)
TRANS_TIME_FIRST=$(date -d @$(expr `printf "%d" 0x$TTHEX_FIRST` / 1000) +'%s')
TRANS_TIME_LAST=$(date -d @$(expr `printf "%d" 0x$TTHEX_LAST` / 1000) +'%s')
RECORD_TIME_FIRST=$(date -d @$(expr `printf "%d" 0x$TTHEX_FIRST` / 1000) +"%Y-%m-%d %H:%M:%S")
RECORD_TIME_LAST=$(date -d @$(expr `printf "%d" 0x$TTHEX_FIRST` / 1000) +"%Y-%m-%d %H:%M:%S")
TIME_LAG_FIRST=$(expr $TIMESTAMP - $TRANS_TIME_FIRST)
TIME_LAG_LAST=$(expr $TIMESTAMP - $TRANS_TIME_LAST)
DELAY_PROCESS=$((`expr $(date -u +"%s") - $TIMESTAMP`))
TRANS_DIFFTIME=$(expr $TRANS_TIME_LAST - $TRANS_TIME_FIRST)
counter=100
else
echo "file doesn't exist" >/dev/null
fi
if [ "${FILENAME}" != "" ] && [ "${FILENAME}" != "*.temp" ];
then
echo "${DATE} ${stream} ${FILENAME} ${FILE_CR_TIME} ${RECORD_TIME_FIRST} ${TIME_LAG_FIRST} ${RECORD_TIME_LAST} ${TIME_LAG_LAST} ${DELAY_PROCESS} ${TRANS_DIFFTIME}" | hadoop dfs -appendToFile - /AUDIT/FINAL.csv
else
echo "Filename is empty" >/dev/null
fi
done
((counter++))
sleep 5s
done
}
S1 &
IUPS &
IUCS &