- Below, a shell script which takes a file from a Object Store Container
- Copies the file locally
- Creates a Hive table from the content
#!/bin/bashÂ
CONTAINER=sc-oc-vm-dev-ace-01
DIRECTORY=
PRICINGREPORT=”PriceReport_$(date +”%Y-%m-%d”).csv”
LIVEAPPSDATA=”RMSLiveAppsData_$(date +”%Y-%m-%d”).csv”
FILELINECOUNT=0
echo “Object Storage Container Name :” $CONTAINER
echo “LiveApps Pricing Data File :” $PRICINGREPORT
echo “LiveApps Data File :” $LIVEAPPSDATA
echo “—————————————————————–“
echo “”
echo “”
echo “—-Checking if files exist on local file system —————-“
if [ -f “$PRICINGREPORT” ] && [ -f “$LIVEAPPSDATA” ]
then
echo $PRICINGREPORT “Found….”
echo $LIVEAPPSDATA “Found….”
echo ” Loading into BigCompute….”
else
echo “”
echo “————-Aborted Data Files not Found—————-“
echo $PRICINGREPORT “..File Not Found”
echo $LIVEAPPSDATA “..File Not Found”
echo “———————————————————“
fi
echo “”
echo “—— Checking if files exist within Container —————–“
echo “—— must su to hdfs account, otherwise this fails ———-“
#Test files that do exist, comment these out ….
PRICINGREPORT=”PriceReport_2018-04-23.csv”
LIVEAPPSDATA=”RMSLiveAppsData_2018-04-23.csv”
sudo su – hdfs sh -c “hadoop fs -ls swift://$CONTAINER.default/$PRICINGREPORT” > files.txt
sudo su – hdfs sh -c “hadoop fs -ls swift://$CONTAINER.default/$LIVEAPPSDATA” >> files.txt
export FILELINECOUNT=`<files.txt wc -l`
echo “—– FileLineCount is :” $FILELINECOUNT
if [ “$FILELINECOUNT” = “0” ]
then
echo “Container is empty”
echo $PRICINGREPORT “Not Found….”
echo $LIVEAPPSDATA “Not Found….”
else
echo “Files Exist Load into Hive”
echo “Copy File from Container to Local File system”
sudo su – hdfs sh -c “hadoop fs -copyToLocal -crc swift://$CONTAINER.default/$PRICINGREPORT /data/landing/LIVEAPPS/PriceReport.csv”
sudo su – hdfs sh -c “hadoop fs -copyToLocal -crc swift://$CONTAINER.default/$LIVEAPPSDATA /data/landing/LIVEAPPS/RMSLiveAppsData.csv”
fi