Downgrading Apache Hadoop YARN to MapReduce v1
# remove YARN configuration
sudo yum remove hadoop-conf-pseudo
# stop YARN
sudo service hadoop-yarn-resourcemanager stop
sudo service hadoop-yarn-nodemanager stop
sudo service hadoop-mapreduce-historyserver stop
# stop HDFS
sudo for x in `cd /etc/init.d ; ls hadoop-hdfs-*` ; do sudo service $x stop ; done
# Install MRv1
sudo yum install hadoop-0.20-conf-pseudo
# Remove cache dir
sudo rm -rf /var/lib/hadoop-hdfs/cache/
# format namenode
sudo -u hdfs hdfs namenode -format
# start HDFS
sudo for x in `cd /etc/init.d ; ls hadoop-hdfs-*` ; do sudo service $x start ; done
# make /tmp directories and set permissions/ownership
sudo -u hdfs hadoop fs -mkdir /tmp
sudo -u hdfs hadoop fs -chmod -R 1777 /tmp
sudo -u hdfs hadoop fs -mkdir -p /var/lib/hadoop-hdfs/cache/mapred/mapred/staging
sudo -u hdfs hadoop fs -chmod 1777 /var/lib/hadoop-hdfs/cache/mapred/mapred/staging
sudo -u hdfs hadoop fs -chown -R mapred /var/lib/hadoop-hdfs/cache/mapred
sudo -u hdfs mkdir -p /var/lib/hadoop-hdfs/cache/mapred/mapred/local/
sudo chown -R mapred /var/lib/hadoop-hdfs/cache/mapred
# check dir structure
sudo -u hdfs hadoop fs -ls -R /
# start MRv1
for x in `cd /etc/init.d ; ls hadoop-0.20-mapreduce-*` ; do sudo service $x start ; done
# make user directory for your <user>
sudo -u hdfs hadoop fs -mkdir /user/cloudera
sudo -u hdfs hadoop fs -chown cloudera /user/cloudera
# test
hadoop fs -mkdir input
hadoop fs -put /etc/hadoop/conf/*.xml input
hadoop jar /usr/lib/hadoop-0.20-mapreduce/hadoop-examples.jar grep input output 'dfs+'
all should be good on localhost:50030 and 50070
Recent Posts
See AllOne of the biggest bottlenecks in Deep Learning is loading data. having fast drives and access to the data is important, especially if...
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName;...
Comments