1.启动hadoop, start-all.sh 启动hadoop
2. copy local example data to hdfs
bin/hadoop dfs -copyFromLocal /opt/tmp /user/hduser/gutenberg
3.开始运行
bin/hadoop jar contrib/streaming/hadoop-*streaming*.jar -file /home/hduser/mapper.py -mapper /home/hduser/mapper.py -file /home/hduser/reducer.py -reducer /home/hduser/reducer.py -input /user/hduser/gutenberg/* -output /user/hduser/gutenberg-output
4. 查看输出内容
bin/hadoop dfs -cat /user/hduser/gutenberg-output/part-00000
hadoop dfs -copyFromLocal tmp ~/tmp
hadoop jar $HADOOP_HOME/contrib/streaming/hadoop-*streaming*.jar -file ~/work/hadoop/mapper.py -mapper ~/work/hadoop/mapper.py -file ~/work/hadoop/reducer.py -reducer ~/work/hadoop/reducer.py -input ~/tmp -output ~/tmp-output
阅读(1777) | 评论(0) | 转发(0) |