赞
踩
hadoop dfs -mkdir /hadoop/path/
hdfs dfs -copyFromLocal /local/data/file.txt /hadoop/data :将本地文件上传到hdfs上(原路径只能是一个文件)
hadoop dfs -put /local/*.txt /hadoop/path/ :put和 copyFromLocal 区别是,put 原路径可以是文件夹等
hadoop dfs -put -f /local/*.txt /hadoop/path/ :-f 含义是表示强制覆益
hdfs dfs -put /local/data/ /hadoop/data/
hadoop dfs -rm -r /hadoop/data
##删除目录不放回收站-skipTrash
hdfs dfs -rm-r -skipTrash /hadoop/data/two_lm_txt_all_gbk
hadoop dfs -ls /tmp/data :查看/tmp/data 目录
hadoop fs -cat /tmp/a.txt :查看/tmp/a.txt文件内容
hadoop fs -get /path/to/hadoop/file.txt /path/on/local/machine
hdfs dfs -count /hadoop/data
hdfs dfs -mv /hadoop/data/* /hadoop/data1/
hdfs dfs -du -h -s /hadoop/data/
hdfs dfs -du -h /workdir/nlm
hadoop dfs -du -h /workdir/nlm
yarn application list
yarn application -list
yarn application -kill id
hadoop dfs -cat /hadoop/data.txt | md5sum
import subprocess
def command_shell(command):
output=subprocess.check_output(command,shell=True)
output_str=output.decode("utf-8")
return output_str.split(" ")[0]
command1="md5sum /local/data.txt"
local_md5=command_shell(command1)
command2="hadoop dfs -cat /hadoop/data.txt | md5sum"
hdfs_md5=command_shell(command2)
if local_md5==hdfs_md5:
print("md5相同,文件一致")
hadoop dfs -getmerge /hadoop/
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。