# import the python subprocess module import subprocess def run_cmd(args_list): """ run linux commands """ # import subprocess print('Running system command: {0}'.format(' '.join(args_list))) proc = subprocess.Popen(args_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE) s_output, s_err = proc.communicate() s_return = proc.returncode return s_return, s_output, s_err
如果是文件,则按照如下格式返回文件信息:文件名 <副本数> 文件大小 修改日期 修改时间 权限 用户ID 组ID
如果是目录,则返回它直接子文件的一个列表,就像在Unix中一样。目录返回列表的信息如下:目录名 \
示例:hadoop fs -ls /user/hadoop/file1 /user/hadoop/file2 hdfs://host:port/user/hadoop/dir1 /nonexistentfile
返回值:成功返回0,失败返回-1。
# Run Hadoop ls command in Python (ret, out, err)= run_cmd(['hdfs', 'dfs', '-ls', 'hdfs_file_path']) lines = out.split('\n')
# Run Hadoop get command in Python (ret, out, err)= run_cmd(['hdfs', 'dfs', '-get', 'hdfs_file_path', 'local_path'])
# Run Hadoop put command in Python (ret, out, err)= run_cmd(['hdfs', 'dfs', '-put', 'local_file', 'hdfs_file_path'])
# Run Hadoop copyFromLocal command in Python (ret, out, err)= run_cmd(['hdfs', 'dfs', '-copyFromLocal', 'local_file', 'hdfs_file_path'])
# Run Hadoop copyToLocal command in Python (ret, out, err)= run_cmd(['hdfs', 'dfs', '-copyToLocal', 'hdfs_file_path', 'local_file'])
# hdfs的常规命令 # hdfs dfs -rm -skipTrash /path/to/file/you/want/to/remove/permanently # Run Hadoop remove file command in Python (ret, out, err)= run_cmd(['hdfs', 'dfs', '-rm', 'hdfs_file_path']) (ret, out, err)= run_cmd(['hdfs', 'dfs', '-rm', '-skipTrash', 'hdfs_file_path'])
# hdfs的常规命令 # rm -r # HDFS Command to remove the entire directory and all of its content from HDFS. # Usage: hdfs dfs -rm -r (ret, out, err)= run_cmd(['hdfs', 'dfs', '-rm', '-r', 'hdfs_file_path']) (ret, out, err)= run_cmd(['hdfs', 'dfs', '-rm', '-r', '-skipTrash', 'hdfs_file_path'])
Check if a file exist in HDFS Usage: hadoop fs -test -[defsz] URI Options: -d: f the path is a directory, return 0. -e: if the path exists, return 0. -f: if the path is a file, return 0. -s: if the path is not empty, return 0. -z: if the file is zero length, return 0. Example: hadoop fs -test -e filename hdfs_file_path = '/tmpo' cmd = ['hdfs', 'dfs', '-test', '-e', hdfs_file_path] ret, out, err = run_cmd(cmd) print(ret, out, err) if ret: print('file does not exist')
本文翻译自 Interacting-with-Hadoop-HDFS-using-Python-codes
其他参考文档:
hadoop shell指南
hdfs工具 snakebite