import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.*;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Date;
import java.text.SimpleDateFormat;
/***
* 查看HDFS上的/user/hadoop/file.txt文件的属性信息
* @author
*
*/
public class FileDetail {
public static void main(String[] args) throws URISyntaxException, IOException, InterruptedException {
Configuration conf=new Configuration();
URI uri=new URI("hdfs://192.168.1.100:9000");
FileSystem fs=FileSystem.get(uri,conf,"hadoop");
//locate file 文件位置
Path fpath=new Path("/user/hadoop/file.txt");
FileStatus filestatus=fs.getFileLinkStatus(fpath);
//get block size 获取文件块大小
long blocksize=filestatus.getBlockSize();
System.out.println("blocksize:"+blocksize);
//get file size 获取文件大小
long filesize=filestatus.getLen();
System.out.println("filesize:"+filesize);
//get file owner 获取文件所属者
String fileowner=filestatus.getOwner();
System.out.println("fileowner:"+fileowner);
//get file access time 获取文件存取时间
SimpleDateFormat formatter=new SimpleDateFormat("yyyy-mm-dd hh:mm:ss");
long accessTime=filestatus.getAccessTime();
System.out.println("access time:"+formatter.format(new Date(accessTime)));
//get file modify time 获取文件修改时间
long modifyTime=filestatus.getModificationTime();
System.out.println("modify time:"+formatter.format(new Date(modifyTime)));
}
}