pom.xml
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.0</version>
</dependency>
HdfsOperate.java
package com.shaoyan.hdfs;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import java.io.IOException;
public class HdfsOperate {
private FileSystem fs;
public HdfsOperate(FileSystem fs){
this.fs = fs;
}
public void closeFS() throws IOException{
fs.close();
}
// 創(chuàng)建目錄
public void makeDir(String dirName) throws IOException {
Path path = new Path(dirName);
FsPermission fsPermission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.READ_EXECUTE);
fs.mkdirs(path, fsPermission);
}
//刪除目錄
public void delDir(String dirName) throws IOException {
Path path = new Path(dirName);
fs.delete(path, true);//true is to delete recursively
}
//寫文件
public void writeFile(String fileName, String content) throws IOException {
Path path = new Path(fileName);
FSDataOutputStream out = fs.create(path);
out.writeUTF(content);
}
//讀文件
public void readFile(String fileName) throws IOException {
Path path = new Path(fileName);
if(fs.exists(path)){
FSDataInputStream is = fs.open(path);
FileStatus status = fs.getFileStatus(path);
byte[] buffer = new byte[Integer.parseInt(String.valueOf(status.getLen()))];
is.readFully(0, buffer);
is.close();
System.out.println(buffer.toString());
}
}
//上傳文件
public void uploadFile(String fileName, String targetDir) throws IOException {
Path src = new Path(fileName);
Path dst = new Path(targetDir);
fs.copyFromLocalFile(src, dst);
}
//刪除文件
public void delFile(String fileName) throws IOException {
Path path = new Path(fileName);
fs.delete(path, true);
}
//查詢目錄下的文件列表
public void listAllFiles(String dirName) throws IOException {
Path path = new Path(dirName);
getFile(path,fs);
}
public void getFile(Path path,FileSystem fs) throws IOException {
FileStatus[] fileStatus = fs.listStatus(path);
for(int i=0;i<fileStatus.length;i++){
if(fileStatus[i].isDirectory()){
Path p = new Path(fileStatus[i].getPath().toString());
getFile(p,fs);
}else{
System.out.println(fileStatus[i].getPath().toString());
}
}
}
}
HdfsTest.java
package com.shaoyan.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
public class HdfsTest {
public static void main(String[] args) throws URISyntaxException, IOException {
Configuration configuration = new Configuration();
configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");//這個需要加上基显,否則會報錯
URI uri = new URI("hdfs://hadoop-master:9000");
FileSystem fs = FileSystem.get(uri, configuration);
HdfsOperate hdfsOperate = new HdfsOperate(fs);
//start test...
hdfsOperate.makeDir("/lucy/java_test1");
hdfsOperate.writeFile("/lucy/java_test1/hello.txt", "hello, hdfs...");
hdfsOperate.readFile("/lucy/java_test1/hello.txt");
hdfsOperate.uploadFile("/home/alice/alice.txt", "/lucy/java_test1");
hdfsOperate.listAllFiles("/lucy");
hdfsOperate.closeFS();
}
}