import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileSystem;
import java.io.*;
import java.util.*;
public class test {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
FileSystem fs = FileSystem.get(conf);
String remoteFilePath = "/usr/local/hadoop/"; // HDFS路径
String remoteDir = "/usr/local/hadoop"; // HDFS路径对应的目录
System.out.print("请输入功能选项:1.新建文件2.打开文件");
Scanner sc =new Scanner( System.in);
String a=sc.nextLine();
switch(a){
case "1":{
System.out.println("请输入文件名:");
Scanner sc1=new Scanner( System.in);
String fileName=sc1.nextLine();
remoteFilePath=remoteFilePath+fileName;
Path remotePath = new Path(remoteFilePath);
FSDataOutputStream outputStream = fs.create(remotePath);
outputStream.close();
System.out.println("创建成功");
remoteFilePath= "/usr/local/hadoop/";
}
case "2":{
System.out.println("请输入要打开的文件名");
Scanner sc1=new Scanner( System.in);
String fileName=sc1.nextLine();
remoteFilePath=remoteFilePath+fileName;
Path filePath=new Path(remoteFilePath);
FSDataInputStream in = fs.open(filePath);
BufferedReader d = new BufferedReader(new InputStreamReader(in));
String line;
System.out.println("文件中的内容如下:");
while ((line = d.readLine()) != null) {
System.out.println(line);
}
System.out.println("请输入要选择的功能:1.添加2.保存并推出");
Scanner sc2=new Scanner( System.in);
String choose=sc1.nextLine();
switch(choose){
case "1":{
Path remotePath = new Path(remoteFilePath);
/* 创建一个文件输出流,输出的内容将追加到文件末尾 */
FSDataOutputStream out = fs.append(remotePath);
out.write("hahah".getBytes());
out.close();
}case "2":{
}
}
}
}
}
}