diff --git a/bin/conf/server.properties b/bin/conf/server.properties
index 2fd69d0cfa45b50d7aef9d3671e34c833ea9e81c..0d009c147748e9100cafebc053c844e2e7aa65e4 100644
--- a/bin/conf/server.properties
+++ b/bin/conf/server.properties
@@ -6,3 +6,6 @@ hdfsUri = hdfs://10.1.65.19:9000/
#hdfs default user
hdfsUser = admin
+
+#hdfs-site.xml path
+hdfsSiteFilePath = /data/unidata/hadoop/etc/hadoop/hdfs-site.xml
diff --git a/bin/hdfs-ftp-server-0.2.0.jar b/bin/hdfs-ftp-server-0.2.0.jar
index 2087741c138c7a62ad015f3cf0073024074ffece..be702a70a5297be7eed81cb46ad750d391a93a37 100644
Binary files a/bin/hdfs-ftp-server-0.2.0.jar and b/bin/hdfs-ftp-server-0.2.0.jar differ
diff --git a/conf/server.properties b/conf/server.properties
index 0aa5f24a96076ade9bac586b66c9cfc008e11b87..d87083439f8ed43716a65a56c74b8cc8481cc23e 100644
--- a/conf/server.properties
+++ b/conf/server.properties
@@ -4,3 +4,6 @@ port = 2222
# hdfs uri
hdfsUri = hdfs://10.1.65.19:9000/
hdfsUser = xtjc
+
+#hdfs-site.xml path
+hdfsSiteFilePath = /data/unidata/hadoop/etc/hadoop/hdfs-site.xml
diff --git a/pom.xml b/pom.xml
index 37c935109837342db956886c3115bbb8bef2cac0..ba5bd039f493e29622975c95ab0ec71a442733f3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -37,7 +37,24 @@
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.8.0
+
+ 1.8
+ 1.8
+
+
+
+
+ src
+
+ **/*.properties
+
+
+
diff --git a/readme.txt b/readme.txt
index f09ca9292a1cbe04357efef11c5e8fbfe6eebf21..b3010053caecb730cb51c1ae7297ab03dcf08427 100644
--- a/readme.txt
+++ b/readme.txt
@@ -17,6 +17,7 @@ HdfsFtpServer
*当前支持的HDFS版本为2.7.2.其他版本请修改pom.xml相关版本并重新打包.
*目前不支持HDFS的权限管理.请在Hadoop的配置中关闭HDFS的权限管理.
*默认的账号密码是admin/admin 可以在user.properties中修改
+*需要在server.properties中修改hdfsSiteFilePath为hdfs-site.xml的地址
*目前客户端使用测试了FileZilla.
diff --git a/src/com/gen/HdfsFtpServer/HdfsFtpServer.java b/src/com/gen/HdfsFtpServer/HdfsFtpServer.java
index 1976bb6b31267ba475d48a849b5d7d120698aa0a..656c5c39bcfbf8baaae80ad7a25537ace4731514 100644
--- a/src/com/gen/HdfsFtpServer/HdfsFtpServer.java
+++ b/src/com/gen/HdfsFtpServer/HdfsFtpServer.java
@@ -68,6 +68,7 @@ public class HdfsFtpServer
int port = Integer.parseInt(properties.getProperty("port"));
HdfsFtpFileSystemView.hdfsUri = properties.getProperty("hdfsUri");
HdfsFtpFileSystemView.hdfsUser = properties.getProperty("hdfsUser");
+ HdfsFtpFileSystemView.hdfsSiteFilePath = properties.getProperty("hdfsSiteFilePath");
ListenerFactory listenerFactory = new ListenerFactory();
listenerFactory.setPort(port);
diff --git a/src/com/gen/HdfsFtpServer/filesystem/HdfsFtpFileSystemView.java b/src/com/gen/HdfsFtpServer/filesystem/HdfsFtpFileSystemView.java
index 249b563376ef7cdf05f0b763ff53052d54465cf5..0331687edf39aacd84539f70b7627a708e94db90 100644
--- a/src/com/gen/HdfsFtpServer/filesystem/HdfsFtpFileSystemView.java
+++ b/src/com/gen/HdfsFtpServer/filesystem/HdfsFtpFileSystemView.java
@@ -9,12 +9,14 @@ import org.apache.ftpserver.ftplet.FtpException;
import org.apache.ftpserver.ftplet.FtpFile;
import org.apache.ftpserver.ftplet.User;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
public class HdfsFtpFileSystemView implements FileSystemView
{
public static String hdfsUri = "";
public static String hdfsUser = "";
+ public static String hdfsSiteFilePath = "";
//current user work directory
public String current = "/";
@@ -30,6 +32,7 @@ public class HdfsFtpFileSystemView implements FileSystemView
DistributedFileSystem temp = new DistributedFileSystem();
Configuration conf = new Configuration();
conf.set("HADOOP_USER_NAME", hdfsUser);
+ conf.addResource(new Path(hdfsSiteFilePath));
try
{
temp.initialize(new URI(hdfsUri), conf);