IDEA MAPREDUCE开发无法从HDFS输入并输出?
来源:4-7 词频统计之自定义Driver类实现

begin_0002
2019-07-31
DEBUG] method:org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.(JniBasedUnixGroupsMappingWithFallback.java:45)
Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping
[DEBUG] method:org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:372)
Failed to detect a valid hadoop home directory
java.io.IOException: HADOOP_HOME or hadoop.home.dir are not set.
at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:354)
at org.apache.hadoop.util.Shell.(Shell.java:379)
at org.apache.hadoop.util.StringUtils.(StringUtils.java:79)
at org.apache.hadoop.security.Groups.parseStaticMapping(Groups.java:168)
at org.apache.hadoop.security.Groups.(Groups.java:132)
at org.apache.hadoop.security.Groups.(Groups.java:100)
at org.apache.hadoop.security.Groups.getUserToGroupsMappingService(Groups.java:435)
at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:341)
at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:308)
at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:895)
at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:861)
at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:728)
at org.apache.hadoop.mapreduce.task.JobContextImpl.(JobContextImpl.java:72)
at org.apache.hadoop.mapreduce.Job.(Job.java:145)
at org.apache.hadoop.mapreduce.Job.getInstance(Job.java:188)
at com.mstf.bigdata.hadoop.WordsCountAPP.main(WordsCountAPP.java:32)
[ERROR] method:org.apache.hadoop.util.Shell.getWinUtilsPath(Shell.java:425)
Failed to locate the winutils binary in the hadoop binary path
java.io.IOException: Could not locate executable null\bin\winutils.exe in the Hadoop binaries.
at org.apache.hadoop.util.Shell.getQualifiedBinPath(Shell.java:407)
at org.apache.hadoop.util.Shell.getWinUtilsPath(Shell.java:422)
at org.apache.hadoop.util.Shell.(Shell.java:415)
at org.apache.hadoop.util.StringUtils.(StringUtils.java:79)
at org.apache.hadoop.security.Groups.parseStaticMapping(Groups.java:168)
at org.apache.hadoop.security.Groups.(Groups.java:132)
at org.apache.hadoop.security.Groups.(Groups.java:100)
at org.apache.hadoop.security.Groups.getUserToGroupsMappingService(Groups.java:435)
at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:341)
at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:308)
at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:895)
at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:861)
at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:728)
at org.apache.hadoop.mapreduce.task.JobContextImpl.(JobContextImpl.java:72)
at org.apache.hadoop.mapreduce.Job.(Job.java:145)
at org.apache.hadoop.mapreduce.Job.getInstance(Job.java:188)
at com.mstf.bigdata.hadoop.WordsCountAPP.main(WordsCountAPP.java:32)
[DEBUG] method:org.apache.hadoop.security.Groups.(Groups.java:150)
Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000
[DEBUG] method:org.apache.hadoop.security.UserGroupInformationHadoopLoginModule.login(UserGroupInformation.java:244)hadooplogin[DEBUG]method:org.apache.hadoop.security.UserGroupInformationHadoopLoginModule.login(UserGroupInformation.java:244)
hadoop login
[DEBUG] method:org.apache.hadoop.security.UserGroupInformationHadoopLoginModule.login(UserGroupInformation.java:244)hadooplogin[DEBUG]method:org.apache.hadoop.security.UserGroupInformationHadoopLoginModule.commit(UserGroupInformation.java:179)
hadoop login commit
[DEBUG] method:org.apache.hadoop.security.UserGroupInformationHadoopLoginModule.commit(UserGroupInformation.java:215)Usinguser:"root"withnameroot[DEBUG]method:org.apache.hadoop.security.UserGroupInformationHadoopLoginModule.commit(UserGroupInformation.java:215)
Using user: "root" with name root
[DEBUG] method:org.apache.hadoop.security.UserGroupInformationHadoopLoginModule.commit(UserGroupInformation.java:215)Usinguser:"root"withnameroot[DEBUG]method:org.apache.hadoop.security.UserGroupInformationHadoopLoginModule.commit(UserGroupInformation.java:225)
User entry: “root”
[DEBUG] method:org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:940)
UGI loginUser:root (auth:SIMPLE)
[DEBUG] method:org.apache.htrace.core.TracerBuilder.loadSamplers(Tracer.java:106)sampler.classes=;loadednosamplers[DEBUG]method:org.apache.htrace.core.TracerBuilder.loadSamplers(Tracer.java:106)
sampler.classes = ; loaded no samplers
[DEBUG] method:org.apache.htrace.core.TracerBuilder.loadSamplers(Tracer.java:106)sampler.classes=;loadednosamplers[DEBUG]method:org.apache.htrace.core.TracerBuilder.loadSpanReceivers(Tracer.java:128)
span.receiver.classes = ; loaded no span receivers
[DEBUG] method:org.apache.hadoop.hdfs.DFSClientConf.<init>(DFSClient.java:479)dfs.client.use.legacy.blockreader.local=false[DEBUG]method:org.apache.hadoop.hdfs.DFSClientConf.<init>(DFSClient.java:479)
dfs.client.use.legacy.blockreader.local = false
[DEBUG] method:org.apache.hadoop.hdfs.DFSClientConf.<init>(DFSClient.java:479)dfs.client.use.legacy.blockreader.local=false[DEBUG]method:org.apache.hadoop.hdfs.DFSClientConf.(DFSClient.java:482)
dfs.client.read.shortcircuit = false
[DEBUG] method:org.apache.hadoop.hdfs.DFSClientConf.<init>(DFSClient.java:485)dfs.client.domain.socket.data.traffic=false[DEBUG]method:org.apache.hadoop.hdfs.DFSClientConf.<init>(DFSClient.java:485)
dfs.client.domain.socket.data.traffic = false
[DEBUG] method:org.apache.hadoop.hdfs.DFSClientConf.<init>(DFSClient.java:485)dfs.client.domain.socket.data.traffic=false[DEBUG]method:org.apache.hadoop.hdfs.DFSClientConf.(DFSClient.java:488)
dfs.domain.socket.path =
[DEBUG] method:org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:726)
Sets dfs.client.block.write.replace-datanode-on-failure.min-replication to 0
[DEBUG] method:org.apache.hadoop.io.retry.RetryUtils.getDefaultRetryPolicy(Retr
2回答
-
参考这个帖子,把java.io.IOException: Could not locate executable null\bin\winutils.exe in the Hadoop binaries这个问题解决了先
00 -
begin_0002
提问者
2019-08-02
己解决
10
相似问题