当前位置:   article > 正文

hadoop:Win10本地环境配置hadoop(附wordcount实例) org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Ljava/_win10下使用maven连接hadoop2.6.0集群并运行wordcount例程

win10下使用maven连接hadoop2.6.0集群并运行wordcount例程

下载 hadoop包后先安装本地环境,配置环境变量:

解压hadoop-2.6.0.tar.gz

变量名 HADOOP_HOME   路径:E:\spark\anzhuangsoft\hadoop-2.6.0

path中:%HADOOP_HOME%\lib和 %HADOOP_HOME%\bin

 

 

验证:

 

 

IDEA中跑wordcount程序:

建一个 maven项目,目录结构:

代码:

RunJob 执行类:
  1. package main.java;
  2. import org.apache.hadoop.conf.Configuration;
  3. import org.apache.hadoop.fs.FileSystem;
  4. import org.apache.hadoop.fs.Path;
  5. import org.apache.hadoop.io.IntWritable;
  6. import org.apache.hadoop.io.Text;
  7. import org.apache.hadoop.mapreduce.Job;
  8. import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
  9. import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
  10. public class RunJob {
  11. // static {
  12. // try {
  13. // // 设置 HADOOP_HOME 目录
  14. // System.setProperty("hadoop.home.dir", "E:\\spark\\anzhuangsoft\\hadoop-2.7.5");
  15. // // 加载库文件
  16. // System.load("E:\\spark\\anzhuangsoft\\hadoop-2.7.5\\bin\\hadoop.dll");
  17. // } catch (UnsatisfiedLinkError e) {
  18. // System.err.println("Native code library failed to load.\n" + e);
  19. // System.exit(1);
  20. // }
  21. // }
  22. public static void main(String[] args) throws Exception {
  23. Configuration configuration = new Configuration();
  24. FileSystem fs = FileSystem.get(configuration);
  25. Job job = Job.getInstance(configuration);
  26. job.setJarByClass(RunJob.class);
  27. job.setJobName("wordCount");
  28. job.setMapperClass(WordCount.WordCountMapper.class);
  29. job.setReducerClass(WordCount.WordCountReducer.class);
  30. job.setMapOutputKeyClass(Text.class);
  31. job.setMapOutputValueClass(IntWritable.class);
  32. FileInputFormat.addInputPath(job, new Path("input"));
  33. Path outPath = new Path("output");
  34. if (fs.exists(outPath)) {
  35. fs.delete(outPath, true);
  36. }
  37. FileOutputFormat.setOutputPath(job, outPath);
  38. boolean completion = job.waitForCompletion(true);
  39. if (completion) {
  40. System.out.println("执行完成");
  41. }
  42. }
  43. }

 

WordCount(含mapper和reducer)
  1. package main.java;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.fs.Path;
  5. import org.apache.hadoop.io.IntWritable;
  6. import org.apache.hadoop.io.LongWritable;
  7. import org.apache.hadoop.io.Text;
  8. import org.apache.hadoop.mapreduce.Job;
  9. import org.apache.hadoop.mapreduce.Mapper;
  10. import org.apache.hadoop.mapreduce.Reducer;
  11. import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
  12. import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
  13. public class WordCount {
  14. public static class WordCountMapper extends Mapper<LongWritable, Text, Text,IntWritable>{
  15. @Override
  16. protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
  17. throws IOException, InterruptedException {
  18. String [] words = value.toString().split(" ");
  19. for (String word : words) {
  20. context.write(new Text(word), new IntWritable(1));
  21. }
  22. }
  23. }
  24. public static class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
  25. @Override
  26. protected void reduce(Text key, Iterable<IntWritable> value,Context context) throws IOException, InterruptedException {
  27. int sum = 0;
  28. for (IntWritable intWritable : value) {
  29. sum += intWritable.get();
  30. }
  31. context.write(key, new IntWritable(sum));
  32. }
  33. }
  34. // static {
  35. // try {
  36. // // 设置 HADOOP_HOME 目录
  37. // System.setProperty("hadoop.home.dir", "E:\\spark\\anzhuangsoft\\hadoop-2.7.5");
  38. // // 加载库文件
  39. // System.load("E:\\spark\\anzhuangsoft\\hadoop-2.7.5\\bin\\hadoop.dll");
  40. // } catch (UnsatisfiedLinkError e) {
  41. // System.err.println("Native code library failed to load.\n" + e);
  42. // System.exit(1);
  43. // }
  44. // }
  45. public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
  46. Configuration conf = new Configuration();
  47. conf.set("mapreduce.framework.name", "local");
  48. Job job = Job.getInstance(conf);
  49. job.setJarByClass(WordCount.class);
  50. //设置Mapper类和Reducer类
  51. job.setMapperClass(WordCountMapper.class);
  52. job.setReducerClass(WordCountReducer.class);
  53. //设置Map端输出key类和输出value
  54. job.setMapOutputKeyClass(Text.class);
  55. job.setMapOutputValueClass(IntWritable.class);
  56. //设置Reduce端输出key类和输出value
  57. job.setOutputKeyClass(Text.class);
  58. job.setOutputValueClass(IntWritable.class);
  59. FileInputFormat.addInputPath(job,new Path("input\\words.txt"));
  60. FileOutputFormat.setOutputPath(job,new Path("output"));
  61. //执行任务
  62. boolean status = job.waitForCompletion(true);
  63. System.out.println(status);
  64. }
  65. }

pom: 

  1. <?xml version="1.0" encoding="UTF-8"?>
  2. <project xmlns="http://maven.apache.org/POM/4.0.0"
  3. xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  4. xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  5. <modelVersion>4.0.0</modelVersion>
  6. <groupId>com.tzb</groupId>
  7. <artifactId>hadoop</artifactId>
  8. <version>1.0-SNAPSHOT</version>
  9. <properties>
  10. <!--<hadoop.version>2.7.5</hadoop.version>-->
  11. <hadoop.version>2.6.0</hadoop.version>
  12. </properties>
  13. <dependencies>
  14. <dependency>
  15. <groupId>org.apache.hadoop</groupId>
  16. <artifactId>hadoop-client</artifactId>
  17. <version>${hadoop.version}</version>
  18. </dependency>
  19. <dependency>
  20. <groupId>org.apache.hadoop</groupId>
  21. <artifactId>hadoop-common</artifactId>
  22. <version>${hadoop.version}</version>
  23. </dependency>
  24. <dependency>
  25. <groupId>org.apache.hadoop</groupId>
  26. <artifactId>hadoop-hdfs</artifactId>
  27. <version>${hadoop.version}</version>
  28. </dependency>
  29. <!--<dependency>-->
  30. <!--<groupId>org.apache.hadoop</groupId>-->
  31. <!--<artifactId>hadoop-client</artifactId>-->
  32. <!--<version>${hadoop.version}</version>-->
  33. <!--</dependency>-->
  34. <dependency>
  35. <groupId>org.apache.hadoop</groupId>
  36. <artifactId>hadoop-mapreduce-client-core</artifactId>
  37. <version>${hadoop.version}</version>
  38. </dependency>
  39. <!--<dependency>-->
  40. <!--<groupId>jdk.tools</groupId>-->
  41. <!--<artifactId>jdk.tools</artifactId>-->
  42. <!--<version>1.8</version>-->
  43. <!--<scope>system</scope>-->
  44. <!--<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>-->
  45. <!--</dependency>-->
  46. </dependencies>
  47. <build>
  48. <plugins>
  49. <plugin>
  50. <groupId>org.apache.maven.plugins</groupId>
  51. <artifactId>maven-jar-plugin</artifactId>
  52. <version>2.4</version>
  53. <configuration>
  54. <archive>
  55. <manifest>
  56. <addClasspath>true</addClasspath>
  57. <classpathPrefix>lib/</classpathPrefix>
  58. <mainClass>cn.itcast.hadoop.mr.WordCountDriver</mainClass>
  59. </manifest>
  60. </archive>
  61. </configuration>
  62. </plugin>
  63. <plugin>
  64. <groupId>org.apache.maven.plugins</groupId>
  65. <artifactId>maven-compiler-plugin</artifactId>
  66. <!--<version>3.0</version>-->
  67. <configuration>
  68. <source>1.8</source>
  69. <target>1.8</target>
  70. <encoding>UTF-8</encoding>
  71. </configuration>
  72. </plugin>
  73. </plugins>
  74. </build>
  75. </project>

执行效果:

将Reducer的代码注释掉,只执行mapper的代码(可以看出未作聚合的效果):

 

 

 

执行代码

报错1:

  1. F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\bin\java "-javaagent:H:\Program Files\JetBrains\IntelliJ IDEA 2017.2.5\lib\idea_rt.jar=52764:H:\Program Files\JetBrains\IntelliJ IDEA 2017.2.5\bin" -Dfile.encoding=GBK -classpath F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\charsets.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\deploy.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\access-bridge-32.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\cldrdata.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\dnsns.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\jaccess.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\jfxrt.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\localedata.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\nashorn.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunec.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunjce_provider.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunmscapi.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunpkcs11.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\zipfs.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\javaws.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jce.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jfr.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jfxswt.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jsse.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\management-agent.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\plugin.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\resources.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\rt.jar;D:\Workspaces\hadoop\target\classes;D:\repository\org\apache\hadoop\hadoop-client\2.6.0\hadoop-client-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-app\2.6.0\hadoop-mapreduce-client-app-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-common\2.6.0\hadoop-mapreduce-client-common-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-client\2.6.0\hadoop-yarn-client-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-server-common\2.6.0\hadoop-yarn-server-common-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-shuffle\2.6.0\hadoop-mapreduce-client-shuffle-2.6.0.jar;D:\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;D:\repository\org\apache\hadoop\hadoop-yarn-api\2.6.0\hadoop-yarn-api-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\2.6.0\hadoop-mapreduce-client-jobclient-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-annotations\2.6.0\hadoop-annotations-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-common\2.6.0\hadoop-common-2.6.0.jar;D:\repository\com\google\guava\guava\11.0.2\guava-11.0.2.jar;D:\repository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;D:\repository\org\apache\commons\commons-math3\3.1.1\commons-math3-3.1.1.jar;D:\repository\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;D:\repository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;D:\repository\commons-codec\commons-codec\1.4\commons-codec-1.4.jar;D:\repository\commons-io\commons-io\2.4\commons-io-2.4.jar;D:\repository\commons-net\commons-net\3.1\commons-net-3.1.jar;D:\repository\commons-collections\commons-collections\3.2.1\commons-collections-3.2.1.jar;D:\repository\javax\servlet\servlet-api\2.5\servlet-api-2.5.jar;D:\repository\org\mortbay\jetty\jetty\6.1.26\jetty-6.1.26.jar;D:\repository\org\mortbay\jetty\jetty-util\6.1.26\jetty-util-6.1.26.jar;D:\repository\com\sun\jersey\jersey-core\1.9\jersey-core-1.9.jar;D:\repository\com\sun\jersey\jersey-json\1.9\jersey-json-1.9.jar;D:\repository\org\codehaus\jettison\jettison\1.1\jettison-1.1.jar;D:\repository\com\sun\xml\bind\jaxb-impl\2.2.3-1\jaxb-impl-2.2.3-1.jar;D:\repository\org\codehaus\jackson\jackson-jaxrs\1.8.3\jackson-jaxrs-1.8.3.jar;D:\repository\org\codehaus\jackson\jackson-xc\1.8.3\jackson-xc-1.8.3.jar;D:\repository\com\sun\jersey\jersey-server\1.9\jersey-server-1.9.jar;D:\repository\asm\asm\3.1\asm-3.1.jar;D:\repository\tomcat\jasper-compiler\5.5.23\jasper-compiler-5.5.23.jar;D:\repository\tomcat\jasper-runtime\5.5.23\jasper-runtime-5.5.23.jar;D:\repository\javax\servlet\jsp\jsp-api\2.1\jsp-api-2.1.jar;D:\repository\commons-el\commons-el\1.0\commons-el-1.0.jar;D:\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;D:\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;D:\repository\net\java\dev\jets3t\jets3t\0.9.0\jets3t-0.9.0.jar;D:\repository\org\apache\httpcomponents\httpclient\4.1.2\httpclient-4.1.2.jar;D:\repository\org\apache\httpcomponents\httpcore\4.1.2\httpcore-4.1.2.jar;D:\repository\com\jamesmurty\utils\java-xmlbuilder\0.4\java-xmlbuilder-0.4.jar;D:\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;D:\repository\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;D:\repository\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;D:\repository\commons-beanutils\commons-beanutils\1.7.0\commons-beanutils-1.7.0.jar;D:\repository\commons-beanutils\commons-beanutils-core\1.8.0\commons-beanutils-core-1.8.0.jar;D:\repository\org\slf4j\slf4j-api\1.7.5\slf4j-api-1.7.5.jar;D:\repository\org\slf4j\slf4j-log4j12\1.7.5\slf4j-log4j12-1.7.5.jar;D:\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;D:\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;D:\repository\org\apache\avro\avro\1.7.4\avro-1.7.4.jar;D:\repository\com\thoughtworks\paranamer\paranamer\2.3\paranamer-2.3.jar;D:\repository\org\xerial\snappy\snappy-java\1.0.4.1\snappy-java-1.0.4.1.jar;D:\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;D:\repository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;D:\repository\org\apache\hadoop\hadoop-auth\2.6.0\hadoop-auth-2.6.0.jar;D:\repository\org\apache\directory\server\apacheds-kerberos-codec\2.0.0-M15\apacheds-kerberos-codec-2.0.0-M15.jar;D:\repository\org\apache\directory\server\apacheds-i18n\2.0.0-M15\apacheds-i18n-2.0.0-M15.jar;D:\repository\org\apache\directory\api\api-asn1-api\1.0.0-M20\api-asn1-api-1.0.0-M20.jar;D:\repository\org\apache\directory\api\api-util\1.0.0-M20\api-util-1.0.0-M20.jar;D:\repository\org\apache\curator\curator-framework\2.6.0\curator-framework-2.6.0.jar;D:\repository\com\jcraft\jsch\0.1.42\jsch-0.1.42.jar;D:\repository\org\apache\curator\curator-client\2.6.0\curator-client-2.6.0.jar;D:\repository\org\apache\curator\curator-recipes\2.6.0\curator-recipes-2.6.0.jar;D:\repository\com\google\code\findbugs\jsr305\1.3.9\jsr305-1.3.9.jar;D:\repository\org\htrace\htrace-core\3.0.4\htrace-core-3.0.4.jar;D:\repository\org\apache\zookeeper\zookeeper\3.4.6\zookeeper-3.4.6.jar;D:\repository\org\apache\commons\commons-compress\1.4.1\commons-compress-1.4.1.jar;D:\repository\org\tukaani\xz\1.0\xz-1.0.jar;D:\repository\org\apache\hadoop\hadoop-hdfs\2.6.0\hadoop-hdfs-2.6.0.jar;D:\repository\commons-daemon\commons-daemon\1.0.13\commons-daemon-1.0.13.jar;D:\repository\io\netty\netty\3.6.2.Final\netty-3.6.2.Final.jar;D:\repository\xerces\xercesImpl\2.9.1\xercesImpl-2.9.1.jar;D:\repository\xml-apis\xml-apis\1.3.04\xml-apis-1.3.04.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-core\2.6.0\hadoop-mapreduce-client-core-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-common\2.6.0\hadoop-yarn-common-2.6.0.jar;D:\repository\javax\xml\bind\jaxb-api\2.2.2\jaxb-api-2.2.2.jar;D:\repository\javax\xml\stream\stax-api\1.0-2\stax-api-1.0-2.jar;D:\repository\javax\activation\activation\1.1\activation-1.1.jar;D:\repository\com\sun\jersey\jersey-client\1.9\jersey-client-1.9.jar;D:\repository\com\google\inject\guice\3.0\guice-3.0.jar;D:\repository\javax\inject\javax.inject\1\javax.inject-1.jar;D:\repository\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;D:\repository\com\sun\jersey\contribs\jersey-guice\1.9\jersey-guice-1.9.jar;D:\repository\com\google\inject\extensions\guice-servlet\3.0\guice-servlet-3.0.jar main.java.RunJob
  2. log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
  3. log4j:WARN Please initialize the log4j system properly.
  4. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
  5. Exception in thread "main" java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Ljava/lang/String;I)Z
  6. at org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Native Method)
  7. at org.apache.hadoop.io.nativeio.NativeIO$Windows.access(NativeIO.java:557)
  8. at org.apache.hadoop.fs.FileUtil.canRead(FileUtil.java:977)
  9. at org.apache.hadoop.util.DiskChecker.checkAccessByFileMethods(DiskChecker.java:187)
  10. at org.apache.hadoop.util.DiskChecker.checkDirAccess(DiskChecker.java:174)
  11. at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:108)
  12. at org.apache.hadoop.fs.LocalDirAllocator$AllocatorPerContext.confChanged(LocalDirAllocator.java:285)
  13. at org.apache.hadoop.fs.LocalDirAllocator$AllocatorPerContext.getLocalPathForWrite(LocalDirAllocator.java:344)
  14. at org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:150)
  15. at org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:131)
  16. at org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:115)
  17. at org.apache.hadoop.mapred.LocalDistributedCacheManager.setup(LocalDistributedCacheManager.java:131)
  18. at org.apache.hadoop.mapred.LocalJobRunner$Job.<init>(LocalJobRunner.java:163)
  19. at org.apache.hadoop.mapred.LocalJobRunner.submitJob(LocalJobRunner.java:731)
  20. at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:536)
  21. at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
  22. at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
  23. at java.security.AccessController.doPrivileged(Native Method)
  24. at javax.security.auth.Subject.doAs(Subject.java:422)
  25. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
  26. at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
  27. at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
  28. at main.java.RunJob.main(RunJob.java:49)
  29. Process finished with exit code 1

以上报错原因,我经过各种尝试,最终锁定目录:C:\Windows\SysWOW64

只有当这个目录中有 hadoop.dll文件时,wordcount程序才可以执行成功。

但是最好把hadoop.dll和winutils.exe两个文件都放到下面两个目录中去

C:\Windows\System32 和 C:\Windows\SysWOW64

如果还是报这个错试试将如下代码放到runjob中去,main方法的外面 ,然后再执行试试,里边目录指向你的hadoop目录。

  1. static {
  2. try {
  3. // 设置 HADOOP_HOME 目录
  4. System.setProperty("hadoop.home.dir", "E:\\spark\\anzhuangsoft\\hadoop-2.7.5");
  5. // 加载库文件
  6. System.load("E:\\spark\\anzhuangsoft\\hadoop-2.7.5\\bin\\hadoop.dll");
  7. } catch (UnsatisfiedLinkError e) {
  8. System.err.println("Native code library failed to load.\n" + e);
  9. System.exit(1);
  10. }
  11. }

报错2:

  1. F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\bin\java "-javaagent:H:\Program Files\JetBrains\IntelliJ IDEA 2017.2.5\lib\idea_rt.jar=53113:H:\Program Files\JetBrains\IntelliJ IDEA 2017.2.5\bin" -Dfile.encoding=GBK -classpath F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\charsets.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\deploy.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\access-bridge-32.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\cldrdata.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\dnsns.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\jaccess.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\jfxrt.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\localedata.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\nashorn.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunec.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunjce_provider.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunmscapi.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\sunpkcs11.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\ext\zipfs.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\javaws.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jce.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jfr.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jfxswt.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\jsse.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\management-agent.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\plugin.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\resources.jar;F:\MyDevelopKit\1.software\jdk\1.8\jdk1.8.0_131\jre\lib\rt.jar;D:\Workspaces\hadoop\target\classes;D:\repository\org\apache\hadoop\hadoop-client\2.6.0\hadoop-client-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-app\2.6.0\hadoop-mapreduce-client-app-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-common\2.6.0\hadoop-mapreduce-client-common-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-client\2.6.0\hadoop-yarn-client-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-server-common\2.6.0\hadoop-yarn-server-common-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-shuffle\2.6.0\hadoop-mapreduce-client-shuffle-2.6.0.jar;D:\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;D:\repository\org\apache\hadoop\hadoop-yarn-api\2.6.0\hadoop-yarn-api-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\2.6.0\hadoop-mapreduce-client-jobclient-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-annotations\2.6.0\hadoop-annotations-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-common\2.6.0\hadoop-common-2.6.0.jar;D:\repository\com\google\guava\guava\11.0.2\guava-11.0.2.jar;D:\repository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;D:\repository\org\apache\commons\commons-math3\3.1.1\commons-math3-3.1.1.jar;D:\repository\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;D:\repository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;D:\repository\commons-codec\commons-codec\1.4\commons-codec-1.4.jar;D:\repository\commons-io\commons-io\2.4\commons-io-2.4.jar;D:\repository\commons-net\commons-net\3.1\commons-net-3.1.jar;D:\repository\commons-collections\commons-collections\3.2.1\commons-collections-3.2.1.jar;D:\repository\javax\servlet\servlet-api\2.5\servlet-api-2.5.jar;D:\repository\org\mortbay\jetty\jetty\6.1.26\jetty-6.1.26.jar;D:\repository\org\mortbay\jetty\jetty-util\6.1.26\jetty-util-6.1.26.jar;D:\repository\com\sun\jersey\jersey-core\1.9\jersey-core-1.9.jar;D:\repository\com\sun\jersey\jersey-json\1.9\jersey-json-1.9.jar;D:\repository\org\codehaus\jettison\jettison\1.1\jettison-1.1.jar;D:\repository\com\sun\xml\bind\jaxb-impl\2.2.3-1\jaxb-impl-2.2.3-1.jar;D:\repository\org\codehaus\jackson\jackson-jaxrs\1.8.3\jackson-jaxrs-1.8.3.jar;D:\repository\org\codehaus\jackson\jackson-xc\1.8.3\jackson-xc-1.8.3.jar;D:\repository\com\sun\jersey\jersey-server\1.9\jersey-server-1.9.jar;D:\repository\asm\asm\3.1\asm-3.1.jar;D:\repository\tomcat\jasper-compiler\5.5.23\jasper-compiler-5.5.23.jar;D:\repository\tomcat\jasper-runtime\5.5.23\jasper-runtime-5.5.23.jar;D:\repository\javax\servlet\jsp\jsp-api\2.1\jsp-api-2.1.jar;D:\repository\commons-el\commons-el\1.0\commons-el-1.0.jar;D:\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;D:\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;D:\repository\net\java\dev\jets3t\jets3t\0.9.0\jets3t-0.9.0.jar;D:\repository\org\apache\httpcomponents\httpclient\4.1.2\httpclient-4.1.2.jar;D:\repository\org\apache\httpcomponents\httpcore\4.1.2\httpcore-4.1.2.jar;D:\repository\com\jamesmurty\utils\java-xmlbuilder\0.4\java-xmlbuilder-0.4.jar;D:\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;D:\repository\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;D:\repository\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;D:\repository\commons-beanutils\commons-beanutils\1.7.0\commons-beanutils-1.7.0.jar;D:\repository\commons-beanutils\commons-beanutils-core\1.8.0\commons-beanutils-core-1.8.0.jar;D:\repository\org\slf4j\slf4j-api\1.7.5\slf4j-api-1.7.5.jar;D:\repository\org\slf4j\slf4j-log4j12\1.7.5\slf4j-log4j12-1.7.5.jar;D:\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;D:\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;D:\repository\org\apache\avro\avro\1.7.4\avro-1.7.4.jar;D:\repository\com\thoughtworks\paranamer\paranamer\2.3\paranamer-2.3.jar;D:\repository\org\xerial\snappy\snappy-java\1.0.4.1\snappy-java-1.0.4.1.jar;D:\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;D:\repository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;D:\repository\org\apache\hadoop\hadoop-auth\2.6.0\hadoop-auth-2.6.0.jar;D:\repository\org\apache\directory\server\apacheds-kerberos-codec\2.0.0-M15\apacheds-kerberos-codec-2.0.0-M15.jar;D:\repository\org\apache\directory\server\apacheds-i18n\2.0.0-M15\apacheds-i18n-2.0.0-M15.jar;D:\repository\org\apache\directory\api\api-asn1-api\1.0.0-M20\api-asn1-api-1.0.0-M20.jar;D:\repository\org\apache\directory\api\api-util\1.0.0-M20\api-util-1.0.0-M20.jar;D:\repository\org\apache\curator\curator-framework\2.6.0\curator-framework-2.6.0.jar;D:\repository\com\jcraft\jsch\0.1.42\jsch-0.1.42.jar;D:\repository\org\apache\curator\curator-client\2.6.0\curator-client-2.6.0.jar;D:\repository\org\apache\curator\curator-recipes\2.6.0\curator-recipes-2.6.0.jar;D:\repository\com\google\code\findbugs\jsr305\1.3.9\jsr305-1.3.9.jar;D:\repository\org\htrace\htrace-core\3.0.4\htrace-core-3.0.4.jar;D:\repository\org\apache\zookeeper\zookeeper\3.4.6\zookeeper-3.4.6.jar;D:\repository\org\apache\commons\commons-compress\1.4.1\commons-compress-1.4.1.jar;D:\repository\org\tukaani\xz\1.0\xz-1.0.jar;D:\repository\org\apache\hadoop\hadoop-hdfs\2.6.0\hadoop-hdfs-2.6.0.jar;D:\repository\commons-daemon\commons-daemon\1.0.13\commons-daemon-1.0.13.jar;D:\repository\io\netty\netty\3.6.2.Final\netty-3.6.2.Final.jar;D:\repository\xerces\xercesImpl\2.9.1\xercesImpl-2.9.1.jar;D:\repository\xml-apis\xml-apis\1.3.04\xml-apis-1.3.04.jar;D:\repository\org\apache\hadoop\hadoop-mapreduce-client-core\2.6.0\hadoop-mapreduce-client-core-2.6.0.jar;D:\repository\org\apache\hadoop\hadoop-yarn-common\2.6.0\hadoop-yarn-common-2.6.0.jar;D:\repository\javax\xml\bind\jaxb-api\2.2.2\jaxb-api-2.2.2.jar;D:\repository\javax\xml\stream\stax-api\1.0-2\stax-api-1.0-2.jar;D:\repository\javax\activation\activation\1.1\activation-1.1.jar;D:\repository\com\sun\jersey\jersey-client\1.9\jersey-client-1.9.jar;D:\repository\com\google\inject\guice\3.0\guice-3.0.jar;D:\repository\javax\inject\javax.inject\1\javax.inject-1.jar;D:\repository\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;D:\repository\com\sun\jersey\contribs\jersey-guice\1.9\jersey-guice-1.9.jar;D:\repository\com\google\inject\extensions\guice-servlet\3.0\guice-servlet-3.0.jar main.java.RunJob
  2. log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
  3. log4j:WARN Please initialize the log4j system properly.
  4. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
  5. Exception in thread "main" java.lang.NullPointerException
  6. at java.lang.ProcessBuilder.start(ProcessBuilder.java:1012)
  7. at org.apache.hadoop.util.Shell.runCommand(Shell.java:482)
  8. at org.apache.hadoop.util.Shell.run(Shell.java:455)
  9. at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)
  10. at org.apache.hadoop.util.Shell.execCommand(Shell.java:808)
  11. at org.apache.hadoop.util.Shell.execCommand(Shell.java:791)
  12. at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
  13. at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:582)
  14. at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:557)
  15. at org.apache.hadoop.fs.LocatedFileStatus.<init>(LocatedFileStatus.java:42)
  16. at org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1699)
  17. at org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1681)
  18. at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:303)
  19. at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.listStatus(FileInputFormat.java:264)
  20. at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:385)
  21. at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:597)
  22. at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:614)
  23. at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:492)
  24. at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
  25. at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
  26. at java.security.AccessController.doPrivileged(Native Method)
  27. at javax.security.auth.Subject.doAs(Subject.java:422)
  28. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
  29. at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
  30. at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
  31. at main.java.RunJob.main(RunJob.java:49)
  32. Process finished with exit code 1

这个报错原因是由于hadoop的bin目录下没有winutils.exe文件。复制进去。

 

转载请注明出处,感谢!

参考:

https://www.cnblogs.com/kevin-lee123/p/11020570.html

https://blog.csdn.net/a2099948768/article/details/79577246

https://www.cnblogs.com/jhxxb/p/10723369.html

winutil.exe和hadoop.dll下载:

https://github.com/steveloughran/winutils

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/小蓝xlanll/article/detail/478794
推荐阅读
相关标签
  

闽ICP备14008679号