intellij windows7 下運(yùn)行hadoop

前提:已安裝jdk intellij, hadoop安裝成功,可正常讀寫文件

一扎运、新建項(xiàng)目 配置

新建java項(xiàng)目

二、導(dǎo)入依賴的jar

Paste_Image.png
Paste_Image.png
Paste_Image.png
Paste_Image.png

最終導(dǎo)入如下


Paste_Image.png

三、創(chuàng)建java實(shí)例

/**
 * Created by Administrator on 2017/2/9.
 */

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.StringTokenizer;

public class WordCount {

    public static class TokenizerMapper
            extends Mapper<Object, Text, Text, IntWritable> {

        private final static IntWritable one = new IntWritable(1);
        private Text word = new Text();

        public void map(Object key, Text value, Context context
        ) throws IOException, InterruptedException {
            StringTokenizer itr = new StringTokenizer(value.toString());
            while (itr.hasMoreTokens()) {
                word.set(itr.nextToken());
                context.write(word, one);
            }
        }
    }

    public static class IntSumReducer
            extends Reducer<Text, IntWritable, Text, IntWritable> {
        private IntWritable result = new IntWritable();

        public void reduce(Text key, Iterable<IntWritable> values,
                           Context context
        ) throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable val : values) {
                sum += val.get();
            }
            result.set(sum);
            context.write(key, result);
        }
    }

    private static void deleteDir(Configuration conf, String dirPath) throws IOException {
        FileSystem fs = FileSystem.get(conf);
        Path targetPath = new Path(dirPath);
        if (fs.exists(targetPath)) {
            boolean delResult = fs.delete(targetPath, true);
            if (delResult) {
                System.out.println(targetPath + " has been deleted sucessfullly.");
            } else {
                System.out.println(targetPath + " deletion failed.");
            }
        }

    }

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
      /*  String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
        if (otherArgs.length < 2) {
            System.err.println("Usage: wordcount <in> [<in>...] <out>");
            System.exit(2);
        }
        //先刪除output目錄
        deleteDir(conf, otherArgs[otherArgs.length - 1]);*/
        Job job = Job.getInstance(conf, "word count");
        job.setJarByClass(WordCount.class);
        job.setMapperClass(TokenizerMapper.class);
        job.setCombinerClass(IntSumReducer.class);
        job.setReducerClass(IntSumReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}

統(tǒng)計(jì) args第一個(gè)參數(shù)對(duì)應(yīng)的文件目錄中所有文件中單詞出現(xiàn)的次數(shù)
輸出結(jié)果在第二個(gè)參數(shù)對(duì)應(yīng)的文件目錄中會(huì)自動(dòng)創(chuàng)建目錄 運(yùn)行前要保證目錄不存在

四蟆沫、運(yùn)行配置

Paste_Image.png

Paste_Image.png

hdfs://localhost:9000/user/wcinput hdfs://localhost:9000/user/wcoutput

五、 運(yùn)行Log

D:\Android\AS\jre\bin\java -Didea.launcher.port=7536 "-Didea.launcher.bin.path=C:\Program Files (x86)\JetBrains\IntelliJ IDEA Community Edition 2016.3.4\bin" -Dfile.encoding=UTF-8 -classpath "D:\Android\AS\jre\jre\lib\charsets.jar;D:\Android\AS\jre\jre\lib\ext\access-bridge-64.jar;D:\Android\AS\jre\jre\lib\ext\cldrdata.jar;D:\Android\AS\jre\jre\lib\ext\dnsns.jar;D:\Android\AS\jre\jre\lib\ext\jaccess.jar;D:\Android\AS\jre\jre\lib\ext\localedata.jar;D:\Android\AS\jre\jre\lib\ext\nashorn.jar;D:\Android\AS\jre\jre\lib\ext\sunec.jar;D:\Android\AS\jre\jre\lib\ext\sunjce_provider.jar;D:\Android\AS\jre\jre\lib\ext\sunmscapi.jar;D:\Android\AS\jre\jre\lib\ext\sunpkcs11.jar;D:\Android\AS\jre\jre\lib\ext\zipfs.jar;D:\Android\AS\jre\jre\lib\jce.jar;D:\Android\AS\jre\jre\lib\jsse.jar;D:\Android\AS\jre\jre\lib\management-agent.jar;D:\Android\AS\jre\jre\lib\resources.jar;D:\Android\AS\jre\jre\lib\rt.jar;C:\hadoop\project\Deme2\out\production\Deme2;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\xz-1.0.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\asm-3.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\avro-1.7.4.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\junit-4.11.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jsch-0.1.42.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jsp-api-2.1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\xmlenc-0.52.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\guava-11.0.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jets3t-0.9.0.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jettison-1.1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jetty-6.1.26.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jsr305-1.3.9.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\log4j-1.2.17.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\paranamer-2.3.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\activation-1.1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-el-1.0.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-io-2.4.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\httpcore-4.2.5.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jaxb-api-2.2.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\stax-api-1.0-2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-cli-1.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-net-3.1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jersey-core-1.9.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jersey-json-1.9.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\servlet-api-2.5.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\slf4j-api-1.7.5.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\zookeeper-3.4.6.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-lang-2.6.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\httpclient-4.2.5.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-codec-1.4.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\hadoop-auth-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\hamcrest-core-1.3.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jackson-xc-1.9.13.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jaxb-impl-2.2.3-1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jersey-server-1.9.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jetty-util-6.1.26.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\mockito-all-1.8.5.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\netty-3.6.2.Final.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\api-util-1.0.0-M20.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-math3-3.1.1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\java-xmlbuilder-0.4.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\protobuf-java-2.5.0.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\slf4j-log4j12-1.7.5.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\snappy-java-1.0.4.1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-digester-1.8.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jackson-jaxrs-1.9.13.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-logging-1.1.3.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jasper-runtime-5.5.23.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\api-asn1-api-1.0.0-M20.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-compress-1.4.1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-httpclient-3.1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jasper-compiler-5.5.23.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\apacheds-i18n-2.0.0-M15.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-beanutils-1.7.0.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jackson-core-asl-1.9.13.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\hadoop-annotations-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-collections-3.2.1.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-configuration-1.6.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\jackson-mapper-asl-1.9.13.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\commons-beanutils-core-1.8.0.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\lib\apacheds-kerberos-codec-2.0.0-M15.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\hadoop-nfs-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\hadoop-common-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\common\hadoop-common-2.5.2-tests.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\hdfs\hadoop-hdfs-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\hdfs\hadoop-hdfs-nfs-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\hdfs\hadoop-hdfs-2.5.2-tests.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\httpfs;C:\hadoop\hadoop-2.5.2\share\hadoop\mapreduce\hadoop-mapreduce-examples-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\mapreduce\hadoop-mapreduce-client-hs-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\mapreduce\hadoop-mapreduce-client-app-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\mapreduce\hadoop-mapreduce-client-core-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\mapreduce\hadoop-mapreduce-client-common-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\mapreduce\hadoop-mapreduce-client-shuffle-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\mapreduce\hadoop-mapreduce-client-jobclient-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\mapreduce\hadoop-mapreduce-client-hs-plugins-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\mapreduce\hadoop-mapreduce-client-jobclient-2.5.2-tests.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-api-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-client-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-common-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-server-tests-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-server-common-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-server-web-proxy-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-server-nodemanager-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-server-resourcemanager-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-applications-distributedshell-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-server-applicationhistoryservice-2.5.2.jar;C:\hadoop\hadoop-2.5.2\share\hadoop\yarn\hadoop-yarn-applications-unmanaged-am-launcher-2.5.2.jar;C:\Program Files (x86)\JetBrains\IntelliJ IDEA Community Edition 2016.3.4\lib\idea_rt.jar" com.intellij.rt.execution.application.AppMain WordCount hdfs://localhost:9000/user/wcinput hdfs://localhost:9000/user/wcoutput
2017-02-09 11:32:43,647 INFO  [main] Configuration.deprecation (Configuration.java:warnOnceIfDeprecated(1019)) - session.id is deprecated. Instead, use dfs.metrics.session-id
2017-02-09 11:32:43,655 INFO  [main] jvm.JvmMetrics (JvmMetrics.java:init(76)) - Initializing JVM Metrics with processName=JobTracker, sessionId=
2017-02-09 11:32:44,139 WARN  [main] mapreduce.JobSubmitter (JobSubmitter.java:copyAndConfigureFiles(150)) - Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
2017-02-09 11:32:44,147 WARN  [main] mapreduce.JobSubmitter (JobSubmitter.java:copyAndConfigureFiles(259)) - No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
2017-02-09 11:32:44,317 INFO  [main] input.FileInputFormat (FileInputFormat.java:listStatus(281)) - Total input paths to process : 1
2017-02-09 11:32:44,429 INFO  [main] mapreduce.JobSubmitter (JobSubmitter.java:submitJobInternal(396)) - number of splits:1
2017-02-09 11:32:44,646 INFO  [main] mapreduce.JobSubmitter (JobSubmitter.java:printTokens(479)) - Submitting tokens for job: job_local1873026139_0001
2017-02-09 11:32:44,722 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-Administrator/mapred/staging/Administrator1873026139/.staging/job_local1873026139_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval;  Ignoring.
2017-02-09 11:32:44,727 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-Administrator/mapred/staging/Administrator1873026139/.staging/job_local1873026139_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts;  Ignoring.
2017-02-09 11:32:44,953 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-Administrator/mapred/local/localRunner/Administrator/job_local1873026139_0001/job_local1873026139_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval;  Ignoring.
2017-02-09 11:32:44,961 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-Administrator/mapred/local/localRunner/Administrator/job_local1873026139_0001/job_local1873026139_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts;  Ignoring.
2017-02-09 11:32:44,971 INFO  [main] mapreduce.Job (Job.java:submit(1289)) - The url to track the job: http://localhost:8080/
2017-02-09 11:32:44,973 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1334)) - Running job: job_local1873026139_0001
2017-02-09 11:32:45,002 INFO  [Thread-3] mapred.LocalJobRunner (LocalJobRunner.java:createOutputCommitter(471)) - OutputCommitter set in config null
2017-02-09 11:32:45,023 INFO  [Thread-3] mapred.LocalJobRunner (LocalJobRunner.java:createOutputCommitter(489)) - OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
2017-02-09 11:32:45,253 INFO  [Thread-3] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(448)) - Waiting for map tasks
2017-02-09 11:32:45,260 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:run(224)) - Starting task: attempt_local1873026139_0001_m_000000_0
2017-02-09 11:32:45,352 INFO  [LocalJobRunner Map Task Executor #0] util.ProcfsBasedProcessTree (ProcfsBasedProcessTree.java:isAvailable(181)) - ProcfsBasedProcessTree currently is supported only on Linux.
2017-02-09 11:32:45,431 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:initialize(587)) -  Using ResourceCalculatorProcessTree : org.apache.hadoop.yarn.util.WindowsBasedProcessTree@2597b289
2017-02-09 11:32:45,443 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:runNewMapper(733)) - Processing split: hdfs://localhost:9000/user/wcinput/file1.txt:0+4
2017-02-09 11:32:45,477 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:createSortingCollector(388)) - Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
2017-02-09 11:32:45,567 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:setEquator(1182)) - (EQUATOR) 0 kvi 26214396(104857584)
2017-02-09 11:32:45,571 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(975)) - mapreduce.task.io.sort.mb: 100
2017-02-09 11:32:45,571 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(976)) - soft limit at 83886080
2017-02-09 11:32:45,572 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(977)) - bufstart = 0; bufvoid = 104857600
2017-02-09 11:32:45,572 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(978)) - kvstart = 26214396; length = 6553600
2017-02-09 11:32:45,970 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 
2017-02-09 11:32:45,977 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1437)) - Starting flush of map output
2017-02-09 11:32:45,977 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1455)) - Spilling map output
2017-02-09 11:32:45,977 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1456)) - bufstart = 0; bufend = 9; bufvoid = 104857600
2017-02-09 11:32:45,977 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1458)) - kvstart = 26214396(104857584); kvend = 26214396(104857584); length = 1/6553600
2017-02-09 11:32:45,989 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1355)) - Job job_local1873026139_0001 running in uber mode : false
2017-02-09 11:32:45,993 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1362)) -  map 0% reduce 0%
2017-02-09 11:32:46,040 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:sortAndSpill(1641)) - Finished spill 0
2017-02-09 11:32:46,212 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:done(1001)) - Task:attempt_local1873026139_0001_m_000000_0 is done. And is in the process of committing
2017-02-09 11:32:46,329 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - map
2017-02-09 11:32:46,330 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:sendDone(1121)) - Task 'attempt_local1873026139_0001_m_000000_0' done.
2017-02-09 11:32:46,330 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:run(249)) - Finishing task: attempt_local1873026139_0001_m_000000_0
2017-02-09 11:32:46,332 INFO  [Thread-3] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(456)) - map task executor complete.
2017-02-09 11:32:46,336 INFO  [Thread-3] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(448)) - Waiting for reduce tasks
2017-02-09 11:32:46,367 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:run(302)) - Starting task: attempt_local1873026139_0001_r_000000_0
2017-02-09 11:32:46,396 INFO  [pool-6-thread-1] util.ProcfsBasedProcessTree (ProcfsBasedProcessTree.java:isAvailable(181)) - ProcfsBasedProcessTree currently is supported only on Linux.
2017-02-09 11:32:46,590 INFO  [pool-6-thread-1] mapred.Task (Task.java:initialize(587)) -  Using ResourceCalculatorProcessTree : org.apache.hadoop.yarn.util.WindowsBasedProcessTree@457f9561
2017-02-09 11:32:46,598 INFO  [pool-6-thread-1] mapred.ReduceTask (ReduceTask.java:run(362)) - Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@18c457f7
2017-02-09 11:32:46,628 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:<init>(193)) - MergerManager: memoryLimit=835295616, maxSingleShuffleLimit=208823904, mergeThreshold=551295104, ioSortFactor=10, memToMemMergeOutputsThreshold=10
2017-02-09 11:32:46,645 INFO  [EventFetcher for fetching Map Completion Events] reduce.EventFetcher (EventFetcher.java:run(61)) - attempt_local1873026139_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
2017-02-09 11:32:46,769 INFO  [localfetcher#1] reduce.LocalFetcher (LocalFetcher.java:copyMapOutput(140)) - localfetcher#1 about to shuffle output of map attempt_local1873026139_0001_m_000000_0 decomp: 13 len: 17 to MEMORY
2017-02-09 11:32:46,783 INFO  [localfetcher#1] reduce.InMemoryMapOutput (InMemoryMapOutput.java:shuffle(100)) - Read 13 bytes from map-output for attempt_local1873026139_0001_m_000000_0
2017-02-09 11:32:46,794 INFO  [localfetcher#1] reduce.MergeManagerImpl (MergeManagerImpl.java:closeInMemoryFile(307)) - closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->13
2017-02-09 11:32:46,797 INFO  [EventFetcher for fetching Map Completion Events] reduce.EventFetcher (EventFetcher.java:run(76)) - EventFetcher is interrupted.. Returning
2017-02-09 11:32:46,805 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 1 / 1 copied.
2017-02-09 11:32:46,805 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(667)) - finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
2017-02-09 11:32:46,832 INFO  [pool-6-thread-1] mapred.Merger (Merger.java:merge(591)) - Merging 1 sorted segments
2017-02-09 11:32:46,833 INFO  [pool-6-thread-1] mapred.Merger (Merger.java:merge(690)) - Down to the last merge-pass, with 1 segments left of total size: 6 bytes
2017-02-09 11:32:46,841 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(742)) - Merged 1 segments, 13 bytes to disk to satisfy reduce memory limit
2017-02-09 11:32:46,843 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(772)) - Merging 1 files, 17 bytes from disk
2017-02-09 11:32:46,845 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(787)) - Merging 0 segments, 0 bytes from memory into reduce
2017-02-09 11:32:46,845 INFO  [pool-6-thread-1] mapred.Merger (Merger.java:merge(591)) - Merging 1 sorted segments
2017-02-09 11:32:46,847 INFO  [pool-6-thread-1] mapred.Merger (Merger.java:merge(690)) - Down to the last merge-pass, with 1 segments left of total size: 6 bytes
2017-02-09 11:32:46,849 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 1 / 1 copied.
2017-02-09 11:32:46,925 INFO  [pool-6-thread-1] Configuration.deprecation (Configuration.java:warnOnceIfDeprecated(1019)) - mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
2017-02-09 11:32:47,080 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1362)) -  map 100% reduce 0%
2017-02-09 11:32:47,770 INFO  [pool-6-thread-1] mapred.Task (Task.java:done(1001)) - Task:attempt_local1873026139_0001_r_000000_0 is done. And is in the process of committing
2017-02-09 11:32:47,775 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 1 / 1 copied.
2017-02-09 11:32:47,775 INFO  [pool-6-thread-1] mapred.Task (Task.java:commit(1162)) - Task attempt_local1873026139_0001_r_000000_0 is allowed to commit now
2017-02-09 11:32:47,822 INFO  [pool-6-thread-1] output.FileOutputCommitter (FileOutputCommitter.java:commitTask(439)) - Saved output of task 'attempt_local1873026139_0001_r_000000_0' to hdfs://localhost:9000/user/wcoutput/_temporary/0/task_local1873026139_0001_r_000000
2017-02-09 11:32:47,824 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - reduce > reduce
2017-02-09 11:32:47,824 INFO  [pool-6-thread-1] mapred.Task (Task.java:sendDone(1121)) - Task 'attempt_local1873026139_0001_r_000000_0' done.
2017-02-09 11:32:47,824 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:run(325)) - Finishing task: attempt_local1873026139_0001_r_000000_0
2017-02-09 11:32:47,825 INFO  [Thread-3] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(456)) - reduce task executor complete.
2017-02-09 11:32:48,141 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1362)) -  map 100% reduce 100%
2017-02-09 11:32:48,141 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1373)) - Job job_local1873026139_0001 completed successfully
2017-02-09 11:32:48,195 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1380)) - Counters: 38
    File System Counters
        FILE: Number of bytes read=404
        FILE: Number of bytes written=483189
        FILE: Number of read operations=0
        FILE: Number of large read operations=0
        FILE: Number of write operations=0
        HDFS: Number of bytes read=8
        HDFS: Number of bytes written=7
        HDFS: Number of read operations=15
        HDFS: Number of large read operations=0
        HDFS: Number of write operations=4
    Map-Reduce Framework
        Map input records=1
        Map output records=1
        Map output bytes=9
        Map output materialized bytes=17
        Input split bytes=109
        Combine input records=1
        Combine output records=1
        Reduce input groups=1
        Reduce shuffle bytes=17
        Reduce input records=1
        Reduce output records=1
        Spilled Records=2
        Shuffled Maps =1
        Failed Shuffles=0
        Merged Map outputs=1
        GC time elapsed (ms)=171
        CPU time spent (ms)=0
        Physical memory (bytes) snapshot=0
        Virtual memory (bytes) snapshot=0
        Total committed heap usage (bytes)=482344960
    Shuffle Errors
        BAD_ID=0
        CONNECTION=0
        IO_ERROR=0
        WRONG_LENGTH=0
        WRONG_MAP=0
        WRONG_REDUCE=0
    File Input Format Counters 
        Bytes Read=4
    File Output Format Counters 
        Bytes Written=7

Process finished with exit code 0

六温治、查看結(jié)果

Paste_Image.png

hadoop fs -rm -r hdfs://localhost:9000/user/wcoutput 刪除輸出目錄
參考
HOW-TO: COMPILE AND DEBUG HADOOP APPLICATIONS WITH INTELLIJ IDEA IN WINDOWS OS (64BIT)

eclipse/intellij idea 遠(yuǎn)程調(diào)試hadoop 2.6.0

最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請(qǐng)聯(lián)系作者
  • 序言:七十年代末饭庞,一起剝皮案震驚了整個(gè)濱河市,隨后出現(xiàn)的幾起案子熬荆,更是在濱河造成了極大的恐慌舟山,老刑警劉巖,帶你破解...
    沈念sama閱讀 216,651評(píng)論 6 501
  • 序言:濱河連續(xù)發(fā)生了三起死亡事件,死亡現(xiàn)場(chǎng)離奇詭異累盗,居然都是意外死亡寒矿,警方通過查閱死者的電腦和手機(jī),發(fā)現(xiàn)死者居然都...
    沈念sama閱讀 92,468評(píng)論 3 392
  • 文/潘曉璐 我一進(jìn)店門若债,熙熙樓的掌柜王于貴愁眉苦臉地迎上來符相,“玉大人,你說我怎么就攤上這事蠢琳“≈眨” “怎么了?”我有些...
    開封第一講書人閱讀 162,931評(píng)論 0 353
  • 文/不壞的土叔 我叫張陵傲须,是天一觀的道長(zhǎng)蓝牲。 經(jīng)常有香客問我,道長(zhǎng)躏碳,這世上最難降的妖魔是什么搞旭? 我笑而不...
    開封第一講書人閱讀 58,218評(píng)論 1 292
  • 正文 為了忘掉前任,我火速辦了婚禮菇绵,結(jié)果婚禮上肄渗,老公的妹妹穿的比我還像新娘。我一直安慰自己咬最,他們只是感情好翎嫡,可當(dāng)我...
    茶點(diǎn)故事閱讀 67,234評(píng)論 6 388
  • 文/花漫 我一把揭開白布。 她就那樣靜靜地躺著永乌,像睡著了一般惑申。 火紅的嫁衣襯著肌膚如雪。 梳的紋絲不亂的頭發(fā)上翅雏,一...
    開封第一講書人閱讀 51,198評(píng)論 1 299
  • 那天圈驼,我揣著相機(jī)與錄音,去河邊找鬼望几。 笑死绩脆,一個(gè)胖子當(dāng)著我的面吹牛,可吹牛的內(nèi)容都是我干的橄抹。 我是一名探鬼主播靴迫,決...
    沈念sama閱讀 40,084評(píng)論 3 418
  • 文/蒼蘭香墨 我猛地睜開眼,長(zhǎng)吁一口氣:“原來是場(chǎng)噩夢(mèng)啊……” “哼楼誓!你這毒婦竟也來了玉锌?” 一聲冷哼從身側(cè)響起,我...
    開封第一講書人閱讀 38,926評(píng)論 0 274
  • 序言:老撾萬榮一對(duì)情侶失蹤疟羹,失蹤者是張志新(化名)和其女友劉穎主守,沒想到半個(gè)月后禀倔,有當(dāng)?shù)厝嗽跇淞掷锇l(fā)現(xiàn)了一具尸體,經(jīng)...
    沈念sama閱讀 45,341評(píng)論 1 311
  • 正文 獨(dú)居荒郊野嶺守林人離奇死亡丸逸,尸身上長(zhǎng)有42處帶血的膿包…… 初始之章·張勛 以下內(nèi)容為張勛視角 年9月15日...
    茶點(diǎn)故事閱讀 37,563評(píng)論 2 333
  • 正文 我和宋清朗相戀三年蹋艺,在試婚紗的時(shí)候發(fā)現(xiàn)自己被綠了剃袍。 大學(xué)時(shí)的朋友給我發(fā)了我未婚夫和他白月光在一起吃飯的照片黄刚。...
    茶點(diǎn)故事閱讀 39,731評(píng)論 1 348
  • 序言:一個(gè)原本活蹦亂跳的男人離奇死亡,死狀恐怖民效,靈堂內(nèi)的尸體忽然破棺而出憔维,到底是詐尸還是另有隱情,我是刑警寧澤畏邢,帶...
    沈念sama閱讀 35,430評(píng)論 5 343
  • 正文 年R本政府宣布业扒,位于F島的核電站,受9級(jí)特大地震影響舒萎,放射性物質(zhì)發(fā)生泄漏程储。R本人自食惡果不足惜,卻給世界環(huán)境...
    茶點(diǎn)故事閱讀 41,036評(píng)論 3 326
  • 文/蒙蒙 一臂寝、第九天 我趴在偏房一處隱蔽的房頂上張望章鲤。 院中可真熱鬧,春花似錦咆贬、人聲如沸败徊。這莊子的主人今日做“春日...
    開封第一講書人閱讀 31,676評(píng)論 0 22
  • 文/蒼蘭香墨 我抬頭看了看天上的太陽(yáng)皱蹦。三九已至,卻和暖如春眷蜈,著一層夾襖步出監(jiān)牢的瞬間沪哺,已是汗流浹背。 一陣腳步聲響...
    開封第一講書人閱讀 32,829評(píng)論 1 269
  • 我被黑心中介騙來泰國(guó)打工酌儒, 沒想到剛下飛機(jī)就差點(diǎn)兒被人妖公主榨干…… 1. 我叫王不留辜妓,地道東北人。 一個(gè)月前我還...
    沈念sama閱讀 47,743評(píng)論 2 368
  • 正文 我出身青樓今豆,卻偏偏與公主長(zhǎng)得像嫌拣,于是被迫代替她去往敵國(guó)和親。 傳聞我的和親對(duì)象是個(gè)殘疾皇子呆躲,可洞房花燭夜當(dāng)晚...
    茶點(diǎn)故事閱讀 44,629評(píng)論 2 354

推薦閱讀更多精彩內(nèi)容

  • 首先异逐,我們?cè)谑褂们跋瓤纯碒DFS是什麼?這將有助于我們是以后的運(yùn)維使用和故障排除思路的獲得插掂。 HDFS采用mast...
    W_Bousquet閱讀 4,194評(píng)論 0 2
  • 當(dāng)數(shù)據(jù)量增大到超出了單個(gè)物理計(jì)算機(jī)存儲(chǔ)容量時(shí)灰瞻,有必要把它分開存儲(chǔ)在多個(gè)不同的計(jì)算機(jī)中腥例。那些管理存儲(chǔ)在多個(gè)網(wǎng)絡(luò)互連的...
    單行線的旋律閱讀 1,920評(píng)論 0 7
  • Spring Cloud為開發(fā)人員提供了快速構(gòu)建分布式系統(tǒng)中一些常見模式的工具(例如配置管理,服務(wù)發(fā)現(xiàn)酝润,斷路器燎竖,智...
    卡卡羅2017閱讀 134,652評(píng)論 18 139
  • Spring Boot 參考指南 介紹 轉(zhuǎn)載自:https://www.gitbook.com/book/qbgb...
    毛宇鵬閱讀 46,806評(píng)論 6 342
  • 看到題目有沒有一種高大上的感覺?毛線要销,當(dāng)前是個(gè)人构回、是個(gè)公司都在說自己搞大數(shù)據(jù),每天沒有幾個(gè)PB的數(shù)據(jù)入庫(kù)疏咐,每天沒有...
    丁小晶的晶小丁閱讀 4,474評(píng)論 0 50