java - Setting Single Node Hadoop Cluster on Ubuntu 14.04 -


i new hadoop , trying setup single node cluster on ubuntu 14.04 machine. have followed michel nall's tutorial. there problem cluster... runs , stops without reason. when cluster running... won't allow me run java mapper , reducers.

can't figure out problem. can me step step installation , usage help. here code , errors:

package org.myorg;  import java.io.ioexception; import java.util.*;  import org.apache.hadoop.fs.path; import org.apache.hadoop.conf.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*;  public class wordcount {    public static class map extends mapreducebase implements mapper<longwritable, text, text, intwritable> {      private final static intwritable 1 = new intwritable(1);      private text word = new text();          public void map(longwritable key, text value, outputcollector<text, intwritable> output, reporter reporter) throws ioexception {        string line = value.tostring();        stringtokenizer tokenizer = new stringtokenizer(line);        while (tokenizer.hasmoretokens()) {          word.set(tokenizer.nexttoken());          output.collect(word, one);        }      }    }     public static class reduce extends mapreducebase implements reducer<text, intwritable, text, intwritable> {      public void reduce(text key, iterator<intwritable> values, outputcollector<text, intwritable> output, reporter reporter) throws ioexception {        int sum = 0;        while (values.hasnext()) {          sum += values.next().get();        }        output.collect(key, new intwritable(sum));      }    }     public static void main(string[] args) throws exception {      jobconf conf = new jobconf(wordcount.class);      conf.setjobname("wordcount");       conf.setoutputkeyclass(text.class);      conf.setoutputvalueclass(intwritable.class);       conf.setmapperclass(map.class);      conf.setcombinerclass(reduce.class);      conf.setreducerclass(reduce.class);       conf.setinputformat(textinputformat.class);      conf.setoutputformat(textoutputformat.class);       fileinputformat.setinputpaths(conf, new path(args[0]));      fileoutputformat.setoutputpath(conf, new path(args[1]));       jobclient.runjob(conf);    } } 

errors are:

exception in thread "main" java.lang.noclassdeffounderror: org/apache/commons/logging/logfactory     @ org.apache.hadoop.conf.configuration.<clinit>(configuration.java:139)     @ wordcount.main(wordcount.java:36) caused by: java.lang.classnotfoundexception: org.apache.commons.logging.logfactory     @ java.net.urlclassloader.findclass(urlclassloader.java:381)     @ java.lang.classloader.loadclass(classloader.java:424)     @ sun.misc.launcher$appclassloader.loadclass(launcher.java:331)     @ java.lang.classloader.loadclass(classloader.java:357)     ... 2 more 

you getting en exception caused by: java.lang.classnotfoundexception: org.apache.commons.logging

above class logfactory not present @ org/apache/commons/logging/ path. have sure whether there or not.


Comments

Popular posts from this blog

javascript - AngularJS custom datepicker directive -

javascript - jQuery date picker - Disable dates after the selection from the first date picker -