hbase ClassNotFoundException [英] hbase ClassNotFoundException

查看:132
本文介绍了hbase ClassNotFoundException的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我想运行一个map reduce示例:

  package my.test; 

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
导入org.apache.hadoop.conf.Configuration;
导入org.apache.hadoop.conf.Configured;
导入org.apache.hadoop.fs.FileSystem;
导入org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.MultiTableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;




/ **
*该类演示了MultiTableOutputFormat类的用法。
*使用这个类,我们可以将Hadoop map reduce程序
*的输出写入不同的HBase表中。
*
* @version 1.0 19 Jul 2011
* @author Wildnove
* /
公共类TestMultiTable扩展Configured implements工具{

private static final Logger LOG = Logger.getLogger(TestMultiTable.class);
private static final String CMDLINE =com.wildnove.tutorial.TestMultiTable< inputFile> [-n name] [-s];

public static void main(String [] args)throws Exception {
int res = ToolRunner.run(new TestMultiTable(),args);
System.exit(res);
}

@Override
public int run(String [] args)throws Exception {
HelpFormatter help = new HelpFormatter();
选项选项=新选项();
options.addOption(h,help,false,打印程序用法);
options.addOption(n,name,true,设置作业名称);
CommandLineParser parser = new BasicParser();
CommandLine cline;
尝试{
cline = parser.parse(options,args);
args = cline.getArgs();
if(args.length< 1){
help.printHelp(CMDLINE,options);
返回-1;
}
} catch(ParseException e){
System.out.println(e);
e.printStackTrace();
help.printHelp(CMDLINE,options);
返回-1;
}

String name = null;
尝试{
if(cline.hasOption('n'))
name = cline.getOptionValue('n');
else
name =wildnove.com - Tutorial MultiTableOutputFormat;
配置conf = getConf();
FileSystem fs = FileSystem.get(conf);
Path inputFile = new Path(fs.makeQualified(new Path(args [0]))。toUri()。getPath());
if(!getMultiTableOutputJob(name,inputFile).waitForCompletion(true))
return -1;
} catch(Exception e){
System.out.println(e);
e.printStackTrace();
help.printHelp(CMDLINE,options);
返回-1;
}
返回0;
}

/ **
*这里我们配置我们的作业使用MultiTableOutputFormat类作为地图缩小输出。
*请注意,我们只使用1个reduce来进行调试,但您可以使用多个reduce。
* /
private Job getMultiTableOutputJob(String name,Path inputFile)throws IOException {
if(LOG.isInfoEnabled()){
LOG.info(name +starting .. 。);
LOG.info(computing file:+ inputFile);
}
Job job = new Job(getConf(),name);
job.setJarByClass(TestMultiTable.class);
job.setMapperClass(Mapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
FileInputFormat.addInputPath(job,inputFile);
job.setOutputFormatClass(MultiTableOutputFormat.class);
job.setNumReduceTasks(1);
job.setReducerClass(Reducer.class);

返回工作;
}

私有静态类映射器扩展org.apache.hadoop.mapreduce.Mapper< LongWritable,Text,Text,Text> {

private text outKey = new Text();
private text outValue = new Text();
$ b / **
*地图方法根据此结构拆分csv文件
* brand,model,size(例如Cadillac,Seville,Midsize)并输出所有数据
*品牌作为钥匙和情侣款,尺寸作为价值。
* /
@Override $ b $ public void map(LongWritable key,Text value,Context context)throws IOException,InterruptedException {
String [] valueSplitted = value.toString()。split ( );
if(valueSplitted.length == 3){
String brand = valueSplitted [0];
String model = valueSplitted [1];
String size = valueSplitted [2];

outKey.set(品牌);
outValue.set(model +,+ size);
context.write(outKey,outValue);



$ b private static class Reducer extends org.apache.hadoop.mapreduce.Reducer< Text,Text,ImmutableBytesWritable,Writable> {
$ b / **
* reduce方法用所有csv数据填充TestCars表,
*计算一些计数器并将这些计数器保存到TestBrandsSizes表中。
*因此,我们使用两个不同的HBase表作为reduce方法的输出。
* /
@Override
protected void reduce(Text key,Iterable< Text> values,Context context)throws IOException,InterruptedException {
Map< String,Integer> statsSizeCounters = new HashMap< String,Integer>();
String brand = key.toString();
//我们收到所有型号,尺寸按品牌分组。
for(Text value:values){
String [] valueSplitted = value.toString()。split(,);
if(valueSplitted.length == 2){
String model = valueSplitted [0];
String size = valueSplitted [1];

//填充TestCars表
ImmutableBytesWritable putTable = new ImmutableBytesWritable(Bytes.toBytes(TestCars));
byte [] putKey = Bytes.toBytes(brand +,+ model);
byte [] putFamily = Bytes.toBytes(Car);
Put put = new Put(putKey);
//限定符品牌
字节[] putQualifier = Bytes.toBytes(brand);
byte [] putValue = Bytes.toBytes(brand);
put.add(putFamily,putQualifier,putValue);
//限定符模型
putQualifier = Bytes.toBytes(model);
putValue = Bytes.toBytes(model);
put.add(putFamily,putQualifier,putValue);
//限定符大小
putQualifier = Bytes.toBytes(size);
putValue = Bytes.toBytes(size);
put.add(putFamily,putQualifier,putValue);
context.write(putTable,put);

//计算一些计数器:一个品牌的不同尺寸的数量
if(!statsSizeCounters.containsKey(size))
statsSizeCounters.put(size,1);
else
statsSizeCounters.put(size,statsSizeCounters.get(size)+ 1); (Entry< String,Integer> entry:statsSizeCounters.entrySet()){
//填充TestBrandsSizes表
ImmutableBytesWritable(



putTable = new ImmutableBytesWritable(Bytes.toBytes(TestBrandsSizes));
byte [] putKey = Bytes.toBytes(brand);
byte [] putFamily = Bytes.toBytes(BrandSizes);
Put put = new Put(putKey);
//我们可以使用限定符的大小
byte [] putQualifier = Bytes.toBytes(entry.getKey());
byte [] putValue = Bytes.toBytes(entry.getValue());
put.add(putFamily,putQualifier,putValue);
context.write(putTable,put);
}
}
}
}

使用eclipse选项jar jar mt.jar:jar文件



运行mapreduce:


[zhouhh @ Hadoop48〜] $ HADOOP_CLASSPATH = $ {HBASE_HOME} / bin / hbase $ b $ classpath $ {HADOOP_HOME} / bin / hadoop classpath
$ {HADOOP_HOME} / bin / hadoop jar mt.jar cars.csv 12/06/11 20:14:33 INFO
test.TestMultiTable:wildnove。 com - 教程MultiTableOutputFormat
开始... 12/06/11 20:14:33 INFO test.TestMultiTable:计算
文件:/user/zhouhh/cars.csv 12/06/11 20:14 :34 INFO
input.FileInputFormat:要输入的总输入路径:1 12/06/11
20:14:34 INFO util.NativeCodeLoader:加载native-hadoop库
12/06 / 11 20:14:34 WARN snappy.LoadSnappy:Snappy原生库不是
已加载12/06/11 20:14:35信息mapred.JobClient:正在运行的作业:
job_201206111811_0012 12/06/11 20 :14:36 INFO ma pred.JobClient:map 0%
reduce 0%12/06/11 20:14:42信息mapred.JobClient:任务ID:
attempt_201206111811_0012_m_000002_0,状态:FAILED
java.lang.RuntimeException :java.lang.ClassNotFoundException:
org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
at org.apache.hadoop.hbase.mapreduce.MultiTableOutputFormat
org.apache.hadoop.mapreduce.JobContext.getOutputFormatClass(JobContext.java:235)
在org.apache.hadoop.mapred.Task.initialize(Task.java:513)
在org.apache。 hadoop.mapred.MapTask.run(MapTask.java:353)在org.apache.hadoop.mapred.Child处
$ 4.run(Child.java:255)$ java.util.AccessController.doPrivileged处
(Native Method)
在javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
在org.apache.hadoop.mapred.Child.main(Child。 java:249)导致:java.lang.ClassNotFoundException:
org.apache.hadoop.hbase.mapreduce.MultiTableOutputFormat $ b $在java.net.URLClassLoader $ 1.run(URLClassLoader.java:366)$ b $ java.net.URLClassLoader $ 1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)$ b $ java.net.URLClassLoader.findClass(URLClassLoader.java :354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
at sun.misc.Launcher $ AppClassLoader.loadClass(Launcher.java:308)
at java。 lang.ClassLoader.loadClass(ClassLoader.java:356)$ java.util.Class.forName0中的
(本地方法)$ b $ java.lang.Class.forName(Class.java:264)
在org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)$ b $ org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)

cars.csv:


[zhouhh @ Hadoop48〜] $ cat cars.csv讴歌,Integra,小
讴歌,传奇,中型奥迪,90,紧凑型奥迪,100,中型宝马,535i ,中型
别克,世纪,中型别克,LeSabre,大型别克,Roadmaster,大型
别克,Riviera,中型凯迪拉克,DeVille,大凯迪拉克,塞维利亚,中型
blockquote>

MultiTableOutputFormat.class位于Hbase.0.94.jar中


[zhouhh @ Hadoop48〜 ] $ echo $ HADOOP_CLASSPATH | tr':''\\\
'| grep hbase
/home/zhouhh/hbase-0.94.0/conf /home/zhouhh/hbase-0.94.0
/home/zhouhh/hbase-0.94.0/hbase-0.94.0.jar
/home/zhouhh/hbase-0.94.0/hbase-0.94.0-tests.jar
/home/zhouhh/hbase-0.94.0/lib/activation-1.1.jar
/home/zhouhh/hbase-0.94.0/lib/asm-3.1.jar
/home/zhouhh/hbase-0.94.0/lib/avro-1.5.3.jar
/ home / zhouhh /hbase-0.94.0/lib/avro-ipc-1.5.3.jar
/home/zhouhh/hbase-0.94.0/lib/commons-beanutils-1.7.0.jar
/ home /zhouhh/hbase-0.94.0/lib/commons-beanutils-core-1.8.0.jar
/home/zhouhh/hbase-0.94.0/lib/commons-cli-1.2.jar
/home/zhouhh/hbase-0.94.0/lib/commons-codec-1.4.jar
/home/zhouhh/hbase-0.94.0/lib/commons-collections-3.2.1.jar
/home/zhouhh/hbase-0.94.0/lib/commons-configuration-1.6.jar
/home/zhouhh/hbase-0.94.0/lib/commons-digester-1.8.jar
/ home /zhouhh/hbase-0.94.0/lib/commons-el-1.0.jar
/home/zhouhh/hbase-0.94.0/lib/commons-httpclient-3.1.jar
/ home / zhouhh /hbase-0.94.0/lib/co mmons-io-2.1.jar
/home/zhouhh/hbase-0.94.0/lib/commons-lang-2.5.jar
/home/zhouhh/hbase-0.94.0/lib/commons- logging-1.1.1.jar
/home/zhouhh/hbase-0.94.0/lib/commons-math-2.1.jar
/home/zhouhh/hbase-0.94.0/lib/commons- net-1.4.1.jar
/home/zhouhh/hbase-0.94.0/lib/core-3.1.1.jar
/home/zhouhh/hbase-0.94.0/lib/guava- r09.jar
/home/zhouhh/hbase-0.94.0/lib/hadoop-core-1.0.2.jar
/home/zhouhh/hbase-0.94.0/lib/high-scale- lib-1.1.1.jar
/home/zhouhh/hbase-0.94.0/lib/httpclient-4.1.2.jar
/home/zhouhh/hbase-0.94.0/lib/httpcore- 4.1.3.jar
/home/zhouhh/hbase-0.94.0/lib/jackson-core-asl-1.5.5.jar
/home/zhouhh/hbase-0.94.0/lib/ jackson-jaxrs-1.5.5.jar
/home/zhouhh/hbase-0.94.0/lib/jackson-mapper-asl-1.5.5.jar
/home/zhouhh/hbase-0.94。 0 / lib / jackson-xc-1.5.5.jar
/home/zhouhh/hbase-0.94.0/lib/jamon-runtime-2.3.1.jar
/ home / zhouhh / hbase- 0.94.0 / lib / jasper-compiler-5.5.23.jar
/home/zhouhh/hbase-0.94.0/ lib / jasper-runtime-5.5.23.jar
/home/zhouhh/hbase-0.94.0/lib/jaxb-api-2.1.jar
/home/zhouhh/hbase-0.94.0/ lib / jaxb-impl-2.1.12.jar
/home/zhouhh/hbase-0.94.0/lib/jersey-core-1.4.jar
/home/zhouhh/hbase-0.94.0/ lib / jersey-json-1.4.jar
/home/zhouhh/hbase-0.94.0/lib/jersey-server-1.4.jar
/home/zhouhh/hbase-0.94.0/lib/ jettison-1.1.jar
/home/zhouhh/hbase-0.94.0/lib/jetty-6.1.26.jar
/home/zhouhh/hbase-0.94.0/lib/jetty-util- 6.1.26.jar
/home/zhouhh/hbase-0.94.0/lib/jruby-complete-1.6.5.jar
/home/zhouhh/hbase-0.94.0/lib/jsp- 2.1-6.1.14.jar
/home/zhouhh/hbase-0.94.0/lib/jsp-api-2.1-6.1.14.jar
/home/zhouhh/hbase-0.94.0/ lib / libthrift-0.8.0.jar
/home/zhouhh/hbase-0.94.0/lib/log4j-1.2.16.jar
/home/zhouhh/hbase-0.94.0/lib/ netty-3.2.4.Final.jar
/home/zhouhh/hbase-0.94.0/lib/protobuf-java-2.4.0a.jar
/home/zhouhh/hbase-0.94.0/ lib / servlet-api-2.5-6.1.14.jar
/home/zhouhh/hbase-0.94.0/lib/slf4j-a pi-1.5.8.jar
/home/zhouhh/hbase-0.94.0/lib/snappy-java-1.0.3.2.jar
/home/zhouhh/hbase-0.94.0/lib/ stax-api-1.0.1.jar
/home/zhouhh/hbase-0.94.0/lib/velocity-1.7.jar
/home/zhouhh/hbase-0.94.0/lib/xmlenc- 0.52.jar
/home/zhouhh/hbase-0.94.0/lib/zookeeper-3.4.3.jar
/home/zhouhh/hbase-0.94.0/conf / home / zhouhh / hbase- 0.94.0
/home/zhouhh/hbase-0.94.0/hbase-0.94.0.jar
/home/zhouhh/hbase-0.94.0/hbase-0.94.0-tests.jar
/home/zhouhh/hbase-0.94.0/lib/activation-1.1.jar
/home/zhouhh/hbase-0.94.0/lib/asm-3.1.jar
/ home / zhouhh / hbase-0.94.0 / lib / avro-1.5.3.jar
/home/zhouhh/hbase-0.94.0/lib/avro-ipc-1.5.3.jar
/ home / zhouhh / hbase-0.94.0 / lib / commons-beanutils-1.7.0.jar
/home/zhouhh/hbase-0.94.0/lib/commons-beanutils-core-1.8.0.jar
/home/zhouhh/hbase-0.94.0/lib/commons-cli-1.2.jar
/home/zhouhh/hbase-0.94.0/lib/commons-codec-1.4.jar
/ home / zhouhh / hbase-0.94.0 / lib / commons-collections-3.2.1.jar
/home/zhouhh/hbase-0.94.0/lib/commons-configuration-1.6.jar
/home/zhouhh/hbase-0.94.0/lib/commons-digester-1.8.jar
/ home /zhouhh/hbase-0.94.0/lib/commons-el-1.0.jar
/home/zhouhh/hbase-0.94.0/lib/commons-httpclient-3.1.jar
/ home / zhouhh /hbase-0.94.0/lib/commons-io-2.1.jar
/home/zhouhh/hbase-0.94.0/lib/commons-lang-2.5.jar
/ home / zhouhh / hbase -0.94.0 / lib / commons-logging-1.1.1.jar
/home/zhouhh/hbase-0.94.0/lib/commons-math-2.1.jar
/ home / zhouhh / hbase -0.94.0 / lib / commons-net-1.4.1.jar
/home/zhouhh/hbase-0.94.0/lib/core-3.1.1.jar
/ home / zhouhh / hbase -0.94.0 / lib / guava-r09.jar
/home/zhouhh/hbase-0.94.0/lib/hadoop-core-1.0.2.jar
/home/zhouhh/hbase-0.94 .0 / lib / high-scale-lib-1.1.1.jar
/home/zhouhh/hbase-0.94.0/lib/httpclient-4.1.2.jar
/ home / zhouhh / hbase -0.94.0 / lib / httpcore-4.1.3.jar
/home/zhouhh/hbase-0.94.0/lib/jackson-core-asl-1.5.5.jar
/ home / zhouhh /hbase-0.94.0/lib/jackson-jaxrs-1.5.5.jar
/home/zhouhh/hbase-0.94.0/lib/jackson-mapper-asl-1.5.5.jar
/home/zhouhh/hbase-0.94.0/lib/jackson-xc-1.5 .jar
/home/zhouhh/hbase-0.94.0/lib/jamon-runtime-2.3.1.jar
/home/zhouhh/hbase-0.94.0/lib/jasper-compiler -5.5.23.jar
/home/zhouhh/hbase-0.94.0/lib/jasper-runtime-5.5.23.jar
/home/zhouhh/hbase-0.94.0/lib/jaxb -api-2.1.jar
/home/zhouhh/hbase-0.94.0/lib/jaxb-impl-2.1.12.jar
/home/zhouhh/hbase-0.94.0/lib/jersey -core-1.4.jar
/home/zhouhh/hbase-0.94.0/lib/jersey-json-1.4.jar
/home/zhouhh/hbase-0.94.0/lib/jersey-server -1.4.jar
/home/zhouhh/hbase-0.94.0/lib/jettison-1.1.jar
/home/zhouhh/hbase-0.94.0/lib/jetty-6.1.26.jar
/home/zhouhh/hbase-0.94.0/lib/jetty-util-6.1.26.jar
/home/zhouhh/hbase-0.94.0/lib/jruby-complete-1.6.5 .jar
/home/zhouhh/hbase-0.94.0/lib/jsp-2.1-6.1.14.jar
/home/zhouhh/hbase-0.94.0/lib/jsp-api-2.1 -6.1.14.jar
/home/zhouhh/hbase-0.94.0/lib/libthrift-0.8.0.jar
/home/zhouhh/hbase-0.94.0/lib/log4j-1.2.16.jar
/home/zhouhh/hbase-0.94.0/lib/netty-3.2.4.Final.jar
/home/zhouhh/hbase-0.94.0/lib/protobuf-java-2.4.0a.jar
/home/zhouhh/hbase-0.94.0/lib/servlet-api-2.5-6.1 .14.jar
/home/zhouhh/hbase-0.94.0/lib/slf4j-api-1.5.8.jar
/home/zhouhh/hbase-0.94.0/lib/snappy-java -1.0.3.2.jar
/home/zhouhh/hbase-0.94.0/lib/stax-api-1.0.1.jar
/home/zhouhh/hbase-0.94.0/lib/velocity -1.7.jar
/home/zhouhh/hbase-0.94.0/lib/xmlenc-0.52.jar
/home/zhouhh/hbase-0.94.0/lib/zookeeper-3.4.3.jar


我已经尝试了很多方法,但仍然存在相同的错误。



任何人都可以帮助我?谢谢

解决方案

您有两个简单的选择:
$ b $ 1一个fat jar,其中 mt.jar 文件包含 hbase-0.94.0.jar (可以用 mvn包-Dfatjar


<2>使用 GenericOptionsParser (我想你试图通过执行 Tool ),然后在命令行中指定-libjars参数。


I want to run a map reduce example:

    package my.test;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.MultiTableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;




    /**
     * This class demonstrates the use of the MultiTableOutputFormat class.
     * Using this class we can write the output of a Hadoop map reduce program
     * into different HBase table.
     *
     * @version 1.0 19 Jul 2011
     * @author  Wildnove
     */
    public class TestMultiTable extends Configured implements Tool {

        private static final Logger LOG = Logger.getLogger(TestMultiTable.class);
        private static final String CMDLINE = "com.wildnove.tutorial.TestMultiTable <inputFile> [-n name] [-s]";

        public static void main(String[] args) throws Exception {
            int res = ToolRunner.run(new TestMultiTable(), args);
            System.exit(res);
        }

        @Override
        public int run(String[] args) throws Exception {
            HelpFormatter help = new HelpFormatter();
            Options options = new Options();
            options.addOption("h", "help", false, "print program usage");
            options.addOption("n", "name", true, "sets job name");
            CommandLineParser parser = new BasicParser();
            CommandLine cline;
            try {
                cline = parser.parse(options, args);
                args = cline.getArgs();
                if (args.length < 1) {
                    help.printHelp(CMDLINE, options);
                    return -1;
                }
            } catch (ParseException e) {
                System.out.println(e);
                e.printStackTrace();
                help.printHelp(CMDLINE, options);
                return -1;
            }

            String name = null;
            try {
                if (cline.hasOption('n'))
                    name = cline.getOptionValue('n');
                else
                    name = "wildnove.com - Tutorial MultiTableOutputFormat ";
                Configuration conf = getConf();
                FileSystem fs = FileSystem.get(conf);
                Path inputFile = new Path(fs.makeQualified(new Path(args[0])).toUri().getPath());
                if (!getMultiTableOutputJob(name, inputFile).waitForCompletion(true))
                    return -1;
            } catch (Exception e) {
                System.out.println(e);
                e.printStackTrace();
                help.printHelp(CMDLINE, options);
                return -1;
            }
            return 0;
        }

        /**
         * Here we configure our job to use MultiTableOutputFormat class as map reduce output.
         * Note that we use 1 reduce only for debugging purpose, but you can use more than 1 reduce.
         */
        private Job getMultiTableOutputJob(String name, Path inputFile) throws IOException {
            if (LOG.isInfoEnabled()) {
                LOG.info(name + " starting...");
                LOG.info("computing file: " + inputFile);
            }
            Job job = new Job(getConf(), name);
            job.setJarByClass(TestMultiTable.class);
            job.setMapperClass(Mapper.class);
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(Text.class);
            FileInputFormat.addInputPath(job, inputFile);
            job.setOutputFormatClass(MultiTableOutputFormat.class);
            job.setNumReduceTasks(1);
            job.setReducerClass(Reducer.class);

            return job;
        }

        private static class Mapper extends org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, Text, Text> {

            private Text outKey = new Text();
            private Text outValue = new Text();

            /**
             * The map method splits the csv file according to this structure
             * brand,model,size (e.g. Cadillac,Seville,Midsize) and output all data using
             * brand as key and the couple model,size as value.
             */
            @Override
            public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
                String[] valueSplitted = value.toString().split(",");
                if (valueSplitted.length == 3) {
                    String brand = valueSplitted[0];
                    String model = valueSplitted[1];
                    String size = valueSplitted[2];

                    outKey.set(brand);
                    outValue.set(model + "," + size);
                    context.write(outKey, outValue);
                }
            }
        }

        private static class Reducer extends org.apache.hadoop.mapreduce.Reducer<Text, Text, ImmutableBytesWritable, Writable> {

            /**
             * The reduce method fill the TestCars table with all csv data,
             * compute some counters and save those counters into the TestBrandsSizes table.
             * So we use two different HBase table as output for the reduce method.
             */
            @Override
            protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
                Map<String, Integer> statsSizeCounters = new HashMap<String, Integer>();
                String brand = key.toString();
                // We are receiving all models,size grouped by brand.
                for (Text value : values) {
                    String[] valueSplitted = value.toString().split(",");
                    if (valueSplitted.length == 2) {
                        String model = valueSplitted[0];
                        String size = valueSplitted[1];

                        // Fill the TestCars table
                        ImmutableBytesWritable putTable = new ImmutableBytesWritable(Bytes.toBytes("TestCars"));
                        byte[] putKey = Bytes.toBytes(brand + "," + model);
                        byte[] putFamily = Bytes.toBytes("Car");
                        Put put = new Put(putKey);
                        // qualifier brand
                        byte[] putQualifier = Bytes.toBytes("brand");
                        byte[] putValue = Bytes.toBytes(brand);
                        put.add(putFamily, putQualifier, putValue);
                        // qualifier model
                        putQualifier = Bytes.toBytes("model");
                        putValue = Bytes.toBytes(model);
                        put.add(putFamily, putQualifier, putValue);
                        // qualifier size
                        putQualifier = Bytes.toBytes("size");
                        putValue = Bytes.toBytes(size);
                        put.add(putFamily, putQualifier, putValue);
                        context.write(putTable, put);

                        // Compute some counters: number of different sizes for a brand
                        if (!statsSizeCounters.containsKey(size))
                            statsSizeCounters.put(size, 1);
                        else
                            statsSizeCounters.put(size, statsSizeCounters.get(size) + 1);
                    }
                }

                for (Entry<String, Integer> entry : statsSizeCounters.entrySet()) {
                    // Fill the TestBrandsSizes table
                    ImmutableBytesWritable putTable = new ImmutableBytesWritable(Bytes.toBytes("TestBrandsSizes"));
                    byte[] putKey = Bytes.toBytes(brand);
                    byte[] putFamily = Bytes.toBytes("BrandSizes");
                    Put put = new Put(putKey);
                    // We can use as qualifier the sizes
                    byte[] putQualifier = Bytes.toBytes(entry.getKey());
                    byte[] putValue = Bytes.toBytes(entry.getValue());
                    put.add(putFamily, putQualifier, putValue);
                    context.write(putTable, put);
                }
            }
        }
    }

build to jar mt.jar with eclipse options :jar file

run the mapreduce:

[zhouhh@Hadoop48 ~]$ HADOOP_CLASSPATH=${HBASE_HOME}/bin/hbase classpath:${HADOOP_HOME}/bin/hadoop classpath ${HADOOP_HOME}/bin/hadoop jar mt.jar cars.csv 12/06/11 20:14:33 INFO test.TestMultiTable: wildnove.com - Tutorial MultiTableOutputFormat starting... 12/06/11 20:14:33 INFO test.TestMultiTable: computing file: /user/zhouhh/cars.csv 12/06/11 20:14:34 INFO input.FileInputFormat: Total input paths to process : 1 12/06/11 20:14:34 INFO util.NativeCodeLoader: Loaded the native-hadoop library 12/06/11 20:14:34 WARN snappy.LoadSnappy: Snappy native library not loaded 12/06/11 20:14:35 INFO mapred.JobClient: Running job: job_201206111811_0012 12/06/11 20:14:36 INFO mapred.JobClient: map 0% reduce 0% 12/06/11 20:14:42 INFO mapred.JobClient: Task Id : attempt_201206111811_0012_m_000002_0, Status : FAILED java.lang.RuntimeException: java.lang.ClassNotFoundException: org.apache.hadoop.hbase.mapreduce.MultiTableOutputFormat at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867) at org.apache.hadoop.mapreduce.JobContext.getOutputFormatClass(JobContext.java:235) at org.apache.hadoop.mapred.Task.initialize(Task.java:513) at org.apache.hadoop.mapred.MapTask.run(MapTask.java:353) at org.apache.hadoop.mapred.Child$4.run(Child.java:255) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:415) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121) at org.apache.hadoop.mapred.Child.main(Child.java:249) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.hbase.mapreduce.MultiTableOutputFormat at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:423) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:356) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:264) at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820) at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)

cars.csv:

[zhouhh@Hadoop48 ~]$ cat cars.csv Acura,Integra,Small Acura,Legend,Midsize Audi,90,Compact Audi,100,Midsize BMW,535i,Midsize Buick,Century,Midsize Buick,LeSabre,Large Buick,Roadmaster,Large Buick,Riviera,Midsize Cadillac,DeVille,Large Cadillac,Seville,Midsize

MultiTableOutputFormat.class is in Hbase.0.94.jar

[zhouhh@Hadoop48 ~]$ echo $HADOOP_CLASSPATH |tr ':' '\n' | grep hbase /home/zhouhh/hbase-0.94.0/conf /home/zhouhh/hbase-0.94.0 /home/zhouhh/hbase-0.94.0/hbase-0.94.0.jar /home/zhouhh/hbase-0.94.0/hbase-0.94.0-tests.jar /home/zhouhh/hbase-0.94.0/lib/activation-1.1.jar /home/zhouhh/hbase-0.94.0/lib/asm-3.1.jar /home/zhouhh/hbase-0.94.0/lib/avro-1.5.3.jar /home/zhouhh/hbase-0.94.0/lib/avro-ipc-1.5.3.jar /home/zhouhh/hbase-0.94.0/lib/commons-beanutils-1.7.0.jar /home/zhouhh/hbase-0.94.0/lib/commons-beanutils-core-1.8.0.jar /home/zhouhh/hbase-0.94.0/lib/commons-cli-1.2.jar /home/zhouhh/hbase-0.94.0/lib/commons-codec-1.4.jar /home/zhouhh/hbase-0.94.0/lib/commons-collections-3.2.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-configuration-1.6.jar /home/zhouhh/hbase-0.94.0/lib/commons-digester-1.8.jar /home/zhouhh/hbase-0.94.0/lib/commons-el-1.0.jar /home/zhouhh/hbase-0.94.0/lib/commons-httpclient-3.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-io-2.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-lang-2.5.jar /home/zhouhh/hbase-0.94.0/lib/commons-logging-1.1.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-math-2.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-net-1.4.1.jar /home/zhouhh/hbase-0.94.0/lib/core-3.1.1.jar /home/zhouhh/hbase-0.94.0/lib/guava-r09.jar /home/zhouhh/hbase-0.94.0/lib/hadoop-core-1.0.2.jar /home/zhouhh/hbase-0.94.0/lib/high-scale-lib-1.1.1.jar /home/zhouhh/hbase-0.94.0/lib/httpclient-4.1.2.jar /home/zhouhh/hbase-0.94.0/lib/httpcore-4.1.3.jar /home/zhouhh/hbase-0.94.0/lib/jackson-core-asl-1.5.5.jar /home/zhouhh/hbase-0.94.0/lib/jackson-jaxrs-1.5.5.jar /home/zhouhh/hbase-0.94.0/lib/jackson-mapper-asl-1.5.5.jar /home/zhouhh/hbase-0.94.0/lib/jackson-xc-1.5.5.jar /home/zhouhh/hbase-0.94.0/lib/jamon-runtime-2.3.1.jar /home/zhouhh/hbase-0.94.0/lib/jasper-compiler-5.5.23.jar /home/zhouhh/hbase-0.94.0/lib/jasper-runtime-5.5.23.jar /home/zhouhh/hbase-0.94.0/lib/jaxb-api-2.1.jar /home/zhouhh/hbase-0.94.0/lib/jaxb-impl-2.1.12.jar /home/zhouhh/hbase-0.94.0/lib/jersey-core-1.4.jar /home/zhouhh/hbase-0.94.0/lib/jersey-json-1.4.jar /home/zhouhh/hbase-0.94.0/lib/jersey-server-1.4.jar /home/zhouhh/hbase-0.94.0/lib/jettison-1.1.jar /home/zhouhh/hbase-0.94.0/lib/jetty-6.1.26.jar /home/zhouhh/hbase-0.94.0/lib/jetty-util-6.1.26.jar /home/zhouhh/hbase-0.94.0/lib/jruby-complete-1.6.5.jar /home/zhouhh/hbase-0.94.0/lib/jsp-2.1-6.1.14.jar /home/zhouhh/hbase-0.94.0/lib/jsp-api-2.1-6.1.14.jar /home/zhouhh/hbase-0.94.0/lib/libthrift-0.8.0.jar /home/zhouhh/hbase-0.94.0/lib/log4j-1.2.16.jar /home/zhouhh/hbase-0.94.0/lib/netty-3.2.4.Final.jar /home/zhouhh/hbase-0.94.0/lib/protobuf-java-2.4.0a.jar /home/zhouhh/hbase-0.94.0/lib/servlet-api-2.5-6.1.14.jar /home/zhouhh/hbase-0.94.0/lib/slf4j-api-1.5.8.jar /home/zhouhh/hbase-0.94.0/lib/snappy-java-1.0.3.2.jar /home/zhouhh/hbase-0.94.0/lib/stax-api-1.0.1.jar /home/zhouhh/hbase-0.94.0/lib/velocity-1.7.jar /home/zhouhh/hbase-0.94.0/lib/xmlenc-0.52.jar /home/zhouhh/hbase-0.94.0/lib/zookeeper-3.4.3.jar /home/zhouhh/hbase-0.94.0/conf /home/zhouhh/hbase-0.94.0 /home/zhouhh/hbase-0.94.0/hbase-0.94.0.jar /home/zhouhh/hbase-0.94.0/hbase-0.94.0-tests.jar /home/zhouhh/hbase-0.94.0/lib/activation-1.1.jar /home/zhouhh/hbase-0.94.0/lib/asm-3.1.jar /home/zhouhh/hbase-0.94.0/lib/avro-1.5.3.jar /home/zhouhh/hbase-0.94.0/lib/avro-ipc-1.5.3.jar /home/zhouhh/hbase-0.94.0/lib/commons-beanutils-1.7.0.jar /home/zhouhh/hbase-0.94.0/lib/commons-beanutils-core-1.8.0.jar /home/zhouhh/hbase-0.94.0/lib/commons-cli-1.2.jar /home/zhouhh/hbase-0.94.0/lib/commons-codec-1.4.jar /home/zhouhh/hbase-0.94.0/lib/commons-collections-3.2.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-configuration-1.6.jar /home/zhouhh/hbase-0.94.0/lib/commons-digester-1.8.jar /home/zhouhh/hbase-0.94.0/lib/commons-el-1.0.jar /home/zhouhh/hbase-0.94.0/lib/commons-httpclient-3.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-io-2.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-lang-2.5.jar /home/zhouhh/hbase-0.94.0/lib/commons-logging-1.1.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-math-2.1.jar /home/zhouhh/hbase-0.94.0/lib/commons-net-1.4.1.jar /home/zhouhh/hbase-0.94.0/lib/core-3.1.1.jar /home/zhouhh/hbase-0.94.0/lib/guava-r09.jar /home/zhouhh/hbase-0.94.0/lib/hadoop-core-1.0.2.jar /home/zhouhh/hbase-0.94.0/lib/high-scale-lib-1.1.1.jar /home/zhouhh/hbase-0.94.0/lib/httpclient-4.1.2.jar /home/zhouhh/hbase-0.94.0/lib/httpcore-4.1.3.jar /home/zhouhh/hbase-0.94.0/lib/jackson-core-asl-1.5.5.jar /home/zhouhh/hbase-0.94.0/lib/jackson-jaxrs-1.5.5.jar /home/zhouhh/hbase-0.94.0/lib/jackson-mapper-asl-1.5.5.jar /home/zhouhh/hbase-0.94.0/lib/jackson-xc-1.5.5.jar /home/zhouhh/hbase-0.94.0/lib/jamon-runtime-2.3.1.jar /home/zhouhh/hbase-0.94.0/lib/jasper-compiler-5.5.23.jar /home/zhouhh/hbase-0.94.0/lib/jasper-runtime-5.5.23.jar /home/zhouhh/hbase-0.94.0/lib/jaxb-api-2.1.jar /home/zhouhh/hbase-0.94.0/lib/jaxb-impl-2.1.12.jar /home/zhouhh/hbase-0.94.0/lib/jersey-core-1.4.jar /home/zhouhh/hbase-0.94.0/lib/jersey-json-1.4.jar /home/zhouhh/hbase-0.94.0/lib/jersey-server-1.4.jar /home/zhouhh/hbase-0.94.0/lib/jettison-1.1.jar /home/zhouhh/hbase-0.94.0/lib/jetty-6.1.26.jar /home/zhouhh/hbase-0.94.0/lib/jetty-util-6.1.26.jar /home/zhouhh/hbase-0.94.0/lib/jruby-complete-1.6.5.jar /home/zhouhh/hbase-0.94.0/lib/jsp-2.1-6.1.14.jar /home/zhouhh/hbase-0.94.0/lib/jsp-api-2.1-6.1.14.jar /home/zhouhh/hbase-0.94.0/lib/libthrift-0.8.0.jar /home/zhouhh/hbase-0.94.0/lib/log4j-1.2.16.jar /home/zhouhh/hbase-0.94.0/lib/netty-3.2.4.Final.jar /home/zhouhh/hbase-0.94.0/lib/protobuf-java-2.4.0a.jar /home/zhouhh/hbase-0.94.0/lib/servlet-api-2.5-6.1.14.jar /home/zhouhh/hbase-0.94.0/lib/slf4j-api-1.5.8.jar /home/zhouhh/hbase-0.94.0/lib/snappy-java-1.0.3.2.jar /home/zhouhh/hbase-0.94.0/lib/stax-api-1.0.1.jar /home/zhouhh/hbase-0.94.0/lib/velocity-1.7.jar /home/zhouhh/hbase-0.94.0/lib/xmlenc-0.52.jar /home/zhouhh/hbase-0.94.0/lib/zookeeper-3.4.3.jar

I have tried many methods,but the same error still there.

any one can help me? thanks

解决方案

You have two easy options:

1) Build a fat jar, where your mt.jar file includes the hbase-0.94.0.jar (can be done with mvn package -Dfatjar )

2) Use the GenericOptionsParser (I think you are trying to by implementing Tool) and then specify the -libjars parameter on the command line.

这篇关于hbase ClassNotFoundException的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆