1 year ago
#297387
Reza Namvar
Java compiler cannot find hadoop.hbase.mapreduce packages
I'm trying to run a MapReduce app which uses a HBase table. The code is provided by my university professor and we are supposed to run it. Here is the code:
package hbase;
import java.io.IOException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
public class HBaseMapReduce {
public static class hbaseMapper extends TableMapper<Text, IntWritable> {
public void map(ImmutableBytesWritable rowKey, Result columns, Context context)
throws IOException, InterruptedException {
try {
String inKey = new String(rowKey.get());
String oKey = inKey.split("#")[0];
byte[] bSales = columns.getValue(Bytes.toBytes("cf"), Bytes.toBytes("sales"));
String sSales = new String(bSales);
Integer sales = new Integer(sSales);
context.write(new Text(oKey), new IntWritable(sales));
} catch (RuntimeException e) {
e.printStackTrace();
}
}
}
public static class hbaseReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
try {
int sum = 0;
for (IntWritable sales : values) {
Integer intSales = new Integer(sales.toString());
sum += intSales;
}
Put insHBase = new Put(key.getBytes());
insHBase.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("sum"), Bytes.toBytes(sum));
context.write(null, insHBase);
} catch (Exception e) {
e.printStackTrace();
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
// define scan and define column families to scan
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("cf"));
Job job = Job.getInstance(conf);
job.setJarByClass(HBaseMapReduce.class);
// define input hbase table
TableMapReduceUtil.initTableMapperJob("test1", scan, hbaseMapper.class, Text.class, IntWritable.class, job);
// define output table
TableMapReduceUtil.initTableReducerJob("test2", hbaseReducer.class, job);
job.waitForCompletion(true);
}
}
The problem is, when I try to compile the code using this command:
javac -cp $HADOOP_CLASSPATH -d hbase_classes hbase/HBaseMapReduce.java
I get the following error:
hbase/HBaseMapReduce.java:13: error: cannot find symbol
import org.apache.hadoop.hbase.mapreduce.TableMapper;
^
symbol: class TableMapper
location: package org.apache.hadoop.hbase.mapreduce
hbase/HBaseMapReduce.java:14: error: cannot find symbol
import org.apache.hadoop.hbase.mapreduce.TableReducer;
^
symbol: class TableReducer
location: package org.apache.hadoop.hbase.mapreduce
hbase/HBaseMapReduce.java:15: error: cannot find symbol
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
^
symbol: class TableMapReduceUtil
location: package org.apache.hadoop.hbase.mapreduce
hbase/HBaseMapReduce.java:21: error: cannot find symbol
public static class hbaseMapper extends TableMapper<Text, IntWritable> {
^
symbol: class TableMapper
location: class HBaseMapReduce
hbase/HBaseMapReduce.java:23: error: cannot find symbol
public void map(ImmutableBytesWritable rowKey, Result columns, Context context)
^
symbol: class Context
location: class hbaseMapper
hbase/HBaseMapReduce.java:38: error: cannot find symbol
public static class hbaseReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
^
symbol: class TableReducer
location: class HBaseMapReduce
hbase/HBaseMapReduce.java:40: error: cannot find symbol
public void reduce(Text key, Iterable<IntWritable> values, Context context)
^
symbol: class Context
location: class hbaseReducer
hbase/HBaseMapReduce.java:65: error: cannot find symbol
TableMapReduceUtil.initTableMapperJob("test1", scan, hbaseMapper.class, Text.class, IntWritable.class, job);
^
symbol: variable TableMapReduceUtil
location: class HBaseMapReduce
hbase/HBaseMapReduce.java:67: error: cannot find symbol
TableMapReduceUtil.initTableReducerJob("test2", hbaseReducer.class, job);
^
symbol: variable TableMapReduceUtil
location: class HBaseMapReduce
9 errors
I can successfully compile and run usual MapReduce code. The problem begins with using the MapReduce package inside the HBase package; i.e.
org.apache.hadoop.hbase.mapreduce
My environment variables:
#Hadoop Related Options
export HADOOP_HOME=/home/ubuntu/hadoop
export HADOOP_INSTALL=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/nativ"
#HBase related options
export HBASE_HOME="/home/ubuntu/hadoop/hbase"
export HBASE_CONF="$HBASE_HOME/conf"
export PATH=$HBASE_HOME/bin:$PATH
#MapReduce Options
export HADOOP_CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath)
export HBASE_CLASSPATH=$($HBASE_HOME/bin/hbase classpath)
export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$HBASE_CLASSPATH
java
hadoop
mapreduce
hbase
0 Answers
Your Answer