MapReduce找不到符号[重复项]

问题描述 投票:-1回答:2

我再次寻求您的技术支持。

我在我的代码中收到多个“找不到符号错误”。我该如何解决该错误?我也有转换变量类型的问题。

对于其中一些变量,我尝试使用例如new Text(some_string_variable)进行转换。是否有必要使用“新”部分?我可以打电话给Text(some_string_variable)吗?

import java.io.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.util.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;




public class mapreduce{

  public static class XMapper extends Mapper<Object, Text, IntWritable, Text>{
 //@Override

    //private IntWritable keys = new IntWritable();
    private Text nums = new Text();

    private final static IntWritable one = new IntWritable(1);

    public void map(Object key, Text value, Context context) throws IOException, InterruptedException {

    String[] Columns = value.toString().split(",");
    String col0 = Columns[0];
    String col1 = Columns[1];
    String col2 = Columns[2];
    String col3 = Columns[3];

    int colInt0 = Integer.parseInt(col0);
    int colInt2 = Integer.parseInt(col2);
    double colInt3 = Double.parseDouble(col3);

    if(colInt0 != 0 && colInt2 !=0 && colInt3 !=0){

        nums = String.valueOf(one)+"\t"+String.valueOf(colInt3);

        context.write(new IntWritable(colInt0), new Text(nums));


        }


    }

  }

  public static class XReducer extends Reducer<IntWritable,Text,IntWritable,Text>{
    public Text tupleResult = new Text();

    private IntWritable result = new IntWritable();

    public void reduce(IntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        //int colKey = 0;

        //int colVal = 0;
        int countAll = 0;
        float sumAll = 0;

        for(Text val: values){

            StringTokenizer itr = new StringTokenizer(val.toString());

            int count = Integer.parseInt(itr.nextToken());
            double sum = Double.parseDouble(itr.nextToken());



            }
        TupleResult.set(Integer.toString(count)+','+Double.toString(sum));
        context.write(new IntWritable(key),new Text(TupleResult));

        }


    }




  public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "mapreduce");

    /* TODO: Needs to be implemented */
    job.setJarByClass(mapreduce.class);
    job.setMapperClass(XMapper.class);
    job.setReducerClass(XReducer.class);
    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(Text.class);

    //

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
}

[ERROR]  mapreduce.java:[38,48] incompatible types: java.lang.String cannot be converted to org.apache.hadoop.io.Text
[ERROR] mapreduce.java:[64,25] cannot find symbol
[ERROR]   symbol:   class StringTokenizer
[ERROR]   location: class mapreduce.XReducer
[ERROR] mapreduce.java:[64,51] cannot find symbol
[ERROR]   symbol:   class StringTokenizer
[ERROR]   location: class mapreduce.XReducer
[ERROR] mapreduce.java:[72,50] cannot find symbol
[ERROR]   symbol:   variable count
[ERROR]   location: class mapreduce.XReducer
[ERROR] mapreduce.java:[72,77] cannot find symbol
[ERROR]   symbol:   variable sum
[ERROR]   location: class mapreduce.XReducer
[ERROR] mapreduce.java:[72,17] cannot find symbol
[ERROR]   symbol:   variable TupleResult
[ERROR]   location: class mapreduce.XReducer
[ERROR] mapreduce.java:[73,47] incompatible types: org.apache.hadoop.io.IntWritable cannot be converted to int
[ERROR] mapreduce.java:[73,61] cannot find symbol
[ERROR]   symbol:   variable TupleResult
[ERROR]   location: class mapreduce.XReducer
java hadoop mapreduce
2个回答
0
投票

Symbole not found错误可能是由于未导入所有使用的类,例如,StringTokenizerhttps://docs.oracle.com/javase/7/docs/api/java/util/StringTokenizer.html

new部分是必需的,因为您正在创建Hadoop文本对象

最诚挚的问候


0
投票

您的代码中有很多问题。

  1. 您错过了课程StringTokenizer的导入
  2. 您使用变量求和和计数超出了他的范围。
  3. 您应注意Java命名约定。类名应以大写字母开头。

另外,您应该正确格式化代码

import java.io.*;
import java.util.StringTokenizer;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.util.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/* TODO: Update variable below with your gtid */
public class Mapreduce{

  public static class XMapper extends Mapper<Object, Text, IntWritable, Text>{
 //@Override

    //private IntWritable keys = new IntWritable();
    private Text nums = new Text();

    private final static IntWritable one = new IntWritable(1);

    public void map(Object key, Text value, Context context) throws IOException, InterruptedException {

    String[] Columns = value.toString().split(",");
    String col0 = Columns[0];
    String col1 = Columns[1];
    String col2 = Columns[2];
    String col3 = Columns[3];

    int colInt0 = Integer.parseInt(col0);
    int colInt2 = Integer.parseInt(col2);
    double colInt3 = Double.parseDouble(col3);

    if(colInt0 != 0 && colInt2 !=0 && colInt3 !=0){

        nums = String.valueOf(one)+"\t"+String.valueOf(colInt3);

        context.write(new IntWritable(colInt0), new Text(nums));


        }


    }

  }

  public static class XReducer extends Reducer<IntWritable,Text,IntWritable,Text>{
    public Text tupleResult = new Text();

    private IntWritable result = new IntWritable();

    public void reduce(IntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        //int colKey = 0;

        //int colVal = 0;
        int countAll = 0;
        float sumAll = 0;
        for(Text val: values){

            StringTokenizer itr = new StringTokenizer(val.toString());

            int count = Integer.parseInt(itr.nextToken());
            double sum = Double.parseDouble(itr.nextToken());


            TupleResult.set(Integer.toString(count)+','+Double.toString(sum));
           context.write(new IntWritable(key),new Text(TupleResult));

            }

        }


    }




  public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "mapreduce");

    /* TODO: Needs to be implemented */
    job.setJarByClass(mapreduce.class);
    job.setMapperClass(XMapper.class);
    job.setReducerClass(XReducer.class);
    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(Text.class);

    //

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
}
© www.soinside.com 2019 - 2024. All rights reserved.