一步一步学习hadoop(八)

Map任务执行类的实现

     Hadoop MapReduce框架将要处理数据切分成片,将每个分片让一个map任务去完成,每个map任务都将分片数据解析为键值对数据,调用Mapper类的map函数,将输入键值对转化为输出键值对,输出键值对的类型和输入键值对的类型可以没有任何关系。一个输入键值键值对可以得到0个或者多个输出键值对。
    Hadoop中的MapReduce作业调用Mapper类来完成map任务,Mapper有两个版本,一个是老版的在org.apache.hadoop.mapred里,新版的在org.apache.hadoop.mapreduce里,新版的将原来的OutputCollector和Reporter整合为一个新的Context对象,使用起来更加简单,功能要更加强大。下面的例子实现新版的Mapper。一般来讲只需对Mapper类的map函数进行重载就可以满足大部分的需求了,有时需要在执行map任务前获取一些自定义的数据,比如传入的参数(如例子中的fieldSeparator),从DisturbuteCache中获取共享数据等特殊要求,需要实现setup函数,该函数在每次执行map任务前调用一次,同理,需要在执行完map任务后清理map任务,需要实现cleanup函数。
    下面以一个类似linux的cut工具的map来讲解mapper类的构造过程。
    该mapper将输入的键值对看成以分隔符分离的字段集,分隔符通过设置mapreduce.fieldsel.data.field.separator指定,默认为\t。输出的key/value对,以mapreduce.fieldsel.map.output.key.value.fields.spec指定,格式为2,3,5:6-9,12-表示输出键为字段2,3,5,输出值为字段6,7,8,9,12和大于12的所有字段。

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
public class FieldSelectionMapper<K, V>
    extends Mapper<K, V, Text, Text> {

  private String mapOutputKeyValueSpec;

  private boolean ignoreInputKey;

  private String fieldSeparator = "\t";

  private List<Integer> mapOutputKeyFieldList = new ArrayList<Integer>();

  private List<Integer> mapOutputValueFieldList = new ArrayList<Integer>();

  private int allMapValueFieldsFrom = -1;

  public void setup(Context context)
      throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    this.fieldSeparator =
      conf.get(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "\t");
    this.mapOutputKeyValueSpec =
      conf.get(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "0-:");
    try {
      this.ignoreInputKey = TextInputFormat.class.getCanonicalName().equals(
        context.getInputFormatClass().getCanonicalName());
    } catch (ClassNotFoundException e) {
      throw new IOException("Input format class not found", e);
    }
    allMapValueFieldsFrom = FieldSelectionHelper.parseOutputKeyValueSpec(
      mapOutputKeyValueSpec, mapOutputKeyFieldList, mapOutputValueFieldList);
  }

  public void map(K key, V val, Context context)
      throws IOException, InterruptedException {
    FieldSelectionHelper helper = new FieldSelectionHelper(
      FieldSelectionHelper.emptyText, FieldSelectionHelper.emptyText);
    helper.extractOutputKeyValue(key.toString(), val.toString(),
      fieldSeparator, mapOutputKeyFieldList, mapOutputValueFieldList,
      allMapValueFieldsFrom, ignoreInputKey, true);
    context.write(helper.getKey(), helper.getValue());
  }
}

其中引用的FieldSelectionHelper类的实现如下:
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Text;
public class FieldSelectionHelper {
  public static Text emptyText = new Text("");
  public static final String DATA_FIELD_SEPERATOR =
    "mapreduce.fieldsel.data.field.separator";
  public static final String MAP_OUTPUT_KEY_VALUE_SPEC =
    "mapreduce.fieldsel.map.output.key.value.fields.spec";
  public static final String REDUCE_OUTPUT_KEY_VALUE_SPEC =
    "mapreduce.fieldsel.reduce.output.key.value.fields.spec";

  private static int extractFields(String[] fieldListSpec,
      List<Integer> fieldList) {
    int allFieldsFrom = -1;
    int i = 0;
    int j = 0;
    int pos = -1;
    String fieldSpec = null;
    for (i = 0; i < fieldListSpec.length; i++) {
      fieldSpec = fieldListSpec[i];
      if (fieldSpec.length() == 0) {
        continue;
      }
      pos = fieldSpec.indexOf('-');
      if (pos < 0) {
        Integer fn = new Integer(fieldSpec);
        fieldList.add(fn);
      } else {
        String start = fieldSpec.substring(0, pos);
        String end = fieldSpec.substring(pos + 1);
        if (start.length() == 0) {
          start = "0";
        }
        if (end.length() == 0) {
          allFieldsFrom = Integer.parseInt(start);
          continue;
        }
        int startPos = Integer.parseInt(start);
        int endPos = Integer.parseInt(end);
        for (j = startPos; j <= endPos; j++) {
          fieldList.add(j);
        }
      }
    }
    return allFieldsFrom;
  }

  private static String selectFields(String[] fields, List<Integer> fieldList,
      int allFieldsFrom, String separator) {
    String retv = null;
    int i = 0;
    StringBuffer sb = null;
    if (fieldList != null && fieldList.size() > 0) {
      if (sb == null) {
        sb = new StringBuffer();
      }
      for (Integer index : fieldList) {
        if (index < fields.length) {
          sb.append(fields[index]);
        }
        sb.append(separator);
      }
    }
    if (allFieldsFrom >= 0) {
      if (sb == null) {
        sb = new StringBuffer();
      }
      for (i = allFieldsFrom; i < fields.length; i++) {
        sb.append(fields[i]).append(separator);
      }
    }
    if (sb != null) {
      retv = sb.toString();
      if (retv.length() > 0) {
        retv = retv.substring(0, retv.length() - 1);
      }
    }
    return retv;
  }
 
  public static int parseOutputKeyValueSpec(String keyValueSpec,
      List<Integer> keyFieldList, List<Integer> valueFieldList) {
    String[] keyValSpecs = keyValueSpec.split(":", -1);
    
    String[] keySpec = keyValSpecs[0].split(",");
    
    String[] valSpec = new String[0];
    if (keyValSpecs.length > 1) {
      valSpec = keyValSpecs[1].split(",");
    }

    FieldSelectionHelper.extractFields(keySpec, keyFieldList);
    return FieldSelectionHelper.extractFields(valSpec, valueFieldList);
  }

  public static String specToString(String fieldSeparator, String keyValueSpec,
      int allValueFieldsFrom, List<Integer> keyFieldList,
      List<Integer> valueFieldList) {
    StringBuffer sb = new StringBuffer();
    sb.append("fieldSeparator: ").append(fieldSeparator).append("\n");

    sb.append("keyValueSpec: ").append(keyValueSpec).append("\n");
    sb.append("allValueFieldsFrom: ").append(allValueFieldsFrom);
    sb.append("\n");
    sb.append("keyFieldList.length: ").append(keyFieldList.size());
    sb.append("\n");
    for (Integer field : keyFieldList) {
      sb.append("\t").append(field).append("\n");
    }
    sb.append("valueFieldList.length: ").append(valueFieldList.size());
    sb.append("\n");
    for (Integer field : valueFieldList) {
      sb.append("\t").append(field).append("\n");
    }
    return sb.toString();
  }

  private Text key = null;
  private Text value = null;
 
  public FieldSelectionHelper() {
  }

  public FieldSelectionHelper(Text key, Text val) {
    this.key = key;
    this.value = val;
  }
 
  public Text getKey() {
    return key;
  }
 
  public Text getValue() {
    return value;
  }

  public void extractOutputKeyValue(String key, String val,
      String fieldSep, List<Integer> keyFieldList, List<Integer> valFieldList,
      int allValueFieldsFrom, boolean ignoreKey, boolean isMap) {
    if (!ignoreKey) {
      val = key + val;
    }
    String[] fields = val.split(fieldSep);
    
    String newKey = selectFields(fields, keyFieldList, -1, fieldSep);
    String newVal = selectFields(fields, valFieldList, allValueFieldsFrom,
      fieldSep);
    if (isMap && newKey == null) {
      newKey = newVal;
      newVal = null;
    }
    
    if (newKey != null) {
      this.key = new Text(newKey);
    }
    if (newVal != null) {
      this.value = new Text(newVal);
    }
  }
}






posted @ 2012-11-13 12:55  飞天的白猪  阅读(201)  评论(0编辑  收藏  举报