`
wang吖
  • 浏览: 240773 次
  • 性别: Icon_minigender_1
  • 来自: 北京
社区版块
存档分类
最新评论

Hbase调用JavaAPI实现批量导入操作

 
阅读更多

将手机上网日志文件批量导入到Hbase中,操作步骤:

1、将日志文件(请下载附件)上传到HDFS中,利用hadoop的操作命令上传:hadoop  fs -put input  /


 

2、创建Hbase表,通过Java操作

 

package com.jiewen.hbase;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;

public class HbaseDemo {

	public static void main(String[] args) throws IOException {
		String tableName = "wlan_log";
		String columnFamily = "cf";

		HbaseDemo.create(tableName, columnFamily);

		// HbaseDemo.put(tableName, "row1", columnFamily, "cl1", "data");
		// HbaseDemo.get(tableName, "row1");
		// HbaseDemo.scan(tableName);
		// HbaseDemo.delete(tableName);
	}

	// hbase操作必备
	private static Configuration getConfiguration() {
		Configuration conf = HBaseConfiguration.create();
		conf.set("hbase.rootdir", "hdfs://hadoop1:9000/hbase");
		// 使用eclipse时必须添加这个,否则无法定位
		conf.set("hbase.zookeeper.quorum", "hadoop1");
		return conf;
	}

	// 创建一张表
	public static void create(String tableName, String columnFamily)
			throws IOException {
		HBaseAdmin admin = new HBaseAdmin(getConfiguration());
		if (admin.tableExists(tableName)) {
			System.out.println("table exists!");
		} else {
			HTableDescriptor tableDesc = new HTableDescriptor(tableName);
			tableDesc.addFamily(new HColumnDescriptor(columnFamily));
			admin.createTable(tableDesc);
			System.out.println("create table success!");
		}
	}

	// 添加一条记录
	public static void put(String tableName, String row, String columnFamily,
			String column, String data) throws IOException {
		HTable table = new HTable(getConfiguration(), tableName);
		Put p1 = new Put(Bytes.toBytes(row));
		p1.add(Bytes.toBytes(columnFamily), Bytes.toBytes(column), Bytes
				.toBytes(data));
		table.put(p1);
		System.out.println("put'" + row + "'," + columnFamily + ":" + column
				+ "','" + data + "'");
	}

	// 读取一条记录
	public static void get(String tableName, String row) throws IOException {
		HTable table = new HTable(getConfiguration(), tableName);
		Get get = new Get(Bytes.toBytes(row));
		Result result = table.get(get);
		System.out.println("Get: " + result);
	}

	// 显示所有数据
	public static void scan(String tableName) throws IOException {
		HTable table = new HTable(getConfiguration(), tableName);
		Scan scan = new Scan();
		ResultScanner scanner = table.getScanner(scan);
		for (Result result : scanner) {
			System.out.println("Scan: " + result);
		}
	}

	// 删除表
	public static void delete(String tableName) throws IOException {
		HBaseAdmin admin = new HBaseAdmin(getConfiguration());
		if (admin.tableExists(tableName)) {
			try {
				admin.disableTable(tableName);
				admin.deleteTable(tableName);
			} catch (IOException e) {
				e.printStackTrace();
				System.out.println("Delete " + tableName + " 失败");
			}
		}
		System.out.println("Delete " + tableName + " 成功");
	}

}

 

3、将日志文件导入Hbase表wlan_log中:

 

import java.text.SimpleDateFormat;
import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;

public class HbaseBatchImport {

	public static void main(String[] args) throws Exception {
		final Configuration configuration = new Configuration();
		// 设置zookeeper
		configuration.set("hbase.zookeeper.quorum", "hadoop1");

		// 设置hbase表名称
		configuration.set(TableOutputFormat.OUTPUT_TABLE, "wlan_log");

		// 将该值改大,防止hbase超时退出
		configuration.set("dfs.socket.timeout", "180000");

		final Job job = new Job(configuration, "HBaseBatchImport");

		job.setMapperClass(BatchImportMapper.class);
		job.setReducerClass(BatchImportReducer.class);
		// 设置map的输出,不设置reduce的输出类型
		job.setMapOutputKeyClass(LongWritable.class);
		job.setMapOutputValueClass(Text.class);

		job.setInputFormatClass(TextInputFormat.class);
		// 不再设置输出路径,而是设置输出格式类型
		job.setOutputFormatClass(TableOutputFormat.class);

		FileInputFormat.setInputPaths(job, "hdfs://hadoop1:9000/input");

		job.waitForCompletion(true);
	}

	static class BatchImportMapper extends
			Mapper<LongWritable, Text, LongWritable, Text> {
		SimpleDateFormat dateformat1 = new SimpleDateFormat("yyyyMMddHHmmss");
		Text v2 = new Text();

		protected void map(LongWritable key, Text value, Context context)
				throws java.io.IOException, InterruptedException {
			final String[] splited = value.toString().split("\t");
			try {
				final Date date = new Date(Long.parseLong(splited[0].trim()));
				final String dateFormat = dateformat1.format(date);
				String rowKey = splited[1] + ":" + dateFormat;
				v2.set(rowKey + "\t" + value.toString());
				context.write(key, v2);
			} catch (NumberFormatException e) {
				final Counter counter = context.getCounter("BatchImport",
						"ErrorFormat");
				counter.increment(1L);
				System.out.println("出错了" + splited[0] + " " + e.getMessage());
			}
		};
	}

	static class BatchImportReducer extends
			TableReducer<LongWritable, Text, NullWritable> {
		protected void reduce(LongWritable key,
				java.lang.Iterable<Text> values, Context context)
				throws java.io.IOException, InterruptedException {
			for (Text text : values) {
				final String[] splited = text.toString().split("\t");

				final Put put = new Put(Bytes.toBytes(splited[0]));
				put.add(Bytes.toBytes("cf"), Bytes.toBytes("date"), Bytes
						.toBytes(splited[1]));
				// 省略其他字段,调用put.add(....)即可
				context.write(NullWritable.get(), put);
			}
		};
	}

}

 4、查看导入结果:



 

  • 大小: 45.2 KB
  • 大小: 207.4 KB
分享到:
评论
3 楼 jinyike 2015-10-14  
2 楼 aqi915 2015-08-21  
aqi915 写道
大神好:
    想资讯下,你的 “\t”  是什么意思呢?

刚用您的代码,hdfs里的文件数据是
http://news.163.com/
http://www.gov.cn/
http://www.sbsm.gov.cn/
http://news.stnn.cc/china/
http://www.zaobao.com/wencui/social
http://www.xinhuanet.com/politics/1.htm
http://news.china.com.cn/shehui/node_7185045.htm

报错了:帮看下呀
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/D:/Software/hadoop-2.7.1/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/D:/Software/hbase-0.98.13/lib/slf4j-log4j12-1.6.4.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
15/08/21 17:34:30 INFO Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
15/08/21 17:34:30 INFO jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
15/08/21 17:34:30 INFO Configuration.deprecation: dfs.socket.timeout is deprecated. Instead, use dfs.client.socket-timeout
15/08/21 17:34:30 INFO zookeeper.RecoverableZooKeeper: Process identifier=hconnection-0xb48183 connecting to ZooKeeper ensemble=192.168.100.142:2181,192.168.100.141:2181,192.168.100.143:2181
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:host.name=hyq-PC
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:java.version=1.7.0_79
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:java.vendor=Oracle Corporation
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:java.home=C:\Program Files (x86)\Java\jdk1.7.0_79\jre
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:java.class.path=E:\java\workspace\01_hbase_upload_file\bin;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\zookeeper-3.4.6.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\zookeeper-3.4.6-tests.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\xz-1.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\stax-api-1.0-2.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\servlet-api-2.5.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\protobuf-java-2.5.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\netty-3.6.2.Final.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\log4j-1.2.17.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\leveldbjni-all-1.8.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jsr305-3.0.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jetty-util-6.1.26.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jetty-6.1.26.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jettison-1.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jersey-server-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jersey-json-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jersey-guice-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jersey-core-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jersey-client-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jaxb-impl-2.2.3-1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jaxb-api-2.2.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\javax.inject-1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jackson-xc-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jackson-mapper-asl-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jackson-jaxrs-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\jackson-core-asl-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\guice-servlet-3.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\guice-3.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\guava-11.0.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\commons-logging-1.1.3.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\commons-lang-2.6.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\commons-io-2.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\commons-compress-1.4.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\commons-collections-3.2.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\commons-codec-1.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\commons-cli-1.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\asm-3.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\aopalliance-1.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\lib\activation-1.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-server-web-proxy-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-server-sharedcachemanager-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-server-resourcemanager-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-server-nodemanager-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-server-common-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-server-applicationhistoryservice-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-registry-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-common-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-client-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-applications-unmanaged-am-launcher-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-applications-distributedshell-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\yarn\hadoop-yarn-api-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\xmlenc-0.52.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\xml-apis-1.3.04.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\xercesImpl-2.9.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\servlet-api-2.5.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\protobuf-java-2.5.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\netty-all-4.0.23.Final.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\netty-3.6.2.Final.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\log4j-1.2.17.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\leveldbjni-all-1.8.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\jsr305-3.0.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\jetty-util-6.1.26.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\jetty-6.1.26.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\jersey-server-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\jersey-core-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\jackson-mapper-asl-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\jackson-core-asl-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\htrace-core-3.1.0-incubating.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\guava-11.0.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\commons-logging-1.1.3.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\commons-lang-2.6.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\commons-io-2.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\commons-daemon-1.0.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\commons-codec-1.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\commons-cli-1.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\lib\asm-3.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\hadoop-hdfs-nfs-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\hdfs\hadoop-hdfs-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\xz-1.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\snappy-java-1.0.4.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\protobuf-java-2.5.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\paranamer-2.3.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\netty-3.6.2.Final.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\log4j-1.2.17.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\leveldbjni-all-1.8.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\junit-4.11.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\jersey-server-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\jersey-guice-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\jersey-core-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\javax.inject-1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\jackson-mapper-asl-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\jackson-core-asl-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\hamcrest-core-1.3.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\hadoop-annotations-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\guice-servlet-3.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\guice-3.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\commons-io-2.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\commons-compress-1.4.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\avro-1.7.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\asm-3.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\lib\aopalliance-1.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\hadoop-mapreduce-client-shuffle-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\hadoop-mapreduce-client-jobclient-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\hadoop-mapreduce-client-hs-plugins-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\hadoop-mapreduce-client-hs-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\hadoop-mapreduce-client-core-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\hadoop-mapreduce-client-common-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\mapreduce\hadoop-mapreduce-client-app-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\zookeeper-3.4.6.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\xz-1.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\xmlenc-0.52.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\stax-api-1.0-2.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\snappy-java-1.0.4.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\slf4j-log4j12-1.7.10.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\slf4j-api-1.7.10.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\servlet-api-2.5.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\protobuf-java-2.5.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\paranamer-2.3.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\netty-3.6.2.Final.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\mockito-all-1.8.5.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\log4j-1.2.17.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\junit-4.11.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jsr305-3.0.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jsp-api-2.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jsch-0.1.42.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jetty-util-6.1.26.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jetty-6.1.26.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jettison-1.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jets3t-0.9.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jersey-server-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jersey-json-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jersey-core-1.9.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jaxb-impl-2.2.3-1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jaxb-api-2.2.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\java-xmlbuilder-0.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jackson-xc-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jackson-mapper-asl-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jackson-jaxrs-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\jackson-core-asl-1.9.13.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\httpcore-4.2.5.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\httpclient-4.2.5.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\htrace-core-3.1.0-incubating.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\hamcrest-core-1.3.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\hadoop-auth-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\hadoop-annotations-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\guava-11.0.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\gson-2.2.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\curator-recipes-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\curator-framework-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\curator-client-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-net-3.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-math3-3.1.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-logging-1.1.3.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-lang-2.6.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-io-2.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-httpclient-3.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-digester-1.8.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-configuration-1.6.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-compress-1.4.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-collections-3.2.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-codec-1.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-cli-1.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-beanutils-core-1.8.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\commons-beanutils-1.7.0.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\avro-1.7.4.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\asm-3.2.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\api-util-1.0.0-M20.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\api-asn1-api-1.0.0-M20.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\apacheds-kerberos-codec-2.0.0-M15.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\apacheds-i18n-2.0.0-M15.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\lib\activation-1.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\hadoop-nfs-2.7.1.jar;D:\Software\hadoop-2.7.1\share\hadoop\common\hadoop-common-2.7.1.jar;D:\Software\hbase-0.98.13\lib\activation-1.1.jar;D:\Software\hbase-0.98.13\lib\aopalliance-1.0.jar;D:\Software\hbase-0.98.13\lib\asm-3.1.jar;D:\Software\hbase-0.98.13\lib\avro-1.7.4.jar;D:\Software\hbase-0.98.13\lib\commons-beanutils-1.7.0.jar;D:\Software\hbase-0.98.13\lib\commons-beanutils-core-1.8.0.jar;D:\Software\hbase-0.98.13\lib\commons-cli-1.2.jar;D:\Software\hbase-0.98.13\lib\commons-codec-1.7.jar;D:\Software\hbase-0.98.13\lib\commons-collections-3.2.1.jar;D:\Software\hbase-0.98.13\lib\commons-compress-1.4.1.jar;D:\Software\hbase-0.98.13\lib\commons-configuration-1.6.jar;D:\Software\hbase-0.98.13\lib\commons-daemon-1.0.13.jar;D:\Software\hbase-0.98.13\lib\commons-digester-1.8.jar;D:\Software\hbase-0.98.13\lib\commons-el-1.0.jar;D:\Software\hbase-0.98.13\lib\commons-httpclient-3.1.jar;D:\Software\hbase-0.98.13\lib\commons-io-2.4.jar;D:\Software\hbase-0.98.13\lib\commons-lang-2.6.jar;D:\Software\hbase-0.98.13\lib\commons-logging-1.1.1.jar;D:\Software\hbase-0.98.13\lib\commons-math-2.1.jar;D:\Software\hbase-0.98.13\lib\commons-net-3.1.jar;D:\Software\hbase-0.98.13\lib\findbugs-annotations-1.3.9-1.jar;D:\Software\hbase-0.98.13\lib\gmbal-api-only-3.0.0-b023.jar;D:\Software\hbase-0.98.13\lib\grizzly-framework-2.1.2.jar;D:\Software\hbase-0.98.13\lib\grizzly-http-2.1.2.jar;D:\Software\hbase-0.98.13\lib\grizzly-http-server-2.1.2.jar;D:\Software\hbase-0.98.13\lib\grizzly-http-servlet-2.1.2.jar;D:\Software\hbase-0.98.13\lib\grizzly-rcm-2.1.2.jar;D:\Software\hbase-0.98.13\lib\guava-12.0.1.jar;D:\Software\hbase-0.98.13\lib\guice-3.0.jar;D:\Software\hbase-0.98.13\lib\guice-servlet-3.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-annotations-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-auth-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-client-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-common-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-hdfs-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-mapreduce-client-app-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-mapreduce-client-common-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-mapreduce-client-core-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-mapreduce-client-jobclient-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-mapreduce-client-shuffle-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-yarn-api-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-yarn-client-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-yarn-common-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-yarn-server-common-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hadoop-yarn-server-nodemanager-2.2.0.jar;D:\Software\hbase-0.98.13\lib\hamcrest-core-1.3.jar;D:\Software\hbase-0.98.13\lib\hbase-annotations-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-checkstyle-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-client-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-common-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-common-0.98.13-hadoop2-tests.jar;D:\Software\hbase-0.98.13\lib\hbase-examples-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-hadoop2-compat-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-hadoop-compat-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-it-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-it-0.98.13-hadoop2-tests.jar;D:\Software\hbase-0.98.13\lib\hbase-prefix-tree-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-protocol-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-rest-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-server-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-server-0.98.13-hadoop2-tests.jar;D:\Software\hbase-0.98.13\lib\hbase-shell-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-testing-util-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\hbase-thrift-0.98.13-hadoop2.jar;D:\Software\hbase-0.98.13\lib\high-scale-lib-1.1.1.jar;D:\Software\hbase-0.98.13\lib\htrace-core-2.04.jar;D:\Software\hbase-0.98.13\lib\httpclient-4.1.3.jar;D:\Software\hbase-0.98.13\lib\httpcore-4.1.3.jar;D:\Software\hbase-0.98.13\lib\jackson-core-asl-1.8.8.jar;D:\Software\hbase-0.98.13\lib\jackson-jaxrs-1.8.8.jar;D:\Software\hbase-0.98.13\lib\jackson-mapper-asl-1.8.8.jar;D:\Software\hbase-0.98.13\lib\jackson-xc-1.8.8.jar;D:\Software\hbase-0.98.13\lib\jamon-runtime-2.3.1.jar;D:\Software\hbase-0.98.13\lib\jasper-compiler-5.5.23.jar;D:\Software\hbase-0.98.13\lib\jasper-runtime-5.5.23.jar;D:\Software\hbase-0.98.13\lib\javax.inject-1.jar;D:\Software\hbase-0.98.13\lib\javax.servlet-3.1.jar;D:\Software\hbase-0.98.13\lib\javax.servlet-api-3.0.1.jar;D:\Software\hbase-0.98.13\lib\jaxb-api-2.2.2.jar;D:\Software\hbase-0.98.13\lib\jaxb-impl-2.2.3-1.jar;D:\Software\hbase-0.98.13\lib\jcodings-1.0.8.jar;D:\Software\hbase-0.98.13\lib\jersey-client-1.8.jar;D:\Software\hbase-0.98.13\lib\jersey-core-1.8.jar;D:\Software\hbase-0.98.13\lib\jersey-grizzly2-1.9.jar;D:\Software\hbase-0.98.13\lib\jersey-guice-1.9.jar;D:\Software\hbase-0.98.13\lib\jersey-json-1.8.jar;D:\Software\hbase-0.98.13\lib\jersey-server-1.8.jar;D:\Software\hbase-0.98.13\lib\jersey-test-framework-core-1.9.jar;D:\Software\hbase-0.98.13\lib\jersey-test-framework-grizzly2-1.9.jar;D:\Software\hbase-0.98.13\lib\jets3t-0.6.1.jar;D:\Software\hbase-0.98.13\lib\jettison-1.3.1.jar;D:\Software\hbase-0.98.13\lib\jetty-6.1.26.jar;D:\Software\hbase-0.98.13\lib\jetty-sslengine-6.1.26.jar;D:\Software\hbase-0.98.13\lib\jetty-util-6.1.26.jar;D:\Software\hbase-0.98.13\lib\joni-2.1.2.jar;D:\Software\hbase-0.98.13\lib\jruby-complete-1.6.8.jar;D:\Software\hbase-0.98.13\lib\jsch-0.1.42.jar;D:\Software\hbase-0.98.13\lib\jsp-2.1-6.1.14.jar;D:\Software\hbase-0.98.13\lib\jsp-api-2.1-6.1.14.jar;D:\Software\hbase-0.98.13\lib\jsr305-1.3.9.jar;D:\Software\hbase-0.98.13\lib\junit-4.11.jar;D:\Software\hbase-0.98.13\lib\libthrift-0.9.0.jar;D:\Software\hbase-0.98.13\lib\log4j-1.2.17.jar;D:\Software\hbase-0.98.13\lib\management-api-3.0.0-b012.jar;D:\Software\hbase-0.98.13\lib\metrics-core-2.2.0.jar;D:\Software\hbase-0.98.13\lib\netty-3.6.6.Final.jar;D:\Software\hbase-0.98.13\lib\paranamer-2.3.jar;D:\Software\hbase-0.98.13\lib\protobuf-java-2.5.0.jar;D:\Software\hbase-0.98.13\lib\servlet-api-2.5-6.1.14.jar;D:\Software\hbase-0.98.13\lib\slf4j-api-1.6.4.jar;D:\Software\hbase-0.98.13\lib\slf4j-log4j12-1.6.4.jar;D:\Software\hbase-0.98.13\lib\snappy-java-1.0.4.1.jar;D:\Software\hbase-0.98.13\lib\xmlenc-0.52.jar;D:\Software\hbase-0.98.13\lib\xz-1.0.jar;D:\Software\hbase-0.98.13\lib\zookeeper-3.4.6.jar
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:java.library.path=C:\Program Files (x86)\Java\jdk1.7.0_79\bin;C:\Windows\Sun\Java\bin;C:\Windows\system32;C:\Windows;C:/Program Files (x86)/Java/jdk1.7.0_79/bin/../jre/bin/client;C:/Program Files (x86)/Java/jdk1.7.0_79/bin/../jre/bin;C:/Program Files (x86)/Java/jdk1.7.0_79/bin/../jre/lib/i386;D:\Software\hadoop-2.7.1\bin;D:\Software\hbase-0.98.13\bin;C:\Program Files (x86)\Java\jdk1.7.0_79\bin;C:\Program Files (x86)\Java\jdk1.7.0_79\jre\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common;D:\Software\TortoiseSVN\bin;D:\Software\eclipse;;.
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:java.io.tmpdir=C:\Users\hyq\AppData\Local\Temp\
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:java.compiler=<NA>
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:os.name=Windows 7
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:os.arch=x86
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:os.version=6.1
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:user.name=hyq
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:user.home=C:\Users\hyq
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Client environment:user.dir=E:\java\workspace\01_hbase_upload_file
15/08/21 17:34:30 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=192.168.100.142:2181,192.168.100.141:2181,192.168.100.143:2181 sessionTimeout=90000 watcher=hconnection-0xb481830x0, quorum=192.168.100.142:2181,192.168.100.141:2181,192.168.100.143:2181, baseZNode=/hbase
15/08/21 17:34:30 INFO zookeeper.ClientCnxn: Opening socket connection to server 192.168.100.143/192.168.100.143:2181. Will not attempt to authenticate using SASL (unknown error)
15/08/21 17:34:31 INFO zookeeper.ClientCnxn: Socket connection established to 192.168.100.143/192.168.100.143:2181, initiating session
15/08/21 17:34:31 INFO zookeeper.ClientCnxn: Session establishment complete on server 192.168.100.143/192.168.100.143:2181, sessionid = 0x24f4e12a0bf0017, negotiated timeout = 90000
15/08/21 17:34:31 INFO mapreduce.TableOutputFormat: Created table instance for wlan_log
15/08/21 17:34:31 WARN mapreduce.JobResourceUploader: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
15/08/21 17:34:31 WARN mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
15/08/21 17:34:31 INFO input.FileInputFormat: Total input paths to process : 1
15/08/21 17:34:31 INFO mapreduce.JobSubmitter: number of splits:1
15/08/21 17:34:31 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_local1666334522_0001
15/08/21 17:34:32 INFO mapreduce.Job: The url to track the job: http://localhost:8080/
15/08/21 17:34:32 INFO mapreduce.Job: Running job: job_local1666334522_0001
15/08/21 17:34:32 INFO mapred.LocalJobRunner: OutputCommitter set in config null
15/08/21 17:34:32 INFO Configuration.deprecation: dfs.socket.timeout is deprecated. Instead, use dfs.client.socket-timeout
15/08/21 17:34:32 INFO mapreduce.TableOutputFormat: Created table instance for wlan_log
15/08/21 17:34:32 INFO mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.hbase.mapreduce.TableOutputCommitter
15/08/21 17:34:32 INFO mapred.LocalJobRunner: Waiting for map tasks
15/08/21 17:34:32 INFO mapred.LocalJobRunner: Starting task: attempt_local1666334522_0001_m_000000_0
15/08/21 17:34:32 INFO mapreduce.TableOutputFormat: Created table instance for wlan_log
15/08/21 17:34:32 INFO util.ProcfsBasedProcessTree: ProcfsBasedProcessTree currently is supported only on Linux.
15/08/21 17:34:32 INFO mapred.Task:  Using ResourceCalculatorProcessTree : org.apache.hadoop.yarn.util.WindowsBasedProcessTree@14d1552
15/08/21 17:34:32 INFO mapred.MapTask: Processing split: hdfs://192.168.100.141:9000/urls/seed.txt:0+216
15/08/21 17:34:32 INFO mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
15/08/21 17:34:32 INFO mapred.MapTask: mapreduce.task.io.sort.mb: 100
15/08/21 17:34:32 INFO mapred.MapTask: soft limit at 83886080
15/08/21 17:34:32 INFO mapred.MapTask: bufstart = 0; bufvoid = 104857600
15/08/21 17:34:32 INFO mapred.MapTask: kvstart = 26214396; length = 6553600
15/08/21 17:34:32 INFO mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
出错了http://news.163.com/ For input string: "http://news.163.com/"
出错了http://www.gov.cn/ For input string: "http://www.gov.cn/"
出错了http://www.sbsm.gov.cn/ For input string: "http://www.sbsm.gov.cn/"
出错了http://news.stnn.cc/china/ For input string: "http://news.stnn.cc/china/"
出错了http://www.zaobao.com/wencui/social For input string: "http://www.zaobao.com/wencui/social"
出错了http://www.xinhuanet.com/politics/1.htm For input string: "http://www.xinhuanet.com/politics/1.htm"
出错了http://news.china.com.cn/shehui/node_7185045.htm For input string: "http://news.china.com.cn/shehui/node_7185045.htm"
15/08/21 17:34:32 INFO mapred.LocalJobRunner:
15/08/21 17:34:32 INFO mapred.MapTask: Starting flush of map output
15/08/21 17:34:32 INFO mapred.Task: Task:attempt_local1666334522_0001_m_000000_0 is done. And is in the process of committing
15/08/21 17:34:32 INFO mapred.LocalJobRunner: map
15/08/21 17:34:32 INFO mapred.Task: Task 'attempt_local1666334522_0001_m_000000_0' done.
15/08/21 17:34:32 INFO mapred.LocalJobRunner: Finishing task: attempt_local1666334522_0001_m_000000_0
15/08/21 17:34:32 INFO mapred.LocalJobRunner: map task executor complete.
15/08/21 17:34:32 INFO mapred.LocalJobRunner: Waiting for reduce tasks
15/08/21 17:34:32 INFO mapred.LocalJobRunner: Starting task: attempt_local1666334522_0001_r_000000_0
15/08/21 17:34:32 INFO mapreduce.TableOutputFormat: Created table instance for wlan_log
15/08/21 17:34:32 INFO util.ProcfsBasedProcessTree: ProcfsBasedProcessTree currently is supported only on Linux.
15/08/21 17:34:32 INFO mapred.Task:  Using ResourceCalculatorProcessTree : org.apache.hadoop.yarn.util.WindowsBasedProcessTree@1f4f32e
15/08/21 17:34:32 INFO mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8c8e1c
15/08/21 17:34:32 INFO reduce.MergeManagerImpl: MergerManager: memoryLimit=181665792, maxSingleShuffleLimit=45416448, mergeThreshold=119899424, ioSortFactor=10, memToMemMergeOutputsThreshold=10
15/08/21 17:34:32 INFO reduce.EventFetcher: attempt_local1666334522_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
15/08/21 17:34:32 INFO reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1666334522_0001_m_000000_0 decomp: 2 len: 6 to MEMORY
15/08/21 17:34:32 INFO reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1666334522_0001_m_000000_0
15/08/21 17:34:32 INFO reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
15/08/21 17:34:32 INFO reduce.EventFetcher: EventFetcher is interrupted.. Returning
15/08/21 17:34:32 INFO mapred.LocalJobRunner: 1 / 1 copied.
15/08/21 17:34:32 INFO reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
15/08/21 17:34:32 INFO mapred.Merger: Merging 1 sorted segments
15/08/21 17:34:32 INFO mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
15/08/21 17:34:32 INFO reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
15/08/21 17:34:32 INFO reduce.MergeManagerImpl: Merging 1 files, 6 bytes from disk
15/08/21 17:34:32 INFO reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
15/08/21 17:34:32 INFO mapred.Merger: Merging 1 sorted segments
15/08/21 17:34:32 INFO mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
15/08/21 17:34:32 INFO mapred.LocalJobRunner: 1 / 1 copied.
15/08/21 17:34:32 INFO Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
15/08/21 17:34:32 INFO mapred.Task: Task:attempt_local1666334522_0001_r_000000_0 is done. And is in the process of committing
15/08/21 17:34:32 INFO mapred.LocalJobRunner: reduce > reduce
15/08/21 17:34:32 INFO mapred.Task: Task 'attempt_local1666334522_0001_r_000000_0' done.
15/08/21 17:34:32 INFO mapred.LocalJobRunner: Finishing task: attempt_local1666334522_0001_r_000000_0
15/08/21 17:34:32 INFO mapred.LocalJobRunner: reduce task executor complete.
15/08/21 17:34:33 INFO mapreduce.Job: Job job_local1666334522_0001 running in uber mode : false
15/08/21 17:34:33 INFO mapreduce.Job:  map 100% reduce 100%
15/08/21 17:34:33 INFO mapreduce.Job: Job job_local1666334522_0001 completed successfully
15/08/21 17:34:33 INFO mapreduce.Job: Counters: 36
File System Counters
FILE: Number of bytes read=368
FILE: Number of bytes written=551508
FILE: Number of read operations=0
FILE: Number of large read operations=0
FILE: Number of write operations=0
HDFS: Number of bytes read=432
HDFS: Number of bytes written=0
HDFS: Number of read operations=6
HDFS: Number of large read operations=0
HDFS: Number of write operations=0
Map-Reduce Framework
Map input records=7
Map output records=0
Map output bytes=0
Map output materialized bytes=6
Input split bytes=106
Combine input records=0
Combine output records=0
Reduce input groups=0
Reduce shuffle bytes=6
Reduce input records=0
Reduce output records=0
Spilled Records=0
Shuffled Maps =1
Failed Shuffles=0
Merged Map outputs=1
GC time elapsed (ms)=38
Total committed heap usage (bytes)=242360320
BatchImport
ErrorFormat=7
Shuffle Errors
BAD_ID=0
CONNECTION=0
IO_ERROR=0
WRONG_LENGTH=0
WRONG_MAP=0
WRONG_REDUCE=0
File Input Format Counters
Bytes Read=216
File Output Format Counters
Bytes Written=0
1 楼 aqi915 2015-08-21  
大神好:
    想资讯下,你的 “\t”  是什么意思呢?

相关推荐

    Hbase调用JavaAPI实现批量导入操作.docx

    Hbase 调用 JavaAPI 实现批量导入操作 在大数据时代,Hbase 作为一个分布式、面向列的 NoSQL 数据库,广泛应用于大规模数据存储和处理中。同时,JavaAPI 作为一个强大且流行的编程语言,广泛应用于各种软件开发中。...

    java api 访问hbase demo(Maven)

    这个Java API访问HBase的Maven项目可能还包含了对HBase其他特性的演示,如扫描(Scan)、过滤器(Filter)、批量操作(Bulk Load)等。通过`Scan`可以实现数据的批量读取,`Filter`可以定制化查询条件,`Bulk Load`...

    java操作Hbase之实现表的创建删除源码

    导入这些库后,你可以开始编写Java代码来操作HBase表。以下是一些关键步骤和API的使用: 1. **连接HBase**:首先,你需要创建一个`Configuration`对象,设置HBase的ZooKeeper连接信息,然后使用`ConnectionFactory`...

    代码中利用Hbase相关的API,包括对HBase的增删改查等操作

    HBase提供了一套Java API,使得开发者能够轻松地进行数据的增删改查操作。本教程将深入探讨如何在代码中利用HBase API进行基本的数据操作。 首先,为了使用HBase API,我们需要在项目中引入相应的依赖。由于HBase是...

    hbase用于查询客户端工具

    REST Gateway是基于JAX-RS(Java API for RESTful Web Services)实现的,提供了标准的CRUD(Create, Read, Update, Delete)操作。 3. **Thrift Gateway**:Thrift是一个跨语言的服务框架,允许使用多种编程语言来...

    hbase导入测试数据集

    除了HBase Shell,我们还可以使用Java API编写程序来执行上述操作。在Java代码中,可以创建`Table`对象,然后使用`put`方法插入数据,最后调用`flushCommits`方法提交更改。 在测试环境中,导入数据集有助于验证...

    thrift1 查询hbase

    Thrift接口将HBase的操作转换为易于理解和使用的API,用户可以通过这些API执行基本的CRUD(创建、读取、更新、删除)操作。 3. **Python与HBase通信**:Python客户端通过导入Thrift生成的库,可以建立到HBase ...

    hfile-to-hbase:处女发表

    在特殊情况下,如数据恢复或批量导入,开发者可能需要直接操作HFile。这通常涉及使用HBase的Admin API创建表,然后使用HFile工具将数据加载到表中。 4. **Java API使用**:在Java中,我们可以使用`org.apache....

    HBaseBulkLoad:使用 MapReduce 作业从文本文件加载 HBase

    在Java开发中,Apache的HBase和Hadoop库提供了丰富的API来支持上述步骤。例如,`org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil`类提供了一些实用方法,如初始化Job,设置Mapper和Reducer,以及配置...

    hadoop实战项目

    - **关键词管理**:实现关键词的增删改查操作,支持关键词的批量导入和导出。 - **关键词导入与导出**:支持从CSV或DICX文件导入关键词,并能够将关键词导出到指定格式的文件中。 ##### 7. 系统API模块 - **API设计...

    hbaseLoading

    4. **使用HBase API**:Java中提供了HBase的API,如`Table`接口和`Put`对象,用于向表中添加数据。先创建`Connection`和`Table`实例,然后遍历CSV数据,对于每一行,创建一个`Put`对象,设置行键和列族/列限定符对应...

Global site tag (gtag.js) - Google Analytics