头歌:旅游网站之数据分析

news/2024/7/7 16:26:07 标签: java, 旅游

第1关 统计每个城市的宾馆平均价格

java">package com.processdata;

import java.io.IOException;
import java.util.Scanner;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.util.HBaseUtil;

/**
 * 使用MapReduce程序处理HBase中的数据并将最终结果存入到另一张表 1中
 */
public class HBaseMapReduce extends Configured implements Tool {

    public static class MyMapper extends TableMapper<Text, DoubleWritable> {
        public static final byte[] column = "price".getBytes();
        public static final byte[] family = "hotel_info".getBytes();

        @Override
        protected void map(ImmutableBytesWritable rowKey, Result result, Context context)
                throws IOException, InterruptedException {
            /********** Begin *********/
		//获取酒店价格
        String cityId = Bytes.toString(result.getValue("cityIdInfo".getBytes(),"cityId".getBytes()));
        byte[] value = result.getValue(family,column);
        //将价格转换为double 
		Double hotel = Double.parseDouble(Bytes.toString(value));
        //将价格转化成()类型
        DoubleWritable i = new DoubleWritable(hotel);
		String key = cityId; 
		//写出城市(id,酒店价格)
        context.write(new Text(key),i);

            // String cityId = Bytes.toString(result.getValue("cityInfo".getBytes(), "cityId".getBytes())); 
            // byte[] value = result.getValue(family, column);
            
            // //获取酒店价格
            // //String cityId1 = Bytes.toString(result.getValue("hotel_info".getBytes(), "price".getBytes())); 
            
            // //将价格转换为double
            // Double ho =Double.parseDouble(Bytes.toString(value));
            // //将价格转换成()类型
            // DoubleWritable i = new DoubleWritable(ho);
            // String key = cityId;
            // //写出(城市id,酒店价格)
            // context.write(new  Text(key),i);
		  	/********** End *********/
        }
    }

    public static class MyTableReducer extends TableReducer<Text, DoubleWritable, ImmutableBytesWritable> {
        @Override
        public void reduce(Text key, Iterable<DoubleWritable> values, Context context) throws IOException, InterruptedException {
            /********** Begin *********/
		 
		 
		double sum = 0;
        int count = 0;  
        for (DoubleWritable num:values){ 
            count++; 
            sum += num.get();  
        }
        double avePrice = sum / count;  
        Put put = new Put(Bytes.toBytes(key.toString()));  
        put.addColumn("average_infos".getBytes(),"price".getBytes(),Bytes.toBytes(String.valueOf(avePrice)));  
        context.write(null,put);//initTableReducerJob 设置了表名所以在这里无需设置了

            // double sum=0;
            // int count=0;
            // for(DoubleWritable value:values){
            //     count++;
            //     sum+=value.get();
            // }
            // double avePrice=sum/count;
            // //创建pit对象
            // Put put =new Put(Bytes.toBytes(key.toString()));
            // put.addColumn("average_infos".getBytes(),"price".getBytes(),Bytes.toBytes(String.valueOf(avePrice)));
            
            // context.write(null,put);        
		 
			/********** End *********/
        }

    }

    
    
    
    
    
    public int run(String[] args) throws Exception {
        //配置Job
        Configuration conf = HBaseConfiguration.create(getConf());
        conf.set("hbase.zookeeper.quorum", "127.0.0.1");  //hbase 服务地址
        conf.set("hbase.zookeeper.property.clientPort", "2181"); //端口号
        Scanner sc = new Scanner(System.in);
        String arg1 = sc.next();
        String arg2 = sc.next();
        //String arg1 = "t_city_hotels_info";
        //String arg2 = "average_table";
        try {
			HBaseUtil.createTable("average_table", new String[] {"average_infos"});
		} catch (Exception e) {
			// 创建表失败
			e.printStackTrace();
		}
        Job job = configureJob(conf,new String[]{arg1,arg2});
        return job.waitForCompletion(true) ? 0 : 1;
    }

    private Job configureJob(Configuration conf, String[] args) throws IOException {
        String tablename = args[0];
        String targetTable = args[1];
        Job job = new Job(conf,tablename);
        Scan scan = new Scan();
        scan.setCaching(300);
        scan.setCacheBlocks(false);//在mapreduce程序中千万不要设置允许缓存
        //初始化Mapreduce程序
        TableMapReduceUtil.initTableMapperJob(tablename,scan,MyMapper.class, Text.class, DoubleWritable.class,job);
        //初始化Reduce
        TableMapReduceUtil.initTableReducerJob(
                targetTable,        // output table
                MyTableReducer.class,    // reducer class
                job);
        job.setNumReduceTasks(1);
        return job;
    }
}

第2关 统计酒店评论中词频较高的词

java">package com.processdata;
import java.io.IOException;
import java.util.List;
import java.util.Scanner;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apdplat.word.WordSegmenter;
import org.apdplat.word.segmentation.Word;
import com.util.HBaseUtil;
import com.vdurmont.emoji.EmojiParser;

/**
 * 词频统计
 *
 */
public class WorldCountMapReduce extends Configured implements Tool {
    

    public static class MyMapper extends TableMapper<Text, IntWritable> {
        private static byte[] family = "comment_info".getBytes();
    	private static byte[] column = "content".getBytes();
        
        @Override
        protected void map(ImmutableBytesWritable rowKey, Result result, Context context)
                throws IOException, InterruptedException {
            /********** Begin *********/
		// String content = Bytes.toString(result.getValue("comment_info".getBytes(),"content".getBytes()));
        // byte[] content = result.getValue(family,column);
		// if(content != null && content.isEmpty()){
        //     content=EmojiParser.removeAllEmojis(content);
        //     List<Word> words = WordSegmenter.seg(content);
        //     IntWritable intWritable = new IntWritable(1);
        //     for (Word word : words) {
        //         Text text = new Text(word.getText());
        //         //5、写入到context
        //         context.write(text,intWritable);
        //     } 
        // }
		 
		    byte[] value = result.getValue(family, column);
            String word = new String(value,"utf-8");
            if(!word.isEmpty()){
                String filter = EmojiParser.removeAllEmojis(word);
                List<Word> segs = WordSegmenter.seg(filter);
                for(Word cont : segs) {
                    Text text = new Text(cont.getText());
                    IntWritable v = new IntWritable(1);
                    context.write(text,v);
                }
            }
		 
			/********** End *********/
    	}
    }

    public static class MyReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
        private static byte[] family =  "word_info".getBytes();
        private static byte[] column = "count".getBytes();
        
        @Override
        public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            /********** Begin *********/
		        //1.统计每一个分词的次数
                // int count = 0;
                // for (IntWritable value:values){
                //     count +=value.get();
                // }
                // //2.构建一个v3 是put  RowKey: 分词
                // Put put = new Put(Bytes.toBytes(key.toString()));
                // //添加列
                // put.addColumn(family,column,Bytes.toBytes(coumt));
                // //3.把k3,v3 写入到context
                // context.write(null,put);

            int sum = 0;
            for (IntWritable value : values) {
                sum += value.get();
            }
            Put put = new Put(Bytes.toBytes(key.toString()));
            put.addColumn(family,column,Bytes.toBytes(sum));
            context.write(null,put);
		  	/********** End *********/
        }

    }

    
    
    
    
    
    public int run(String[] args) throws Exception {
        //配置Job
        Configuration conf = HBaseConfiguration.create(getConf());
        conf.set("hbase.zookeeper.quorum", "127.0.0.1");  //hbase 服务地址
        conf.set("hbase.zookeeper.property.clientPort", "2181"); //端口号
        Scanner sc = new Scanner(System.in);
        String arg1 = sc.next();
        String arg2 = sc.next();
        try {
			HBaseUtil.createTable("comment_word_count", new String[] {"word_info"});
		} catch (Exception e) {
			// 创建表失败
			e.printStackTrace();
		}
        Job job = configureJob(conf,new String[]{arg1,arg2});
        return job.waitForCompletion(true) ? 0 : 1;
    }

    private Job configureJob(Configuration conf, String[] args) throws IOException {
        String tablename = args[0];
        String targetTable = args[1];
        Job job = new Job(conf,tablename);
        Scan scan = new Scan();
        scan.setCaching(300);
        scan.setCacheBlocks(false);//在mapreduce程序中千万不要设置允许缓存
        //初始化Mapper Reduce程序
        TableMapReduceUtil.initTableMapperJob(tablename,scan,MyMapper.class, Text.class, IntWritable.class,job);
        TableMapReduceUtil.initTableReducerJob(targetTable,MyReducer.class,job);
        job.setNumReduceTasks(1);
        return job;
    }

}


http://www.niftyadmin.cn/n/5312380.html

相关文章

CCF模拟题 202305-1 重复局面

试题编号&#xff1a; 202305-1 试题名称&#xff1a; 重复局面 时间限制&#xff1a; 1.0s 内存限制&#xff1a; 512.0MB 题目背景 国际象棋在对局时&#xff0c;同一局面连续或间断出现3次或3次以上&#xff0c;可由任意一方提出和棋。 问题描述 国际象棋每一个局面可以用…

Spring MVC(day1)

什么是MVC MVC是一种设计模式&#xff0c;将软件按照模型、视图、控制器来划分&#xff1a; M&#xff1a;Model&#xff0c;模型层&#xff0c;指工程中的JavaBean&#xff0c;作用是处理数据 JavaBean分为两类&#xff1a; 一类称为数据承载Bean&#xff1a;专门存储业务数据…

Plotly.js 热力图与折线结合

上次记录了Echarts热力图与折线图的结合&#xff0c;但其效果不是很自然。后又找到了Plotly.js库&#xff0c;发现其效果不错。在此整理下实现过程。这里面涉及到自定义工具栏、自定义工具图标等等 配置工具栏显示的工具图标 let config {locale: zh-cn, // 设置本地语…

UE4.27_PIE/SIE

UE4.27_PIE/SIE 1. 疑问&#xff1a; 不明白什么是PIE/SIE? 不知道快捷键&#xff1f; 2. PIE/SIE: play in editor/simulate in editor 3. 快捷键&#xff1a; F8: 运行时possess&eject切换 4. 运行操作效果&#xff1a; PIE&SIE

LeetCode——447. 回旋镖的数量

大佬&#xff0c;牛&#xff01;&#xff01;&#xff01; 题目&#xff1a;给你一个n*2的数组&#xff0c;表示n个点。然后让你从中选择三个点i,j,k&#xff0c;使得i到j和i到k的欧氏距离相等。问一共有多少种情况。需要注意的是&#xff0c;假设i,j,k是满足条件的&#xff0…

【Linux】Linux系统编程——Linux常用快捷键

在 Linux 中&#xff0c;特别是在使用命令行界面时&#xff0c;有许多快捷键可以提高你的工作效率。以下是一些常用的快捷键&#xff1a; 命令行编辑&#xff1a; Ctrl A&#xff1a;移动光标到行首。Ctrl E&#xff1a;移动光标到行尾。Ctrl U&#xff1a;删除光标之前到行…

【HarmonyOS4.0】第四篇-ArkUI基础实战

一、ArkUI框架简介 ArkUI开发框架是方舟开发框架的简称&#xff0c;它是一套构建 HarmonyOS / OpenHarmony 应用界面的声明式UI开发框架&#xff0c;它使用极简的UI信息语法、丰富的UI组件以及实时界面语言工具&#xff0c;帮助开发者提升应用界面开发效率 30%&#xff0c;开发…

网络通信(12)-C#TCP客户端封装帮助类实例

本文使用Socket在C#语言环境下完成TCP客户端封装帮助类的实例。 实例完成的功能: 客户端与服务器连接,实现实时刷新状态。 客户端接收服务器的数据。 客户端发送给服务器的数据。 客户端实时判定状态,断开连接后自动重连。 客户端与服务器端发送心跳包。 在VS中创建C…