1、安装依赖包
yum install rsync gcc openldap-develpython-ldapmysql-develpython-devel python-setuptools python-simplejson sqlite-devellibxml2-devel libxslt-devel cyrus-sasl-devel
2、下载hue的cdh版本
wget http://archive-primary.cloudera.com/cdh5/cdh/5/hue-3.7.0-cdh5.4.2.tar.gz
3、解压安装
tar zxvf hue-3.7.0-cdh5.4.2.tar.gz
cd hue-3.7.0-cdh5.4.2
make install PREFIX=/hue HADOOP_HOME=/home/hadoop/hadoop
ln -s /hue/hue/desktop/libs/hadoop/java-lib/hue-plugins-3.7.0-cdh5.4.2.jar /home/hadoop/hadoop/lib
4、创建hue相关mysql数据
mysql -u root -p
create database hue;
grant all on hue.* to ‘hue‘@‘localhost‘ identified by
‘Ab1234567890‘;
grant all on hue.* to ‘hue‘@‘hd1‘ identified by ‘Ab1234567890‘;
grant
all on hue.* to ‘hue‘@‘%‘ identified by
‘Ab1234567890‘;
5、同步hue初始化数据
/hue/hue/build/env/bin/hue
migrate
/hue/hue/build/env/bin/hue syncdb
6、配置文件
vim /hue/hue/desktop/conf/hue.ini
[desktop]
secret_key=XXXX11112222
http_host=hd1 (可以写ip)
http_port=7777
time_zone=Asia/Shanghai
server_user=hadoop
server_group=hadoop
default_user=hadoop
default_hdfs_superuser=hadoop
default_site_encoding=utf-8
[hadoop]
fs_defaultfs=hdfs://hd1:9000
webhdfs_url=http://hd1:14000/webhdfs/v1
hadoop_conf_dir=‘/home/hadoop/hadoop/etc/hadoop‘
[beeswax]
hive_server_host=hd1
hive_server_port=10000
hive_conf_dir=/usr/local/spark/spark-1.3.0-bin-hadoop2.3/conf
其他功能如果不用的话可以不进行配置,
beeswax就是hive,如果要想使用spark-sql,必须先保证spark的thriftserver是正常工作的。则hue会通过beeswax吧hiveql直接发送给sparksql的thriftserver。
原文地址:http://7737197.blog.51cto.com/7727197/1664721