码迷,mamicode.com
首页 > 其他好文 > 详细

spark2

时间:2017-11-30 21:43:14      阅读:175      评论:0      收藏:0      [点我收藏+]

标签:rmi   server   apache   tail   syn   unity   service   add   amp   

tar xzf hadoop.xxxx.gz
mv filename dir

----------------------
sudo -s
passwd root
apt-get install vim
apt-get install vim-gtk
apt-get update

vim /usr/share/lightdm/lightdm.conf.d/50-ubuntu.conf
user-session=ubuntu
greeter-session=unity-greeter
greeter-show-manual-login=true
allow-guest=false

vim /root/.profile
mesg n => tty -s && mesg n
--------------------------
apt-get install ssh
vim /etc/ssh/sshd_conf
PermitRootLogin prohibit password => yes
sudo service ssh restart

ssh-keygen -t rsa
cat /root/.ssh/id_rsa.pub >> /root/.ssh/authorized_keys
ssh localhost
exit

/etc/init.d/ssh start,stop,restart
ps -e |grep ssh
----------------------
vim /etc/hostname
reboot

vim /etc/hosts

slave--id_rsa.pub >> master:
scp id_rsa.pub root@master:/root/.ssh/id_rsa.pub.slaveN
master synthesize all id_rsa.pub:
cat id_rsa.pub >> authorized_keys
cat id_rsa.pub.slaveN >> authorized_keys
master authorized_keys >> slaveN:
scp authorized_keys root@slaveN:/root/.ssh/authorized_keys

----------------------------------
vim ~/.bashrc
export SCALA_HOME=/usr/lib/scala

export HADOOP_HOME=/usr/local/hadoop
#export YARN_HOME=/usr/local/hadoop
export HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop
#export YARN_CONF_DIR=${HADOOP_HOME}/etc/hadoop
#export HADOOP_PREFIX=${HADOOP_HOME}
#export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
#export HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_PREFIX}/lib/native
#export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
#export HADOOP_MAPRED_HOME=${HADOOP_PREFIX}
#export LD_LIBRARY_PATH=${HADOOP_PREFIX}/lib/native

export SPARK_HOME=/usr/local/spark
export SPARK_MASTER_IP=192.168.199.191
export SPARK_WORKER_MEMORY=2g

export JAVA_HOME=/usr/lib/java
export JRE_HOME=${JAVA_HOME}/jre
export CLASS_PATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:${JRE_HOME}/bin:${SCALA_HOME}/bin:${HADOOP_HOME}/bin:${HADOOP_HOME}/sbin:${SPARK_HOME}/bin:${SPARK_HOME}/sbin:$PATH


--------------------------
vim slaves
--------------------------
vim core-site.xml
<configuration>
        <property>
                <name>fs.defaultFS</name>
                <value>hdfs://master:9000</value>
        </property>
        <property>
                <name>hadoop.tmp.dir</name>
                <value>file:/usr/local/hadoop/tmp</value>
        </property>
        <property>
                <name>io.file.buffer.size</name>
                <value>131072</value>
        </property>
        <property>
                <name>hadoop.proxyuser.hadoop.hosts</name>
                <value>*</value>
        </property>
        <property>
                <name>hadoop.proxyuser.hadoop.groups</name>
                <value>*</value>
        </property>
</configuration>
------------------------------
vim hdfs-site.xml

<configuration>
        <property>
                <name>dfs.replication</name>
                <value>2</value>
        </property>
        <property>
                <name>dfs.namenode.secondary.http-address</name>
                <value>master:9001</value>
        </property>
        <property>
                <name>dfs.http.address</name>
                <value>master:50070</value>
        </property>

        <property>
                <name>dfs.namenode.name.dir</name>
                <value>file:/usr/local/hadoop/dfs/name</value>
        </property>
        <property>
                <name>dfs.datanode.data.dir</name>
                <value>file:/usr/local/hadoop/dfs/data</value>
        </property>
        <property>
                <name>dfs.namenode.checkpoint.dir</name>
                <value>file:/usr/local/hadoop/dfs/namesecondary</value>
        </property>
        <property>
                <name>dfs.webhdfs.enabled</name>
                <value>true</value>
        </property>
     <!--   <property>
                <name>dfs.namenode.datanode.registration.ip-hostname-check</name>
                <value>false</value>
        </property>-->
</configuration>

---------------------------
vim mapred-site.xml
<configuration>
        <property>
                <name>mapreduce.framework.name</name>
                <value>yarn</value>
        </property>
        <property>
                <name>mapreduce.jobhistory.address</name>
                <value>master:10020</value>
        </property>
        <property>
                <name>mapreduce.jobhistory.webapp.address</name>
                <value>master:19888</value>
        </property>
</configuration>
--------------------------
vim yarn-site.xml
<configuration>

<!-- Site specific YARN configuration properties -->
        <property>
                <name>yarn.resourcemanager.hostname</name>
                <value>master</value>
        </property>
        <property>
                <name>yarn.nodemanager.aux-services</name>
                <value>mapreduce_shuffle</value>
        </property>

        <property>
                <name>yarn.resourcemanager.address</name>
                <value>master:8032</value>
        </property>
        <property>
                <name>yarn.resourcemanager.scheduler.address</name>
                <value>master:8030</value>
        </property>
        <property>
                <name>yarn.resourcemanager.resource-tracker.address</name>
                <value>master:8031</value>
        </property>
        <property>
                <name>yarn.resourcemanager.admin.address</name>
                <value>master:8033</value>
        </property>
        <property>
                <name>yarn.resourcemanager.webapp.address</name>
                <value>master:8088</value>
        </property>
</configuration>

-------------------------------------
vim hadoop-env.sh
# export JAVA_HOME=
export JAVA_HOME=/usr/lib/java
--------------------------------------
vim spark-env.sh
export SCALA_HOME=/usr/lib/scala
export JAVA_HOME=/usr/lib/java
export HADOOP_HOME=/usr/local/hadoop

export SPARK_MASTER_IP=master
export SPARK_WORKER_MEMORY=2g
export MASTER=spark://master:7077
export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoopcd
---------------------------------------------------------
vim spark-defaults.conf
spark.master                     spark://master:7077
spark.eventLog.enabled           true
spark.eventLog.dir               hdfs://master:8021/directory
#spark.serializer                 org.apache.spark.serializer.KryoSerializer
#spark.driver.memory              5g
spark.executor.extraJavaOptions  -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three"
spark.history.fs.logDirectory    hdfs://Master:8021/directory
spark.yarn.historyServer.address hdfs://Master:18080


spark2

标签:rmi   server   apache   tail   syn   unity   service   add   amp   

原文地址:http://www.cnblogs.com/1380shadow/p/7931785.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!