标签:require 安装 cte 修改 文件夹权限 dep image img ack
deployUser=hadoop
### The install home path of Linkis
LINKIS_INSTALL_HOME=/home/hadoop/Install/Linkis #Must provided
WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis/ ##file:// required
### User‘s root hdfs path
HDFS_USER_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
### Path to store job ResultSet:file or hdfs path
RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis
### Provide the DB information of Hive metadata database.
HIVE_META_URL=jdbc:mysql://127.0.0.1:3306/dss?useUnicode=true #hive元数据,存在mysql里,dss库下
HIVE_META_USER=root
HIVE_META_PASSWORD=root
###HADOOP CONF DIR
HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop
###HIVE CONF DIR
HIVE_CONF_DIR=/usr/local/hive/conf
###SPARK CONF DIR
SPARK_CONF_DIR=/usr/local/spark/conf
### Used to store user‘s custom variables, user‘s configuration, UDFs and functions, while providing the JobHistory service
MYSQL_HOST=127.0.0.1
MYSQL_PORT=3306
MYSQL_DB=dss #这里选的元数据的路径
MYSQL_USER=root
MYSQL_PASSWORD=root
deployUser=hadoop
### The install home path of DSS,Must provided
DSS_INSTALL_HOME=/home/hadoop/Install/DSS #配置DSS的安装路径,配置好之后需要新建这个目录,并且修改这个文件夹权限
### Specifies the user workspace, which is used to store the user‘s script files and log files.
### Generally local directory
WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis/
### Path to store job ResultSet:file or hdfs path
RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis
### 1、DataCheck APPJOINT,This service is used to provide DataCheck capability.
HIVE_META_URL=jdbc:mysql://127.0.0.1:3306/dss?characterEncoding=UTF-8
HIVE_META_USER=root
HIVE_META_PASSWORD=root
#Used to store the azkaban project transformed by DSS
WDS_SCHEDULER_PATH=file:///tmp/wds/scheduler #在安装目录下提前建好改文件
标签:require 安装 cte 修改 文件夹权限 dep image img ack
原文地址:https://www.cnblogs.com/w-honey/p/12900833.html