docker build -t hd-container:1.0 .
# win
docker-compose -f win-docker-compose-hive.yml up -d
#mac
docker-compose -f mac-docker-compose-hive.yml up -d
hdfs namenode -format
hadoop-daemon.sh start namenode
hadoop-daemon.sh start datanode
yarn-daemon.sh start resourcemanager
yarn-daemon.sh start nodemanager
hadoop fs -mkdir -p /user/hive/warehouse
./hiveserver2 &
# 1 初始化环境
source /opt/script/bigdata_env.sh
# 2 启动master
hdfs namenode -format
sh /opt/script/start-components.sh master
# 3 启动slave
sh /opt/script/start-components.sh slave
# 4 初始化hive
hadoop fs -mkdir -p /user/hive/warehouse
hadoop fs -chown hive:hive /user/hive
hadoop fs -chown hive:hive /user/hive/warehouse
schematool -dbType derby -initSchema
# 5启动hiveserver2
nohup hiveserver2 &
beeline -u jdbc:hive2://localhost:10000/default -n hive
create table test(id string, name string, age int);