master
huanghai 5 years ago
parent 61627ca2f7
commit f6001854f0

@ -0,0 +1,4 @@
10 ACCOUNTING 1700
20 RESEARCH 1800
30 SALES 1900
40 OPERATIONS 1700

@ -0,0 +1,14 @@
7369 SMITH CLERK 7902 1980-12-17 800.00 20
7499 ALLEN SALESMAN 7698 1981-2-20 1600.00 300.00 30
7521 WARD SALESMAN 7698 1981-2-22 1250.00 500.00 30
7566 JONES MANAGER 7839 1981-4-2 2975.00 20
7654 MARTIN SALESMAN 7698 1981-9-28 1250.00 1400.00 30
7698 BLAKE MANAGER 7839 1981-5-1 2850.00 30
7782 CLARK MANAGER 7839 1981-6-9 2450.00 10
7788 SCOTT ANALYST 7566 1987-4-19 3000.00 20
7839 KING PRESIDENT 1981-11-17 5000.00 10
7844 TURNER SALESMAN 7698 1981-9-8 1500.00 0.00 30
7876 ADAMS CLERK 7788 1987-5-23 1100.00 20
7900 JAMES CLERK 7698 1981-12-3 950.00 30
7902 FORD ANALYST 7566 1981-12-3 3000.00 20
7934 MILLER CLERK 7782 1982-1-23 1300.00 10

@ -0,0 +1,53 @@
# 启动docker容器
docker run -d -m 8G -p 7070:7070 -p 8088:8088 -p 50070:50070 -p 8032:8032 -p 8042:8042 -p 16010:16010 apachekylin/apache-kylin-standalone:3.1.0
docker exec -it <container_id> bash
# 查看完整的container_id
docker ps --no-trunc
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
[root@gp-segment2 ~]# docker ps --no-trunc
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
81afbbbdffb180de4d215e6e3e9cbf4314e21b7f9e46d42ba5c30bf2530178a0 apachekylin/apache-kylin-standalone:3.1.0 "/home/admin/entrypoint.sh" 17 hours ago Up 17 hours 0.0.0.0:7070->7070/tcp, 0.0.0.0:8032->8032/tcp, 0.0.0.0:8042->8042/tcp, 0.0.0.0:8088->8088/tcp, 0.0.0.0:16010->16010/tcp, 0.0.0.0:50070->50070/tcp nervous_cerf
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
# 进入docker
[root@gp-segment2 ~]# docker exec -it 1ad7fc12adf1be39a1cae4b33f5e673709962d387ea1a03c2dce0296b210a7ce bash
[root@81afbbbdffb1 admin]#
# 准备创建表
[root@81afbbbdffb1 admin]# hive
ls: cannot access /home/admin/spark-2.3.1-bin-hadoop2.6/lib/spark-assembly-*.jar: No such file or directory
Logging initialized using configuration in jar:file:/home/admin/apache-hive-1.2.1-bin/lib/hive-common-1.2.1.jar!/hive-log4j.properties
hive>
-- 创建部门表
create external table if not exists default.dept(
deptno int,
dname string,
loc int)
row format delimited fields terminated by '\t';
-- 创建员工表
create external table if not exists default.emp(
empno int,
ename string,
job string,
mgr int,
hiredate string,
sal double,
comm double,
deptno int)
row format delimited fields terminated by '\t';
-- 向外部表中增加数据
show tables;
load data local inpath '/opt/module/datas/dept.txt' into table default.dept;
load data local inpath '/opt/module/datas/emp.txt' into table default.emp;
-- 查询结果
select * from emp;
select * from dept;
Loading…
Cancel
Save