您好,登录后才能下订单哦!
密码登录
登录注册
点击 登录注册 即表示同意《亿速云用户服务条款》
[huiyunltd@hadoop-senior ~]$ tar xf /opt/softwares/apache-hive-0.13.1-bin.tar.gz -C /opt/app/ [huiyunltd@hadoop-senior ~]$ /opt/app/hadoop-2.5.0/sbin/start-dfs.sh hadoop-senior.huiyunltd.com:50070 [huiyunltd@hadoop-senior02 ~]$ /opt/app/hadoop-2.5.0/sbin/start-yarn.sh http://hadoop-senior02.huiyunltd.com:8088 [huiyunltd@hadoop-senior03 ~]$ /opt/app/hadoop-2.5.0/bin/hdfs dfs -mkdir /tmp [huiyunltd@hadoop-senior03 ~]$ /opt/app/hadoop-2.5.0/bin/hdfs dfs -chmod g+x /tmp [huiyunltd@hadoop-senior03 ~]$ /opt/app/hadoop-2.5.0/bin/hdfs dfs -mkdir -p /user/hive/warehouse [huiyunltd@hadoop-senior03 ~]$ /opt/app/hadoop-2.5.0/bin/hdfs dfs -chmod g+x /user/hive/warehouse [huiyunltd@hadoop-senior ~]$ /opt/app/hadoop-2.5.0/sbin/mr-jobhistory-daemon.sh start historyserver /opt/app/apache-hive-0.13.1-bin/conf/hive-env.sh HADOOP_HOME=/opt/app/hadoop-2.5.0 export HIVE_CONF_DIR=/opt/app/apache-hive-0.13.1-bin/conf [huiyunltd@hadoop-senior ~]$ /opt/app/apache-hive-0.13.1-bin/bin/hive hive> show databases; hive> use default; hive> create table kms_log(ip string,user string,requesturl string); hive> show tables; hive> desc kms_log; hive> select count(*) from kms_log; hive> create table student(id int,name string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'; [huiyunltd@hadoop-senior ~]$ touch /opt/datas/student.txt student.txt --------------- 1001 jack 1002 mary 1003 thomas 1004 tom hive> load data local inpath '/opt/datas/student.txt' into table student; hive> select * from student; hive> select * from student; hive> select id from student; [huiyunltd@hadoop-senior ~]$ unzip -o -d /opt/softwares/ /opt/softwares/mysql-libs.zip [huiyunltd@hadoop-senior ~]$ rpm -qa|grep mysql [huiyunltd@hadoop-senior ~]$ sudo rpm -e --nodeps mysql-libs-5.1.66-2.el6_3.x86_64 [huiyunltd@hadoop-senior ~]$ rpm -qa|grep mysql [huiyunltd@hadoop-senior ~]$ sudo rpm -ivh /opt/softwares/mysql-libs/MySQL-server-5.6.24-1.el6.x86_64.rpm [huiyunltd@hadoop-senior ~]$ sudo rpm -ivh /opt/softwares/mysql-libs/MySQL-client-5.6.24-1.el6.x86_64.rpm [huiyunltd@hadoop-senior ~]$ sudo cat /root/.mysql_secret [huiyunltd@hadoop-senior ~]$ service mysql status [huiyunltd@hadoop-senior ~]$ rpm -qa|grep MySQL [huiyunltd@hadoop-senior ~]$ sudo service mysql start mysql> SET PASSWORD=PASSWORD('123456') mysql> select User,Host,Password from user; mysql> update user set Host='%' where User='root' and Host='localhost'; mysql> delete from user where User='root' and Host='hadoop-senior.huiyunltd.com'; mysql> delete from user where User='root' and Host='127.0.0.1'; mysql> delete from user where User='root' and Host='::1'; [huiyunltd@hadoop-senior ~]$ sudo service mysql restart [huiyunltd@hadoop-senior ~]$ cp /opt/app/apache-hive-0.13.1-bin/conf/hive-default.xml.template /opt/app/apache-hive-0.13.1-bin/conf/hive-site.xml [huiyunltd@hadoop-senior ~]$ tar xf /opt/softwares/mysql-libs/mysql-connector-java-5.1.27.tar.gz -C /opt/softwares/mysql-libs/ [huiyunltd@hadoop-senior ~]$ cp /opt/softwares/mysql-libs/mysql-connector-java-5.1.27/mysql-connector-java-5.1.27-bin.jar /opt/app/apache-hive-0.13.1-bin/lib/
--------------hive-site.xml-------------------
<?xml version="1.0"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <configuration> <property> <name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://hadoop-senior.huiyunltd.com:3306/metastore?createDatabaseIfNotExist=true</value> <description>JDBC connect string for a JDBC metastore</description> </property> <property> <name>javax.jdo.option.ConnectionDriverName</name> <value>com.mysql.jdbc.Driver</value> <description>Driver class name for a JDBC metastore</description> </property> <property> <name>javax.jdo.option.ConnectionUserName</name> <value>root</value> <description>username to use against metastore database</description> </property> <property> <name>javax.jdo.option.ConnectionPassword</name> <value>123456</value> <description>password to use against metastore database</description> </property> </configuration>
免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。