0. 环境
1 台Centos7主机 部署 master KDC
2 台 Centos7主机 部署 Kerberos Client
1. Master主机安装Kerberos
yum install krb5-server krb5-libs krb5-workstation -y
1.1 配置kdc.conf
vim /var/kerberos/krb5kdc/kdc.conf
[kdcdefaults]
kdc_ports = 88
kdc_tcp_ports = 88
[realms]
HADOOP.COM = {
#master_key_type = aes256-cts
acl_file = /var/kerberos/krb5kdc/kadm5.acl
dict_file = /usr/share/dict/words
admin_keytab = /var/kerberos/krb5kdc/kadm5.keytab
max_renewable_life = 7d
supported_enctypes = aes128-cts:normal des3-hmac-sha1:normal arcfour-hmac:normal camellia256-cts:normal camellia128-cts:normal des-hmac-sha1:normal des-cbc-md5:normal des-cbc-crc:normal
}
说明:
- HADOOP.COM:是设定的realms。名字随意。Kerberos可以支持多个realms,一般全用大写
- master_key_type,supported_enctypes默认使用aes256-cts。由于,JAVA使用aes256-cts验证方式需要安装额外的jar包,这里暂不使用
- acl_file:标注了admin的用户权限。文件格式是
Kerberos_principal permissions [target_principal] [restrictions]支持通配符等 - admin_keytab:KDC进行校验的keytab
- supported_enctypes:支持的校验方式。注意把aes256-cts去掉
1.2 配置krb5.conf
vim /etc/krb5.conf
# Configuration snippets may be placed in this directory as well
includedir /etc/krb5.conf.d/
[logging]
default = FILE:/var/log/krb5libs.log
kdc = FILE:/var/log/krb5kdc.log
admin_server = FILE:/var/log/kadmind.log
[libdefaults]
default_realm = HADOOP.COM
dns_lookup_realm = false
dns_lookup_kdc = false
ticket_lifetime = 24h
renew_lifetime = 7d
forwardable = true
clockskew = 120
udp_preference_limit = 1
[realms]
HADOOP.COM = {
kdc = node-1
admin_server = node-1
}
[domain_realm]
.hadoop.com = HADOOP.COM
hadoop.com = HADOOP.COM
说明:
- [logging]:表示server端的日志的打印位置
- udp_preference_limit = 1 禁止使用udp可以防止一个Hadoop中的错误
- ticket_lifetime: 表明凭证生效的时限,一般为24小时。
- renew_lifetime: 表明凭证最长可以被延期的时限,一般为一个礼拜。当凭证过期之后,对安全认证的服务的后续访问则会失败。
- clockskew:时钟偏差是不完全符合主机系统时钟的票据时戳的容差,超过此容差将不接受此票据,单位是秒
1.3 初始化kerberos database
kdb5_util create -s -r HADOOP.COM
其中,[-s]表示生成stash file,并在其中存储master server key(krb5kdc);还可以用[-r]来指定一个realm name —— 当krb5.conf中定义了多个realm时才是必要的。
1.4 修改database administrator的ACL权限
vim /var/kerberos/krb5kdc/kadm5.acl
#修改如下
*/admin@HADOOP.COM *
kadm5.acl 文件更多内容可参考:kadm5.acl
想要管理 KDC 的资料库有两种方式, 一种直接在 KDC 本机上面直接执行,可以不需要密码就登入资料库管理;一种则是需要输入账号密码才能管理~这两种方式分别是:
- kadmin.local:需要在 KDC server 上面操作,无需密码即可管理资料库
- kadmin:可以在任何一台 KDC 领域的系统上面操作,但是需要输入管理员密码
1.5 启动kerberos daemons
systemctl start kadmin krb5kdc
systemctl enable kadmin krb5kdc
2. 在另外两台主机部署Kerberos Client
yum install krb5-workstation krb5-libs -y
#从master主机复制krb5.conf到这两台主机
scp /etc/krb5.conf node-2:/etc/krb5.conf
scp /etc/krb5.conf node-3:/etc/krb5.conf
3. kerberos的日常操作
先配置下root/admin密码
[root@node-1 ~]# kadmin.local
Authenticating as principal root/admin@HADOOP.COM with password.
kadmin.local: addprinc root/admin
WARNING: no policy specified for root/admin@HADOOP.COM; defaulting to no policy
Enter password for principal "root/admin@HADOOP.COM":
Re-enter password for principal "root/admin@HADOOP.COM":
Principal "root/admin@HADOOP.COM" created.
kadmin.local: listprincs
K/M@HADOOP.COM
kadmin/admin@HADOOP.COM
kadmin/changepw@HADOOP.COM
kadmin/instance-ay8h77o0-1@HADOOP.COM
kiprop/instance-ay8h77o0-1@HADOOP.COM
krbtgt/HADOOP.COM@HADOOP.COM
root/admin@HADOOP.COM
kadmin.local: exit
新加用户hd1:
[root@node-3 ~]# kadmin
Authenticating as principal root/admin@HADOOP.COM with password.
Password for root/admin@HADOOP.COM:
kadmin: addprinc hd1
WARNING: no policy specified for hd1@HADOOP.COM; defaulting to no policy
Enter password for principal "hd1@HADOOP.COM":
Re-enter password for principal "hd1@HADOOP.COM":
Principal "hd1@HADOOP.COM" created.
kadmin: exit
注:输入?,可以查看所有命令的用法
kadmin.local: ?
Available kadmin.local requests:
add_principal, addprinc, ank
Add principal
delete_principal, delprinc
Delete principal
modify_principal, modprinc
Modify principal
rename_principal, renprinc
Rename principal
change_password, cpw Change password
get_principal, getprinc Get principal
list_principals, listprincs, get_principals, getprincs
List principals
add_policy, addpol Add policy
modify_policy, modpol Modify policy
用hd1登录:
[root@node-3 ~]# kinit hd1
Password for hd1@HADOOP.COM:
[root@node-3 ~]# klist
# 查看已授权列表
Ticket cache: FILE:/tmp/krb5cc_0
Default principal: hd1@HADOOP.COM
Valid starting Expires Service principal
07/12/2018 17:27:32 07/13/2018 17:27:32 krbtgt/HADOOP.COM@HADOOP.COM
renew until 07/19/2018 17:27:32
删除当前的认证的缓存
[root@node-3 ~]# kdestroy
强制更新ticket
kinit -R
使用keytab
生成keytab
kadmin.local -q "xst -k keytab/hd1.keytab hd1@HADOOP.COM"
or
kadmin -q "xst -k keytab/hd1.keytab hd1@HADOOP.COM"
注意:生成keytab后,密码被修改了,无法再用之前输入密码登录了
如果想要密码不变,需要按照以下方式做,必须是kadmin.local
[root@node-1 ~]# kadmin.local -q "xst -k keytab/hd6.keytab -norandkey hd6@HADOOP.COM"
否则
[root@node-3 ~]# kadmin -q "xst -k keytab/hd6.keytab -norandkey hd6@HADOOP.COM"
Authenticating as principal root/admin@HADOOP.COM with password.
Password for root/admin@HADOOP.COM:
kadmin: Operation requires ``extract-keys'' privilege while changing hd6@HADOOP.COM's key
[root@node-3 ~]# kinit hd1
Password for hd1@HADOOP.COM:
kinit: Password incorrect while getting initial credentials
登录
[root@node-3 ~]# kinit -kt keytab/hd3.keytab hd3
[root@node-3 ~]# klist
Ticket cache: FILE:/tmp/krb5cc_0
Default principal: hd3@HADOOP.COM
Valid starting Expires Service principal
07/12/2018 18:24:43 07/13/2018 18:24:43 krbtgt/HADOOP.COM@HADOOP.COM
renew until 07/19/2018 18:24:43
合并keytab文件
[root@node-3 ~]# ktutil
ktutil: ?
Available ktutil requests:
clear_list, clear Clear the current keylist.
read_kt, rkt Read a krb5 keytab into the current keylist.
read_st, rst Read a krb4 srvtab into the current keylist.
write_kt, wkt Write the current keylist to a krb5 keytab.
write_st, wst Write the current keylist to a krb4 srvtab.
add_entry, addent Add an entry to the current keylist.
delete_entry, delent Delete an entry from the current keylist.
list, l List the current keylist.
list_requests, lr, ? List available requests.
quit, exit, q Exit program.
ktutil: rkt keytab/hd2.keytab
ktutil: rkt keytab/hd3.keytab
ktutil: wkt keytab/hd23.keytab
ktutil: exit
#使用合并后的keytab登录
[root@node-3 ~]# kinit -kt keytab/hd23.keytab hd3
[root@node-3 ~]# klist
Ticket cache: FILE:/tmp/krb5cc_0
Default principal: hd3@HADOOP.COM
Valid starting Expires Service principal
07/12/2018 18:30:27 07/13/2018 18:30:27 krbtgt/HADOOP.COM@HADOOP.COM
renew until 07/19/2018 18:30:27
可以查看keytab文件内容
[root@node-3 ~]# klist -ket keytab/hd23.keytab
Keytab name: FILE:keytab/hd23.keytab
KVNO Timestamp Principal
---- ------------------- ------------------------------------------------------
2 07/12/2018 18:28:33 hd2@HADOOP.COM (aes128-cts-hmac-sha1-96)
2 07/12/2018 18:28:33 hd2@HADOOP.COM (des3-cbc-sha1)
2 07/12/2018 18:28:33 hd2@HADOOP.COM (arcfour-hmac)
2 07/12/2018 18:28:33 hd2@HADOOP.COM (camellia256-cts-cmac)
2 07/12/2018 18:28:33 hd2@HADOOP.COM (camellia128-cts-cmac)
2 07/12/2018 18:28:33 hd2@HADOOP.COM (des-hmac-sha1)
2 07/12/2018 18:28:33 hd2@HADOOP.COM (des-cbc-md5)
2 07/12/2018 18:28:33 hd3@HADOOP.COM (aes128-cts-hmac-sha1-96)
2 07/12/2018 18:28:33 hd3@HADOOP.COM (des3-cbc-sha1)
2 07/12/2018 18:28:33 hd3@HADOOP.COM (arcfour-hmac)
2 07/12/2018 18:28:33 hd3@HADOOP.COM (camellia256-cts-cmac)
2 07/12/2018 18:28:33 hd3@HADOOP.COM (camellia128-cts-cmac)
2 07/12/2018 18:28:33 hd3@HADOOP.COM (des-hmac-sha1)
2 07/12/2018 18:28:33 hd3@HADOOP.COM (des-cbc-md5)
网友评论