From 44debf4eaaa7df41d07387d5e98393b932be2b0c Mon Sep 17 00:00:00 2001 From: Salvatore LaMendola Date: Thu, 23 Mar 2017 12:23:01 -0400 Subject: [PATCH 1/4] Fix hive.metastore.uris list generation - Was previously only prepending thrift:// to a single host. --- cookbooks/bcpc-hadoop/recipes/hive_config.rb | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/cookbooks/bcpc-hadoop/recipes/hive_config.rb b/cookbooks/bcpc-hadoop/recipes/hive_config.rb index 55681abf3..c803696a4 100644 --- a/cookbooks/bcpc-hadoop/recipes/hive_config.rb +++ b/cookbooks/bcpc-hadoop/recipes/hive_config.rb @@ -4,12 +4,12 @@ # Description : To setup hive configuration only. No hive package will be installed through this Recipe # -#Create hive password +# Create hive password hive_password = make_config('mysql-hive-password', secure_password) # Hive table stats user stats_user = make_config('mysql-hive-table-stats-user', - node["bcpc"]["hadoop"]["hive"]["hive_table_stats_db_user"]) + node['bcpc']['hadoop']['hive']['hive_table_stats_db_user']) stats_password = make_config('mysql-hive-table-stats-password', secure_password) %w{hive webhcat hcat hive-hcatalog}.each do |w| @@ -83,10 +83,9 @@ hive_site_vars[:hive_sql_password], 'hive.metastore.uris' => - 'thrift://' + - hive_site_vars[:hive_hosts] - .map{ |s| float_host(s[:hostname]) + ":9083" } - .join(","), + hive_site_vars['hive_hosts'] + .map { |s| 'thrift://' + float_host(s[:hostname]) + ':9083' } + .join(','), 'hive.zookeeper.quorum' => hive_site_vars[:zk_hosts].map{ |s| float_host(s[:hostname]) }.join(","), From d6fd5ce4147fbc58252f59fe2f8b66672b941e14 Mon Sep 17 00:00:00 2001 From: Salvatore LaMendola Date: Thu, 23 Mar 2017 17:09:37 -0400 Subject: [PATCH 2/4] Cleanup of hive_config.rb to make Rubocop/Foodcritic happy --- cookbooks/bcpc-hadoop/recipes/hive_config.rb | 218 +++++++++---------- 1 file changed, 109 insertions(+), 109 deletions(-) diff --git a/cookbooks/bcpc-hadoop/recipes/hive_config.rb b/cookbooks/bcpc-hadoop/recipes/hive_config.rb index c803696a4..3c31d9ea6 100644 --- a/cookbooks/bcpc-hadoop/recipes/hive_config.rb +++ b/cookbooks/bcpc-hadoop/recipes/hive_config.rb @@ -1,7 +1,8 @@ # -# Cookbook Name : bcpc-hadoop -# Recipe Name : hive_config -# Description : To setup hive configuration only. No hive package will be installed through this Recipe +# Cookbook Name: bcpc-hadoop +# Recipe Name: hive_config +# Description: To setup hive configuration only. No hive +# package will be installed through this Recipe # # Create hive password @@ -12,109 +13,108 @@ node['bcpc']['hadoop']['hive']['hive_table_stats_db_user']) stats_password = make_config('mysql-hive-table-stats-password', secure_password) -%w{hive webhcat hcat hive-hcatalog}.each do |w| +%w(hive webhcat hcat hive-hcatalog).each do |w| directory "/etc/#{w}/conf.#{node.chef_environment}" do - owner "root" - group "root" - mode 00755 + owner 'root' + group 'root' + mode 0o0755 action :create recursive true end bash "update-#{w}-conf-alternatives" do - code(%Q{ + code(%Q( update-alternatives --install /etc/#{w}/conf #{w}-conf /etc/#{w}/conf.#{node.chef_environment} 50 update-alternatives --set #{w}-conf /etc/#{w}/conf.#{node.chef_environment} - }) + )) end end hive_site_vars = { - :is_hive_server => node.run_list.expand(node.chef_environment).recipes.include?("bcpc-hadoop::hive_hcatalog"), - :mysql_hosts => node[:bcpc][:hadoop][:mysql_hosts].map{ |m| m[:hostname] }, - :zk_hosts => node[:bcpc][:hadoop][:zookeeper][:servers], - :hive_hosts => node[:bcpc][:hadoop][:hive_hosts], - :stats_user => stats_user, - :warehouse => "#{node['bcpc']['hadoop']['hdfs_url']}/user/hive/warehouse", - :metastore_keytab => "#{node[:bcpc][:hadoop][:kerberos][:keytab][:dir]}/#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:keytab]}", - :server_keytab => "#{node[:bcpc][:hadoop][:kerberos][:keytab][:dir]}/#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:keytab]}", - :kerberos_enabled => node[:bcpc][:hadoop][:kerberos][:enable], - :hs2_auth => node["bcpc"]["hadoop"]["hive"]["server2"]["authentication"], - :hs2_ldap_url => node["bcpc"]["hadoop"]["hive"]["server2"]["ldap_url"], - :hs2_ldap_domain => node["bcpc"]["hadoop"]["hive"]["server2"]["ldap_domain"] + is_hive_serverzzzz: node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog'), + mysql_hosts: node['bcpc']['hadoop']['mysql_hosts'].map { |m| m['hostname'] }, + zk_hosts: node['bcpc']['hadoop']['zookeeper']['servers'], + hive_hosts: node['bcpc']['hadoop']['hive_hosts'], + stats_user: stats_user, + warehouse: "#{node['bcpc']['hadoop']['hdfs_url']}/user/hive/warehouse", + metastore_keytab: "#{node['bcpc']['hadoop']['kerberos']['keytab']['dir']}/#{node['bcpc']['hadoop']['kerberos']['data']['hive']['keytab']}", + server_keytab: "#{node['bcpc']['hadoop']['kerberos']['keytab']['dir']}/#{node['bcpc']['hadoop']['kerberos']['data']['hive']['keytab']}", + kerberos_enabled: node['bcpc']['hadoop']['kerberos']['enable'], + hs2_auth: node['bcpc']['hadoop']['hive']['server2']['authentication'], + hs2_ldap_url: node['bcpc']['hadoop']['hive']['server2']['ldap_url'], + hs2_ldap_domain: node['bcpc']['hadoop']['hive']['server2']['ldap_domain'] } -hive_site_vars[:hive_sql_password] = \ -if node.run_list.expand(node.chef_environment).recipes.include?("bcpc-hadoop::hive_hcatalog") then - hive_password -else - "" -end +hive_site_vars['hive_sql_password'] = \ + if node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog') + hive_password + else + '' + end -hive_site_vars[:stats_sql_password] = \ -if node.run_list.expand(node.chef_environment).recipes.include?("bcpc-hadoop::hive_hcatalog") then - stats_password -else - "" -end +hive_site_vars['stats_sql_password'] = \ + if node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog') + stats_password + else + '' + end -hive_site_vars[:metastore_princ] = \ -if node.run_list.expand(node.chef_environment).recipes.include?("bcpc-hadoop::hive_hcatalog") then - "#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:principal]}/#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:princhost] == '_HOST' ? float_host(node[:fqdn]) : node[:bcpc][:hadoop][:kerberos][:data][:hive][:princhost]}@#{node[:bcpc][:hadoop][:kerberos][:realm]}" -else - "#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:principal]}/#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:princhost] == '_HOST' ? '_HOST' : node[:bcpc][:hadoop][:kerberos][:data][:hive][:princhost]}@#{node[:bcpc][:hadoop][:kerberos][:realm]}" -end +hive_site_vars['metastore_princ'] = \ + if node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog') + "#{node['bcpc']['hadoop']['kerberos']['data']['hive']['principal']}/#{node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost'] == '_HOST' ? float_host(node['fqdn']) : node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost']}@#{node['bcpc']['hadoop']['kerberos']['realm']}" + else + "#{node['bcpc']['hadoop']['kerberos']['data']['hive']['principal']}/#{node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost'] == '_HOST' ? '_HOST' : node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost']}@#{node['bcpc']['hadoop']['kerberos']['realm']}" + end -hive_site_vars[:server_princ] = \ -if node.run_list.expand(node.chef_environment).recipes.include?("bcpc-hadoop::hive_hcatalog") then - "#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:principal]}/#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:princhost] == '_HOST' ? float_host(node[:fqdn]) : node[:bcpc][:hadoop][:kerberos][:data][:hive][:princhost]}@#{node[:bcpc][:hadoop][:kerberos][:realm]}" -else - "#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:principal]}/#{node[:bcpc][:hadoop][:kerberos][:data][:hive][:princhost] == '_HOST' ? '_HOST' : node[:bcpc][:hadoop][:kerberos][:data][:hive][:princhost]}@#{node[:bcpc][:hadoop][:kerberos][:realm]}" -end +hive_site_vars['server_princ'] = \ + if node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog') + "#{node['bcpc']['hadoop']['kerberos']['data']['hive']['principal']}/#{node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost'] == '_HOST' ? float_host(node['fqdn']) : node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost']}@#{node['bcpc']['hadoop']['kerberos']['realm']}" + else + "#{node['bcpc']['hadoop']['kerberos']['data']['hive']['principal']}/#{node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost'] == '_HOST' ? '_HOST' : node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost']}@#{node['bcpc']['hadoop']['kerberos']['realm']}" + end generated_values = -{ - 'javax.jdo.option.ConnectionURL' => - 'jdbc:mysql:loadbalance://' + - hive_site_vars[:mysql_hosts].join(',') + - ':3306/metastore?loadBalanceBlacklistTimeout=5000', - - 'javax.jdo.option.ConnectionPassword' => - hive_site_vars[:hive_sql_password], - - 'hive.metastore.uris' => - hive_site_vars['hive_hosts'] - .map { |s| 'thrift://' + float_host(s[:hostname]) + ':9083' } - .join(','), - - 'hive.zookeeper.quorum' => - hive_site_vars[:zk_hosts].map{ |s| float_host(s[:hostname]) }.join(","), - - 'hive.server2.support.dynamic.service.discovery' => 'true', - - 'hive.server2.zookeeper.namespace' => - "HS2-#{node.chef_environment}-#{hive_site_vars[:hs2_auth]}", - - 'hive.server2.thrift.bind.host' => "#{float_host(node[:fqdn])}", - - 'hive.server2.thrift.port' => - node["bcpc"]["hadoop"]["hive"]["server2"]["port"], - - 'hive.metastore.warehouse.dir' => - hive_site_vars[:warehouse], - - 'hive.stats.dbconnectionstring' => - 'jdbc:mysql:loadbalance://' + hive_site_vars[:mysql_hosts].join(',') + - ':3306/hive_table_stats?useUnicode=true' + - '&characterEncoding=UTF-8' + - '&user=' + hive_site_vars[:stats_user] + - '&password=' + hive_site_vars[:stats_sql_password], -} + { + 'javax.jdo.option.ConnectionURL' => + 'jdbc:mysql:loadbalance://' + + hive_site_vars['mysql_hosts'].join(',') + + ':3306/metastore?loadBalanceBlacklistTimeout=5000', -if hive_site_vars[:kerberos_enabled] && - hive_site_vars[:hs2_auth] == 'KERBEROS' + 'javax.jdo.option.ConnectionPassword' => + hive_site_vars['hive_sql_password'], + + 'hive.metastore.uris' => + hive_site_vars['hive_hosts'] + .map { |s| 'thrift://' + float_host(s['hostname']) + ':9083' } + .join(','), + + 'hive.zookeeper.quorum' => + hive_site_vars['zk_hosts'].map { |s| float_host(s['hostname']) }.join(','), + + 'hive.server2.support.dynamic.service.discovery' => 'true', + + 'hive.server2.zookeeper.namespace' => + "HS2-#{node.chef_environment}-#{hive_site_vars[:hs2_auth]}", + + 'hive.server2.thrift.bind.host' => float_host(node['fqdn']).to_s, + + 'hive.server2.thrift.port' => + node['bcpc']['hadoop']['hive']['server2']['port'], + + 'hive.metastore.warehouse.dir' => + hive_site_vars['warehouse'], + + 'hive.stats.dbconnectionstring' => + 'jdbc:mysql:loadbalance://' + hive_site_vars['mysql_hosts'].join(',') + + ':3306/hive_table_stats?useUnicode=true' \ + '&characterEncoding=UTF-8' \ + '&user=' + hive_site_vars['stats_user'] + + '&password=' + hive_site_vars['stats_sql_password'] + } + +if hive_site_vars['kerberos_enabled'] && hive_site_vars['hs2_auth'] == 'KERBEROS' hs2_auth_values = { - 'hive.server2.authentication' => + 'hive.server2.authentication' => hive_site_vars[:hs2_auth] } elsif hive_site_vars[:hs2_auth] == 'LDAP' @@ -137,28 +137,28 @@ if hive_site_vars[:kerberos_enabled] kerberos_values = { - 'hive.metastore.sasl.enabled' => true, - - 'hive.metastore.kerberos.keytab.file' => - hive_site_vars[:metastore_keytab], + 'hive.metastore.sasl.enabled' => 'true', + + 'hive.metastore.kerberos.keytab.file' => + hive_site_vars[:metastore_keytab], - 'hive.metastore.kerberos.principal' => - hive_site_vars[:metastore_princ], + 'hive.metastore.kerberos.principal' => + hive_site_vars[:metastore_princ], - 'hive.server2.authentication.kerberos.keytab' => - hive_site_vars[:server_keytab], + 'hive.server2.authentication.kerberos.keytab' => + hive_site_vars[:server_keytab], - 'hive.server2.authentication.kerberos.principal' => - hive_site_vars[:server_princ] + 'hive.server2.authentication.kerberos.principal' => + hive_site_vars[:server_princ] } generated_values.merge!(kerberos_values) end -site_xml = node[:bcpc][:hadoop][:hive][:site_xml] +site_xml = node['bcpc']['hadoop']['hive']['site_xml'] # flatten_hash converts the tree of node object values to a hash with # dot-notation keys. -#environment_values = flatten_hash(site_xml) +# environment_values = flatten_hash(site_xml) # The complete hash for hive_site.xml is a merger of values # dynamically generated in this recipe, and hardcoded values from the @@ -167,28 +167,28 @@ template '/etc/hive/conf/hive-site.xml' do source 'generic_site.xml.erb' - mode 0644 - variables(:options => complete_hive_site_hash) + mode 0o0644 + variables(options: complete_hive_site_hash) end link "/etc/hive-hcatalog/conf.#{node.chef_environment}/hive-site.xml" do to "/etc/hive/conf.#{node.chef_environment}/hive-site.xml" end -template "/etc/hive/conf/hive-env.sh" do - source "generic_env.sh.erb" - mode 0644 - variables(:options => node[:bcpc][:hadoop][:hive][:env_sh]) +template '/etc/hive/conf/hive-env.sh' do + source 'generic_env.sh.erb' + mode 0o0644 + variables(options: node['bcpc']['hadoop']['hive']['env_sh']) end # This template contains no variables/substitutions. -template "/etc/hive/conf/hive-exec-log4j.properties" do - source "hv_hive-exec-log4j.properties.erb" - mode 0644 +template '/etc/hive/conf/hive-exec-log4j.properties' do + source 'hv_hive-exec-log4j.properties.erb' + mode 0o0644 end # This template contains no variables/substitutions. -template "/etc/hive/conf/hive-log4j.properties" do - source "hv_hive-log4j.properties.erb" - mode 0644 +template '/etc/hive/conf/hive-log4j.properties' do + source 'hv_hive-log4j.properties.erb' + mode 0o0644 end From de144eb077a17bba9fd2166b8b86331fa2bff926 Mon Sep 17 00:00:00 2001 From: Salvatore LaMendola Date: Thu, 23 Mar 2017 17:13:10 -0400 Subject: [PATCH 3/4] Append MySQL port to each hostname... ...per [the docs](https://dev.mysql.com/doc/connector-j/5.1/en/connector-j-usagenotes-j2ee-concepts-managing-load-balanced-connections.html). This was not causing any issues luckily, since 3306 is the default and used when no port is specified, but if we ever decide to use a non-standard port, we will want to be able to replace this string with an attribute and append it to each host accordingly. --- cookbooks/bcpc-hadoop/recipes/hive_config.rb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cookbooks/bcpc-hadoop/recipes/hive_config.rb b/cookbooks/bcpc-hadoop/recipes/hive_config.rb index 3c31d9ea6..5424f97da 100644 --- a/cookbooks/bcpc-hadoop/recipes/hive_config.rb +++ b/cookbooks/bcpc-hadoop/recipes/hive_config.rb @@ -32,7 +32,7 @@ hive_site_vars = { is_hive_serverzzzz: node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog'), - mysql_hosts: node['bcpc']['hadoop']['mysql_hosts'].map { |m| m['hostname'] }, + mysql_hosts: node['bcpc']['hadoop']['mysql_hosts'].map { |m| m['hostname'] + ':3306' }, zk_hosts: node['bcpc']['hadoop']['zookeeper']['servers'], hive_hosts: node['bcpc']['hadoop']['hive_hosts'], stats_user: stats_user, @@ -78,7 +78,7 @@ 'javax.jdo.option.ConnectionURL' => 'jdbc:mysql:loadbalance://' + hive_site_vars['mysql_hosts'].join(',') + - ':3306/metastore?loadBalanceBlacklistTimeout=5000', + '/metastore?loadBalanceBlacklistTimeout=5000', 'javax.jdo.option.ConnectionPassword' => hive_site_vars['hive_sql_password'], @@ -106,7 +106,7 @@ 'hive.stats.dbconnectionstring' => 'jdbc:mysql:loadbalance://' + hive_site_vars['mysql_hosts'].join(',') + - ':3306/hive_table_stats?useUnicode=true' \ + '/hive_table_stats?useUnicode=true' \ '&characterEncoding=UTF-8' \ '&user=' + hive_site_vars['stats_user'] + '&password=' + hive_site_vars['stats_sql_password'] From 51cfd88a55b7eafa53940350f8a177d5b28fa3e7 Mon Sep 17 00:00:00 2001 From: Salvatore LaMendola Date: Thu, 23 Mar 2017 19:04:38 -0400 Subject: [PATCH 4/4] Fix objects that should not be strings --- cookbooks/bcpc-hadoop/recipes/hive_config.rb | 30 ++++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/cookbooks/bcpc-hadoop/recipes/hive_config.rb b/cookbooks/bcpc-hadoop/recipes/hive_config.rb index 5424f97da..d5b3dab3d 100644 --- a/cookbooks/bcpc-hadoop/recipes/hive_config.rb +++ b/cookbooks/bcpc-hadoop/recipes/hive_config.rb @@ -32,7 +32,7 @@ hive_site_vars = { is_hive_serverzzzz: node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog'), - mysql_hosts: node['bcpc']['hadoop']['mysql_hosts'].map { |m| m['hostname'] + ':3306' }, + mysql_hosts: node['bcpc']['hadoop']['mysql_hosts'].map { |m| m[:hostname] + ':3306' }, zk_hosts: node['bcpc']['hadoop']['zookeeper']['servers'], hive_hosts: node['bcpc']['hadoop']['hive_hosts'], stats_user: stats_user, @@ -45,28 +45,28 @@ hs2_ldap_domain: node['bcpc']['hadoop']['hive']['server2']['ldap_domain'] } -hive_site_vars['hive_sql_password'] = \ +hive_site_vars[:hive_sql_password] = \ if node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog') hive_password else '' end -hive_site_vars['stats_sql_password'] = \ +hive_site_vars[:stats_sql_password] = \ if node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog') stats_password else '' end -hive_site_vars['metastore_princ'] = \ +hive_site_vars[:metastore_princ] = \ if node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog') "#{node['bcpc']['hadoop']['kerberos']['data']['hive']['principal']}/#{node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost'] == '_HOST' ? float_host(node['fqdn']) : node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost']}@#{node['bcpc']['hadoop']['kerberos']['realm']}" else "#{node['bcpc']['hadoop']['kerberos']['data']['hive']['principal']}/#{node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost'] == '_HOST' ? '_HOST' : node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost']}@#{node['bcpc']['hadoop']['kerberos']['realm']}" end -hive_site_vars['server_princ'] = \ +hive_site_vars[:server_princ] = \ if node.run_list.expand(node.chef_environment).recipes.include?('bcpc-hadoop::hive_hcatalog') "#{node['bcpc']['hadoop']['kerberos']['data']['hive']['principal']}/#{node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost'] == '_HOST' ? float_host(node['fqdn']) : node['bcpc']['hadoop']['kerberos']['data']['hive']['princhost']}@#{node['bcpc']['hadoop']['kerberos']['realm']}" else @@ -77,19 +77,19 @@ { 'javax.jdo.option.ConnectionURL' => 'jdbc:mysql:loadbalance://' + - hive_site_vars['mysql_hosts'].join(',') + + hive_site_vars[:mysql_hosts].join(',') + '/metastore?loadBalanceBlacklistTimeout=5000', 'javax.jdo.option.ConnectionPassword' => - hive_site_vars['hive_sql_password'], + hive_site_vars[:hive_sql_password], 'hive.metastore.uris' => - hive_site_vars['hive_hosts'] - .map { |s| 'thrift://' + float_host(s['hostname']) + ':9083' } + hive_site_vars[:hive_hosts] + .map { |s| 'thrift://' + float_host(s[:hostname]) + ':9083' } .join(','), 'hive.zookeeper.quorum' => - hive_site_vars['zk_hosts'].map { |s| float_host(s['hostname']) }.join(','), + hive_site_vars[:zk_hosts].map { |s| float_host(s[:hostname]) }.join(','), 'hive.server2.support.dynamic.service.discovery' => 'true', @@ -102,17 +102,17 @@ node['bcpc']['hadoop']['hive']['server2']['port'], 'hive.metastore.warehouse.dir' => - hive_site_vars['warehouse'], + hive_site_vars[:warehouse], 'hive.stats.dbconnectionstring' => - 'jdbc:mysql:loadbalance://' + hive_site_vars['mysql_hosts'].join(',') + + 'jdbc:mysql:loadbalance://' + hive_site_vars[:mysql_hosts].join(',') + '/hive_table_stats?useUnicode=true' \ '&characterEncoding=UTF-8' \ - '&user=' + hive_site_vars['stats_user'] + - '&password=' + hive_site_vars['stats_sql_password'] + '&user=' + hive_site_vars[:stats_user] + + '&password=' + hive_site_vars[:stats_sql_password] } -if hive_site_vars['kerberos_enabled'] && hive_site_vars['hs2_auth'] == 'KERBEROS' +if hive_site_vars[:kerberos_enabled] && hive_site_vars[:hs2_auth] == 'KERBEROS' hs2_auth_values = { 'hive.server2.authentication' => hive_site_vars[:hs2_auth]