Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for log4j2.properties and unused parameters become deprecated (daily_rolling_date_pattern , default_logging_level, file_rolling_type, logging_file, logging_level, logging_template, deprecation_logging, deprecation_logging_level, rolling_file_max_backup_index, rolling_file_max_file_size, security_logging_content, security_logging_source) #1185

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 0 additions & 14 deletions data/common.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,11 @@ elasticsearch::autoupgrade: false
elasticsearch::config: {}
elasticsearch::configdir: "/etc/elasticsearch"
elasticsearch::configdir_recurselimit: 2
elasticsearch::default_logging_level: INFO
elasticsearch::daily_rolling_date_pattern: |
"'.'yyyy-MM-dd"
elasticsearch::defaults_location:
elasticsearch::deprecation_logging: false
elasticsearch::deprecation_logging_level: DEBUG
elasticsearch::download_tool:
elasticsearch::download_tool_insecure:
elasticsearch::download_tool_verify_certificates: true
elasticsearch::ensure: present
elasticsearch::file_rolling_type: dailyRollingFile
elasticsearch::indices: {}
elasticsearch::init_defaults: {}
elasticsearch::init_defaults_file:
Expand All @@ -30,10 +24,6 @@ elasticsearch::instances: {}
elasticsearch::jvm_options: []
elasticsearch::license:
elasticsearch::logdir: "/var/log/elasticsearch"
elasticsearch::logging_config: {}
elasticsearch::logging_file:
elasticsearch::logging_level: INFO
elasticsearch::logging_template:
elasticsearch::manage_datadir: true
elasticsearch::manage_logdir: true
elasticsearch::manage_repo: true
Expand All @@ -53,12 +43,8 @@ elasticsearch::purge_secrets: false
elasticsearch::repo_stage: false
elasticsearch::restart_on_change: false
elasticsearch::roles: {}
elasticsearch::rolling_file_max_backup_index: 1
elasticsearch::rolling_file_max_file_size: 10MB
elasticsearch::scripts: {}
elasticsearch::secrets:
elasticsearch::security_logging_content:
elasticsearch::security_logging_source:
elasticsearch::service_name: elasticsearch
elasticsearch::service_provider: systemd
elasticsearch::snapshot_repositories: {}
Expand Down
70 changes: 70 additions & 0 deletions lib/puppet/parser/functions/es_hash2properties.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
# frozen_string_literal: true

# Top-level Puppet functions
module Puppet::Parser::Functions
newfunction(
:es_hash2properties,
type: :rvalue,
doc: <<-'ENDHEREDOC') do |args|
Converts a puppet hash to Java properties file string

For example:

$hash = {'a' => 'value'}
es_hash2properties($hash)

@return String
ENDHEREDOC

# Technically we support two arguments but only first is mandatory ...
raise Puppet::ParseError, "es_hash2properties(): wrong number of arguments (#{args.length}; must be at least 1)" if args.empty?

input = args[0]

raise Puppet::ParseError, "es_hash2properties: expected first argument to be an Hash, got #{input.inspect}" unless input.is_a?(Hash)

options = args[1] if args[1]

raise Puppet::ParseError, "es_hash2properties: expected second argument to be a Hash, got #{options.inspect}" if options && !(options.is_a? Hash)

settings = {
'header' => '# THIS FILE IS MANAGED BY PUPPET',
'key_val_separator' => ' = ',
'quote_char' => '',
'list_separator' => ',',
}

settings.merge!(options) if options

result = []
key_hashes = input.to_a
properties = {}
list_separator = settings['list_separator']
until key_hashes.empty?
key_value = key_hashes.pop
if key_value[1].is_a?(Hash)
key_hashes += key_value[1].to_a.map { |key, value| ["#{key_value[0]}.#{key}", value] }
else
prop_value = if key_value[1].is_a?(Array)
key_value[1].join(list_separator)
else
prop_value = key_value[1]
end
properties[key_value[0]] = prop_value
end
end

key_val_separator = settings['key_val_separator']
quote_char = settings['quote_char']

properties.each do |property, value|
result << "#{property}#{key_val_separator}#{quote_char}#{value}#{quote_char}"
end

result.sort! { |x, y| String(x) <=> String(y) }
result.insert(0, settings['header'])
result << ''

return result.join("\n")
end
end
35 changes: 14 additions & 21 deletions manifests/config.pp
Original file line number Diff line number Diff line change
Expand Up @@ -141,27 +141,20 @@
$_tls_config = {}
}

# # Logging file or hash
# if ($elasticsearch::logging_file != undef) {
# $_log4j_content = undef
# } else {
# if ($elasticsearch::logging_template != undef ) {
# $_log4j_content = template($elasticsearch::logging_template)
# } else {
# $_log4j_content = template("${module_name}/etc/elasticsearch/log4j2.properties.erb")
# }
# $_logging_source = undef
# }
# file {
# "${elasticsearch::configdir}/log4j2.properties":
# ensure => file,
# content => $_log4j_content,
# source => $_logging_source,
# mode => '0644',
# notify => $elasticsearch::_notify_service,
# require => Class['elasticsearch::package'],
# before => Class['elasticsearch::service'],
# }
# Generate log4j2.properties file
if ($elasticsearch::logging_config != undef) {
file {
"${elasticsearch::configdir}/log4j2.properties":
ensure => file,
content => es_hash2properties($elasticsearch::logging_config),
group => $elasticsearch::elasticsearch_group,
owner => $elasticsearch::elasticsearch_user,
mode => '0644',
notify => $elasticsearch::_notify_service,
require => Class['elasticsearch::package'],
before => Class['elasticsearch::service'],
}
}

# Generate Elasticsearch config
$data = merge(
Expand Down
51 changes: 26 additions & 25 deletions manifests/init.pp
Original file line number Diff line number Diff line change
Expand Up @@ -77,23 +77,23 @@
# copying files from the `configdir` to instance `configdir`s.
#
# @param daily_rolling_date_pattern
# File pattern for the file appender log when file_rolling_type is 'dailyRollingFile'.
# DEPRECATED, File pattern for the file appender log when file_rolling_type is 'dailyRollingFile'.
#
# @param datadir
# Allows you to set the data directory of Elasticsearch.
#
# @param default_logging_level
# Default logging level for Elasticsearch.
# DEPRECATED, Default logging level for Elasticsearch.
#
# @param defaults_location
# Absolute path to directory containing init defaults file.
#
# @param deprecation_logging
# Whether to enable deprecation logging. If enabled, deprecation logs will be
# DEPRECATED, Whether to enable deprecation logging. If enabled, deprecation logs will be
# saved to ${cluster.name}_deprecation.log in the Elasticsearch log folder.
#
# @param deprecation_logging_level
# Default deprecation logging level for Elasticsearch.
# DEPRECATED, Default deprecation logging level for Elasticsearch.
#
# @param download_tool
# Command-line invocation with which to retrieve an optional package_url.
Expand Down Expand Up @@ -152,17 +152,17 @@
# Mode directory that will be used for Elasticsearch logging (default 2750).
#
# @param logging_config
# Representation of information to be included in the log4j.properties file.
# Representation of information to be included in the log4j2.properties file.
#
# @param logging_file
# Instead of a hash, you may supply a `puppet://` file source for the
# DEPRECATED, Instead of a hash, you may supply a `puppet://` file source for the
# log4j.properties file.
#
# @param logging_level
# Default logging level for Elasticsearch.
# DEPRECATED, Default logging level for Elasticsearch.
#
# @param logging_template
# Use a custom logging template - just supply the relative path, i.e.
# DEPRECATED, Use a custom logging template - just supply the relative path, i.e.
# `$module/elasticsearch/logging.yml.erb`
#
# @param manage_datadir
Expand Down Expand Up @@ -268,10 +268,10 @@
# Define roles via a hash. This is mainly used with Hiera's auto binding.
#
# @param rolling_file_max_backup_index
# Max number of logs to store whern file_rolling_type is 'rollingFile'
# DEPRECATED, Max number of logs to store whern file_rolling_type is 'rollingFile'
#
# @param rolling_file_max_file_size
# Max log file size when file_rolling_type is 'rollingFile'
# DEPRECATED, Max log file size when file_rolling_type is 'rollingFile'
#
# @param scripts
# Define scripts via a hash. This is mainly used with Hiera's auto binding.
Expand All @@ -281,11 +281,11 @@
# Elasticsearch keystore file. If unset, the keystore is left unmanaged.
#
# @param security_logging_content
# File content for x-pack logging configuration file (will be placed
# DEPRECATED, File content for x-pack logging configuration file (will be placed
# into log4j2.properties file).
#
# @param security_logging_source
# File source for x-pack logging configuration file (will be placed
# DEPRECATED, File source for x-pack logging configuration file (will be placed
# into log4j2.properties).
#
# @param service_name
Expand Down Expand Up @@ -349,17 +349,13 @@
Hash $config,
Stdlib::Absolutepath $configdir,
Integer $configdir_recurselimit,
String $daily_rolling_date_pattern,
Elasticsearch::Multipath $datadir,
Optional[Stdlib::Absolutepath] $defaults_location,
Boolean $deprecation_logging,
String $deprecation_logging_level,
Optional[String] $download_tool,
Optional[String] $download_tool_insecure,
Boolean $download_tool_verify_certificates,
String $elasticsearch_group,
String $elasticsearch_user,
Enum['dailyRollingFile', 'rollingFile', 'file'] $file_rolling_type,
Stdlib::Absolutepath $homedir,
Hash $indices,
Hash $init_defaults,
Expand All @@ -368,10 +364,6 @@
Array[String] $jvm_options,
Optional[Variant[String, Hash]] $license,
Stdlib::Absolutepath $logdir,
Hash $logging_config,
Optional[String] $logging_file,
String $logging_level,
Optional[String] $logging_template,
Boolean $manage_datadir,
Boolean $manage_logdir,
Boolean $manage_repo,
Expand All @@ -392,12 +384,8 @@
Variant[Boolean, String] $repo_stage,
Boolean $restart_on_change,
Hash $roles,
Integer $rolling_file_max_backup_index,
String $rolling_file_max_file_size,
Hash $scripts,
Optional[Hash] $secrets,
Optional[String] $security_logging_content,
Optional[String] $security_logging_source,
String $service_name,
Enum['init', 'openbsd', 'openrc', 'systemd'] $service_provider,
Hash $snapshot_repositories,
Expand All @@ -411,15 +399,28 @@
Variant[String, Boolean] $version,
Optional[Stdlib::Absolutepath] $ca_certificate = undef,
Optional[Stdlib::Absolutepath] $certificate = undef,
String $default_logging_level = $logging_level,
Optional[String] $keystore_password = undef,
Optional[Stdlib::Absolutepath] $keystore_path = undef,
Optional[Hash] $logging_config = undef,
Optional[Stdlib::Absolutepath] $private_key = undef,
Enum['rsa','dsa','ec'] $private_key_type = 'rsa',
Boolean $restart_config_change = $restart_on_change,
Boolean $restart_package_change = $restart_on_change,
Boolean $restart_plugin_change = $restart_on_change,
Stdlib::Filemode $logdir_mode = '2750',
# Deprecated (not used)
Optional[String] $daily_rolling_date_pattern = undef,
Optional[String] $default_logging_level = undef,
Enum['dailyRollingFile', 'rollingFile', 'file'] $file_rolling_type = 'dailyRollingFile',
Optional[String] $logging_file = undef,
Optional[String] $logging_level = undef,
Optional[String] $logging_template = undef,
Optional[Boolean] $deprecation_logging = undef,
Optional[String] $deprecation_logging_level = undef,
Optional[Integer] $rolling_file_max_backup_index = undef,
Optional[String] $rolling_file_max_file_size = undef,
Optional[String] $security_logging_content = undef,
Optional[String] $security_logging_source = undef,
) {
#### Validate parameters

Expand Down
66 changes: 66 additions & 0 deletions spec/functions/es_hash2properties_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
# frozen_string_literal: true

require 'spec_helper'
describe 'es_hash2properties' do
describe 'exception handling' do
it {
expect(subject).to run.with_params.and_raise_error(
Puppet::ParseError, %r{wrong number of arguments}i
)
}

it {
expect(subject).to run.with_params('1').and_raise_error(
Puppet::ParseError, %r{expected first argument}
)
}

it {
expect(subject).to run.with_params({ 'a' => 1 }, '2').and_raise_error(
Puppet::ParseError, %r{expected second argument}
)
}
end

describe 'conversion' do
context 'simple keys' do
it {
expect(subject).to run.with_params({
'key1' => 'value1',
'key2' => 0,
'key3' => true,
}).and_return(['# THIS FILE IS MANAGED BY PUPPET', 'key1 = value1', 'key2 = 0', 'key3 = true', ''].join("\n"))
}
end

context 'keys and subkeys' do
it {
expect(subject).to run.with_params({
'key1' => { 'subkey1' => 'value1', 'subkey2' => 0, },
'key2' => true,
}).and_return(['# THIS FILE IS MANAGED BY PUPPET', 'key1.subkey1 = value1', 'key1.subkey2 = 0', 'key2 = true', ''].join("\n"))
}
end

context 'options header' do
it {
expect(subject).to run.with_params({
'key1' => 'value1',
'key2' => 0,
'key3' => true,
},
{
'header' => '# CUSTOM HEADER',
}).and_return(['# CUSTOM HEADER', 'key1 = value1', 'key2 = 0', 'key3 = true', ''].join("\n"))
}
end
end

it 'does not change the original hashes' do
argument1 = { 'key1' => 'value1' }
original1 = argument1.dup

subject.execute(argument1)
expect(argument1).to eq(original1)
end
end
Loading