Skip to content
This repository has been archived by the owner on Jul 11, 2020. It is now read-only.

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Tweak ElasticSearch for logstash
Browse files Browse the repository at this point in the history
bai committed Jun 24, 2015
1 parent 0e43e67 commit f123a73
Showing 1 changed file with 27 additions and 5 deletions.
32 changes: 27 additions & 5 deletions cookbooks/rubygems-logging/recipes/server_elasticsearch.rb
Original file line number Diff line number Diff line change
@@ -7,9 +7,23 @@
node.default['elasticsearch']['cloud']['aws']['access_key'] = aws_credentials['access_key_id']
node.default['elasticsearch']['cloud']['aws']['secret_key'] = aws_credentials['secret_access_key']
node.default['elasticsearch']['cloud']['aws']['region'] = 'us-west-2'
node.default['elasticsearch']['version'] = '1.5.0'
node.default['elasticsearch']['allocated_memory'] = '4000m'
node.default['elasticsearch']['bootstrap']['mlockall'] = true

node.default['elasticsearch']['version'] = '1.6.0'

default['elasticsearch']['plugins'] = {
'elasticsearch/marvel/latest' => {
'url' => 'https://download.elasticsearch.org/elasticsearch/marvel/marvel-latest.zip'
},
'lmenezes/elasticsearch-kopf' => {},
}

node.default['elasticsearch']['allocated_memory'] = "#{(node['memory']['total'].to_i * 0.5 ).floor / 1024}m"

node.default['elasticsearch']['mlockall'] = node['memory']['total'].to_i >= 1048576

# Allow recoveries speeds up to 500mb/sec in 12 streams
node.default['elasticsearch']['indices.recovery.concurrent_streams'] = 12
node.default['elasticsearch']['indices.recovery.max_bytes_per_sec'] = '500mb'

# node.default['elasticsearch']['custom_config']['threadpool.search.type'] = 'fixed'
# node.default['elasticsearch']['custom_config']['threadpool.search.size'] = '20'
@@ -35,6 +49,15 @@
node.default['elasticsearch']['custom_config']['index.refresh_interval'] = '30s'
node.default['elasticsearch']['custom_config']['index.translog.flush_threshold_ops'] = '50000'

# Remove shard reallocation throttling/limits
node.default['elasticsearch']['custom_config']['cluster.routing.allocation.node_concurrent_recoveries'] = '-1'
node.default['elasticsearch']['custom_config']['cluster.routing.allocation.cluster_concurrent_rebalance'] = '-1'

node.default['elasticsearch']['custom_config']['index.analysis.analyzer.default.type'] = 'keyword'
node.default['elasticsearch']['custom_config']['index.analysis.analyzer.default.stopwords'] = '_none_'

node.default['elasticsearch']['skip_restart'] = true

node.default['elasticsearch']['gc_settings'] = <<-CONFIG
-XX:+UseG1GC
-XX:+HeapDumpOnOutOfMemoryError
@@ -75,8 +98,7 @@
include_recipe 'elasticsearch'
include_recipe 'elasticsearch::ebs'
include_recipe 'elasticsearch::data'

install_plugin 'lmenezes/elasticsearch-kopf'
include_recipe 'elasticsearch::plugins'

easy_install_package 'elasticsearch-curator'

0 comments on commit f123a73

Please sign in to comment.