Skip to content

Commit

Permalink
Merge pull request #216 from opencrvs/chore/upgrade-to-elastic-8
Browse files Browse the repository at this point in the history
Upgrade Elastic stack from 7.17 to 8
  • Loading branch information
makelicious authored Jul 30, 2024
2 parents dfbf4c6 + aa84a6c commit 05fbd63
Show file tree
Hide file tree
Showing 13 changed files with 102 additions and 60 deletions.
9 changes: 9 additions & 0 deletions infrastructure/deployment/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,15 @@ echo
echo "Waiting 2 mins for mongo to deploy before working with data. Please note it can take up to 10 minutes for the entire stack to deploy in some scenarios."
echo

echo 'Setting up elastalert indices'

while true; do
if configured_ssh "/opt/opencrvs/infrastructure/elasticsearch/setup-elastalert-indices.sh"; then
break
fi
sleep 5
done

echo "Setting up Kibana config & alerts"

while true; do
Expand Down
20 changes: 10 additions & 10 deletions infrastructure/docker-compose.deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ services:
- overlay_net

filebeat:
image: docker.elastic.co/beats/filebeat:7.17.0
image: docker.elastic.co/beats/filebeat:8.14.3
user: root
networks:
- overlay_net
Expand All @@ -85,7 +85,7 @@ services:
- 'traefik.enable=false'

metricbeat:
image: docker.elastic.co/beats/metricbeat:7.17.13
image: docker.elastic.co/beats/metricbeat:8.14.3
user: root
cap_add:
- SYS_PTRACE
Expand Down Expand Up @@ -128,7 +128,7 @@ services:
[
'curl',
'-u',
'elastic:${ELASTICSEARCH_SUPERUSER_PASSWORD}',
'kibana_system:${KIBANA_SYSTEM_PASSWORD}',
'-X',
'POST',
'http://kibana:5601/api/saved_objects/_import?overwrite=true',
Expand Down Expand Up @@ -156,7 +156,7 @@ services:
gelf-address: 'udp://127.0.0.1:12201'
tag: 'setup-kibana-config'
kibana:
image: docker.elastic.co/kibana/kibana:7.17.0
image: docker.elastic.co/kibana/kibana:8.14.3
restart: always
deploy:
labels:
Expand All @@ -173,8 +173,8 @@ services:
networks:
- overlay_net
environment:
- ELASTICSEARCH_USERNAME=elastic
- ELASTICSEARCH_PASSWORD=${ELASTICSEARCH_SUPERUSER_PASSWORD}
- ELASTICSEARCH_USERNAME=kibana_system
- ELASTICSEARCH_PASSWORD=${KIBANA_SYSTEM_PASSWORD}
configs:
- source: kibana.{{ts}}
target: /usr/share/kibana/config/kibana.yml
Expand Down Expand Up @@ -282,7 +282,6 @@ services:
- path.repo=/data/backups/elasticsearch
- cluster.name=docker-cluster
- network.host=0.0.0.0
- discovery.zen.minimum_master_nodes=1
- discovery.type=single-node
- xpack.security.enabled=true
- xpack.security.authc.api_key.enabled=true
Expand Down Expand Up @@ -365,6 +364,7 @@ services:
- APM_ELASTIC_PASSWORD=${ROTATING_APM_ELASTIC_PASSWORD}
- SEARCH_ELASTIC_USERNAME=search-user
- SEARCH_ELASTIC_PASSWORD=${ROTATING_SEARCH_ELASTIC_PASSWORD}
- KIBANA_SYSTEM_PASSWORD=${KIBANA_SYSTEM_PASSWORD}
- KIBANA_USERNAME=${KIBANA_USERNAME}
- KIBANA_PASSWORD=${KIBANA_PASSWORD}
volumes:
Expand All @@ -384,7 +384,7 @@ services:
gelf-address: 'udp://127.0.0.1:12201'
tag: 'setup-elasticsearch-users'
elastalert:
image: jertel/elastalert2:2.3.0
image: jertel/elastalert2:2.19.0
restart: unless-stopped
environment:
- ES_USERNAME=elastic
Expand All @@ -408,7 +408,7 @@ services:
tag: 'elastalert'

logstash:
image: logstash:7.17.0
image: logstash:8.14.3
command: logstash -f /etc/logstash/logstash.conf --verbose
ports:
- '12201:12201'
Expand All @@ -431,7 +431,7 @@ services:
- 'traefik.enable=false'
replicas: 1
apm-server:
image: docker.elastic.co/apm/apm-server:7.15.2
image: docker.elastic.co/apm/apm-server:7.17.22
cap_add: ['CHOWN', 'DAC_OVERRIDE', 'SETGID', 'SETUID']
cap_drop: ['ALL']
restart: always
Expand Down
13 changes: 11 additions & 2 deletions infrastructure/elasticsearch/roles/search_user.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,17 @@
{
"cluster": ["manage"],
"indices": [
{
"names": ["ocrvs"],
"privileges": ["write", "create", "create_index", "delete", "delete_index", "read"]
"names": ["ocrvs", "ocrvs-*"],
"privileges": [
"write",
"create",
"create_index",
"delete",
"delete_index",
"read",
"manage"
]
}
]
}
44 changes: 44 additions & 0 deletions infrastructure/elasticsearch/setup-elastalert-indices.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#!/usr/bin/env bash

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
#
# OpenCRVS is also distributed under the terms of the Civil Registration
# & Healthcare Disclaimer located at http://opencrvs.org/license.
#
# Copyright (C) The OpenCRVS Authors located at https://github.com/opencrvs/opencrvs-core/blob/master/AUTHORS.

# Upgrading from 7 to 8 requires deleting elastalert indices. https://elastalert2.readthedocs.io/en/latest/recipes/faq.html#does-elastalert-2-support-elasticsearch-8

set -e

docker_command="docker run --rm --network=opencrvs_overlay_net curlimages/curl"

echo 'Waiting for availability of Elasticsearch'
ping_status_code=$($docker_command --connect-timeout 60 -u elastic:$ELASTICSEARCH_SUPERUSER_PASSWORD -o /dev/null -w '%{http_code}' "http://elasticsearch:9200")

if [ "$ping_status_code" -ne 200 ]; then
echo "Elasticsearch is not ready. API returned status code: $ping_status_code"
exit 1
fi



echo 'Scaling down Elastalert'

docker service scale opencrvs_elastalert=0

echo 'Deleting Elastalert indices'
indices='elastalert_status,elastalert_status_error,elastalert_status_past,elastalert_status_silence,elastalert_status_status'

delete_status_code=$($docker_command --connect-timeout 60 -u elastic:$ELASTICSEARCH_SUPERUSER_PASSWORD -o /dev/null -w '%{http_code}' "http://elasticsearch:9200/${indices}" -X DELETE)

if [ "$delete_status_code" -ne 200 ]; then
echo "Could not delete indices. API returned status code: $delete_status_code"
exit 1
fi

echo 'Scaling up Elastalert'
docker service scale opencrvs_elastalert=1

33 changes: 0 additions & 33 deletions infrastructure/elasticsearch/setup-helpers.sh
Original file line number Diff line number Diff line change
Expand Up @@ -230,36 +230,3 @@ function ensure_settings {

return $result
}


function create_elastic_index {
local index_name=$1
local elasticsearch_host="${ELASTICSEARCH_HOST:-elasticsearch}"

local -a args=( '-s' '-D-' '-m15' '-w' '%{http_code}'
"http://${elasticsearch_host}:9200/${index_name}"
'-X' 'PUT'
'-H' 'Content-Type: application/json'
)

if [[ -n "${ELASTIC_PASSWORD:-}" ]]; then
args+=( '-u' "elastic:${ELASTIC_PASSWORD}" )
fi

local -i result=1
local output

output="$(curl "${args[@]}")"

echo "${output}"

if [[ "${output: -3}" -eq 200 || $output == *"resource_already_exists"* ]]; then
result=0
fi

if ((result)); then
echo -e "\n${output::-3}\n"
fi

return $result
}
3 changes: 0 additions & 3 deletions infrastructure/elasticsearch/setup-settings.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,5 @@ echo "-------- $(date) --------"
log 'Waiting for availability of Elasticsearch'
wait_for_elasticsearch

log "Creating index for Elasticsearch. Index: ocrvs"
create_elastic_index "ocrvs"

log "Updating replicas for Elasticsearch"
ensure_settings "{\"index\":{\"number_of_replicas\":0}}"
1 change: 1 addition & 0 deletions infrastructure/elasticsearch/setup-users.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ users_passwords=(
[$SEARCH_ELASTIC_USERNAME]="${SEARCH_ELASTIC_PASSWORD:-}"
[beats_system]="${METRICBEAT_ELASTIC_PASSWORD:-}"
[apm_system]="${APM_ELASTIC_PASSWORD:-}"
[kibana_system]="${KIBANA_SYSTEM_PASSWORD:-}"
[$KIBANA_USERNAME]="${KIBANA_PASSWORD:-}"
)

Expand Down
24 changes: 24 additions & 0 deletions infrastructure/environments/setup-environment.ts
Original file line number Diff line number Diff line change
Expand Up @@ -653,6 +653,13 @@ const derivedVariables = [
type: 'disabled',
scope: 'ENVIRONMENT'
},
{
name: 'KIBANA_SYSTEM_PASSWORD',
valueLabel: 'KIBANA_SYSTEM_PASSWORD',
valueType: 'SECRET',
type: 'disabled',
scope: 'ENVIRONMENT'
},
{
name: 'MINIO_ROOT_USER',
valueLabel: 'MINIO_ROOT_USER',
Expand Down Expand Up @@ -1103,6 +1110,23 @@ const SPECIAL_NON_APPLICATION_ENVIRONMENTS = ['jump', 'backup']
),
scope: 'ENVIRONMENT' as const
},
{
name: 'KIBANA_SYSTEM_PASSWORD',
type: 'SECRET' as const,
didExist: findExistingValue(
'KIBANA_SYSTEM_PASSWORD',
'SECRET',
'ENVIRONMENT',
existingValues
),
value: findExistingOrDefine(
'KIBANA_SYSTEM_PASSWORD',
'SECRET',
'ENVIRONMENT',
generateLongPassword()
),
scope: 'ENVIRONMENT' as const
},
{
name: 'MINIO_ROOT_USER',
type: 'SECRET' as const,
Expand Down
2 changes: 1 addition & 1 deletion infrastructure/monitoring/beats/metricbeat.yml
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ setup.kibana:
password: ${KIBANA_PASSWORD}

#============================== Xpack Monitoring ===============================
xpack.monitoring:
monitoring:
enabled: true
elasticsearch:
username: ${BEATS_USERNAME}
Expand Down
2 changes: 1 addition & 1 deletion infrastructure/monitoring/filebeat/filebeat.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ setup.kibana:
password: ${ELASTICSEARCH_PASSWORD}

#============================== Xpack Monitoring ===============================
xpack.monitoring:
monitoring:
enabled: true
elasticsearch:

Expand Down
1 change: 0 additions & 1 deletion infrastructure/monitoring/kibana/kibana.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ monitoring.ui.container.elasticsearch.enabled: true
xpack.encryptedSavedObjects.encryptionKey: '{{KIBANA_ENCRYPTION_KEY}}'
xpack.reporting.encryptionKey: '{{KIBANA_ENCRYPTION_KEY}}'
xpack.actions.preconfiguredAlertHistoryEsIndex: true
xpack.infra.sources.default.logAlias: 'logs-*,filebeat-*,kibana_sample_data_logs*,logstash*'
# If your Elasticsearch is protected with basic authentication, these settings provide
# the username and password that the Kibana server uses to perform maintenance on the Kibana
# index at startup. Your Kibana users still need to authenticate with Elasticsearch, which
Expand Down
2 changes: 1 addition & 1 deletion infrastructure/monitoring/kibana/setup-config.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,4 @@ $docker_command --connect-timeout 60 -u elastic:$ELASTICSEARCH_SUPERUSER_PASSWOR
$docker_command --connect-timeout 60 -u elastic:$ELASTICSEARCH_SUPERUSER_PASSWORD "$kibana_alerting_api_url" | docker run --rm -i --network=opencrvs_overlay_net ghcr.io/jqlang/jq -r '.data[].id' | while read -r id; do
$docker_command --connect-timeout 60 -X POST -H 'kbn-xsrf: true' -u elastic:$ELASTICSEARCH_SUPERUSER_PASSWORD "http://kibana:5601/api/alerting/rule/$id/_disable"
$docker_command --connect-timeout 60 -X POST -H 'kbn-xsrf: true' -u elastic:$ELASTICSEARCH_SUPERUSER_PASSWORD "http://kibana:5601/api/alerting/rule/$id/_enable"
done
done
8 changes: 0 additions & 8 deletions infrastructure/run-migrations.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,5 @@ elasticsearch_host() {
fi
}

create_elastic_index () {
local index_name=$1
echo "Creating ElasticSearch Index: ${index_name}"
docker run --rm --network=opencrvs_overlay_net appropriate/curl curl -XPUT "http://$(elasticsearch_host)/$index_name" -v
}

create_elastic_index "ocrvs"

# run migration by restarting migration service
docker service update --force --update-parallelism 1 --update-delay 30s opencrvs_migration

0 comments on commit 05fbd63

Please sign in to comment.