2018-03-27 18:15:01 +02:00
#!/usr/bin/env bash
# 3 options:
# - docker compose
# - docker "simple"
# - classic installation
# > macOS
2018-03-28 15:37:04 +02:00
# > debian/ubuntu
# > other linux
2018-03-27 18:15:01 +02:00
config( )
{
2018-11-20 14:43:55 +01:00
if [ " $( whoami) " = "root" ]
2018-09-18 14:12:49 +02:00
then
2018-11-28 10:23:46 +01:00
echo "It is not recommended to run this script as root. As a normal user, elevation will be prompted if needed."
read -rp "Continue anyway? (y/n) " confirm </dev/tty
if [ [ " $confirm " = "n" ] ] ; then exit 1; fi
2019-10-09 11:17:15 +02:00
else
if ! command -v sudo
then
echo "Please install and configure sudo before running this script."
echo "sudo was not found, exiting..."
exit 1
elif ! groups | grep sudo; then
echo "Please add your current user to the sudoers."
echo " You can run the following as root: \"usermod -aG sudo $( whoami) \", then logout and login again "
echo "sudo was not configured, exiting..."
exit 1
fi
if ! groups | grep docker; then
echo "Please add your current user to the docker group."
echo " You can run the following as root: \"usermod -aG docker $( whoami) \", then logout and login again "
echo "current user is not allowed to use docker, exiting..."
exit 1
fi
2018-09-18 14:12:49 +02:00
fi
2018-03-29 17:48:55 +02:00
echo "detecting curl..."
2018-03-29 12:41:00 +02:00
if ! command -v curl
then
echo "Please install curl before running this script."
echo "curl was not found, exiting..."
exit 1
fi
2018-04-04 17:16:58 +02:00
echo "detecting jq..."
if ! command -v jq
then
echo "Please install jq before running this script."
echo "jq was not found, exiting..."
exit 1
fi
2018-12-03 15:10:04 +01:00
if ! command -v awk || ! [ [ $( awk -W version) = ~ ^GNU ] ]
then
echo "Please install GNU Awk before running this script."
echo "gawk was not found, exiting..."
exit 1
fi
2018-04-04 16:39:34 +02:00
echo "checking memory..."
mem = $( free -mt | grep Total | awk '{print $2}' )
if [ " $mem " -lt 4000 ]
then
read -rp "Not enough memory to perform upgrade. Would you like to add the necessary swap? (y/n) " swap </dev/tty
if [ " $swap " = "y" ]
then
local swap_value = $(( 4096 - $mem ))
sudo fallocate -l " ${ swap_value } M " /swapfile
sudo chmod 600 /swapfile
sudo mkswap /swapfile
sudo swapon /swapfile
echo '/swapfile none swap sw 0 0' | sudo tee -a /etc/fstab
2018-04-05 16:38:18 +02:00
echo 'vm.swappiness=10' | sudo tee -a /etc/sysctl.conf
2018-04-04 16:39:34 +02:00
echo 'vm.vfs_cache_pressure=50' | sudo tee -a /etc/sysctl.conf
else
echo "Please upgrade memory to 4GB or more to allow the upgrade to run."
free -h
exit 7
fi
fi
2018-03-27 18:15:01 +02:00
FM_PATH = $( pwd )
2018-03-28 15:37:04 +02:00
TYPE = "NOT-FOUND"
2020-02-26 10:19:43 +01:00
read -rp " Is Fab-manager installed at \" $FM_PATH \"? (y/n) " confirm </dev/tty
2018-03-27 18:15:01 +02:00
if [ " $confirm " = "y" ]
then
2018-04-05 18:18:35 +02:00
# checking disk space (minimum required = 1168323KB)
2018-04-05 17:32:19 +02:00
space = $( df $FM_PATH | awk '/[0-9]%/{print $(NF-2)}' )
2018-04-05 18:18:35 +02:00
if [ " $space " -lt 1258291 ]
2018-04-05 17:32:19 +02:00
then
echo "Not enough free disk space to perform upgrade. Please free at least 1,2GB of disk space and try again"
df -h $FM_PATH
exit 7
fi
2018-03-28 15:37:04 +02:00
if [ -f " $FM_PATH /config/application.yml " ]
then
ES_HOST = $( cat " $FM_PATH /config/application.yml " | grep ELASTICSEARCH_HOST | awk '{print $2}' )
elif [ -f " $FM_PATH /config/env " ]
then
ES_HOST = $( cat " $FM_PATH /config/env " | grep ELASTICSEARCH_HOST | awk '{split($0,a,"="); print a[2]}' )
2018-09-18 14:12:49 +02:00
else
echo "Fab-manager's environment file not found, please run this script from the installation folder"
exit 1
2018-03-28 15:37:04 +02:00
fi
2018-03-29 17:48:55 +02:00
ES_IP = $( getent ahostsv4 " $ES_HOST " | awk '{ print $1 }' | uniq)
2018-03-27 18:15:01 +02:00
else
2020-02-26 10:19:43 +01:00
echo "Please run this script from the Fab-manager's installation folder"
2018-03-27 18:15:01 +02:00
exit 1
fi
}
test_docker_compose( )
{
2018-03-28 15:37:04 +02:00
if [ [ -f " $FM_PATH /docker-compose.yml " ] ]
2018-03-27 18:15:01 +02:00
then
docker-compose ps | grep elastic
2018-03-28 15:37:04 +02:00
if [ [ $? = 0 ] ]
then
TYPE = "DOCKER-COMPOSE"
local container_id = $( docker-compose ps | grep elastic | awk '{print $1}' )
ES_IP = $( docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' " $container_id " )
fi
2018-03-27 18:15:01 +02:00
fi
}
test_docker( )
{
2018-04-04 16:39:34 +02:00
if command -v docker
2018-03-27 18:15:01 +02:00
then
2018-04-04 16:39:34 +02:00
docker ps | grep elasticsearch:1.7
if [ [ $? = 0 ] ]
then
local containers = $( docker ps | grep elasticsearch:1.7)
docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $( echo " $containers " | awk '{print $1}' ) | grep " $ES_IP "
if [ [ $? = 0 ] ] ; then TYPE = "DOCKER" ; fi
fi
2018-03-27 18:15:01 +02:00
fi
}
test_classic( )
{
2018-03-28 15:37:04 +02:00
if [ " $ES_IP " = "127.0.0.1" ] || [ " $ES_IP " = "::1" ]
2018-03-27 18:15:01 +02:00
then
whereis -b elasticsearch | grep "/"
2018-03-28 15:37:04 +02:00
if [ [ $? = 0 ] ] ; then TYPE = "CLASSIC" ; fi
2018-03-27 18:15:01 +02:00
fi
}
test_running( )
{
2018-03-28 15:37:04 +02:00
local http_res = $( curl -I " $ES_IP :9200 " 2>/dev/null | head -n 1 | cut -d$' ' -f2)
if [ " $http_res " = "200" ]
2018-03-27 18:15:01 +02:00
then
echo "ONLINE"
else
echo "OFFLINE"
fi
}
2018-03-28 15:37:04 +02:00
test_version( )
{
local version = $( curl " $ES_IP :9200 " 2>/dev/null | grep number | awk '{print $3}' )
2018-04-04 16:39:34 +02:00
case " $version " in
*1.7*)
echo "1.7"
; ;
*2.4*)
echo "2.4"
; ;
*5.6*)
echo "5.6"
; ;
*6.2*)
echo "6.2"
; ;
*)
echo " $version "
esac
2018-03-28 15:37:04 +02:00
}
2018-03-27 18:15:01 +02:00
detect_installation( )
{
echo "Detecting installation type..."
2018-03-28 15:37:04 +02:00
test_docker_compose
if [ [ " $TYPE " = "DOCKER-COMPOSE" ] ]
2018-03-27 18:15:01 +02:00
then
echo "Docker-compose installation detected."
else
2018-03-28 15:37:04 +02:00
test_docker
if [ [ " $TYPE " = "DOCKER" ] ]
2018-03-27 18:15:01 +02:00
then
echo "Classical docker installation detected."
else
2018-03-28 15:37:04 +02:00
test_classic
if [ [ " $TYPE " = "CLASSIC" ] ]
2018-03-27 18:15:01 +02:00
then
echo "Local installation detected on the host system."
fi
fi
fi
if [ [ " $TYPE " = "NOT-FOUND" ] ]
then
echo "ElasticSearch 1.7 was not found on the current system, exiting..."
exit 2
else
2018-04-04 16:39:34 +02:00
echo -n "Detecting online status... "
2018-03-27 18:15:01 +02:00
if [ [ " $TYPE " != "NOT-FOUND" ] ]
then
STATUS = $( test_running)
2018-04-04 16:39:34 +02:00
echo " $STATUS "
2018-04-17 10:46:49 +02:00
if [ " $STATUS " = "OFFLINE" ]
then
echo "Your ElasticSearch instance is offline. Please check the logs and verify that no problems prevent the upgrade..."
if [ " $TYPE " = "CLASSIC" ] ;
then
echo " Note: You can use \`sudo journalctl -u elasticsearch.service\` or \`vi /var/log/elasticsearch/elasticsearch.log. $\(date --rfc-3339=date\)\` to view the logs, depending on your installation " ;
else
2018-04-17 10:48:31 +02:00
echo "Note: You can use \`docker logs CONTAINER\` to view the logs" ;
2018-04-17 10:46:49 +02:00
fi
2018-09-18 14:12:49 +02:00
exit 2
2018-04-17 10:46:49 +02:00
fi
2018-03-27 18:15:01 +02:00
fi
fi
}
2018-04-04 16:39:34 +02:00
error_index_status( )
{
echo "Your elasticSearch installation contains indices which states are not \"green\", but this cannot be solved automatically."
echo "Please solve theses status before continuing..."
curl " $ES_IP :9200/_cat/indices?v " 2>/dev/null | grep --color -E " yellow|red| $"
exit 6
}
ensure_initial_status_green( )
{
echo "Checking status of your elasticSearch indices..."
local state = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{print $1}' | sort | uniq)
if [ " $state " != "green" ]
then
local replicas = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{print $5}' | sort | uniq)
if [ " $replicas " != "0" ]
then
local indices = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{print $3}' )
for index in $indices # do not surround $indices with quotes
do
2018-04-05 13:12:46 +02:00
curl -XPUT " $ES_IP :9200/ $index /_settings " 2>/dev/null -H 'Content-Type: application/json' -d ' {
2018-04-04 16:39:34 +02:00
"index" : {
"number_of_replicas" : 0
}
} '
done
local final_state = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{print $1}' | sort | uniq)
if [ " $final_state " != "green" ] ; then error_index_status; fi
else
error_index_status
fi
fi
}
wait_for_online( )
{
2018-04-09 16:37:21 +02:00
local counter = 0
echo -n "Waiting for ElasticSearch instance to came online"
2018-04-04 16:39:34 +02:00
STATUS = $( test_running)
while [ " $STATUS " != "ONLINE" ]
do
2018-04-09 16:37:21 +02:00
echo -n "."
2018-04-04 16:39:34 +02:00
sleep 1
STATUS = $( test_running)
2018-04-09 16:37:21 +02:00
( ( ++counter) )
if [ " $counter " -eq 120 ]
then
echo -e "\nThe ElasticSearch instance did not came online for 2 minutes, please check the logs for any errors. Exiting..."
exit 8
fi
2018-04-04 16:39:34 +02:00
done
2018-04-09 16:37:21 +02:00
echo -e "\n"
2018-04-04 16:39:34 +02:00
}
2018-04-05 16:38:18 +02:00
wait_for_green_status( )
{
2018-04-09 16:37:21 +02:00
echo -n "Waiting for ElasticSearch indices to have green status"
2018-04-05 16:38:18 +02:00
local state = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{print $1}' | sort | uniq)
while [ " $state " != "green" ]
do
2018-04-09 16:37:21 +02:00
echo -n "."
2018-04-05 16:38:18 +02:00
sleep 1
state = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{print $1}' | sort | uniq)
done
2018-04-09 16:37:21 +02:00
echo -e "\n"
2018-04-05 16:38:18 +02:00
}
2018-04-05 13:12:46 +02:00
prepare_upgrade( )
{
curl -XPUT " $ES_IP :9200/_cluster/settings?pretty " 2>/dev/null -H 'Content-Type: application/json' -d' {
"transient" : {
"cluster.routing.allocation.enable" : "none"
}
} '
curl -XPOST 2>/dev/null " $ES_IP :9200/_flush/synced?pretty "
}
reenable_allocation( )
{
curl -XPUT " $ES_IP :9200/_cluster/settings?pretty " -H 'Content-Type: application/json' -d' {
"transient" : {
"cluster.routing.allocation.enable" : "all"
}
} '
}
2018-03-27 18:15:01 +02:00
upgrade_compose( )
{
2018-03-29 12:41:00 +02:00
local current = $1
local target = $2
2018-04-04 16:39:34 +02:00
echo -e " \nUpgrading docker-compose installation from $current to $target ... "
2018-04-05 13:12:46 +02:00
prepare_upgrade
2018-03-27 18:15:01 +02:00
docker-compose stop elasticsearch
docker-compose rm -f elasticsearch
2018-04-05 09:24:49 +02:00
local image = " elasticsearch: $target "
2018-04-05 13:12:46 +02:00
if [ $target = '6.2' ] ; then image = "docker.elastic.co\/elasticsearch\/elasticsearch-oss:6.2.3" ; fi
2018-04-05 09:24:49 +02:00
sed -i.bak " s/image: elasticsearch: $current /image: $image /g " " $FM_PATH /docker-compose.yml "
2018-04-09 16:37:21 +02:00
if ! grep -qe "ES_JAVA_OPTS" docker-compose.yml
then
sed -i.bak " /image: $image /s/.*/&\n environment:\n - \"ES_JAVA_OPTS=-Xms512m -Xmx512m\"/ " " $FM_PATH /docker-compose.yml "
fi
if ! grep -qe "ulimits" docker-compose.yml
then
sed -i.bak " /image: $image /s/.*/&\n ulimits:\n memlock:\n soft: -1\n hard: -1/ " " $FM_PATH /docker-compose.yml "
fi
2018-11-29 16:35:45 +01:00
if [ $target = '2.4' ]
then
# get current data directory
dir = $( awk 'BEGIN { FS="\n"; RS="";} { match($0, /image: elasticsearch:2\.4(\n|.)+volumes:(\n|.)+(-.*elasticsearch\/data)/, lines); FS="[ :]+"; RS="\r\n"; split(lines[3], line); print line[2] }' " $FM_PATH /docker-compose.yml " )
# set the configuration directory
dir = $( echo " ${ dir //[ $'\t\r\n ' ] } /config " )
# insert configuration directory into docker-compose bindings
2019-01-02 14:49:13 +01:00
awk " BEGIN { FS=\"\n\"; RS=\"\";} { print gensub(/(image: elasticsearch:2\.4(\n|.)+volumes:(\n|.)+(-.*elasticsearch\/data))/, \"\\\\1\n - ${ dir } :/usr/share/elasticsearch/config\", \"g\") } " " $FM_PATH /docker-compose.yml " > " $FM_PATH /.awktmpfile " && mv " $FM_PATH /.awktmpfile " " $FM_PATH /docker-compose.yml "
abs_dir = $( echo " $dir " | sed " s^\${PWD}^ $FM_PATH ^ " )
echo -e " \nCopying ElasticSearch 2.4 configuration files from GitHub to $abs_dir ... "
mkdir -p " $abs_dir "
2019-03-26 14:04:45 +01:00
curl -sSL https://raw.githubusercontent.com/sleede/fab-manager/master/docker/elasticsearch.yml > " $abs_dir /elasticsearch.yml "
curl -sSL https://raw.githubusercontent.com/sleede/fab-manager/master/docker/log4j2.properties > " $abs_dir /log4j2.properties "
2018-11-29 16:35:45 +01:00
fi
2018-03-27 18:15:01 +02:00
docker-compose pull
docker-compose up -d
2018-04-04 16:39:34 +02:00
wait_for_online
2018-04-05 16:38:18 +02:00
wait_for_green_status
2018-04-05 13:12:46 +02:00
# check status
2018-03-28 15:37:04 +02:00
local version = $( test_version)
2018-03-29 12:41:00 +02:00
if [ " $STATUS " = "ONLINE" ] && [ " $version " = " $target " ] ; then
2018-04-05 16:38:18 +02:00
echo " Installation of elasticsearch $target was successful. "
2018-03-27 18:15:01 +02:00
else
2018-03-29 12:41:00 +02:00
echo " Unable to find an active ElasticSearch $target instance, something may have went wrong, exiting... "
2018-03-28 15:37:04 +02:00
echo " status: $STATUS , version: $version "
2018-03-27 18:15:01 +02:00
exit 4
fi
}
upgrade_docker( )
{
2018-03-29 12:41:00 +02:00
local current = $1
local target = $2
2018-04-04 16:39:34 +02:00
echo -e " \nUpgrading docker installation from $current to $target ... "
2018-03-29 12:41:00 +02:00
local containers = $( docker ps | grep " elasticsearch: $current " )
2018-03-27 18:15:01 +02:00
# get container id
local id = $( docker inspect -f '{{.Id}} {{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $( echo " $containers " | awk '{print $1}' ) | grep " $ES_IP " | awk '{print $1}' )
# get container name
local name = $( docker inspect -f '{{.Name}}' " $id " | sed s:^/::g)
# get container network name
2018-03-28 10:45:04 +02:00
local network = $( docker inspect -f '{{.NetworkSettings.Networks}}' " $id " | sed 's/map\[\(.*\):0x[a-f0-9]*\]/\1/' )
2018-11-26 16:49:26 +01:00
# get container mappings
local volumes = $( docker inspect -f '{{.Mounts}}' " $id " | sed 's/} {/\n/g' | sed 's/^\[\?{\?bind[[:blank:]]*\([^[:blank:]]*\)[[:blank:]]*\([^[:blank:]]*\)[[:blank:]]*true \(rprivate\)\?}\?]\?$/-v \1:\2/g' )
local mounts = $( echo " $volumes " | sed -e ':a' -e 'N' -e '$!ba' -e 's/\n/ /g' )
# get mapped ports
local ports = $( docker inspect -f '{{.NetworkSettings.Ports}}' " $id " ) | sed 's!\([0-9]*\)/tcp:\[{0\.0\.0\.0 \([0-9]*\)}\]!\1:\2!g' | sed 's/^map\[\(.*\)\]/\1/' | sed 's/^map\[\(.*\)\]/\1/' | sed 's/ / -p /' | sed 's/^/-p /'
2018-04-05 13:12:46 +02:00
prepare_upgrade
2018-03-29 12:41:00 +02:00
# stop current elastic
2018-03-28 10:45:04 +02:00
docker stop " $name "
docker rm -f " $name "
2018-03-29 12:41:00 +02:00
# run target elastic
2018-04-05 09:24:49 +02:00
local image = " elasticsearch: $target "
2018-04-05 13:12:46 +02:00
local image_name = " $image "
if [ $target = '6.2' ]
2018-11-26 16:49:26 +01:00
then
2018-04-05 13:12:46 +02:00
image_name = " elasticsearch-oss: $target "
image = " docker.elastic.co/elasticsearch/ $image_name "
2018-11-29 16:35:45 +01:00
elif [ $target = '2.4' ]
2018-11-26 16:49:26 +01:00
then
configdir = $( echo " $volumes " | grep config | awk -F'[ :]' '{print $2}' )
echo -e " \nCopying ElasticSearch 2.4 configuration files from $( pwd ) /docker to $configdir ... "
sudo cp docker/elasticsearch.yml " $configdir "
sudo cp docker/log4j2.properties " $configdir "
2018-04-05 13:12:46 +02:00
fi
2018-04-05 09:24:49 +02:00
docker pull " $image "
2018-11-26 16:49:26 +01:00
echo docker run --restart= always -d --name= " $name " --network= " $network " --ip= " $ES_IP " " $mounts " " $ports " " $image_name " | bash
2018-04-04 16:39:34 +02:00
wait_for_online
2018-04-05 16:38:18 +02:00
wait_for_green_status
2018-04-05 13:12:46 +02:00
# check status
2018-03-28 15:37:04 +02:00
local version = $( test_version)
2018-03-29 12:41:00 +02:00
if [ " $STATUS " = "ONLINE" ] && [ " $version " = " $target " ] ; then
2018-04-05 16:38:18 +02:00
echo " Installation of elasticsearch $target was successful. "
2018-03-28 10:45:04 +02:00
else
2018-03-29 12:41:00 +02:00
echo " Unable to find an active ElasticSearch $target instance, something may have went wrong, exiting... "
2018-03-28 15:37:04 +02:00
echo " status: $STATUS , version: $version "
2018-03-28 10:45:04 +02:00
exit 4
fi
2018-03-27 18:15:01 +02:00
}
2018-03-29 12:41:00 +02:00
unsupported_message( )
{
local version = $1
echo "Automated upgrade of your elasticSearch installation is not supported on your system."
echo " Please refer to your vendor's instructions to install ElasticSearch $version "
echo " For more informations: https://www.elastic.co/guide/en/elasticsearch/reference/ $version /setup-upgrade.html "
exit 5
}
2018-03-27 18:15:01 +02:00
upgrade_classic( )
{
2018-03-29 12:41:00 +02:00
local target = $1
2018-03-28 11:55:10 +02:00
local system = $( uname -s)
case " $system " in
Linux*)
if [ -f /etc/os-release ]
then
. /etc/os-release
2018-03-28 15:37:04 +02:00
if [ " $ID " = 'debian' ] || [ [ " $ID_LIKE " = *'debian' * ] ]
2018-03-28 11:55:10 +02:00
then
# Debian compatible
2018-04-04 16:39:34 +02:00
echo -e " \nUpdating ElasticSearch to $target "
2018-04-05 13:12:46 +02:00
prepare_upgrade
# stop elasticsearch
if command -v systemctl
then
sudo systemctl stop elasticsearch.service
else
sudo /etc/init.d/elasticsearch stop
fi
# process with package upgrade
2018-03-28 11:55:10 +02:00
wget -qO - https://packages.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
2018-03-29 12:41:00 +02:00
case " $target " in
"2.4" )
echo "deb http://packages.elastic.co/elasticsearch/2.x/debian stable main" | sudo tee /etc/apt/sources.list.d/elasticsearch-2.x.list
; ;
2018-03-29 13:03:35 +02:00
"5.6" )
echo "deb https://artifacts.elastic.co/packages/5.x/apt stable main" | sudo tee /etc/apt/sources.list.d/elastic-5.x.list
; ;
"6.2" )
echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | sudo tee /etc/apt/sources.list.d/elastic-6.x.list
; ;
2018-03-29 12:41:00 +02:00
esac
2018-03-29 10:06:03 +02:00
sudo apt-get update && sudo apt-get install --only-upgrade elasticsearch
2018-04-05 13:12:46 +02:00
# restart elasticsearch service
2018-04-04 16:39:34 +02:00
if command -v systemctl
then
2018-04-05 13:12:46 +02:00
sudo systemctl start elasticsearch.service
2018-04-04 16:39:34 +02:00
else
2018-04-05 13:12:46 +02:00
sudo /etc/init.d/elasticsearch start
2018-04-04 16:39:34 +02:00
fi
2018-03-29 12:41:00 +02:00
else
unsupported_message
2018-03-28 11:55:10 +02:00
fi
fi
; ;
Darwin*)
2018-03-29 12:41:00 +02:00
# Mac OS X
2018-04-05 13:12:46 +02:00
echo -e " \nUpdating ElasticSearch to $target "
prepare_upgrade
2018-04-05 16:38:18 +02:00
brew services stop elasticsearch
2018-03-28 11:55:10 +02:00
brew update
2018-03-29 12:41:00 +02:00
case " $target " in
"2.4" )
2018-04-05 09:24:49 +02:00
brew install elasticsearch@2.4
2018-03-29 12:41:00 +02:00
; ;
2018-03-29 13:03:35 +02:00
"5.6" )
2018-04-05 09:24:49 +02:00
brew install elasticsearch@5.6
2018-03-29 13:03:35 +02:00
; ;
"6.2" )
2018-04-05 09:24:49 +02:00
brew install elasticsearch
2018-03-29 13:03:35 +02:00
; ;
2018-03-29 12:41:00 +02:00
esac
2018-04-05 16:38:18 +02:00
brew services start elasticsearch
2018-03-28 11:55:10 +02:00
; ;
*)
2018-03-29 12:41:00 +02:00
unsupported_message
2018-03-28 11:55:10 +02:00
; ;
esac
2018-04-04 16:39:34 +02:00
wait_for_online
2018-04-05 16:38:18 +02:00
wait_for_green_status
2018-04-05 13:12:46 +02:00
# check status
2018-04-04 16:39:34 +02:00
local version = $( test_version)
if [ " $STATUS " = "ONLINE" ] && [ " $version " = " $target " ] ; then
2018-04-05 16:38:18 +02:00
echo " Installation of elasticsearch $target was successful. "
2018-04-04 16:39:34 +02:00
else
echo " Unable to find an active ElasticSearch $target instance, something may have went wrong, exiting... "
echo " status: $STATUS , version: $version "
exit 4
fi
2018-03-27 18:15:01 +02:00
}
2018-03-29 12:41:00 +02:00
reindex_indices( )
{
2018-04-17 10:50:25 +02:00
# get number of documents (before elastic 5.x, docs.count is at column 6)
2018-04-05 16:38:18 +02:00
local docs = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{s+=$6} END {printf "%.0f", s}' )
# get all indices
2018-03-29 17:48:55 +02:00
local indices = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{print $3}' )
2018-04-05 18:18:35 +02:00
2018-04-05 16:38:18 +02:00
local migration_indices = ""
2018-03-29 17:48:55 +02:00
for index in $indices # do not surround $indices with quotes
2018-03-29 12:41:00 +02:00
do
2018-04-04 17:16:58 +02:00
# get the mapping of the existing index
local mapping = $( curl " http:// $ES_IP :9200/ $index /_mapping " 2>/dev/null | jq -c -M -r " . $index " )
local definition = $( echo " $mapping " ' {
2018-03-29 12:41:00 +02:00
"settings" : {
"index" : {
2018-03-29 17:48:55 +02:00
"number_of_shards" : 1,
"number_of_replicas" : 0,
2018-03-29 12:41:00 +02:00
"refresh_interval" : -1
}
}
2018-04-04 17:16:58 +02:00
} ' | jq -s add)
local migration_index = " $index " " _ $1 "
2018-04-05 16:38:18 +02:00
migration_indices += " $migration_index , "
2018-04-04 17:16:58 +02:00
# create the temporary migration index with the previous mapping
2018-11-26 16:49:26 +01:00
curl -XPUT " http:// $ES_IP :9200/ $migration_index / " 2>/dev/null -H 'Content-Type: application/json' -d " $definition "
2018-04-04 17:16:58 +02:00
# reindex data content to the new migration index
2018-04-05 13:12:46 +02:00
curl -XPOST " $ES_IP :9200/_reindex?pretty " 2>/dev/null -H 'Content-Type: application/json' -d ' {
2018-03-29 17:48:55 +02:00
"source" : {
"index" : "'" $index "'"
2018-03-29 12:41:00 +02:00
} ,
2018-03-29 17:48:55 +02:00
"dest" : {
"index" : "'" $migration_index "'"
2018-11-29 16:35:45 +01:00
} ,
"script" : {
"inline" : "ctx._source.remove('" '"' _id'"' "')"
2018-03-29 12:41:00 +02:00
}
2018-03-29 17:48:55 +02:00
} '
2018-03-29 12:41:00 +02:00
done
2018-04-05 16:38:18 +02:00
echo "Indices are reindexing. This may take a while, waiting to finish... "
# first we wait for all indices states became green
wait_for_green_status
# then we wait for all documents to be reindexed
local new_docs = $( curl " $ES_IP :9200/_cat/indices?index= $migration_indices " 2>/dev/null | awk '{s+=$6} END {printf "%.0f", s}' )
while [ " $new_docs " != " $docs " ]
2018-03-29 12:41:00 +02:00
do
2018-04-05 16:38:18 +02:00
echo -ne " \rdocs: $docs , reindexed: $new_docs "
2018-03-29 12:41:00 +02:00
sleep 1
2018-04-05 16:38:18 +02:00
new_docs = $( curl " $ES_IP :9200/_cat/indices?index= $migration_indices " 2>/dev/null | awk '{s+=$6} END {printf "%.0f", s}' )
2018-03-29 12:41:00 +02:00
done
2018-04-05 16:38:18 +02:00
echo -e "\nReindex completed, deleting previous index..."
2018-04-04 16:39:34 +02:00
for index in $indices # do not surround $indices with quotes
2018-03-29 12:41:00 +02:00
do
2018-04-05 18:18:35 +02:00
curl -XDELETE " $ES_IP :9200/ $index ?pretty " 2>/dev/null
2018-03-29 12:41:00 +02:00
done
2018-04-05 16:38:18 +02:00
reenable_allocation
2018-03-29 12:41:00 +02:00
}
reindex_final_indices( )
{
local previous = $1
2018-04-05 18:18:35 +02:00
# get number of documents (from elastic 5.x, docs.count is at column 7)
local docs = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{s+=$7} END {printf "%.0f", s}' )
2018-04-05 16:38:18 +02:00
# get all indices
2018-03-29 17:48:55 +02:00
local indices = $( curl " $ES_IP :9200/_cat/indices " 2>/dev/null | awk '{print $3}' )
2018-04-05 18:18:35 +02:00
2018-04-05 16:38:18 +02:00
local final_indices = ""
2018-03-29 17:48:55 +02:00
for index in $indices # do not surround $indices with quotes
2018-03-29 12:41:00 +02:00
do
2018-04-05 16:38:18 +02:00
# get the mapping of the existing index
local mapping = $( curl " http:// $ES_IP :9200/ $index /_mapping " 2>/dev/null | jq -c -M -r " . $index " )
local definition = $( echo " $mapping " ' {
"settings" : {
"index" : {
"number_of_shards" : 1,
"number_of_replicas" : 0,
"refresh_interval" : -1
}
}
} ' | jq -s add)
2018-03-29 12:41:00 +02:00
local final_index = $( echo " $index " | sed " s/\(.*\)_ $previous $/\1/ " )
2018-04-05 16:38:18 +02:00
final_indices += " $final_index , "
2018-04-05 18:18:35 +02:00
# create the final index with the previous mapping
2018-11-26 16:49:26 +01:00
curl -XPUT " http:// $ES_IP :9200/ $final_index " 2>/dev/null -H 'Content-Type: application/json' -d " $definition "
2018-04-05 18:18:35 +02:00
# reindex data content to the new migration index
2018-04-05 13:12:46 +02:00
curl -XPOST " $ES_IP :9200/_reindex?pretty " 2>/dev/null -H 'Content-Type: application/json' -d ' {
2018-03-29 17:48:55 +02:00
"source" : {
"index" : "'" $index "'"
2018-03-29 12:41:00 +02:00
} ,
2018-03-29 17:48:55 +02:00
"dest" : {
"index" : "'" $final_index "'"
2018-03-29 12:41:00 +02:00
}
2018-03-29 17:48:55 +02:00
} '
2018-03-29 12:41:00 +02:00
done
2018-04-05 18:18:35 +02:00
echo "Indices are reindexing. This may take a while, waiting to finish... "
2018-04-05 16:38:18 +02:00
# first we wait for all indices states became green
wait_for_green_status
2018-11-26 16:49:26 +01:00
# then we wait for all documents to be reindexed
2018-04-05 18:18:35 +02:00
local new_docs = $( curl " $ES_IP :9200/_cat/indices?index= $final_indices " 2>/dev/null | awk '{s+=$7} END {printf "%.0f", s}' )
2018-04-05 16:38:18 +02:00
while [ " $new_docs " != " $docs " ]
2018-03-29 12:41:00 +02:00
do
2018-04-05 16:38:18 +02:00
echo -ne " \rdocs: $docs , reindexed: $new_docs "
2018-03-29 12:41:00 +02:00
sleep 1
2018-04-05 18:18:35 +02:00
new_docs = $( curl " $ES_IP :9200/_cat/indices?index= $final_indices " 2>/dev/null | awk '{s+=$7} END {printf "%.0f", s}' )
2018-03-29 12:41:00 +02:00
done
2018-04-05 18:18:35 +02:00
echo -e "\nReindex completed, deleting previous index..."
2018-04-04 16:39:34 +02:00
for index in $indices # do not surround $indices with quotes
2018-03-29 12:41:00 +02:00
do
2018-04-05 13:12:46 +02:00
curl -XDELETE " $ES_IP :9200/ $index ?pretty " 2>/dev/null
2018-03-29 12:41:00 +02:00
done
2018-04-05 18:18:35 +02:00
reenable_allocation
2018-03-29 12:41:00 +02:00
}
2018-03-27 18:15:01 +02:00
start_upgrade( )
{
2018-03-29 12:41:00 +02:00
# $1: current version
# $2: target version
2018-03-27 18:15:01 +02:00
case " $TYPE " in
"DOCKER-COMPOSE" )
2018-03-29 12:41:00 +02:00
upgrade_compose $1 $2
2018-03-27 18:15:01 +02:00
; ;
"DOCKER" )
2018-03-29 12:41:00 +02:00
upgrade_docker $1 $2
2018-03-27 18:15:01 +02:00
; ;
"CLASSIC" )
2018-03-29 12:41:00 +02:00
upgrade_classic $2
2018-03-27 18:15:01 +02:00
; ;
*)
echo " Unexpected ElasticSearch installation $TYPE "
exit 3
esac
}
2019-10-22 16:46:40 +02:00
function trap_ctrlc( )
{
echo "Ctrl^C, exiting..."
exit 2
}
2018-03-27 18:15:01 +02:00
upgrade_elastic( )
{
config
detect_installation
2018-04-04 16:39:34 +02:00
read -rp "Continue with upgrading? (y/n) " confirm </dev/tty
2019-10-22 16:46:40 +02:00
if [ [ " $confirm " = "y" ] ] ; then
trap "trap_ctrlc" 2 # SIGINT
2018-04-04 16:39:34 +02:00
ensure_initial_status_green
start_upgrade '1.7' '2.4'
reindex_indices '24'
start_upgrade '2.4' '5.6'
2018-04-05 17:32:19 +02:00
reindex_final_indices '24'
2018-04-04 16:39:34 +02:00
fi
2018-03-27 18:15:01 +02:00
}
2018-04-04 16:39:34 +02:00
upgrade_elastic " $@ "