diff --git a/devops/windows/base-docker-linux/z-stack-2-reboot-hub-images-db-init.sh b/devops/windows/base-docker-linux/z-stack-2-reboot-hub-images-db-init.sh new file mode 100644 index 000000000..b8fca10e1 --- /dev/null +++ b/devops/windows/base-docker-linux/z-stack-2-reboot-hub-images-db-init.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +set -e +set -u + +echo +echo "Down stack" +source ./helper/scripts/d-stack-08-rm.sh +source ./helper/scripts/d-stack-09-wait-removed.sh + +echo +echo "Up app containers" +source ./helper/scripts/d-stack-01-create-dirs.sh +sleep 2 +source ./helper/scripts/d-stack-02-deploy-0.sh +sleep 2 +source ./helper/scripts/d-stack-03-up-hub-containers.sh + diff --git a/devops/windows/base-docker-wsl/conf/env/conf-env-high-1-pc.template b/devops/windows/base-docker-wsl/conf/env/conf-env-high-1-pc.template index 6a77450e3..4f7d7611a 100644 --- a/devops/windows/base-docker-wsl/conf/env/conf-env-high-1-pc.template +++ b/devops/windows/base-docker-wsl/conf/env/conf-env-high-1-pc.template @@ -30,6 +30,9 @@ export DATA_DGRAPH_ALPHA_01_DIR=${PROJECT_DATA_DIR}/dgraph/alpha-01 export DATA_DGRAPH_ALPHA_02_DIR=${PROJECT_DATA_DIR}/dgraph/alpha-02 export DATA_DGRAPH_ALPHA_03_DIR=${PROJECT_DATA_DIR}/dgraph/alpha-03 export DATA_POSTGRESQL_DIR=${PROJECT_DATA_DIR}/postgres +export DATA_POSTGRESQL_DB_DIR=${PROJECT_DATA_DIR}/postgres_db +export POSTGRES_BACKUP_DIRECTORY=${PROJECT_DATA_DIR}/backups/postgres +export DGRAPH_BACKUP_DIRECTORY=${PROJECT_DATA_DIR}/backups/dgraph export SCALE_KEYCLOAK_TEST_SERVER=${SCALE_KEYCLOAK_TEST_SERVER} export SCALE_NGINX=${SCALE_NGINX} diff --git a/devops/windows/base-docker-wsl/conf/env/conf-env-low-1-pc.template b/devops/windows/base-docker-wsl/conf/env/conf-env-low-1-pc.template index 4892331c5..3f2440c58 100644 --- a/devops/windows/base-docker-wsl/conf/env/conf-env-low-1-pc.template +++ b/devops/windows/base-docker-wsl/conf/env/conf-env-low-1-pc.template @@ -26,6 +26,9 @@ export DATA_KAFKA_01_DIR=${PROJECT_DATA_DIR}/kafka-01 export DATA_DGRAPH_ZERO_01_DIR=${PROJECT_DATA_DIR}/dgraph/zero-01 export DATA_DGRAPH_ALPHA_01_DIR=${PROJECT_DATA_DIR}/dgraph/alpha-01 export DATA_POSTGRESQL_DIR=${PROJECT_DATA_DIR}/postgres +export DATA_POSTGRESQL_DB_DIR=${PROJECT_DATA_DIR}/postgres_db +export POSTGRES_BACKUP_DIRECTORY=${PROJECT_DATA_DIR}/backups/postgres +export DGRAPH_BACKUP_DIRECTORY=${PROJECT_DATA_DIR}/backups/dgraph export SCALE_KEYCLOAK_TEST_SERVER=${SCALE_KEYCLOAK_TEST_SERVER} export SCALE_NGINX=${SCALE_NGINX} diff --git a/devops/windows/base-docker-wsl/helper/bootstrapper/bootstrapper-docker.sh b/devops/windows/base-docker-wsl/helper/bootstrapper/bootstrapper-docker.sh new file mode 100644 index 000000000..5f54a3c99 --- /dev/null +++ b/devops/windows/base-docker-wsl/helper/bootstrapper/bootstrapper-docker.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -e +set -u + +pushd . + SCRIPT_DIR=$(cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P) + cd ${SCRIPT_DIR}/../.. + + java_args="${@:1}" + source ./conf.env + docker exec $(docker ps -q -f name=bootstrapper) /bootstrapper.sh $java_args + +popd \ No newline at end of file diff --git a/devops/windows/base-docker-wsl/helper/bootstrapper/bootstrapper.sh b/devops/windows/base-docker-wsl/helper/bootstrapper/bootstrapper.sh new file mode 100644 index 000000000..ffc9d2622 --- /dev/null +++ b/devops/windows/base-docker-wsl/helper/bootstrapper/bootstrapper.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +pushd "$(dirname "$0")" + # Creating conf.env file + pushd ./docker/conf/env || exit + source ./create-env-linux-high-1.sh + popd || exit + + source ./source/conf.env + + java_args="${@:1}" + + pushd ../../JeMPI_Apps/JeMPI_Bootstrapper + mvn compile exec:java -Dexec.mainClass="org.jembi.jempi.bootstrapper.BootstrapperCLI" -Dexec.args="$java_args" + popd +popd \ No newline at end of file diff --git a/devops/windows/base-docker-wsl/helper/scripts/d-stack-01-create-dirs.sh b/devops/windows/base-docker-wsl/helper/scripts/d-stack-01-create-dirs.sh index 05cfe7c6c..682be5abd 100644 --- a/devops/windows/base-docker-wsl/helper/scripts/d-stack-01-create-dirs.sh +++ b/devops/windows/base-docker-wsl/helper/scripts/d-stack-01-create-dirs.sh @@ -27,6 +27,11 @@ pushd . mkdir -p ${DATA_POSTGRESQL_DIR} cp conf/postgres/*.* ${DATA_POSTGRESQL_DIR}/. + mkdir -p ${DATA_POSTGRESQL_DB_DIR} + sudo chown -R 1001:1001 ${DATA_POSTGRESQL_DB_DIR} + sudo chmod -R 770 ${DATA_POSTGRESQL_DB_DIR} + + echo popd diff --git a/devops/windows/base-docker-wsl/helper/scripts/d-stack-03-up-hub-containers.sh b/devops/windows/base-docker-wsl/helper/scripts/d-stack-03-up-hub-containers.sh index b82b72c0c..55c56ebf5 100644 --- a/devops/windows/base-docker-wsl/helper/scripts/d-stack-03-up-hub-containers.sh +++ b/devops/windows/base-docker-wsl/helper/scripts/d-stack-03-up-hub-containers.sh @@ -23,11 +23,4 @@ pushd . docker service scale ${STACK_NAME}_ratel=${SCALE_RATEL} - pushd helper/topics - source ./topics-create.sh - source ./topics-list.sh - popd - pushd helper/postgres - source ./create-schema.sh - popd popd diff --git a/devops/windows/base-docker-wsl/z-stack-2-reboot-hub-images-db-init.sh b/devops/windows/base-docker-wsl/z-stack-2-reboot-hub-images-db-init.sh new file mode 100644 index 000000000..b8fca10e1 --- /dev/null +++ b/devops/windows/base-docker-wsl/z-stack-2-reboot-hub-images-db-init.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +set -e +set -u + +echo +echo "Down stack" +source ./helper/scripts/d-stack-08-rm.sh +source ./helper/scripts/d-stack-09-wait-removed.sh + +echo +echo "Up app containers" +source ./helper/scripts/d-stack-01-create-dirs.sh +sleep 2 +source ./helper/scripts/d-stack-02-deploy-0.sh +sleep 2 +source ./helper/scripts/d-stack-03-up-hub-containers.sh + diff --git a/devops/windows/deployment/backup_restore/dgraph-backup.sh b/devops/windows/deployment/backup_restore/dgraph-backup.sh new file mode 100644 index 000000000..43510e471 --- /dev/null +++ b/devops/windows/deployment/backup_restore/dgraph-backup.sh @@ -0,0 +1,80 @@ +#!/bin/bash +echo "$PWD" +source ../../base-docker-wsl/conf.env +# Load Environment Variables for DGraph Alpha and Zero Nodes +DGRAPH_ALPHA_HOSTS="${DGRAPH_HOSTS:-localhost}" +DGRAPH_ALPHA_PORTS="${DGRAPH_PORTS:-8080}" +DGRAPH_ZERO_HOSTS="${DGRAPH_ZERO_HOSTS:-localhost}" +DGRAPH_ZERO_PORTS="${DGRAPH_ZERO_PORTS:-5080}" + +# Load Environment Variables for Data Directories +DATA_DGRAPH_ZERO_01_DIR="${DATA_DGRAPH_ZERO_01_DIR}" +DATA_DGRAPH_ALPHA_01_DIR="${DATA_DGRAPH_ALPHA_01_DIR}" +DATA_DGRAPH_ALPHA_02_DIR="${DATA_DGRAPH_ALPHA_02_DIR:-}" # Optional +DATA_DGRAPH_ALPHA_03_DIR="${DATA_DGRAPH_ALPHA_03_DIR:-}" # Optional +BACKUP_PATH="${DGRAPH_BACKUP_DIRECTORY}/$(date +%Y%m%d_%H%M%S)" +REMOTE_SERVER="${DGRAPH_BACKUP_REMOTE_SERVER}" +REMOTE_PATH="${DGRAPH_BACKUP_REMOTE_PATH}" + +# Create Backup Directory if it doesn't exist +[ ! -d "$BACKUP_PATH" ] && mkdir -p "$BACKUP_PATH" + +LOG_FILE="${BACKUP_PATH}/dgraph_backup_$(date +%Y%m%d_%H%M%S).log" + +# Function to Backup DGraph Alpha and Zero Nodes +backup_dgraph_node() { + local host=$1 + local port=$2 + + echo "$(date) - Starting backup for DGraph node at ${host}:${port}" >> "${LOG_FILE}" + # Replace with actual backup command for the node. Example: + # curl "http://${host}:${port}/admin/backup" + echo "$(date) - Backup completed for DGraph node at ${host}:${port}" >> "${LOG_FILE}" +} + +# Function to Backup DGraph Directory +backup_dgraph_dir() { + local dir=$1 + local dir_name=$(basename "$dir") + echo "Starting Dgraph database Backup..." + echo "$(date) - Starting backup for DGraph directory at $dir" >> "${LOG_FILE}" + tar -czvf "${BACKUP_PATH}/${dir_name}_$(date +%Y%m%d_%H%M%S).tar.gz" -C "$dir" . + echo "$(date) - Backup completed for DGraph directory at $dir" >> "${LOG_FILE}" + echo "Database Dgraph Backup completed." +} + + +# Backup DGraph Nodes (Alphas and Zeros) +IFS=',' read -r -a alpha_hosts <<< "$DGRAPH_ALPHA_HOSTS" +IFS=',' read -r -a alpha_ports <<< "$DGRAPH_ALPHA_PORTS" +IFS=',' read -r -a zero_hosts <<< "$DGRAPH_ZERO_HOSTS" +IFS=',' read -r -a zero_ports <<< "$DGRAPH_ZERO_PORTS" + +for i in "${!alpha_hosts[@]}"; do + backup_dgraph_node "${alpha_hosts[i]}" "${alpha_ports[i]}" +done + +for i in "${!zero_hosts[@]}"; do + backup_dgraph_node "${zero_hosts[i]}" "${zero_ports[i]}" +done + +# Backup DGraph Directories +[ -d "$DATA_DGRAPH_ZERO_01_DIR" ] && backup_dgraph_dir "$DATA_DGRAPH_ZERO_01_DIR" +[ -d "$DATA_DGRAPH_ALPHA_01_DIR" ] && backup_dgraph_dir "$DATA_DGRAPH_ALPHA_01_DIR" +[ -d "$DATA_DGRAPH_ALPHA_02_DIR" ] && backup_dgraph_dir "$DATA_DGRAPH_ALPHA_02_DIR" +[ -d "$DATA_DGRAPH_ALPHA_03_DIR" ] && backup_dgraph_dir "$DATA_DGRAPH_ALPHA_03_DIR" + +# Function to Copy Backup to Remote Server +copy_to_remote() { + if [ -n "${REMOTE_SERVER}" ] && [ -n "${REMOTE_PATH}" ]; then + echo "$(date) - Starting remote transfer" >> "${LOG_FILE}" + scp "${BACKUP_PATH}/*_$(date +%Y%m%d).tar.gz" ${REMOTE_SERVER}:${REMOTE_PATH} + echo "$(date) - Remote transfer completed" >> "${LOG_FILE}" + else + echo "$(date) - Remote server details not set. Skipping remote transfer." >> "${LOG_FILE}" + fi +} + +# Main Execution +copy_to_remote +chmod -R 777 "$BACKUP_PATH" diff --git a/devops/windows/deployment/backup_restore/dgraph-restore.sh b/devops/windows/deployment/backup_restore/dgraph-restore.sh new file mode 100644 index 000000000..8768ece94 --- /dev/null +++ b/devops/windows/deployment/backup_restore/dgraph-restore.sh @@ -0,0 +1,87 @@ +#!/bin/bash +source ../conf.env +#Backup Folder Name +while true; do + # Ask the user to enter a folder name + echo "Backup folder Path:- ${DGRAPH_BACKUP_DIRECTORY}" + pushd ${DGRAPH_BACKUP_DIRECTORY} + echo + echo "Recent 5 Backups list" + ls -lt --time=creation --sort=time | grep '^d' | tail -n 5 + echo + popd + read -p "Please enter your Dgraph Backup Folder Name: " BACKUP_FOLDER_NAME + + # Check if the folder exists + if [ -d "${DGRAPH_BACKUP_DIRECTORY}/$BACKUP_FOLDER_NAME" ]; then + echo "Folder '$BACKUP_FOLDER_NAME' exists!" + break # Exit the loop if the folder exists + else + echo "Folder '$BACKUP_FOLDER_NAME' does not exist, at ${DGRAPH_BACKUP_DIRECTORY}. " + echo "Please try again" + fi +done + +pushd ../deployment/reboot + source d-stack-3-reboot.sh +popd +echo "Please wait for sometime..." +sleep 60 +# BACKUP_FOLDER_NAME="20240202_123952" +# Load Environment Variables for DGraph Alpha and Zero Nodes +DGRAPH_ALPHA_HOSTS="${DGRAPH_HOSTS:-localhost}" +DGRAPH_ALPHA_PORTS="${DGRAPH_PORTS:-8080}" +DGRAPH_ZERO_HOSTS="${DGRAPH_ZERO_HOSTS:-localhost}" +DGRAPH_ZERO_PORTS="${DGRAPH_ZERO_PORTS:-5080}" + +# Load Environment Variables for Data Directories +DATA_DGRAPH_ZERO_01_DIR="${DATA_DGRAPH_ZERO_01_DIR}" +DATA_DGRAPH_ALPHA_01_DIR="${DATA_DGRAPH_ALPHA_01_DIR}" +DATA_DGRAPH_ALPHA_02_DIR="${DATA_DGRAPH_ALPHA_02_DIR:-}" # Optional +DATA_DGRAPH_ALPHA_03_DIR="${DATA_DGRAPH_ALPHA_03_DIR:-}" # Optional +BACKUP_PATH="${DGRAPH_BACKUP_DIRECTORY}/$BACKUP_FOLDER_NAME" + +REMOTE_SERVER="${DGRAPH_BACKUP_REMOTE_SERVER:-""}" +REMOTE_PATH="${DGRAPH_BACKUP_REMOTE_PATH:-""}" +# Function to Restore DGraph Directory +restore_dgraph_dir() { + local dir=$1 + local backup_file=$2 + echo $dir + if [ "$(ls -A $dir)" ]; then + echo "Directory $dir is not empty. Clearing existing files." + sudo rm -rf $dir/* + fi + + echo “$(date) - Starting restore for DGraph directory at $dir from $backup_file” + sudo tar -xzvf ${backup_file} -C $dir --strip-components=1 + echo “$(date) - Restore completed for DGraph directory at $dir” +} +# Restore Zero Nodes +for backup_file in ${BACKUP_PATH}/zero*.tar.gz; do + echo $backup_file + # Assuming the first directory is for zero nodes + restore_dgraph_dir $DATA_DGRAPH_ZERO_01_DIR $backup_file +done +# Restore Alpha Nodes +for backup_file in ${BACKUP_PATH}/alpha*.tar.gz; do + # Assuming the first directory is for alpha nodes + restore_dgraph_dir $DATA_DGRAPH_ALPHA_01_DIR $backup_file +done + +copy_from_remote() { + if [ -n "${REMOTE_SERVER}" ] && [ -n "${REMOTE_PATH}" ]; then + echo "$(date) - Starting remote copy" + scp ${REMOTE_SERVER}:${REMOTE_PATH}/*_$(date +%Y%m%d).tar.gz "${BACKUP_PATH}/" + echo "$(date) - Remote copy completed" + else + echo "$(date) - Remote server details not set. Skipping remote copy." + fi +} +# Main Execution +copy_from_remote +echo "Rebooting JeMPI" +pushd ../deployment/reboot + echo $PWD + source d-stack-3-reboot.sh +popd \ No newline at end of file diff --git a/devops/windows/deployment/backup_restore/generate_logrotate_conf.sh b/devops/windows/deployment/backup_restore/generate_logrotate_conf.sh new file mode 100644 index 000000000..60ce21fbc --- /dev/null +++ b/devops/windows/deployment/backup_restore/generate_logrotate_conf.sh @@ -0,0 +1,44 @@ +#!/bin/bash +source ../conf.env +# Load Backup Directory from Environment Variable +POSTGRES_BACKUP_PATH="${POSTGRES_BACKUP_DIRECTORY}" +OLD_LOGS_DIR="${BACKUP_PATH}/old_logs" + +# Create Logrotate Configuration File +cat << EOF > /etc/logrotate.d/postgres-backup +$POSTGRES_BACKUP_PATH/backup_*.log { + rotate 30 + daily + missingok + notifempty + compress + delaycompress + create 640 root adm + dateext + dateformat -%Y%m%d + olddir $OLD_LOGS_DIR +} +EOF + +echo "Logrotate configuration for PostgreSQL backup updated." + +DGRAPH_BACKUP_PATH="${DGRAPH_BACKUP_DIRECTORY}" +OLD_LOGS_DIR="${BACKUP_PATH}/old_logs" + +# Create Logrotate Configuration File +cat << EOF > /etc/logrotate.d/dgraph-backup +$DGRAPH_BACKUP_PATH/backup_*.log { + rotate 30 + daily + missingok + notifempty + compress + delaycompress + create 640 root adm + dateext + dateformat -%Y%m%d + olddir $OLD_LOGS_DIR +} +EOF + +echo "Logrotate configuration for DGraph backup updated." diff --git a/devops/windows/deployment/backup_restore/postgres-backup.sh b/devops/windows/deployment/backup_restore/postgres-backup.sh new file mode 100644 index 000000000..3e39bec5c --- /dev/null +++ b/devops/windows/deployment/backup_restore/postgres-backup.sh @@ -0,0 +1,55 @@ +#!/bin/bash +source ../../base-docker-wsl/conf.env +# Load Database Credentials from Environment Variables + +DB_NAME="${POSTGRESQL_DATABASE}" +DB_USER="${POSTGRESQL_USERNAME}" +DB_PASSWORD="${POSTGRESQL_PASSWORD}" +DB_HOST="${POSTGRES_HOST:-localhost}" +BACKUP_PATH="${POSTGRES_BACKUP_DIRECTORY}/$(date +%Y%m%d_%H%M%S)" +OLD_LOGS_DIR="${BACKUP_PATH}/old_logs" # Directory to store old logs + +databases=("$POSTGRESQL_DATABASE" "$POSTGRESQL_USERS_DB" "$POSTGRESQL_NOTIFICATIONS_DB" "$POSTGRESQL_AUDIT_DB" "$POSTGRESQL_KC_TEST_DB") + +# Check and Create Backup Directory and Old Logs Directory +[ ! -d "$BACKUP_PATH" ] && mkdir -p "$BACKUP_PATH" +[ ! -d "$OLD_LOGS_DIR" ] && mkdir -p "$OLD_LOGS_DIR" + +LOG_FILE="${BACKUP_PATH}/$(date +%Y%m%d_%H%M%S).log" + +# Check for Remote Server Details +REMOTE_SERVER="${POSTGRES_BACKUP_REMOTE_SERVER}" +REMOTE_PATH="${POSTGRES_BACKUP_REMOTE_PATH}" +PGPORT="${PGPORT:-5432}" + +# Function to Perform Backup +backup_database() { + echo "Starting Postgres database Backup..." + # Loop through each database and dump it + for db in "${databases[@]}"; do + echo "db.. $db " + backup_file="${BACKUP_PATH}/${db}--$(date +%Y%m%d_%H%M%S).sql" + + echo "$(date) - Starting backup for database: ${db}" >> "${LOG_FILE}" + PGPASSWORD=$DB_PASSWORD pg_dump -h $DB_HOST -U $DB_USER -d $db -F c -f "${BACKUP_PATH}/${db}--$(date +%Y%m%d_%H%M%S).sql" + echo "$(date) - Backup completed for database: ${db}" >> "${LOG_FILE}" + done + echo "Database Postgres Backup completed." +} + +echo Function to Copy Backup to Remote Server +copy_to_remote() { + if [ -n "${REMOTE_SERVER}" ] && [ -n "${REMOTE_PATH}" ]; then + for db in "${databases[@]}"; do + echo "$(date) - Starting remote transfer" >> "${LOG_FILE}" + scp "${BACKUP_PATH}/${db}_$(date +%Y%m%d_%H%M%S).sql" ${REMOTE_SERVER}:${REMOTE_PATH} + echo "$(date) - Remote transfer completed" >> "${LOG_FILE}" + done + else + echo "$(date) - Remote server details not set. Skipping remote transfer." >> "${LOG_FILE}" + fi +} + +# # Main Execution +backup_database +copy_to_remote diff --git a/devops/windows/deployment/backup_restore/postgres-restore.sh b/devops/windows/deployment/backup_restore/postgres-restore.sh new file mode 100644 index 000000000..082a9b68e --- /dev/null +++ b/devops/windows/deployment/backup_restore/postgres-restore.sh @@ -0,0 +1,68 @@ +#!/bin/bash +source ../conf.env +#Backup Folder Name +while true; do + # Ask the user to enter a folder name + echo "Backup folder Path:- ${POSTGRES_BACKUP_DIRECTORY}" + pushd ${POSTGRES_BACKUP_DIRECTORY} + echo + echo "Recent 5 Backups list" + ls -lt --time=creation --sort=time | grep '^d' | tail -n 5 + echo + popd + read -p "Please enter your Postgres Backup Folder Name: " BACKUP_FOLDER_NAME + + # Check if the folder exists + if [ -d "${POSTGRES_BACKUP_DIRECTORY}/$BACKUP_FOLDER_NAME" ]; then + echo "Folder '$BACKUP_FOLDER_NAME' exists!" + break # Exit the loop if the folder exists + else + echo "Folder '$BACKUP_FOLDER_NAME' does not exist, at ${DGRAPH_BACKUP_DIRECTORY}. " + echo "Please try again" + fi +done +# PostgreSQL settings from environment variables +DB_NAME="${POSTGRESQL_DATABASE}" +PGHOST="${POSTGRES_HOST:-localhost}" +PGPORT="${PGPORT:-5432}" +PGUSER="${POSTGRESQL_USERNAME}" +PGPASSWORD="${POSTGRESQL_PASSWORD}" +PGDATABASE="${POSTGRESQL_DATABASE}" + +BACKUP_DIR="${POSTGRES_BACKUP_DIRECTORY}/$BACKUP_FOLDER_NAME" + +databases=("$POSTGRESQL_DATABASE" "$POSTGRESQL_USERS_DB" "$POSTGRESQL_NOTIFICATIONS_DB" "$POSTGRESQL_AUDIT_DB" "$POSTGRESQL_KC_TEST_DB") + + +for db in "${databases[@]}"; do + # Check if the database exists + PGPASSWORD="$PGPASSWORD" psql -U "$PGUSER" -h "$PGHOST" -p $PGPORT -lqt | cut -d \| -f 1 | grep -qw "$db" + + if [ $? -ne 0 ]; then + # Create the database if it doesn't exist + PGPASSWORD="$PGPASSWORD" psql -U "$PGUSER" -h "$PGHOST" -p $PGPORT -c "CREATE DATABASE $db;" postgres + echo "Database $db created." + else + echo "Database $db already exists." + fi + echo +done + +echo "Restoring databases for "${databases[@]}"" +echo +for backup_file in "$BACKUP_DIR"/*.sql; do + + if [ -f "$backup_file" ]; then + DB_NAME=$(basename "$backup_file" | cut -d'-' -f1) + echo "Restoring $DB_NAME with sql :- $backup_file" + + PGPASSWORD="$PGPASSWORD" pg_restore -U "$PGUSER" -h "$PGHOST" -d "$DB_NAME" -F c --clean --if-exists "$backup_file" + + echo "Restore completed for $DB_NAME" + else + echo "Backup file $backup_file not found." + fi + echo +done + +echo "All backups restored. postgres" diff --git a/devops/windows/deployment/common/allow_port.ps1 b/devops/windows/deployment/common/allow_port.ps1 new file mode 100644 index 000000000..9be63a5ff --- /dev/null +++ b/devops/windows/deployment/common/allow_port.ps1 @@ -0,0 +1,13 @@ +# Define the port number +$portNumber = "5432" + +# Define the rule name +$ruleName = "Allow_Port_$portNumber" + +# Create an inbound rule to allow traffic on the specified port +New-NetFirewallRule -DisplayName $ruleName -Direction Inbound -Protocol TCP -LocalPort $portNumber -Action Allow + +# Create an outbound rule to allow traffic on the specified port +New-NetFirewallRule -DisplayName $ruleName -Direction Outbound -Protocol TCP -LocalPort $portNumber -Action Allow + +Write-Host "Firewall rule to allow traffic on port $portNumber has been created." diff --git a/devops/windows/deployment/common/swarm-leave.sh b/devops/windows/deployment/common/swarm-leave.sh new file mode 100644 index 000000000..373df87cb --- /dev/null +++ b/devops/windows/deployment/common/swarm-leave.sh @@ -0,0 +1,7 @@ +#!/bin/bash +# Set JEMPI_HOME environment variable +export JEMPI_HOME=$(pwd) +echo "Setting JEMPI_HOME to: $JEMPI_HOME"​ +cd $JEMPI_HOME/devops/windows/base-docker-wsl +echo "Leaving Swarm on node1" +source $JEMPI_HOME/devops/windows/base-docker-wsl/b-swarm-3-leave.sh \ No newline at end of file diff --git a/devops/windows/deployment/deploy-local-windows.ps1 b/devops/windows/deployment/deploy-local-windows.ps1 new file mode 100644 index 000000000..8357f1d9b --- /dev/null +++ b/devops/windows/deployment/deploy-local-windows.ps1 @@ -0,0 +1,168 @@ + +cd ../../../ +$currentPath = $PWD.Path +Write-Host "Current directory: $currentPath" +# Define the URL of the MSI file +$nodeUrl = "https://nodejs.org/dist/v20.10.0/node-v20.10.0-x64.msi" +$nodeAppName="node.exe" + +$sbtUrl = "https://github.com/sbt/sbt/releases/download/v1.9.7/sbt-1.9.7.msi" +$sbtAppName="sbt.exe" + +# $javaUrl = "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.8.1+1/OpenJDK17U-jdk_x64_windows_hotspot_17.0.8.1_1.msi" +$javaUrl = "https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.1+12/OpenJDK21U-jdk_x64_windows_hotspot_21.0.1_12.msi" +$javaAppName="java.exe" + + +# Define the local path where you want to save the MSI file + +function installApp() { + param ( + [string]$url, + [string]$appName + ) + + $localPath = "$env:USERPROFILE\Downloads\$appName" + Write-Host "$appName is Downloaded at:- $localPath" + + # Download the MSI file + Invoke-WebRequest -Uri $url -OutFile $localPath + + # Install the MSI file + # Start-Process -FilePath $appName -ArgumentList "/i", "$localPath", "/quiet" -Wait + + Remove-Item -Path $localPath +} + +# Start WSL in a new window +Start-Process wsl.exe -WindowStyle Normal +# Wait for WSL to start + +Push-Location $currentPath/devops/windows/deployment/common + Write-Host "Current directory: $PWD.path" + .\allow_port.ps1 -Wait +Pop-Location + + +# Display menu options +Write-Host "Select an option for local deployment:" +Write-Host "1. Deploy JeMPI (For Fresh Start)" +Write-Host "2. Build and Reboot." +Write-Host "3. Restart JeMPI." +Write-Host "4. Stop JeMPI." +Write-Host "5. Backup Postgres & Dgraph." +Write-Host "6. Restore Postgres & Dgraph." +Write-Host "7. Destroy JeMPI (This process will wipe all data)." +Write-Host "8. Install Prerequisites." + +# Get user input +$choice = Read-Host "Enter the number of your choice" +$wslPath = wsl.exe pwd +Write-Host "Path in WSL:- $wslPath" +# Process the user's choice +switch ($choice) { + '1' { + Write-Host $wslPath + Write-Host "Deploying JeMPI " + wsl -d Ubuntu $wslPath/devops/windows/deployment/deploy-local-wsl.sh -Wait + Start-Sleep -Seconds 30 + + Push-Location $currentPath/JeMPI_Apps/JeMPI_Configuration + Write-Host "Current directory: $PWD.path" + .\create.ps1 reference\config-reference.json + Pop-Location + + Push-Location $currentPath/devops/windows/run--base-docker-wsl + Write-Host "start-with-bootstraper.ps1" + # .\bootstrapper.ps1 -Wait + .\start-with-bootstraper.ps1 -Wait + + Write-Host "Script completed." + Write-Host "Running file: start-ui.ps1" + .\start-ui.ps1 -Wait + Pop-Location + Write-Host "Script completed." + } + '2' { + Write-Host "Build and Reboot" + wsl -d Ubuntu $wslPath/devops/windows/deployment/deploy-local-wsl.sh + Push-Location $currentPath/JeMPI_Apps/JeMPI_Configuration + .\create.ps1 reference\config-reference.json + Pop-Location + + Push-Location $currentPath/devops/windows/run--base-docker-wsl + Write-Host "start-with-bootstraper.ps1" + .\start.ps1 -Wait + + Write-Host "Script completed." + Write-Host "Running file: start-ui.ps1" + .\start-ui.ps1 -Wait + + Write-Host "Script completed." + Pop-Location + } + '3' { + Push-Location $currentPath/devops/windows/run--base-docker-wsl + Write-Host "Down the JeMPI." + Write-Host "Running file: stop.ps1" + .\stop.ps1 -Wait + Write-Host "Build and Reboot" + Write-Host "Running file: start.ps1" + .\start.ps1 -Wait + + Write-Host "Script completed." + Write-Host "Running file: start-ui.ps1" + .\start-ui.ps1 -Wait + + Write-Host "Script completed." + Pop-Location + } + '4' { + Push-Location $currentPath/devops/windows/run--base-docker-wsl + Write-Host "Down the JeMPI." + Write-Host "Running file: stop.ps1" + .\stop.ps1 -Wait + Pop-Location + } + '5' { + Write-Host "Database Backup." + Push-Location $currentPath/devops/windows/deployment/backup_restore/ + wsl -d Ubuntu $wslPath/devops/windows/deployment/backup_restore/dgraph-backup.sh + wsl -d Ubuntu $wslPath/devops/windows/deployment/backup_restore/postgres-backup.sh + Pop-Location + + } + '6' { + Write-Host "Restore Backup." + } + '7' { + $confirmation = Read-Host "Are you sure, Do you want to Destroy? (Ctrl+Y for Yes, any other key for No)" + if ($confirmation -eq [char]25) { + # Proceed with the script logic + Write-Host "Proceeding with the script..." + Write-Host "Down the JeMPI." + Write-Host "Running file: stop.ps1" + Push-Location $currentPath/devops/windows/run--base-docker-wsl + .\stop.ps1 -Wait + Pop-Location + wsl -d Ubuntu $wslPath/devops/windows/deployment/common/swarm-leave.sh + } else { + Write-Host "Exiting the script..." + exit + } + + } + '8' { + Push-Location $currentPath/devops/windows/run--base-docker-wsl + Write-Host "Installing required softwares" + installApp $nodeUrl $nodeAppName + installApp $sbtUrl $sbtAppName + installApp $javaUrl $javaAppName + Pop-Location + } + default { + Write-Host "Invalid choice. Please enter a valid option." + } +} + + diff --git a/devops/windows/deployment/deploy-local-wsl.sh b/devops/windows/deployment/deploy-local-wsl.sh new file mode 100644 index 000000000..d5990f59e --- /dev/null +++ b/devops/windows/deployment/deploy-local-wsl.sh @@ -0,0 +1,63 @@ +#!/bin/bash +# Set JEMPI_HOME environment variable +export JEMPI_HOME=$(pwd) +echo "Setting JEMPI_HOME to: $JEMPI_HOME"​ +# Check if Docker is already installed +if command -v docker &> /dev/null; then + echo "Docker is already installed." +else + # Install Docker + sudo apt-get update + sudo apt-get install -y docker.io + + # Add your user to the docker group to run Docker without sudo + sudo usermod -aG docker $USER + + echo "Docker has been installed." +fi + + + +# Navigate to environment configuration directory +echo "Navigate to environment configuration directory" +cd $JEMPI_HOME/devops/windows/base-docker-wsl/conf/env +dos2unix * +source $JEMPI_HOME/devops/windows/base-docker-wsl/conf/env/create-env-linux-low-1.sh + +# Running Docker helper scripts +echo "Running Docker helper scripts " +cd $JEMPI_HOME/devops/windows/base-docker-wsl/helper/scripts/ +dos2unix * +source $JEMPI_HOME/devops/windows/base-docker-wsl/helper/scripts/x-swarm-a-set-insecure-registries.sh + +cd $JEMPI_HOME + +# Navigate to Docker directory + +# Pull Docker images from hub +echo "Pull Docker images from hub" +cd $JEMPI_HOME/devops/windows/base-docker-wsl +source $JEMPI_HOME/devops/windows/base-docker-wsl/a-images-1-pull-from-hub.sh + +if docker info | grep -q "Swarm: active"; then + echo "Docker Swarm is running." +else + echo "Docker Swarm is not running." + echo "Initialize Swarm on node1" + source $JEMPI_HOME/devops/windows/base-docker-wsl/b-swarm-1-init-node1.sh + +fi + +# Create Docker registry +echo "Create Docker registry" +source $JEMPI_HOME/devops/windows/base-docker-wsl/c-registry-1-create.sh + +# Push Docker images to the registry +echo "Push Docker images to the registry" +source $JEMPI_HOME/devops/windows/base-docker-wsl/c-registry-2-push-hub-images.sh + +# Build and reboot the entire stack +echo "Build and reboot the entire stack" +# yes | source $JEMPI_HOME/devops/windows/base-docker-wsl/z-stack-2-reboot-hub-images.sh +yes | source $JEMPI_HOME/devops/windows/base-docker-wsl/z-stack-2-reboot-hub-images-db-init + diff --git a/devops/windows/run--base-docker-wsl/bootstrapper.ps1 b/devops/windows/run--base-docker-wsl/bootstrapper.ps1 new file mode 100644 index 000000000..072ae100d --- /dev/null +++ b/devops/windows/run--base-docker-wsl/bootstrapper.ps1 @@ -0,0 +1,132 @@ +$script_path = $MyInvocation.MyCommand.Path +$script_dir = Split-Path $script_path +Set-Location $script_dir + +$linux_server_ip = ((wsl hostname -I) -split " ")[0] + +Write-Host $linux_server_ip + + +$bootstrapper_folder = '.\app_data\bootstrapper' +$def_bootstrapper_kafka_client_id = "-DKAFKA_CLIENT_ID=client-id-bootstrapper" +$def_bootstrapper_kafka_application_id = "-DKAFKA_APPLICATION_ID=app-id-bootstrapper" + + +$kafka1_ip = $linux_server_ip +$postgresql_ip = $linux_server_ip +$postgresql_port = 5432 +$dgraph_hosts = $linux_server_ip +$dgraph_ports = '9080' + +$api_ip = 'localhost' +$api_http_port = 50000 + +$etl_ip = 'localhost' +$etl_http_port = 50001 + +$controller_ip = 'localhost' +$controller_http_port = 50002 + +$linker_ip = 'localhost' +$linker_http_port = 50003 + +$jempi_apps_dir = "..\..\..\..\..\JeMPI_Apps" + +$async_receiver_folder = '.\app_data\async_receiver' +$etl_folder = '.\app_data\etl' +$controller_folder = '.\app_data\controller' +$linker_folder = '.\app_data\linker' +$api_folder = '.\app_data\api' + +$def_kafka_bootstrap_servers = "-DKAFKA_BOOTSTRAP_SERVERS=" + $kafka1_ip + ":9094" +$def_postgresql_ip = "-DPOSTGRESQL_IP=" + $postgresql_ip +$def_postgresql_port = "-DPOSTGRESQL_PORT=" + $postgresql_port +$def_postgresql_user = "-DPOSTGRESQL_USER=`"postgres`"" +$def_postgresql_password = "-DPOSTGRESQL_PASSWORD=`"postgres`"" +$def_dgraph_hosts = "-DDGRAPH_HOSTS=" + $dgraph_hosts +$def_dgraph_ports = "-DDGRAPH_PORTS=" + $dgraph_ports +$def_etl_ip = "-DETL_IP=" + $etl_ip +$def_etl_http_port = "-DETL_HTTP_PORT=" + $etl_http_port +$def_controller_ip = "-DCONTROLLER_IP=" + $controller_ip +$def_controller_http_port = "-DCONTROLLER_HTTP_PORT=" + $controller_http_port +$def_linker_ip = "-DLINKER_IP=" + $linker_ip +$def_linker_http_port = "-DLINKER_HTTP_PORT=" + $linker_http_port +$def_api_ip = "-DAPI_IP=" + $api_ip +$def_api_http_port = "-DAPI_HTTP_PORT=" + $api_http_port + +$async_receiver_jar = "-jar " + $jempi_apps_dir + "\JeMPI_AsyncReceiver\target\AsyncReceiver-1.0-SNAPSHOT-spring-boot.jar" +$def_async_reveiver_log4j_level = "-DLOG4J2_LEVEL=DEBUG" +$def_async_receiver_kafka_client_id = "-DKAFKA_CLIENT_ID=client-id-syncrx" + +$etl_jar = "-jar " + $jempi_apps_dir + "\JeMPI_ETL\target\ETL-1.0-SNAPSHOT-spring-boot.jar" +$def_etl_log4j_level = "-DLOG4J2_LEVEL=DEBUG" +$def_etl_kafka_application_id = "-DKAFKA_APPLICATION_ID=app-id-etl" + +$controller_jar = "-jar " + $jempi_apps_dir + "\JeMPI_Controller\target\Controller-1.0-SNAPSHOT-spring-boot.jar" +$def_controller_log4j_level = "-DLOG4J2_LEVEL=DEBUG" +$def_controller_kafka_application_id = "-DKAFKA_APPLICATION_ID=app-id-ctrl" +$def_controller_kafka_client_id = "-DKAFKA_CLIENT_ID=client-id-ctrl" + +$linker_jar = "-jar " + $jempi_apps_dir + "\JeMPI_Linker\target\Linker-1.0-SNAPSHOT-spring-boot.jar" +$def_linker_log4j_level = "-DLOG4J2_LEVEL=TRACE" +$def_linker_kafka_application_id_interactions = "-DKAFKA_APPLICATION_ID_INTERACTIONS=app-id-lnk1" +$def_linker_kafka_application_id_mu = "-DKAFKA_APPLICATION_ID_MU=app-id-lnk2" +$def_linker_kafka_client_id_notifications = "-DKAFKA_CLIENT_ID_NOTIFICATIONS=client-id-lnk3" +$def_linker_match_threshold = "-DLINKER_MATCH_THRESHOLD=0.65" +$def_linker_match_threshold_margin = "-DLINKER_MATCH_THRESHOLD_MARGIN=0.1" + +$api_jar = "-jar " + $jempi_apps_dir + "\JeMPI_API\target\API-1.0-SNAPSHOT-spring-boot.jar" +$def_api_log4j_level = "-DLOG4J2_LEVEL=TRACE" +$def_api_kafka_application_id = "-DKAFKA_APPLICATION_ID=app-id-api" + +$bootstrapper_jar = "-jar " + $jempi_apps_dir + "\JeMPI_Bootstrapper\target\Bootstrapper-1.0-SNAPSHOT-spring-boot.jar" +$def_postgresql_postgres = "-DPOSTGRESQL_DATABASE=postgres" +$def_postgresql_user_db = "-DPOSTGRESQL_USERS_DB=users_db" +$def_postgresql_notifications_db = "-DPOSTGRESQL_NOTIFICATIONS_DB=notifications_db" +$def_postgresql_audit_db = "-DPOSTGRESQL_AUDIT_DB=audit_db" +$def_postgresql_kc_test_db = "-DPOSTGRESQL_KC_TEST_DB=kc_test_db" + + Write-Host "Starting BootStrapper App" + +$bootstrapper_handle = Start-Process -FilePath java ` + -ArgumentList $def_linker_log4j_level, ` + $def_postgresql_ip, ` + $def_postgresql_port, ` + $def_postgresql_user, ` + $def_postgresql_password, ` + $def_kafka_bootstrap_servers, ` + $def_linker_kafka_application_id_interactions, ` + $def_linker_kafka_application_id_mu, ` + $def_linker_kafka_client_id_notifications, ` + $def_dgraph_hosts, ` + $def_dgraph_ports, ` + $def_linker_http_port, ` + $def_linker_match_threshold, ` + $def_linker_match_threshold_margin, ` + $def_etl_ip, ` + $def_etl_http_port, ` + $def_controller_ip, ` + $def_controller_http_port, ` + $def_linker_ip, ` + $def_linker_http_port, ` + $def_api_ip, ` + $def_api_http_port, ` + $def_postgresql_user_db, ` + $def_postgresql_notifications_db, ` + $def_postgresql_audit_db, ` + $def_postgresql_kc_test_db, ` + $def_postgresql_postgres, ` + $def_bootstrapper_kafka_application_id, ` + '-server', ` + '--enable-preview', ` + $bootstrapper_jar , ` + 'data', ` + 'resetAll' ` + -WindowStyle Normal ` + -WorkingDirectory $bootstrapper_folder ` + -Debug ` + -Verbose ` + -PassThru ` + -RedirectStandardError 'bootstrapper_stderr.txt' ` + -RedirectStandardOutput 'bootstrapper_stdout.txt' +$bootstrapper_handle | Export-Clixml -Path (Join-Path './' 'bootstapper.xml') diff --git a/devops/windows/run--base-docker-wsl/start-with-bootstraper.ps1 b/devops/windows/run--base-docker-wsl/start-with-bootstraper.ps1 new file mode 100644 index 000000000..6464009f1 --- /dev/null +++ b/devops/windows/run--base-docker-wsl/start-with-bootstraper.ps1 @@ -0,0 +1,297 @@ +$script_path = $MyInvocation.MyCommand.Path +$script_dir = Split-Path $script_path +Set-Location $script_dir + +$linux_server_ip = ((wsl hostname -I) -split " ")[0] + +Copy-Item ..\..\..\JeMPI_Apps\JeMPI_Configuration\config-api.json ..\..\..\JeMPI_Apps\JeMPI_API\src\main\resources\config-api.json +Copy-Item ..\..\..\JeMPI_Apps\JeMPI_Configuration\config-api.json ..\..\..\JeMPI_Apps\JeMPI_API_KC\src\main\resources\config-api.json + +$kafka1_ip = $linux_server_ip +$postgresql_ip = $linux_server_ip +$postgresql_port = 5432 +$dgraph_hosts = $linux_server_ip +$dgraph_ports = '9080' + +$api_ip = 'localhost' +$api_http_port = 50000 + +$etl_ip = 'localhost' +$etl_http_port = 50001 + +$controller_ip = 'localhost' +$controller_http_port = 50002 + +$linker_ip = 'localhost' +$linker_http_port = 50003 + +$jempi_apps_dir = "..\..\..\..\..\JeMPI_Apps" + +$async_receiver_folder = '.\app_data\async_receiver' +$etl_folder = '.\app_data\etl' +$controller_folder = '.\app_data\controller' +$linker_folder = '.\app_data\linker' +$api_folder = '.\app_data\api' + +$def_kafka_bootstrap_servers = "-DKAFKA_BOOTSTRAP_SERVERS=" + $kafka1_ip + ":9094" +$def_postgresql_ip = "-DPOSTGRESQL_IP=" + $postgresql_ip +$def_postgresql_port = "-DPOSTGRESQL_PORT=" + $postgresql_port +$def_postgresql_user = "-DPOSTGRESQL_USER=`"postgres`"" +$def_postgresql_password = "-DPOSTGRESQL_PASSWORD=`"postgres`"" +$def_postgresql_notifications_db = "-DPOSTGRESQL_NOTIFICATIONS_DB=`"notifications_db`"" +$def_postgresql_audit_db = "-DPOSTGRESQL_AUDIT_DB=`"audit_db`"" + +$def_dgraph_hosts = "-DDGRAPH_HOSTS=" + $dgraph_hosts +$def_dgraph_ports = "-DDGRAPH_PORTS=" + $dgraph_ports +$def_etl_ip = "-DETL_IP=" + $etl_ip +$def_etl_http_port = "-DETL_HTTP_PORT=" + $etl_http_port +$def_controller_ip = "-DCONTROLLER_IP=" + $controller_ip +$def_controller_http_port = "-DCONTROLLER_HTTP_PORT=" + $controller_http_port +$def_linker_ip = "-DLINKER_IP=" + $linker_ip +$def_linker_http_port = "-DLINKER_HTTP_PORT=" + $linker_http_port +$def_api_ip = "-DAPI_IP=" + $api_ip +$def_api_http_port = "-DAPI_HTTP_PORT=" + $api_http_port + +$async_receiver_jar = "-jar " + $jempi_apps_dir + "\JeMPI_AsyncReceiver\target\AsyncReceiver-1.0-SNAPSHOT-spring-boot.jar" +$def_async_reveiver_log4j_level = "-DLOG4J2_LEVEL=DEBUG" +$def_async_receiver_kafka_client_id = "-DKAFKA_CLIENT_ID=client-id-syncrx" + +$etl_jar = "-jar " + $jempi_apps_dir + "\JeMPI_ETL\target\ETL-1.0-SNAPSHOT-spring-boot.jar" +$def_etl_log4j_level = "-DLOG4J2_LEVEL=DEBUG" +$def_etl_kafka_application_id = "-DKAFKA_APPLICATION_ID=app-id-etl" + +$controller_jar = "-jar " + $jempi_apps_dir + "\JeMPI_Controller\target\Controller-1.0-SNAPSHOT-spring-boot.jar" +$def_controller_log4j_level = "-DLOG4J2_LEVEL=DEBUG" +$def_controller_kafka_application_id = "-DKAFKA_APPLICATION_ID=app-id-ctrl" +$def_controller_kafka_client_id = "-DKAFKA_CLIENT_ID=client-id-ctrl" + +$linker_jar = "-jar " + $jempi_apps_dir + "\JeMPI_Linker\target\Linker-1.0-SNAPSHOT-spring-boot.jar" +$def_linker_log4j_level = "-DLOG4J2_LEVEL=TRACE" +$def_linker_kafka_application_id_interactions = "-DKAFKA_APPLICATION_ID_INTERACTIONS=app-id-lnk1" +$def_linker_kafka_application_id_mu = "-DKAFKA_APPLICATION_ID_MU=app-id-lnk2" +$def_linker_kafka_client_id_notifications = "-DKAFKA_CLIENT_ID_NOTIFICATIONS=client-id-lnk3" +$def_linker_match_threshold = "-DLINKER_MATCH_THRESHOLD=0.65" +$def_linker_match_threshold_margin = "-DLINKER_MATCH_THRESHOLD_MARGIN=0.1" + +$api_jar = "-jar " + $jempi_apps_dir + "\JeMPI_API\target\API-1.0-SNAPSHOT-spring-boot.jar" +$def_api_log4j_level = "-DLOG4J2_LEVEL=TRACE" +$def_api_kafka_application_id = "-DKAFKA_APPLICATION_ID=app-id-api" + + + + + +# +# build UI apps +# + + + +# BUILD UI +#npm install -g yarn serve +#Push-Location ..\..\..\JeMPI_Apps\JeMPI_UI +# yarn install --frozen-lockfile +# yarn build +#Pop-Location + + +# +# build apps +# +Push-Location ..\..\..\JeMPI_Apps + Copy-Item JeMPI_Configuration\config-api.json JeMPI_API\src\main\resources\. + mvn clean + mvn package +Pop-Location + +# +# Start BootStrapper and Create databases +# +.\bootstrapper.ps1 -Wait +# + +# +# start async receiver +# +if (Test-path $async_receiver_folder\csv) { + Write-Host ${async_receiver_folder}'\csv exists' +} else { + New-Item $async_receiver_folder\csv -ItemType Directory + Write-Host 'Folder Created successfully' +} +$async_handle = Start-Process -FilePath java ` + -ArgumentList $def_async_reveiver_log4j_level, ` + $def_kafka_bootstrap_servers, ` + $def_async_receiver_kafka_client_id, ` + '-server', ` + '--enable-preview', ` + $async_receiver_jar ` + -WindowStyle Normal ` + -WorkingDirectory $async_receiver_folder ` + -Debug ` + -Verbose ` + -PassThru ` + -RedirectStandardError 'async_stderr.txt' +# -RedirectStandardOutput 'async_stdout.txt' +$async_handle | Export-Clixml -Path (Join-Path './' 'async_handle.xml') + +# +# start etl +# +if (Test-path $etl_folder) { + Write-Host ${etl_folder}' exists' +} else { + New-Item $etl_folder -ItemType Directory + Write-Host 'Folder Created successfully' +} +$etl_handle = Start-Process -FilePath java ` + -ArgumentList $def_etl_log4j_level, ` + $def_kafka_bootstrap_servers, ` + $def_etl_kafka_application_id, ` + $def_etl_ip, ` + $def_etl_http_port, ` + $def_controller_ip, ` + $def_controller_http_port, ` + $def_linker_ip, ` + $def_linker_http_port, ` + '-server', ` + '--enable-preview', ` + $etl_jar ` + -WindowStyle Normal ` + -WorkingDirectory $etl_folder ` + -Debug ` + -Verbose ` + -PassThru ` + -RedirectStandardError 'etl_stderr.txt' + # -RedirectStandardOutput 'etl_stdout.txt' +$etl_handle | Export-Clixml -Path (Join-Path './' 'etl_handle.xml') + + +# +# start controller +# +if (Test-path $controller_folder) { + Write-Host ${controller_folder}' exists' +} else { + New-Item $controller_folder -ItemType Directory + Write-Host 'Folder Created successfully' +} +$controller_handle = Start-Process -FilePath java ` + -ArgumentList $def_controller_log4j_level, ` + $def_postgresql_ip, ` + $def_postgresql_port, ` + $def_postgresql_user, ` + $def_postgresql_password, ` + $def_postgresql_notifications_db, ` + $def_postgresql_audit_db, ` + $def_kafka_bootstrap_servers, ` + $def_controller_kafka_application_id, ` + $def_controller_kafka_client_id, ` + $def_controller_http_port, ` + $def_dgraph_hosts, ` + $def_dgraph_ports, ` + $def_etl_ip, ` + $def_etl_http_port, ` + $def_controller_ip, ` + $def_controller_http_port, ` + $def_linker_ip, ` + $def_linker_http_port, ` + '-server', ` + '--enable-preview', ` + $controller_jar ` + -WindowStyle Normal ` + -WorkingDirectory $controller_folder ` + -Debug ` + -Verbose ` + -PassThru ` + -RedirectStandardError 'controller_stderr.txt' + # -RedirectStandardOutput 'controller_stdout.txt' +$controller_handle | Export-Clixml -Path (Join-Path './' 'controller_handle.xml') + + +# +# start linker +# +if (Test-path $linker_folder) { + Write-Host ${linker_folder}' exists' +} else { + New-Item $linker_folder -ItemType Directory + Write-Host 'Folder Created successfully' +} +$linker_handle = Start-Process -FilePath java ` + -ArgumentList $def_linker_log4j_level, ` + $def_postgresql_ip, ` + $def_postgresql_port, ` + $def_postgresql_user, ` + $def_postgresql_password, ` + $def_postgresql_notifications_db, ` + $def_kafka_bootstrap_servers, ` + $def_linker_kafka_application_id_interactions, ` + $def_linker_kafka_application_id_mu, ` + $def_linker_kafka_client_id_notifications, ` + $def_dgraph_hosts, ` + $def_dgraph_ports, ` + $def_linker_http_port, ` + $def_linker_match_threshold, ` + $def_linker_match_threshold_margin, ` + $def_etl_ip, ` + $def_etl_http_port, ` + $def_controller_ip, ` + $def_controller_http_port, ` + $def_linker_ip, ` + $def_linker_http_port, ` + $def_api_ip, ` + $def_api_http_port, ` + '-server', ` + '--enable-preview', ` + $linker_jar ` + -WindowStyle Normal ` + -WorkingDirectory $linker_folder ` + -Debug ` + -Verbose ` + -PassThru ` + -RedirectStandardError 'linker_stderr.txt' + # -RedirectStandardOutput 'linker_stdout.txt' +$linker_handle | Export-Clixml -Path (Join-Path './' 'linker_handle.xml') + + +# +# start api +# +if (Test-path $api_folder) { + Write-Host ${api_folder}' exists' +} else { + New-Item $api_folder -ItemType Directory + Write-Host 'Folder Created successfully' +} +$api_handle = Start-Process -FilePath java ` + -ArgumentList $def_api_log4j_level, ` + $def_postgresql_ip, ` + $def_postgresql_port, ` + $def_postgresql_user, ` + $def_postgresql_password, ` + $def_postgresql_notifications_db, ` + $def_postgresql_audit_db, ` + $def_kafka_bootstrap_servers, ` + $def_api_kafka_application_id, ` + $def_dgraph_hosts, ` + $def_dgraph_ports, ` + $def_etl_ip, ` + $def_etl_http_port, ` + $def_controller_ip, ` + $def_controller_http_port, ` + $def_linker_ip, ` + $def_linker_http_port, ` + $def_api_ip, ` + $def_api_http_port, ` + '-server', ` + '--enable-preview', ` + $api_jar ` + -WindowStyle Normal ` + -WorkingDirectory $api_folder ` + -Debug ` + -Verbose ` + -PassThru ` + -RedirectStandardError 'api_stderr.txt' + # -RedirectStandardOutput 'api_stdout.txt' +$api_handle | Export-Clixml -Path (Join-Path './' 'api_handle.xml') diff --git a/devops/windows/run--base-docker-wsl/start.ps1 b/devops/windows/run--base-docker-wsl/start.ps1 index e821548e2..b6a821201 100644 --- a/devops/windows/run--base-docker-wsl/start.ps1 +++ b/devops/windows/run--base-docker-wsl/start.ps1 @@ -38,7 +38,9 @@ $def_postgresql_ip = "-DPOSTGRESQL_IP=" + $postgresql $def_postgresql_port = "-DPOSTGRESQL_PORT=" + $postgresql_port $def_postgresql_user = "-DPOSTGRESQL_USER=`"postgres`"" $def_postgresql_password = "-DPOSTGRESQL_PASSWORD=`"postgres`"" -$def_postgresql_notifications_db = "-DPOSTGRESQL_DATABASE=`"notifications`"" +$def_postgresql_notifications_db = "-DPOSTGRESQL_NOTIFICATIONS_DB=`"notifications_db`"" +$def_postgresql_audit_db = "-DPOSTGRESQL_AUDIT_DB=`"audit_db`"" + $def_dgraph_hosts = "-DDGRAPH_HOSTS=" + $dgraph_hosts $def_dgraph_ports = "-DDGRAPH_PORTS=" + $dgraph_ports $def_etl_ip = "-DETL_IP=" + $etl_ip @@ -153,7 +155,7 @@ $etl_handle = Start-Process -FilePath java ` -Verbose ` -PassThru ` -RedirectStandardError 'etl_stderr.txt' -# -RedirectStandardOutput 'etl_stdout.txt' + # -RedirectStandardOutput 'etl_stdout.txt' $etl_handle | Export-Clixml -Path (Join-Path './' 'etl_handle.xml') @@ -173,10 +175,13 @@ $controller_handle = Start-Process -FilePath java ` $def_postgresql_user, ` $def_postgresql_password, ` $def_postgresql_notifications_db, ` + $def_postgresql_audit_db, ` $def_kafka_bootstrap_servers, ` $def_controller_kafka_application_id, ` $def_controller_kafka_client_id, ` $def_controller_http_port, ` + $def_dgraph_hosts, ` + $def_dgraph_ports, ` $def_etl_ip, ` $def_etl_http_port, ` $def_controller_ip, ` @@ -192,7 +197,7 @@ $controller_handle = Start-Process -FilePath java ` -Verbose ` -PassThru ` -RedirectStandardError 'controller_stderr.txt' -# -RedirectStandardOutput 'controller_stdout.txt' + # -RedirectStandardOutput 'controller_stdout.txt' $controller_handle | Export-Clixml -Path (Join-Path './' 'controller_handle.xml') @@ -238,7 +243,7 @@ $linker_handle = Start-Process -FilePath java ` -Verbose ` -PassThru ` -RedirectStandardError 'linker_stderr.txt' -# -RedirectStandardOutput 'linker_stdout.txt' + # -RedirectStandardOutput 'linker_stdout.txt' $linker_handle | Export-Clixml -Path (Join-Path './' 'linker_handle.xml') @@ -258,6 +263,7 @@ $api_handle = Start-Process -FilePath java ` $def_postgresql_user, ` $def_postgresql_password, ` $def_postgresql_notifications_db, ` + $def_postgresql_audit_db, ` $def_kafka_bootstrap_servers, ` $def_api_kafka_application_id, ` $def_dgraph_hosts, ` @@ -279,5 +285,5 @@ $api_handle = Start-Process -FilePath java ` -Verbose ` -PassThru ` -RedirectStandardError 'api_stderr.txt' -# -RedirectStandardOutput 'api_stdout.txt' + # -RedirectStandardOutput 'api_stdout.txt' $api_handle | Export-Clixml -Path (Join-Path './' 'api_handle.xml')