diff --git a/.gitignore b/.gitignore
index 7ccc567..6d66c12 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,5 @@ logs/
master.zip
original.release.json
stageworkshop-master/
+venv/
+.idea/
\ No newline at end of file
diff --git a/CHANGELOG.MD b/CHANGELOG.MD
new file mode 100644
index 0000000..fe8524d
--- /dev/null
+++ b/CHANGELOG.MD
@@ -0,0 +1,86 @@
+
Changelog stageworkshop
+
+* Unreleased
+
+* Version 2.0.6-ci.14
+
+Changes made
+
+* 2019-04-12 willem@nutanix.com
+
+Stageworkshop part
+ * Changed the following files:
+ * scripts/lib.pe.sh; Changed the way we check if the PC is running
+ Changed the PE to PC registration
+
+ * scripts/lib.pc.sh; Before we can enable calm we need to make sure the system is ready for it.
+ * scripts/lib.common.sh; Removed the acli as 5.8.2 has an error on that. All on nuclei now.
+ * stage_workshop.sh; Re-enabled calm workshop (AOS and PC 5.8.2)
+
+Call back server part
+
+ * No changes
+
+
+Changes made
+
+
+* 2019-04-04 willem@nutanix.com
+
+Stageworkshop part
+ * Changed the following files:
+
+ * scripts/lib.pc.sh; Added LCM upgrades using API calls and not files. Also added Karbon enable
+
+Call back server part
+
+ * No changes
+
+
+Changes made
+
+
+* 2019-03-21 willem@nutanix.com
+
+Stageworkshop part
+ * Changed the following files:
+
+ * scripts/lib.pc.sh; Added a loop function for the waiting for Calm to be enabled before starting the LCM.
+
+Call back server part
+
+ * No changes
+
+
+* 2019-03-20 willem@nutanix.com
+
+Stageworkshop part
+ * Changed the following files:
+
+ * release.json; Changed all the 13 to 14 as a number
+ * scripts/lib.pc.sh; Added a loop function for the LCM part to follow the progress of teh API call. Also added an extra wait of 5 minutes so that the "Enable Application" is done before we start the LCM! Otherwise it will crash.
+ * script/lib/pe/sh; Added a line that if it's being called by we- something, it changes lib.common.sh to we-lib.common.sh
+
+ * Copied for keeping the original the following files:
+
+ * scripts/lib.pc.org.sh
+
+ * Created extra files for a server reporting system (centralised logging using curl)
+
+ * we_stage_workshop.sh; the new version of the staging workshop script. This one holds the lines for the centralised feedback
+ * we_push_centos_cl_disk.sh; small script to push the CentOS Image that is needed in the TS2019 workshops
+ * script/we-ts2019.sh; has a more debug info fired for the scripts run at PE and PC timeframe (nohup bash -x)
+ * script/we-lib.common.sh; contains extra functions for the call back to the server
+
+
+Call back server part
+ * Reason for this server is to have the possibility to send the logging/steps of the stageworkshop script to a centralised server. The "tool" is based on python.
+
+ * Created files and their goal:
+
+ * logserver/logserver.py; Listener for the messagees from the scripts.
+ * usage: python3 logserver.py 3000 to start the logserver on port 3000 (used by the scripts for now)
+ * logserver/webserver.py; Webserver to show the messages of the different running scripts
+ * usage: python3 webserver to start the webserver which will default to port 5000 by default.
+
+
diff --git a/README.md b/README.md
index 4605b54..dc7be73 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,8 @@ After HPoC Foundation, you can have push-button Calm in about half an hour!
# Table of Contents #
-- [Available Workshops](#available-workshops)
+- [How To Workshop](#howto-workshop)
+- [Available Workshops](#available-workshops)
- [HPoC Cluster Reservation](#hpoc-cluster-reservation)
- [Staging Your HPoC](#staging-your-hpoc)
- [Interactive Usage](#interactive-usage)
@@ -17,6 +18,10 @@ After HPoC Foundation, you can have push-button Calm in about half an hour!
---
+## How To Workshop ##
+
+Please review the How To Workshop for the latest instructions http://ntnx.tips/howto
+
## Available Workshops ##
1. Calm Introduction Workshop (AOS/AHV 5.5+)
diff --git a/bootstrap.sh b/bootstrap.sh
index 1d74e94..d94931a 100755
--- a/bootstrap.sh
+++ b/bootstrap.sh
@@ -3,11 +3,14 @@
# Example use from a Nutanix CVM:
# curl --remote-name --location https://raw.githubusercontent.com/nutanixworkshops/stageworkshop/master/bootstrap.sh && sh ${_##*/}
#
+# Development/Beta version
+# curl --remote-name --location https://raw.githubusercontent.com/jncox/stageworkshop/master/bootstrap.sh && sh ${_##*/}
+#
# For testing:
# curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/}
if [[ -z ${SOURCE} ]]; then
- ORGANIZATION=nutanixworkshops
+ ORGANIZATION=jncox
REPOSITORY=stageworkshop
BRANCH=master
else
@@ -48,6 +51,16 @@ _ERROR=0
if (( ${_ERROR} == 1 )); then
echo "Error ${_ERROR}: This script should be run on a Nutanix CVM!"
#echo RESTORE:
+
+#########################################################################
+### Added to verify user is Nutanix 7/22/2019 from mlavi version of file.
+#########################################################################
+
+ exit ${_ERROR}
+elif [[ $(whoami) != 'nutanix' ]]; then
+ _ERROR=50
+ echo "PBC-50: This guardrail can be relaxed with proper testing for the future."
+ echo "Error ${_ERROR}: This script should be run as user nutanix!"
exit ${_ERROR}
fi
@@ -102,7 +115,7 @@ elif [[ ! -d ${REPOSITORY}-${BRANCH} ]]; then
fi
pushd ${REPOSITORY}-${BRANCH}/ \
- && chmod -R u+x *sh
+ && chmod -R u+x *.sh
if [[ -e release.json ]]; then
echo -e "\n${ARCHIVE}::$(basename $0) release: $(grep FullSemVer release.json | awk -F\" '{print $4}')"
@@ -131,7 +144,7 @@ cat <= 5.9, starting LCM inventory...
2018-12-26 16:05:26|96508|lcm|inventory _test=|500|```
- PE> ncli multicluster add-to-multicluster external-ip-address-or-svm-ips=$PC_HOST username=admin password=yaknow
+ - Notify bart.grootzevert when fixed
+ - 2019-02-20 21:28:12|4424|pc_configure|PC>=5.10, manual join PE to PC = |Cluster registration is currently in progress. This operation may take a while.
+Error: The username or password entered is incorrect.|
- ADC2 wonky
- 2019-02-15 16:12:08|20294|pe_auth|Adjusted directory-url=ldap://10.42.23.40:389 because AOS-5.10.0.1 >= 5.9
diff --git a/hol_stageworkshop.sh b/hol_stageworkshop.sh
new file mode 100755
index 0000000..1556bac
--- /dev/null
+++ b/hol_stageworkshop.sh
@@ -0,0 +1,237 @@
+#!/usr/bin/env bash
+# use bash -x to debug command substitution and evaluation instead of echo.
+DEBUG=
+
+# Source Workshop common routines + global variables
+source scripts/lib.common.sh
+source scripts/global.vars.sh
+begin
+
+# For WORKSHOPS keyword mappings to scripts and variables, please use:
+# - Calm || Bootcamp || Citrix || Summit
+# - PC #.#
+WORKSHOPS=(\
+"Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \
+"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \
+"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \
+"Previous Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \
+"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \
+"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \
+"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \
+"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \
+"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \
+#"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \
+#"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \
+#"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \
+#"Citrix Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \
+#"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \
+) # Adjust function stage_clusters, below, for file/script mappings as needed
+
+
+function log() {
+ local _caller
+
+ _caller=$(echo -n "$(caller 0 | awk '{print $2}')")
+ echo "$(date '+%Y-%m-%d %H:%M:%S')|$$|${_caller}|${1}"
+}
+
+function checkStagingIsDone
+{
+ #Set Variables
+ pcIP=${1}
+ clusterPW=${2}
+ local _sleep=20m
+ local _attempts=7
+ local _loop=0
+ local _test
+ local _error=77
+
+
+#if the snc_bootcamp.sh script is still on the CVM, then the cluster is not yet ready
+ while true ; do
+ (( _loop++ ))
+ _test=$(sshpass -p "nutanix/4u" ssh -o StrictHostKeyChecking=no nutanix@$pcIP [[ -f /home/nutanix/.staging_complete ]] && echo "ready" || echo "notready")
+
+ if [ "$_test" == "ready" ]; then
+ log "CVM with IP of $nodeIP is ready"
+ return 0
+ elif (( _loop > _attempts )); then
+ log "Warning ${_error} @${pcIP}: Giving up after ${_loop} tries."
+ return ${_error}
+ else
+ log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep}..."
+ sleep ${_sleep}
+ fi
+ done
+}
+
+function stage_clusters() {
+ # Adjust map below as needed with $WORKSHOPS
+ local _cluster
+ local _container
+ local _dependency
+ local _fields
+ local _libraries='global.vars.sh lib.common.sh '
+ local _pe_launch # will be transferred and executed on PE
+ local _pc_launch # will be transferred and executed on PC
+ local _sshkey=${SSH_PUBKEY}
+ #local _wc_arg='--lines'
+ local _wc_arg=${WC_ARG}
+ local _workshop=${WORKSHOPS[$((${WORKSHOP_NUM}-1))]}
+
+ # Map to latest and greatest of each point release
+ # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download
+ # TODO: make WORKSHOPS and map a JSON configuration file?
+
+ ## Set script vars since we know what versions we want to use
+ export PC_VERSION="${PC_CURRENT_VERSION}"
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='snc_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+
+
+ dependencies 'install' 'sshpass'
+
+
+ # Send configuration scripts to remote clusters and execute Prism Element script
+ # shellcheck disable=2206
+ PE_HOST=${1}
+ PE_PASSWORD=${2}
+ EMAIL=${3}
+ idcluster=${4}
+
+ mysql --login-path=local -sN<<<"Use hol; UPDATE cluster SET fk_idclusterstatus = (SELECT idclusterstatus from clusterstatus WHERE cstatus = \"Staging\") WHERE idcluster = \"${idcluster}\";" 2>&1
+ echo "Node $nodeIP with cluster ID of $idcluster marked as staging"
+
+ pe_configuration_args "${_pc_launch}"
+
+ . /opt/scripts/stageworkshop/scripts/global.vars.sh # re-import for relative settings
+
+ prism_check 'PE' 60
+
+ if [[ -d cache ]]; then
+ pushd cache || true
+ for _dependency in ${JQ_PACKAGE} ${SSHPASS_PACKAGE}; do
+ if [[ -e ${_dependency} ]]; then
+ log "Sending cached ${_dependency} (optional)..."
+ remote_exec 'SCP' 'PE' "${_dependency}" 'OPTIONAL'
+ fi
+ done
+ popd || true
+ fi
+
+ if (( $? == 0 )) ; then
+ log "Sending configuration script(s) to PE@${PE_HOST}"
+ else
+ _error=15
+ log "Error ${_error}: Can't reach PE@${PE_HOST}"
+ exit ${_error}
+ fi
+
+ if [[ -e ${RELEASE} ]]; then
+ log "Adding release version file..."
+ _libraries+=" ../${RELEASE}"
+ fi
+
+ pushd /opt/scripts/stageworkshop/scripts \
+ && remote_exec 'SCP' 'PE' "${_libraries} ${_pe_launch} ${_pc_launch}" \
+ && popd || exit
+
+ # For Calm container updates...
+ if [[ -d cache/pc-${PC_VERSION}/ ]]; then
+ log "Uploading PC updates in background..."
+ pushd cache/pc-${PC_VERSION} \
+ && pkill scp || true
+ for _container in epsilon nucalm ; do
+ if [[ -f ${_container}.tar ]]; then
+ remote_exec 'SCP' 'PE' ${_container}.tar 'OPTIONAL' &
+ fi
+ done
+ popd || exit
+ else
+ log "No PC updates found in cache/pc-${PC_VERSION}/"
+ fi
+
+ if [[ -f ${_sshkey} ]]; then
+ log "Sending ${_sshkey} for addition to cluster..."
+ remote_exec 'SCP' 'PE' ${_sshkey} 'OPTIONAL'
+ fi
+
+ log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}"
+ remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &"
+ unset PE_CONFIGURATION
+
+ # shellcheck disable=SC2153
+ cat < Gear > Cluster Lockdown,
+ the following will fail silently, use ssh nutanix@{PE|PC} instead.
+
+ $ SSHPASS='${PE_PASSWORD}' sshpass -e ssh \\
+ ${SSH_OPTS} \\
+ nutanix@${PE_HOST} 'date; tail -f ${_pe_launch%%.sh}.log'
+ You can login to PE to see tasks in flight and eventual PC registration:
+ https://${PRISM_ADMIN}:${PE_PASSWORD}@${PE_HOST}:9440/
+
+EOM
+
+ if (( "$(echo ${_libraries} | grep -i lib.pc | wc ${_wc_arg})" > 0 )); then
+ # shellcheck disable=2153
+ cat <&1
+ echo "Node $nodeIP with cluster ID of $idcluster marked as ready. RC is $rc"
+
+ elif [ $rc -eq 77 ] ; then
+ #Update Database to mark cluster as Error when the staging script is no longer on the CVM
+
+ mysql --login-path=local -sN<<<"Use hol; UPDATE cluster SET fk_idclusterstatus = (SELECT idclusterstatus from clusterstatus WHERE cstatus = \"Error\") WHERE idcluster = \"${idcluster}\";" 2>&1
+ echo "Node $nodeIP with cluster ID of $idcluster marked as ERROR. RC is $rc"
+
+ fi
+
+ finish
+ exit
+}
+
+function pe_configuration_args() {
+ local _pc_launch="${1}"
+
+ PE_CONFIGURATION="EMAIL=${EMAIL} PRISM_ADMIN=${PRISM_ADMIN} PE_PASSWORD=${PE_PASSWORD} PE_HOST=${PE_HOST} PC_LAUNCH=${_pc_launch} PC_VERSION=${PC_VERSION}"
+}
+
+
+#__main__
+
+# Source Workshop common routines + global variables
+. /opt/scripts/stageworkshop/scripts/lib.common.sh
+. /opt/scripts/stageworkshop/scripts/global.vars.sh
+begin
+
+
+# shellcheck disable=SC2213
+
+
+#stage_clusters "${1}" "${2}" "${3}"
+
+mysql --login-path=local -sN<<<"Use hol; SELECT idcluster,nodeIP,peIP,dsIP,clusterPW,clustername FROM cluster WHERE fk_idclusterstatus = (SELECT idclusterstatus from clusterstatus WHERE cstatus = \"Created\");" | while read idcluster nodeIP peIP dsIP clusterPW clustername; do
+ stage_clusters "$peIP" "$clusterPW" "nutanixexpo@gmail.com" "$idcluster" &
+done
diff --git a/hooks/autohook.sh b/hooks/autohook.sh
old mode 100755
new mode 100644
diff --git a/hooks/pre-commit/01-release b/hooks/pre-commit/01-release
deleted file mode 120000
index d5316c0..0000000
--- a/hooks/pre-commit/01-release
+++ /dev/null
@@ -1 +0,0 @@
-../scripts/semver_release.sh
\ No newline at end of file
diff --git a/hooks/pre-commit/01-release b/hooks/pre-commit/01-release
new file mode 100644
index 0000000..05eb028
--- /dev/null
+++ b/hooks/pre-commit/01-release
@@ -0,0 +1,5 @@
+XSym
+0028
+5863f36caffa453eb1b2b296b337da2a
+../scripts/semver_release.sh
+
\ No newline at end of file
diff --git a/hooks/scripts/semver_release.sh b/hooks/scripts/semver_release.sh
old mode 100755
new mode 100644
diff --git a/quick.txt b/quick.txt
new file mode 100644
index 0000000..c75bee7
--- /dev/null
+++ b/quick.txt
@@ -0,0 +1 @@
+10.42.9.37|techX2019!|willem@nutanix.com
diff --git a/release.json b/release.json
index ade92c8..0ef8c0b 100644
--- a/release.json
+++ b/release.json
@@ -1,32 +1,33 @@
{
"Major": 2,
"Minor": 0,
- "Patch": 6,
- "PreReleaseTag": "ci.11",
- "PreReleaseTagWithDash": "-ci.11",
+ "Patch": 7,
+ "PreReleaseTag": "ci.15",
+ "PreReleaseTagWithDash": "-ci.15",
"PreReleaseLabel": "ci",
- "PreReleaseNumber": 11,
+ "PreReleaseNumber": 15,
"BuildMetaData": "",
"BuildMetaDataPadded": "",
- "FullBuildMetaData": "Branch.master.Sha.b47788a3bdc14b8fd8852a1fefccf73a125d038c",
- "MajorMinorPatch": "2.0.6",
- "SemVer": "2.0.6-ci.11",
- "LegacySemVer": "2.0.6-ci11",
- "LegacySemVerPadded": "2.0.6-ci0011",
+ "FullBuildMetaData": "Branch.master.Sha.3d62e775126b97ddac481a1fcc81920d42d998fc",
+ "MajorMinorPatch": "2.0.7",
+ "SemVer": "2.0.6-ci.15",
+ "LegacySemVer": "2.0.7-ci15",
+ "LegacySemVerPadded": "2.0.7-ci0015",
"AssemblySemVer": "2.0.6.0",
"AssemblySemFileVer": "2.0.6.0",
- "FullSemVer": "2.0.6-ci.11",
- "InformationalVersion": "2.0.6-ci.11+Branch.master.Sha.b47788a3bdc14b8fd8852a1fefccf73a125d038c",
+ "FullSemVer": "2.0.6-ci.14",
+ "InformationalVersion": "2.0.6-ci.14+Branch.master.Sha.3d62e775126b97ddac481a1fcc81920d42d998fc",
"BranchName": "master",
- "Sha": "b47788a3bdc14b8fd8852a1fefccf73a125d038c",
- "ShortSha": "b47788a",
- "NuGetVersionV2": "2.0.6-ci0011",
- "NuGetVersion": "2.0.6-ci0011",
- "NuGetPreReleaseTagV2": "ci0011",
- "NuGetPreReleaseTag": "ci0011",
- "CommitsSinceVersionSource": 11,
- "CommitsSinceVersionSourcePadded": "0011",
- "CommitDate": "2019-02-15",
- "PrismCentralStable": "5.8.2",
- "PrismCentralDev": "5.10.1.1"
+ "Sha": "3d62e775126b97ddac481a1fcc81920d42d998fc",
+ "ShortSha": "3d62e77",
+ "NuGetVersionV2": "2.0.6-ci0014",
+ "NuGetVersion": "2.0.6-ci0014",
+ "NuGetPreReleaseTagV2": "ci0014",
+ "NuGetPreReleaseTag": "ci0013",
+ "CommitsSinceVersionSource": 14,
+ "CommitsSinceVersionSourcePadded": "0014",
+ "CommitDate": "2019-03-20",
+ "PrismCentralStable": "5.11.2.1",
+ "PrismCentralCurrent": "5.17.0.3",
+ "PrismCentralDev": "5.17.0.3"
}
diff --git a/scripts/Consolidated_Storage_bootcamp.sh b/scripts/Consolidated_Storage_bootcamp.sh
new file mode 100755
index 0000000..b771590
--- /dev/null
+++ b/scripts/Consolidated_Storage_bootcamp.sh
@@ -0,0 +1,193 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+ && prism_pro_server_deploy \
+ && files_install \
+ && sleep 30 \
+ && create_file_server "${NW1_NAME}" "${NW1_NAME}" \
+ && sleep 30 \
+ && file_analytics_install \
+ && sleep 30 \
+ && create_file_analytics_server \
+ && sleep 30
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ deploy_peer_mgmt_server "${PMC}" \
+ && deploy_peer_agent_server "${AGENTA}" \
+ && deploy_peer_agent_server "${AGENTB}"
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ Win10v1903.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ CentOS7.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'PE_PASSWORD'
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ args_required 'PE_HOST'
+
+ dependencies 'install' 'jq' \
+ && files_install
+
+ log "PE = https://${PE_HOST}:9440"
+ ;;
+esac
+
+finish
diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh
new file mode 100755
index 0000000..7808960
--- /dev/null
+++ b/scripts/all_bootcamp.sh
@@ -0,0 +1,185 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+ export NW2_DHCP_START="${IPV4_PREFIX}.132"
+ export NW2_DHCP_END="${IPV4_PREFIX}.219"
+ export _external_nw_name="${1}"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && create_era_container \
+ && era_network_configure \
+ && authentication_source \
+ && pe_auth \
+ && prism_pro_server_deploy \
+ && files_install \
+ && sleep 30 \
+ && create_file_server "${NW1_NAME}" "${NW1_NAME}" \
+ && sleep 30 \
+ && file_analytics_install \
+ && sleep 30 \
+ && create_file_analytics_server \
+ && sleep 30 \
+ && deploy_era \
+ && deploy_mssql \
+ && deploy_oracle_19c
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ deploy_peer_mgmt_server "${PMC}" \
+ && deploy_peer_agent_server "${AGENTA}" \
+ && deploy_peer_agent_server "${AGENTB}"
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export _prio_images_arr=(\
+ Windows2016_10202020.qcow2 \
+ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \
+ )
+
+ export QCOW2_IMAGES=(\
+ CentOS7_05272020.qcow2 \
+ Windows2019.qcow2 \
+ Win10v1909_10192020.qcow2 \
+ WinTools_05272020.qcow2 \
+ Linux_ToolsVM_05272020.qcow2
+ LinuxMint_ToolsVM.qcow2 \
+ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \
+ veeam/VeeamAHVProxy2.0.404.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ Windows2016.iso \
+ FrameCCA-3.0.0.iso \
+ FrameGuestAgentInstaller_1.0.2.8.iso \
+ Nutanix-VirtIO-1.1.5.iso \
+ veeam/VBR_10.0.0.4442.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && karbon_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && karbon_image_download \
+ && priority_images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && configure_era \
+ && upload_citrix_calm_blueprint \
+ && sleep 30 \
+ && images \
+ && seedPC \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/app_modernization_bootcamp.sh b/scripts/app_modernization_bootcamp.sh
new file mode 100755
index 0000000..7aa5df2
--- /dev/null
+++ b/scripts/app_modernization_bootcamp.sh
@@ -0,0 +1,156 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+
+ # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be
+ if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then
+ pc_smtp
+ fi
+
+ ssp_auth \
+ && calm_enable \
+ && karbon_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && karbon_image_download \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && upload_karbon_calm_blueprint \
+ && sleep 30 \
+ && upload_CICDInfra_calm_blueprint \
+ && images \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/basic_bootcamp.sh b/scripts/basic_bootcamp.sh
new file mode 100755
index 0000000..da45b26
--- /dev/null
+++ b/scripts/basic_bootcamp.sh
@@ -0,0 +1,153 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ Win10v1903.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ Windows2016.iso \
+ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && pc_project \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && images \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh
new file mode 100755
index 0000000..b7fd3e6
--- /dev/null
+++ b/scripts/bootcamp.sh
@@ -0,0 +1,156 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+ && prism_pro_server_deploy
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ ## TODO: If Debug is set we should run with bash -x. Maybe this???? Or are we going to use a fourth parameter
+ # if [ ! -z DEBUG ]; then
+ # bash_cmd='bash'
+ # else
+ # bash_cmd='bash -x'
+ # fi
+ # _command="EMAIL=${EMAIL} \
+ # PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ # PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup ${bash_cmd} ${HOME}/${PC_LAUNCH} IMAGES"
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ files_install && sleep 30
+
+ create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30
+
+ file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && karbon_enable \
+ && objects_enable \
+ && lcm \
+ && object_store \
+ && karbon_image_download \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && seedPC \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/calm_bootcamp.sh b/scripts/calm_bootcamp.sh
new file mode 100755
index 0000000..dd39b23
--- /dev/null
+++ b/scripts/calm_bootcamp.sh
@@ -0,0 +1,161 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoDC'
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+
+ # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be
+ if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then
+ pc_smtp
+ fi
+
+ ssp_auth \
+ && calm_enable \
+ && karbon_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && karbon_image_download \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && upload_karbon_calm_blueprint \
+ && sleep 30 \
+ && upload_CICDInfra_calm_blueprint \
+ && images \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/calm.sh b/scripts/calm_iaas_bootcamp.sh
similarity index 64%
rename from scripts/calm.sh
rename to scripts/calm_iaas_bootcamp.sh
index 6222290..a9610fd 100755
--- a/scripts/calm.sh
+++ b/scripts/calm_iaas_bootcamp.sh
@@ -18,6 +18,8 @@ case ${1} in
PE | pe )
. lib.pe.sh
+ export AUTH_SERVER='AutoAD'
+
args_required 'PE_HOST PC_LAUNCH'
ssh_pubkey & # non-blocking, parallel suitable
@@ -31,16 +33,25 @@ case ${1} in
if (( $? == 0 )) ; then
pc_install "${NW1_NAME}" \
&& prism_check 'PC' \
- && pc_configure \
- && dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq'
- log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
- log "PE = https://${PE_HOST}:9440"
- log "PC = https://${PC_HOST}:9440"
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
- files_install & # parallel, optional. Versus: $0 'files' &
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
- finish
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
else
finish
_error=18
@@ -50,6 +61,19 @@ case ${1} in
;;
PC | pc )
. lib.pc.sh
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
dependencies 'install' 'jq' || exit 13
ssh_pubkey & # non-blocking, parallel suitable
@@ -66,6 +90,12 @@ case ${1} in
log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
pe_determine ${1}
. global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
fi
if [[ ! -z "${2}" ]]; then # hidden bonus
@@ -85,13 +115,14 @@ case ${1} in
ssp_auth \
&& calm_enable \
&& lcm \
- && images \
+ && pc_project \
+ && flow_enable \
&& pc_cluster_img_import \
+ && images \
&& prism_check 'PC'
log "Non-blocking functions (in development) follow."
- pc_project
- flow_enable
+ #pc_project
pc_admin
# ntnx_download 'AOS' # function in lib.common.sh
diff --git a/scripts/cicd_bootcamp.sh b/scripts/cicd_bootcamp.sh
new file mode 100755
index 0000000..e891314
--- /dev/null
+++ b/scripts/cicd_bootcamp.sh
@@ -0,0 +1,157 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+
+ # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be
+ if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then
+ pc_smtp
+ fi
+
+ ssp_auth \
+ && calm_enable \
+ && karbon_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && karbon_image_download \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && upload_karbon_calm_blueprint \
+ && sleep 30 \
+ && upload_CICDInfra_calm_blueprint \
+ && images \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/citrix.sh b/scripts/citrix.sh
deleted file mode 100644
index f837313..0000000
--- a/scripts/citrix.sh
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/env bash
-# -x
-
-#__main()__________
-
-# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables
-. /etc/profile.d/nutanix_env.sh
-. lib.common.sh
-. global.vars.sh
-begin
-
-args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION'
-
-#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
-
-log "Adding key to ${1} VMs..."
-ssh_pubkey & # non-blocking, parallel suitable
-
-# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
-
-case ${1} in
- PE | pe )
- . lib.pe.sh
-
- log "Configure PE role mapping"
- ncli authconfig add-role-mapping role=ROLE_CLUSTER_ADMIN entity-type=group name="${DOMAIN_NAME}" entity-values="${DOMAIN_ADMIN_GROUP}"
-
- log "Creating Reverse Lookup Zone on DC VM"
- remote_exec 'ssh' 'AUTH_SERVER' "samba-tool dns zonecreate dc1 ${HPOC_NUMBER}.21.10.in-addr.arpa; service samba-ad-dc restart"
- log 'Create custom OUs...'
- remote_exec 'ssh' 'AUTH_SERVER' "apt install ldb-tools -y -q"
- remote_exec 'ssh' 'AUTH_SERVER' "cat << EOF > ous.ldif
-dn: OU=Non-PersistentDesktop,DC=NTNXLAB,DC=local
-changetype: add
-objectClass: top
-objectClass: organizationalunit
-description: Non-Persistent Desktop OU
-
-dn: OU=PersistentDesktop,DC=NTNXLAB,DC=local
-changetype: add
-objectClass: top
-objectClass: organizationalunit
-description: Persistent Desktop OU
-
-dn: OU=XenAppServer,DC=NTNXLAB,DC=local
-changetype: add
-objectClass: top
-objectClass: organizationalunit
-description: XenApp Server OU
-
-EOF"
- remote_exec 'ssh' 'AUTH_SERVER' "ldbmodify -H /var/lib/samba/private/sam.ldb ous.ldif; service samba-ad-dc restart"
-
- log "Create PE user account XD for MCS Plugin"
- ncli user create user-name=xd user-password=nutanix/4u first-name=XenDesktop last-name=Service email-id=no-reply@nutanix.com
- ncli user grant-cluster-admin-role user-name=xd
-
- log "Get UUIDs from cluster:"
- NET_UUID=$(acli net.get ${NW1_NAME} | grep "uuid" | cut -f 2 -d ':' | xargs)
- log "${NW1_NAME} UUID is ${NET_UUID}"
- CONTAINER_UUID=$(ncli container ls name=${STORAGE_DEFAULT} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs)
- log "${STORAGE_DEFAULT} UUID is ${CONTAINER_UUID}"
-
- log "Download AFS image from ${AFS_SRC_URL}"
- wget -nv ${AFS_SRC_URL}
- log "Download AFS metadata JSON from ${AFS_META_URL}"
- wget -nv ${AFS_META_URL}
- log "Stage AFS"
- ncli software upload file-path=/home/nutanix/${AFS_SRC_URL##*/} meta-file-path=/home/nutanix/${AFS_META_URL##*/} software-type=FILE_SERVER
- log "Delete AFS sources to free some space"
- rm ${AFS_SRC_URL##*/} ${AFS_META_URL##*/}
-
- curl -u admin:${PE_PASSWORD} -k -H 'Content-Type: application/json' -X POST https://127.0.0.1:9440/api/nutanix/v3/prism_central -d "${DEPLOY_BODY}"
- log "Waiting for PC deployment to complete (Sleeping 15m)"
- sleep 900
- log "Sending PC configuration script"
- pc_send_file stage_citrixhow_pc.sh
-
- # Execute that file asynchroneously remotely (script keeps running on CVM in the background)
- log "Launching PC configuration script"
- pc_remote_exec "PE_PASSWORD=${PE_PASSWORD} nohup bash /home/nutanix/stage_citrixhow_pc.sh >> pcconfig.log 2>&1 &"
-
- dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
- && pe_license \
- && pe_init \
- && network_configure \
- && authentication_source \
- && pe_auth \
- && pc_install "${NW1_NAME}" \
- && prism_check 'PC'
-
- if (( $? == 0 )) ; then
- pc_configure \
- && dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq'
-
- log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
- log "PE = https://${PE_HOST}:9440"
- log "PC = https://${PC_HOST}:9440"
-
- finish
- else
- finish
- log "Error 18: in main functional chain, exit!"
- exit 18
- fi
- ;;
- PC | pc )
- . lib.pc.sh
-
- #PC_UPGRADE_URL='http://10.21.250.221/images/ahv/techsummit/nutanix_installer_package_pc-release-euphrates-5.5.0.6-stable-14bd63735db09b1c9babdaaf48d062723137fc46.tar.gz'
-
- # Set Prism Central Password to Prism Element Password
- # log "Setting PC password to PE password"
- # ncli user reset-password user-name="admin" password="${PE_PASSWORD}"
-
- # Prism Central upgrade
- #log "Download PC upgrade image: ${PC_UPGRADE_URL##*/}"
- #wget -nv ${PC_UPGRADE_URL}
-
- #log "Prepare PC upgrade image"
- #tar -xzf ${PC_UPGRADE_URL##*/}
- #rm ${PC_UPGRADE_URL##*/}
-
- #log "Upgrade PC"
- #cd /home/nutanix/install ; ./bin/cluster -i . -p upgrade
-
- log "PC Configuration complete on $(date)"
-
- dependencies 'install' 'sshpass' && dependencies 'install' 'jq' || exit 13
-
- pc_passwd
-
- export NUCLEI_SERVER='localhost'
- export NUCLEI_USERNAME="${PRISM_ADMIN}"
- export NUCLEI_PASSWORD="${PE_PASSWORD}"
- # nuclei -debug -username admin -server localhost -password nx2Tech704\! vm.list
-
- ntnx_cmd # check cli services available?
-
- if [[ ! -z "${2}" ]]; then
- # hidden bonus
- log "Don't forget: $0 first.last@nutanixdc.local%password"
- calm_update && exit 0
- fi
-
- export ATTEMPTS=2
- export SLEEP=10
-
- pc_init \
- && pc_ui \
- && pc_auth \
- && pc_smtp
-
- ssp_auth \
- && calm_enable \
- && images \
- && flow_enable \
- && prism_check 'PC'
-
- unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
-
- if (( $? == 0 )); then
- #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
- #&&
- log "PC = https://${PC_HOST}:9440"
- finish
- else
- _error=19
- log "Error ${_error}: failed to reach PC!"
- exit ${_error}
- fi
- ;;
-esac
diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh
new file mode 100755
index 0000000..09a7dd0
--- /dev/null
+++ b/scripts/citrix_bootcamp.sh
@@ -0,0 +1,168 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+ && prism_pro_server_deploy \
+ && files_install \
+ && sleep 30 \
+ && create_file_server "${NW1_NAME}" "${NW1_NAME}" \
+ && sleep 30 \
+ && file_analytics_install \
+ && sleep 30 \
+ && create_file_analytics_server \
+ && sleep 30
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export _prio_images_arr=(\
+ Windows2016.qcow2 \
+ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \
+ )
+
+ export QCOW2_IMAGES=(\
+ Win10v1903.qcow2 \
+ WinToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && pc_project \
+ && priority_images \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && upload_citrix_calm_blueprint \
+ && sleep 30 \
+ && seedPC \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh
new file mode 100755
index 0000000..51a7819
--- /dev/null
+++ b/scripts/era_bootcamp.sh
@@ -0,0 +1,168 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+ # Networking needs for Era Bootcamp
+ #export NW2_NAME='EraManaged'
+ export NW2_DHCP_START="${IPV4_PREFIX}.132"
+ export NW2_DHCP_END="${IPV4_PREFIX}.209"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && create_era_container \
+ && era_network_configure \
+ && authentication_source \
+ && pe_auth \
+ && deploy_era \
+ && deploy_mssql \
+ && deploy_oracle_19c
+
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export _prio_images_arr=(\
+ )
+
+ export QCOW2_IMAGES=(\
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ CentOS7_05272020.qcow2 \
+ Win10v1903_05272020.qcow2 \
+ Win10v1909_05272020.qcow2 \
+ WinTools_05272020.qcow2 \
+ Linux_ToolsVM_05272020.qcow2
+ LinuxMint_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && priority_images \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && configure_era \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/era_cluster_id.json b/scripts/era_cluster_id.json
new file mode 100644
index 0000000..6e53be7
--- /dev/null
+++ b/scripts/era_cluster_id.json
@@ -0,0 +1 @@
+{"id":"a6b659e6-48ee-4a0b-9048-cb1a079c460c","name":"EraCluster","uniqueName":"ERACLUSTER","description":"Era Bootcamp Cluster","cloudType":"NTNX","dateCreated":null,"dateModified":null,"ownerId":"eac70dbf-22fb-462b-9498-949796ca1f73","status":"UP","version":"v2","hypervisorType":"AHV","hypervisorVersion":"5.15","properties":[{"ref_id":"a6b659e6-48ee-4a0b-9048-cb1a079c460c","name":"ERA_STORAGE_CONTAINER","value":"Era","description":null},{"ref_id":"a6b659e6-48ee-4a0b-9048-cb1a079c460c","name":"RESOURCE_CONFIG","value":"{\"storageThresholdPercentage\":95.0,\"memoryThresholdPercentage\":95.0}","description":null}],"referenceCount":0,"ip":"10.42.7.37","username":"admin","password":"techX2019!","cloudInfo":null,"resourceConfig":null}
diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh
new file mode 100755
index 0000000..c68b117
--- /dev/null
+++ b/scripts/era_mssql_bootcamp.sh
@@ -0,0 +1,153 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+ # Networking needs for Era Bootcamp
+ #export NW2_NAME='EraManaged'
+ export NW2_DHCP_START="${IPV4_PREFIX}.132"
+ export NW2_DHCP_END="${IPV4_PREFIX}.209"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && create_era_container \
+ && era_network_configure \
+ && authentication_source \
+ && pe_auth \
+ && deploy_era \
+ && deploy_mssql
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export _prio_images_arr=(\
+ )
+
+ export QCOW2_IMAGES=(\
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && sleep 30 \
+ && priority_images \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && configure_era \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+esac
diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh
new file mode 100755
index 0000000..68db5eb
--- /dev/null
+++ b/scripts/era_oracle_bootcamp.sh
@@ -0,0 +1,153 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+ # Networking needs for Era Bootcamp
+ #export NW2_NAME='EraManaged'
+ export NW2_DHCP_START="${IPV4_PREFIX}.132"
+ export NW2_DHCP_END="${IPV4_PREFIX}.209"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && create_era_container \
+ && era_network_configure \
+ && authentication_source \
+ && pe_auth \
+ && deploy_era \
+ && deploy_oracle_19c
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export _prio_images_arr=(\
+ )
+
+ export QCOW2_IMAGES=(\
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && sleep 30 \
+ && priority_images \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && configure_era \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+esac
diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh
new file mode 100755
index 0000000..5b28cdf
--- /dev/null
+++ b/scripts/era_postgres_bootcamp.sh
@@ -0,0 +1,163 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+ # Networking needs for Era Bootcamp
+ #export NW2_NAME='EraManaged'
+ export NW2_DHCP_START="${IPV4_PREFIX}.132"
+ export NW2_DHCP_END="${IPV4_PREFIX}.209"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && create_era_container \
+ && era_network_configure \
+ && authentication_source \
+ && pe_auth \
+ && deploy_era
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export _prio_images_arr=(\
+ ERA-Server-build-1.2.1.qcow2 \
+ )
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && sleep 30 \
+ && priority_images \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && configure_era \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh
new file mode 100755
index 0000000..b771590
--- /dev/null
+++ b/scripts/files_bootcamp.sh
@@ -0,0 +1,193 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+ && prism_pro_server_deploy \
+ && files_install \
+ && sleep 30 \
+ && create_file_server "${NW1_NAME}" "${NW1_NAME}" \
+ && sleep 30 \
+ && file_analytics_install \
+ && sleep 30 \
+ && create_file_analytics_server \
+ && sleep 30
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ deploy_peer_mgmt_server "${PMC}" \
+ && deploy_peer_agent_server "${AGENTA}" \
+ && deploy_peer_agent_server "${AGENTB}"
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ Win10v1903.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ CentOS7.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'PE_PASSWORD'
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ args_required 'PE_HOST'
+
+ dependencies 'install' 'jq' \
+ && files_install
+
+ log "PE = https://${PE_HOST}:9440"
+ ;;
+esac
+
+finish
diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh
new file mode 100755
index 0000000..a6ff49f
--- /dev/null
+++ b/scripts/frame_bootcamp.sh
@@ -0,0 +1,164 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+ && prism_pro_server_deploy \
+ && files_install \
+ && sleep 30 \
+ && create_file_server "${NW1_NAME}" "${NW1_NAME}" \
+ && sleep 30 \
+ && file_analytics_install \
+ && sleep 30 \
+ && create_file_analytics_server \
+ && sleep 30
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ ## TODO: If Debug is set we should run with bash -x. Maybe this???? Or are we going to use a fourth parameter
+ # if [ ! -z DEBUG ]; then
+ # bash_cmd='bash'
+ # else
+ # bash_cmd='bash -x'
+ # fi
+ # _command="EMAIL=${EMAIL} \
+ # PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ # PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup ${bash_cmd} ${HOME}/${PC_LAUNCH} IMAGES"
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ Win10v1903.qcow2 \
+ WinToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ FrameCCA-3.0.0.iso \
+ FrameGuestAgentInstaller_1.0.2.7.iso \
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && pc_project \
+ && images \
+ && flow_enable \
+ && seedPC \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh
old mode 100644
new mode 100755
index a686a11..bd84ff1
--- a/scripts/global.vars.sh
+++ b/scripts/global.vars.sh
@@ -1,176 +1,484 @@
#!/usr/bin/env bash
# shellcheck disable=SC2034
- RELEASE='release.json'
-# Sync the following to lib.common.sh::ntnx_download-Case=PC
-# Browse to: https://portal.nutanix.com/#/page/releases/prismDetails
-# - Find ${PC_VERSION} in the Additional Releases section on the lower right side
-# - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL
- PC_DEV_VERSION='5.10.1.1'
- PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.1.1/pcdeploy-5.10.1.1.json'
-PC_STABLE_VERSION='5.8.2'
-PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/v1/pc_deploy-5.8.2.json'
-# Sync the following to lib.common.sh::ntnx_download-Case=FILES
-# Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA
-# - Find ${FILES_VERSION} in the Additional Releases section on the lower right side
-# - Provide "Upgrade Metadata File" URL to FILES_METAURL
- FILES_VERSION='3.2.0.1'
- FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json'
- # 2019-02-15: override until metadata URL fixed
- # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json'
- FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2'
- # Revert by overriding again...
- FILES_VERSION='3.2.0'
- FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json'
-
+RELEASE='release.json'
+PC_DEV_VERSION='pc.2020.11'
+PC_CURRENT_VERSION='pc.2020.9'
+PC_STABLE_VERSION='5.17.0.3'
+FILES_VERSION='3.7.2.1'
+FILE_ANALYTICS_VERSION='2.2.0'
NTNX_INIT_PASSWORD='nutanix/4u'
- PRISM_ADMIN='admin'
- SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub"
- STORAGE_POOL='SP01'
- STORAGE_DEFAULT='Default'
- STORAGE_IMAGES='Images'
-
- # Conventions for *_REPOS arrays -- the URL must end with either:
- # - trailing slash, which imples _IMAGES argument to function repo_source()
- # - or full package filename.
-
- # https://stedolan.github.io/jq/download/#checksums_and_signatures
- JQ_REPOS=(\
- 'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \
- )
- QCOW2_REPOS=(\
- 'http://10.21.250.221/images/tech-enablement/' \
- 'http://10.21.250.221/images/ahv/techsummit/' \
- 'http://10.132.128.50:81/share/saved-images/' \
- 'https://s3.amazonaws.com/get-ahv-images/' \
- ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share
- QCOW2_IMAGES=(\
+PRISM_ADMIN='admin'
+SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub"
+STORAGE_POOL='SP01'
+STORAGE_DEFAULT='Default'
+STORAGE_IMAGES='Images'
+STORAGE_ERA='Era'
+ATTEMPTS=40
+SLEEP=60
+PrismOpsServer='PrismOpsLabUtilityServer'
+SeedPC='seedPC202011.zip'
+BasicUnattend='basic_unattend.xml'
+CALM_RSA_KEY_FILE='calm_rsa_key.env'
+
+Citrix_Blueprint='CitrixBootcampInfra.json'
+Beam_Blueprint=''
+Karbon_Blueprint='KarbonClusterDeployment.json'
+CICDInfra_Blueprint='CICD_Infra.json'
+
+# Curl and SSH settings
+CURL_OPTS='--insecure --silent --show-error' # --verbose'
+CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null"
+CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}"
+SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null'
+SSH_OPTS+=' -q' # -v'
+
+####################################################
+# Users for Tools VMs and Source VMs
+###################################################
+
+USERS=(\
+ User01 \
+ User02 \
+ User03 \
+ User04 \
+ User05 \
+ User06 \
+)
+
+####################################################
+# Era VARs
+###################################################
+
+ERA_Blueprint='EraServerDeployment.json'
+ERAServerImage='ERA-Server-build-2.1.0.qcow2'
+ERAServerName='EraServer'
+ERA_USER="admin"
+#ERA_PASSWORD="nutanix/4u"
+ERA_PASSWORD="${PE_PASSWORD}"
+ERA_Default_PASSWORD="Nutanix/4u"
+ERA_NETWORK="Secondary"
+ERA_Container_RF="2"
+
+MSSQL_SourceVM="MSSQLSourceVM"
+MSSQL_SourceVM_Image1="MSSQL_1_01132021"
+MSSQL_SourceVM_Image2="MSSQL_2_01132021"
+
+Oracle_12c_SourceVM="Oracle12cSource"
+Oracle_12c_SourceVM_BootImage="12c_bootdisk"
+Oracle_12c_SourceVM_Image1="12c_disk1"
+Oracle_12c_SourceVM_Image2="12c_disk2"
+Oracle_12c_SourceVM_Image3="12c_disk3"
+Oracle_12c_SourceVM_Image4="12c_disk4"
+Oracle_12c_SourceVM_Image5="12c_disk5"
+Oracle_12c_SourceVM_Image6="12c_disk6"
+
+Oracle_19c_SourceVM="Oracle19cSource"
+Oracle_19c_SourceVM_BootImage="19c-bootdisk"
+Oracle_19c_SourceVM_Image1="19c-disk1"
+Oracle_19c_SourceVM_Image2="19c-disk2"
+Oracle_19c_SourceVM_Image3="19c-disk3"
+Oracle_19c_SourceVM_Image4="19c-disk4"
+Oracle_19c_SourceVM_Image5="19c-disk5"
+Oracle_19c_SourceVM_Image6="19c-disk6"
+Oracle_19c_SourceVM_Image7="19c-disk7"
+Oracle_19c_SourceVM_Image8="19c-disk8"
+Oracle_19c_SourceVM_Image9="19c-disk9"
+
+####################################################
+# 3rd Party images used at GTS or Add-On Labs
+###################################################
+#Peer Software
+PeerMgmtServer='Windows2016-PeerMgmt-30sep20'
+PeerAgentServer='Windows2016-PeerAgent-30sep20'
+PMC="PeerMgmt"
+AGENTA="PeerAgent-Files"
+AGENTB="PeerAgent-Win"
+
+#Hycu
+HycuServer='HYCU-4.0.3-Demo'
+
+#Veeam
+VeeamServer=''
+
+##################################
+#
+# Look for JQ, AutoDC, and QCOW2 Repos in DC specific below.
+#
+##################################
+
+_prio_images_arr=(\
+ ERA-Server-build-1.2.1.qcow2 \
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \
+)
+
+QCOW2_IMAGES=(\
CentOS7.qcow2 \
Windows2016.qcow2 \
Windows2012R2.qcow2 \
Windows10-1709.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ ERA-Server-build-1.2.1.qcow2 \
+ MSSQL-2016-VM.qcow2 \
+ hycu-3.5.0-6253.qcow2 \
+ VeeamAvailability_1.0.457.vmdk \
+ move3.2.0.qcow2 \
+)
+ISO_IMAGES=(\
CentOS7.iso \
Windows2016.iso \
Windows2012R2.iso \
Windows10.iso \
- Nutanix-VirtIO-1.1.3.iso \
- 'https://s3.amazonaws.com/technology-boot-camp/ERA-Server-build-1.0.0-21edfbc990a8f3424fed146d837483cb1a00d56d.qcow2' \
- 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \
- )
- # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso
- # http://download.nutanix.com/era/1.0.0/ERA-Server-build-1.0.0-bae7ca0d653e1af2bcb9826d1320e88d8c4713cc.qcow2
-
- # https://pkgs.org/download/sshpass
- # https://sourceforge.net/projects/sshpass/files/sshpass/
- SSHPASS_REPOS=(\
- 'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \
- )
+ Nutanix-VirtIO-1.1.5.iso \
+ VeeamBR_9.5.4.2615.Update4.iso \
+)
# shellcheck disable=2206
- OCTET=(${PE_HOST//./ }) # zero index
- IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]}
+OCTET=(${PE_HOST//./ }) # zero index
+IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]}
DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1))
- PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2))
- DNS_SERVERS='8.8.8.8'
- NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org'
- NW1_NAME='Primary'
- NW1_VLAN=0
-# Assuming HPOC defaults
- NW1_SUBNET="${IPV4_PREFIX}.1/25"
- NW1_DHCP_START="${IPV4_PREFIX}.50"
- NW1_DHCP_END="${IPV4_PREFIX}.125"
-# https://sewiki.nutanix.com/index.php/Hosted_POC_FAQ#I.27d_like_to_test_email_alert_functionality.2C_what_SMTP_server_can_I_use_on_Hosted_POC_clusters.3F
-SMTP_SERVER_ADDRESS='nutanix-com.mail.protection.outlook.com'
- SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com'
- SMTP_SERVER_PORT=25
-
- AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file
- AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))"
- LDAP_PORT=389
- AUTH_FQDN='ntnxlab.local'
- AUTH_DOMAIN='NTNXLAB'
+PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2))
+FILE_ANALYTICS_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 22))
+PrismOpsServer_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 5))"
+ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 7))
+CITRIX_DDC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 8))
+BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+BUCKETS_VIP="${IPV4_PREFIX}.17"
+OBJECTS_NW_START="${IPV4_PREFIX}.18"
+OBJECTS_NW_END="${IPV4_PREFIX}.21"
+DNS_SERVERS='8.8.8.8'
+NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org'
+SUBNET_MASK="255.255.255.128"
+
+# Getting the network ready
+
+NW1_NAME='Primary'
+NW1_VLAN=0
+
+# TODO: Need to make changes to the network configuration if we are running against a single Node Cluster
+# https://confluence.eng.nutanix.com:8443/pages/viewpage.action?spaceKey=SEW&title=Bootcamps%3A+Networking+Scheme
+
+case "${OCTET[3]}" in
+
+ 7 ) # We are in Partition 1
+ NW1_SUBNET="${IPV4_PREFIX}.1/26"
+ NW1_GATEWAY="${IPV4_PREFIX}.1"
+ NW1_DHCP_START="${IPV4_PREFIX}.38"
+ NW1_DHCP_END="${IPV4_PREFIX}.58"
+ NW2_NAME=''
+ NW2_VLAN=''
+ NW2_SUBNET=''
+ NW2_DHCP_START=''
+ NW2_DHCP_END=''
+ NW3_NAME=''
+ NW3_NETMASK=''
+ NW3_START=""
+ NW3_END=""
+ ERA_NETWORK="Primary"
+ ERA_Container_RF="1"
+ BUCKETS_DNS_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 25))
+ BUCKETS_VIP=${IPV4_PREFIX}.$((${OCTET[3]} + 26))
+ OBJECTS_NW_START=${IPV4_PREFIX}.$((${OCTET[3]} + 27))
+ OBJECTS_NW_END=${IPV4_PREFIX}.$((${OCTET[3]} + 30))
+ ;;
+
+ 71 ) # We are in Partition 2
+ NW1_SUBNET="${IPV4_PREFIX}.65/26"
+ NW1_GATEWAY="${IPV4_PREFIX}.65"
+ NW1_DHCP_START="${IPV4_PREFIX}.102"
+ NW1_DHCP_END="${IPV4_PREFIX}.122"
+ NW2_NAME=''
+ NW2_VLAN=''
+ NW2_SUBNET=''
+ NW2_DHCP_START=''
+ NW2_DHCP_END=''
+ NW3_NAME=''
+ NW3_NETMASK=''
+ NW3_START=""
+ NW3_END=""
+ ERA_NETWORK="Primary"
+ ERA_Container_RF="1"
+ BUCKETS_DNS_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 25))
+ BUCKETS_VIP=${IPV4_PREFIX}.$((${OCTET[3]} + 26))
+ OBJECTS_NW_START=${IPV4_PREFIX}.$((${OCTET[3]} + 27))
+ OBJECTS_NW_END=${IPV4_PREFIX}.$((${OCTET[3]} + 30))
+ ;;
+
+ 135 ) # We are in Partition 3
+ NW1_SUBNET="${IPV4_PREFIX}.129/26"
+ NW1_GATEWAY="${IPV4_PREFIX}.129"
+ NW1_DHCP_START="${IPV4_PREFIX}.166"
+ NW1_DHCP_END="${IPV4_PREFIX}.186"
+ NW2_NAME=''
+ NW2_VLAN=''
+ NW2_SUBNET=''
+ NW2_DHCP_START=''
+ NW2_DHCP_END=''
+ NW3_NAME=''
+ NW3_NETMASK=''
+ NW3_START=""
+ NW3_END=""
+ ERA_NETWORK="Primary"
+ ERA_Container_RF="1"
+ BUCKETS_DNS_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 25))
+ BUCKETS_VIP=${IPV4_PREFIX}.$((${OCTET[3]} + 26))
+ OBJECTS_NW_START=${IPV4_PREFIX}.$((${OCTET[3]} + 27))
+ OBJECTS_NW_END=${IPV4_PREFIX}.$((${OCTET[3]} + 30))
+ ;;
+
+ 199 ) # We are in Partition 4
+ NW1_SUBNET="${IPV4_PREFIX}.193/26"
+ NW1_GATEWAY="${IPV4_PREFIX}.193"
+ NW1_DHCP_START="${IPV4_PREFIX}.230"
+ NW1_DHCP_END="${IPV4_PREFIX}.250"
+ NW2_NAME=''
+ NW2_VLAN=''
+ NW2_SUBNET=''
+ NW2_DHCP_START=''
+ NW2_DHCP_END=''
+ NW3_NAME=''
+ NW3_NETMASK=''
+ NW3_START=""
+ NW3_END=""
+ ERA_NETWORK="Primary"
+ ERA_Container_RF="1"
+ BUCKETS_DNS_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 25))
+ BUCKETS_VIP=${IPV4_PREFIX}.$((${OCTET[3]} + 26))
+ OBJECTS_NW_START=${IPV4_PREFIX}.$((${OCTET[3]} + 27))
+ OBJECTS_NW_END=${IPV4_PREFIX}.$((${OCTET[3]} + 30))
+ ;;
+
+
+ * ) # For normal clusters
+ NW1_SUBNET="${IPV4_PREFIX}.1/25"
+ NW1_GATEWAY="${IPV4_PREFIX}.1"
+ NW1_DHCP_START="${IPV4_PREFIX}.50"
+ NW1_DHCP_END="${IPV4_PREFIX}.125"
+
+ NW2_NAME='Secondary'
+ NW2_VLAN=$((OCTET[2]*10+1))
+ NW2_SUBNET="${IPV4_PREFIX}.129/25"
+ NW2_GATEWAY="${IPV4_PREFIX}.129"
+ NW2_DHCP_START="${IPV4_PREFIX}.132"
+ NW2_DHCP_END="${IPV4_PREFIX}.253"
+
+ NW3_NAME='EraManaged'
+ NW3_NETMASK='255.255.255.128'
+ NW3_START="${IPV4_PREFIX}.210"
+ NW3_END="${IPV4_PREFIX}.253"
+ ;;
+
+esac
+
+# Stuff needed for object_store
+#OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects'
+VLAN=${OCTET[2]}
+NETWORK="${OCTET[0]}.${OCTET[1]}"
+
+SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com'
+SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com'
+SMTP_SERVER_PORT=25
+
+AUTH_SERVER='AutoAD' # default; TODO:180 refactor AUTH_SERVER choice to input file
+AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 4))"
+LDAP_PORT=389
+AUTH_FQDN='ntnxlab.local'
+AUTH_DOMAIN='NTNXLAB'
AUTH_ADMIN_USER='administrator@'${AUTH_FQDN}
AUTH_ADMIN_PASS='nutanix/4u'
AUTH_ADMIN_GROUP='SSP Admins'
- AUTODC_REPOS=(\
- 'http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2' \
- 'https://s3.amazonaws.com/get-ahv-images/AutoDC-04282018.qcow2' \
- 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \
- # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \
- 'http://10.59.103.143:8000/autodc-2.0.qcow2' \
-)
-# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42)
+
+# For Nutanix HPOC/Marketing clusters (RTP 10.55, PHC 10.42, PHX 10.38)
# https://sewiki.nutanix.com/index.php/HPOC_IP_Schema
case "${OCTET[0]}.${OCTET[1]}" in
- 10.20 ) #Marketing: us-west = SV
- DNS_SERVERS='10.21.253.10'
- ;;
- 10.21 ) #HPOC: us-west = SV
- if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then
- log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters'
- exit 0
- fi
-
- # backup cluster; override relative IP addressing
- if (( ${OCTET[2]} == 249 )); then
- AUTH_HOST="${IPV4_PREFIX}.118"
- PC_HOST="${IPV4_PREFIX}.119"
- fi
- DNS_SERVERS='10.21.253.10,10.21.253.11'
- NW2_NAME='Secondary'
- NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 ))
- NW2_SUBNET="${IPV4_PREFIX}.129/25"
- NW2_DHCP_START="${IPV4_PREFIX}.132"
- NW2_DHCP_END="${IPV4_PREFIX}.253"
- ;;
10.55 ) # HPOC us-east = DUR
- DNS_SERVERS='10.21.253.11'
- NW2_NAME='Secondary'
- NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 ))
- NW2_SUBNET="${IPV4_PREFIX}.129/25"
- NW2_DHCP_START="${IPV4_PREFIX}.132"
- NW2_DHCP_END="${IPV4_PREFIX}.253"
+ PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.11.json'
+ PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.11.tar'
+ PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.9.json'
+ PC_CURRENT_URL='http://10.55.251.38/workshop_staging/pc.2020.9.tar'
+ PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json'
+ PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar'
+ FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable-metadata.json'
+ FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2'
+ FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json'
+ FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2'
+ JQ_REPOS=(\
+ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \
+ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \
+ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \
+ )
+ SSHPASS_REPOS=(\
+ 'http://10.55.251.38/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \
+ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \
+ )
+ QCOW2_REPOS=(\
+ 'http://10.55.251.38/workshop_staging/' \
+ 'https://s3.amazonaws.com/get-ahv-images/' \
+ )
+ AUTODC_REPOS=(\
+ 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \
+ )
+ AUTOAD_REPOS=(\
+ 'http://10.55.251.38/workshop_staging/AutoAD.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \
+ )
+ PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip'
+ BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/'
+ DNS_SERVERS='10.55.251.10,10.55.251.11'
+ ERA_PRIMARY_DNS='10.55.251.10'
+ ERA_SECONDARY_DNS='10.55.251.11'
+ OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects'
;;
10.42 ) # HPOC us-west = PHX
- DNS_SERVERS='10.42.196.10'
- NW2_NAME='Secondary'
- NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 ))
- NW2_SUBNET="${IPV4_PREFIX}.129/25"
- NW2_DHCP_START="${IPV4_PREFIX}.132"
- NW2_DHCP_END="${IPV4_PREFIX}.253"
- QCOW2_IMAGES=(\
- CentOS7.qcow2 \
- Windows2016.qcow2 \
- Windows2012R2.qcow2 \
- Windows10-1709.qcow2 \
- ToolsVM.qcow2 \
- CentOS7.iso \
- Windows2012R2.iso \
- SQLServer2014SP3.iso \
- Nutanix-VirtIO-1.1.3.iso \
- acs-centos7.qcow2 \
- acs-ubuntu1604.qcow2 \
- xtract-vm-2.0.3.qcow2 \
- ERA-Server-build-1.0.1.qcow2 \
- sherlock-k8s-base-image_320.qcow2 \
- hycu-3.5.0-6138.qcow2 \
- VeeamAvailability_1.0.457.vmdk \
- VeeamBR_9.5.4.2615.Update4.iso \
+ PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.11.json'
+ PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.11.tar'
+ PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json'
+ PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar'
+ PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json'
+ PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar'
+ FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable-metadata.json'
+ FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2'
+ FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json'
+ FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2'
+ JQ_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \
+ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \
+ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \
+ )
+ SSHPASS_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \
+ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \
+ )
+ QCOW2_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/' \
+ 'https://s3.amazonaws.com/get-ahv-images/' \
+ )
+ AUTODC_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \
+ )
+ AUTOAD_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \
+ )
+ PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip'
+ BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/'
+ DNS_SERVERS='10.42.196.10,10.42.194.10'
+ ERA_PRIMARY_DNS='10.42.196.10'
+ ERA_SECONDARY_DNS='10.42.194.10'
+ OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects'
+ ;;
+ 10.38 ) # HPOC us-west = PHX 1-Node Clusters
+ PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.11.json'
+ PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.11.tar'
+ PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json'
+ PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar'
+ PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json'
+ PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar'
+ FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable-metadata.json'
+ FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2'
+ FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json'
+ FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2'
+ JQ_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \
+ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \
+ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \
+ )
+ SSHPASS_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \
+ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \
)
+ QCOW2_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/' \
+ 'https://s3.amazonaws.com/get-ahv-images/' \
+ )
+ AUTODC_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \
+ )
+ AUTOAD_REPOS=(\
+ 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \
+ )
+ PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip'
+ BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/'
+ DNS_SERVERS="10.42.196.10,10.42.194.10"
+ ERA_PRIMARY_DNS='10.42.196.10'
+ ERA_SECONDARY_DNS='10.42.194.10'
+ OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects'
+
+ # If the third OCTET is between 170 and 199, we need to have the +3 vlan for the secondary
+ if [[ ${OCTET[2]} -gt 169 ]]; then
+ NW2_VLAN=$((OCTET[2]*10+3))
+ fi
+ ;;
+ 10.136 ) # HPOC us-west = BLR
+ PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.11.json'
+ PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.11.tar'
+ PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.9.json'
+ PC_CURRENT_URL='http://10.136.239.13/workshop_staging/pc.2020.9.tar'
+ PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json'
+ PC_STABLE_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar'
+ FILES_METAURL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable-metadata.json'
+ FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2'
+ FILE_ANALYTICS_METAURL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json'
+ FILE_ANALYTICS_URL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2'
+ JQ_REPOS=(\
+ 'http://10.136.239.13/workshop_staging/jq-linux64.dms' \
+ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \
+ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \
+ )
+ SSHPASS_REPOS=(\
+ 'http://10.136.239.13/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \
+ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \
+ )
+ QCOW2_REPOS=(\
+ 'http://10.136.239.13/workshop_staging/' \
+ 'https://s3.amazonaws.com/get-ahv-images/' \
+ )
+ AUTODC_REPOS=(\
+ 'http://10.136.239.13/workshop_staging/AutoDC2.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \
+ )
+ AUTOAD_REPOS=(\
+ 'http://10.136.239.13/workshop_staging/AutoAD.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \
+ )
+ PC_DATA='http://10.136.239.13/workshop_staging/seedPC.zip'
+ BLUEPRINT_URL='http:/10.136.239.13/workshop_staging/CalmBlueprints/'
+ DNS_SERVERS='10.136.239.10,10.136.239.11'
+ ERA_PRIMARY_DNS='10.136.239.10'
+ ERA_SECONDARY_DNS='10.136.239.11'
+ OBJECTS_OFFLINE_REPO='http://10.136.239.13/workshop_staging/objects'
;;
10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR
- DNS_SERVERS='10.132.71.40'
- NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17"
- NW1_DHCP_START="${IPV4_PREFIX}.100"
- NW1_DHCP_END="${IPV4_PREFIX}.250"
- # PC deploy file local override, TODO:30 make an PC_URL array and eliminate
- PC_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar
- PC_DEV_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json
- PC_STABLE_METAURL=${PC_DEV_METAURL}
-
- QCOW2_IMAGES=(\
+ JQ_REPOS=(\
+ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \
+ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \
+ )
+ QCOW2_REPOS=(\
+ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \
+ )
+ AUTODC_REPOS=(\
+ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \
+ )
+
+ DNS_SERVERS='10.132.71.40'
+ NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17"
+ NW1_DHCP_START="${IPV4_PREFIX}.100"
+ NW1_DHCP_END="${IPV4_PREFIX}.250"
+ # PC deploy file local override, TODO:30 make an PC_URL array and eliminate
+ PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar
+ PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json
+ PC_STABLE_METAURL=${PC_CURRENT_METAURL}
+
+ QCOW2_IMAGES=(\
Centos7-Base.qcow2 \
Centos7-Update.qcow2 \
Windows2012R2.qcow2 \
@@ -182,18 +490,6 @@ case "${OCTET[0]}.${OCTET[1]}" in
;;
esac
-HTTP_CACHE_HOST='localhost'
-HTTP_CACHE_PORT=8181
-
- ATTEMPTS=40
- SLEEP=60 # pause (in seconds) between ATTEMPTS
-
- CURL_OPTS='--insecure --silent --show-error' # --verbose'
-CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null"
-CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}"
- SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null'
- SSH_OPTS+=' -q' # -v'
-
# Find operating system and set dependencies
if [[ -e /etc/lsb-release ]]; then
# Linux Standards Base
@@ -205,7 +501,10 @@ elif [[ $(uname -s) == 'Darwin' ]]; then
OS_NAME='Darwin'
fi
-WC_ARG='--lines'
+WC_ARG='-l'
if [[ ${OS_NAME} == 'Darwin' ]]; then
WC_ARG='-l'
fi
+if [[ ${OS_NAME} == 'alpine' ]]; then
+ WC_ARG='-l'
+fi
diff --git a/scripts/images_only.sh b/scripts/images_only.sh
new file mode 100755
index 0000000..45c3b42
--- /dev/null
+++ b/scripts/images_only.sh
@@ -0,0 +1,102 @@
+#!/usr/bin/env bash
+#####################################################
+# Images Inly distribution script #
+# ------------------------------------------------- #
+# Willem Essenstam - 0.1 - 15 March 2020 #
+# Initial version #
+#####################################################
+
+#__main()__________
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. we-lib.common.sh
+. global.vars.sh
+
+# Try to figure out what workshop we have run
+# Which log files do we have?
+log_files=$(ls *.log)
+
+images_arr=("CentOS7.qcow2" "Windows2012R2.qcow2" "Windows10-1709.qcow2" "WinToolsVM.qcow2" "Linux_ToolsVM.qcow2" \
+ "ERA-Server-build-1.2.1.qcow2" "MSSQL-2016-VM.qcow2" "hycu-3.5.0-6253.qcow2" "VeeamAvailability_1.0.457.vmdk" "move3.2.0.qcow2" \
+ "AutoXD.qcow2" "CentOS7.iso" "Windows2016.iso" "Windows2012R2.iso" "Windows10.iso" "Nutanix-VirtIO-1.1.5.iso" "SQLServer2014SP3.iso" \
+ "XenApp_and_XenDesktop_7_18.iso" "VeeamBR_9.5.4.2615.Update4.iso" "Windows2016.qcow2" "ERA-Server-build-1.2.1.qcow2" "Win10v1903.qcow2" \
+ "Linux_ToolsVM.qcow2" "move-3.4.1.qcow2" "GTSOracle/19c-april/19c-bootdisk.qcow2" "GTSOracle/19c-april/19c-disk1.qcow2" "GTSOracle/19c-april/19c-disk2.qcow2" \
+ "GTSOracle/19c-april/19c-disk3.qcow2" "GTSOracle/19c-april/19c-disk4.qcow2" "GTSOracle/19c-april/19c-disk5.qcow2" "GTSOracle/19c-april/19c-disk6.qcow2" \
+ "GTSOracle/19c-april/19c-disk7.qcow2" "GTSOracle/19c-april/19c-disk8.qcow2" "GTSOracle/19c-april/19c-disk9.qcow2" "HYCU/Mine/HYCU-4.0.3-Demo.qcow2" \
+ "veeam/VeeamAHVProxy2.0.404.qcow2" "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" "FrameCCA-2.1.6.iso" "FrameCCA-2.1.0.iso" "FrameGuestAgentInstaller_1.0.2.2_7930.iso" \
+ "veeam/VBR_10.0.0.4442.iso")
+
+if [[ $log_files == *"snc_bootcamp"* ]]; then
+ # We have found snc_bootcamp has been run
+ workshop="snc_bootcamp"
+ send_img_array=(${images_arr[@]:0:20})
+elif [[ $log_files == *"basic_bootcamp"* ]]; then
+ # We have found basic_bootcamp has been run
+ workshop="basic_bootcamp"
+ send_img_array=(${images_arr[13]} ${images_arr[0]} ${images_arr[15]} ${images_arr[14]})
+elif [[ $log_files == *"privatecloud_bootcamp"* ]]; then
+ # We have found privatecloud_bootcamp has been run
+ workshop="privatecloud_bootcamp"
+elif [[ $log_files == *"era_bootcamp"* ]]; then
+ # We have found era_bootcamp has been run
+ workshop="era_bootcamp"
+elif [[ $log_files == *"files_bootcamp"* ]]; then
+ # We have found files_bootcamp has been run
+ workshop="files_bootcamp"
+elif [[ $log_files == *"calm_bootcamp"* ]]; then
+ # We have found calm_bootcamp has been run
+ workshop="calm_bootcamp"
+elif [[ $log_files == *"citrix_bootcam"* ]]; then
+ # We have found citrix_bootcamp has been run
+ workshop="citrix_bootcamp"
+elif [[ $log_files == *"frame_bootcamp"* ]]; then
+ # We have found frame_bootcamp has been run
+ workshop="frame_bootcamp"
+elif [[ $log_files == *"bootcamp"* ]]; then
+ # We have fond that the bootcamp has been run
+ workshop="bootcamp"
+ send_img_array=(${images_arr[@]:0:20})
+elif [[ $log_files == *"ts2020"* ]]; then
+ # We have fond that the ts2020 has been run
+ workshop="ts2020"
+ send_img_array=(${images_arr[0]} ${images_arr[@]:20:41})
+fi
+
+# Make the right images avail for the different workshops based on the one we found from the log file
+case $workshop in
+ "snc_bootcamp")
+ echo "Found the SNC_Bootcamp has run."
+ ;;
+ "basic_bootcamp")
+ echo "basic_bootcamp found"
+ ;;
+ "privatecloud_bootcamp")
+ echo "privatecloud_bootcamp found"
+ ;;
+ "era_bootcamp")
+ echo "Era_bootcamp found"
+ ;;
+ "files_bootcamp")
+ echo "files_bootcamp found"
+ ;;
+ "calm_bootcamp")
+ echo "calm_bootcamp found"
+ ;;
+ "citrix_bootcamp")
+ echo "citrix_bootcamp found"
+ ;;
+ "frame_bootcamp")
+ echo "frame_bootcamp found"
+ ;;
+ "bootcamp")
+ echo "bootcamp found"
+ ;;
+ esac
+
+
+
+
+
+
+
+
diff --git a/scripts/karbon_bootcamp.sh b/scripts/karbon_bootcamp.sh
new file mode 100755
index 0000000..a9610fd
--- /dev/null
+++ b/scripts/karbon_bootcamp.sh
@@ -0,0 +1,145 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && pc_project \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && images \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/leap_addon_bootcamp.sh b/scripts/leap_addon_bootcamp.sh
new file mode 100755
index 0000000..ae50ab7
--- /dev/null
+++ b/scripts/leap_addon_bootcamp.sh
@@ -0,0 +1,145 @@
+#!/usr/bin/env bash
+ #-x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export _external_nw_name="${1}"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && pc_project \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && images \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh
index c0393c8..6fe35d7 100755
--- a/scripts/lib.common.sh
+++ b/scripts/lib.common.sh
@@ -1,6 +1,16 @@
#!/usr/bin/env bash
# dependencies: dig
+##################################################################################
+# List of date, who and change made to the file
+# --------------------------------------------------------------------------------
+# 12-04-2019 - Willem Essenstam
+# Changed the run_once function so it checks not on lines in the log file but
+# on if the PC is configured by trying to log in using the set password
+##################################################################################
+
+##################################################################################
+
function args_required() {
local _argument
local _error=88
@@ -23,6 +33,8 @@ function args_required() {
fi
}
+##################################################################################
+
function begin() {
local _release
@@ -33,6 +45,8 @@ function begin() {
log "$(basename ${0})${_release} start._____________________"
}
+##################################################################################
+
function dependencies {
local _argument
local _error
@@ -136,6 +150,8 @@ function dependencies {
esac
}
+##################################################################################
+
function dns_check() {
local _dns
local _error
@@ -158,6 +174,8 @@ function dns_check() {
fi
}
+##################################################################################
+
function download() {
local _attempts=5
local _error=0
@@ -200,6 +218,8 @@ function download() {
done
}
+##################################################################################
+
function fileserver() {
local _action=${1} # REQUIRED
local _host=${2} # REQUIRED, TODO: default to PE?
@@ -231,11 +251,6 @@ function fileserver() {
remote_exec 'ssh' ${_host} \
"python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}"
-
- # acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \
- # source_url=http://10.4.150.64:8181/autodc-2.0.qcow2
- #AutoDC2: pending
- #AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable
popd || exit
;;
'stop' )
@@ -245,28 +260,117 @@ function fileserver() {
esac
}
+##################################################################################
+
+
function finish() {
log "${0} ran for ${SECONDS} seconds._____________________"
echo
}
+##################################################################################
+# Images install
+##################################################################################
+
function images() {
# https://portal.nutanix.com/#/page/docs/details?targetId=Command-Ref-AOS-v59:acl-acli-image-auto-r.html
- local _cli='acli'
+ local _cli='nuclei'
local _command
local _http_body
local _image
local _image_type
local _name
- local _source='source_url'
+ local _source='source_uri'
local _test
- which "$_cli"
- if (( $? > 0 )); then
- _cli='nuclei'
- _source='source_uri'
+#######################################
+# For doing ISO IMAGES
+#######################################
+
+for _image in "${ISO_IMAGES[@]}" ; do
+
+ # log "DEBUG: ${_image} image.create..."
+ if [[ ${_cli} == 'nuclei' ]]; then
+ _test=$(source /etc/profile.d/nutanix_env.sh \
+ && ${_cli} image.list 2>&1 \
+ | grep -i complete \
+ | grep "${_image}")
+ #else
+ # _test=$(source /etc/profile.d/nutanix_env.sh \
+ # && ${_cli} image.list 2>&1 \
+ # | grep "${_image}")
fi
+ if [[ ! -z ${_test} ]]; then
+ log "Skip: ${_image} already complete on cluster."
+ else
+ _command=''
+ _name="${_image}"
+
+ if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc -l) )); then
+ log 'Bypass multiple repo source checks...'
+ SOURCE_URL="${_image}"
+ else
+ repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]!
+ fi
+
+ if [[ -z "${SOURCE_URL}" ]]; then
+ _error=30
+ log "Warning ${_error}: didn't find any sources for ${_image}, continuing..."
+ # exit ${_error}
+ fi
+
+ # TODO:0 TOFIX: acs-centos ugly override for today...
+ if (( $(echo "${_image}" | grep -i 'acs-centos' | wc -l ) > 0 )); then
+ _name=acs-centos
+ fi
+
+ if [[ ${_cli} == 'acli' ]]; then
+ _image_type='kIsoImage'
+ _command+=" ${_name} annotation=${_image} image_type=${_image_type} \
+ container=${STORAGE_IMAGES} architecture=kX86_64 wait=true"
+ else
+ _command+=" name=${_name} description=\"${_image}\""
+ fi
+
+ if [[ ${_cli} == 'nuclei' ]]; then
+ _http_body=$(cat <&1 &
+ if (( $? != 0 )); then
+ log "Warning: Image submission: $?. Continuing..."
+ #exit 10
+ fi
+
+ if [[ ${_cli} == 'nuclei' ]]; then
+ log "NOTE: image.uuid = RUNNING, but takes a while to show up in:"
+ log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State"
+ fi
+ fi
+ fi
+
+done
+
+#######################################
+# For doing Disk IMAGES
+#######################################
+
for _image in "${QCOW2_IMAGES[@]}" ; do
# log "DEBUG: ${_image} image.create..."
@@ -275,10 +379,233 @@ function images() {
&& ${_cli} image.list 2>&1 \
| grep -i complete \
| grep "${_image}")
+
+ fi
+
+ if [[ ! -z ${_test} ]]; then
+ log "Skip: ${_image} already complete on cluster."
+ else
+ _command=''
+ _name="${_image}"
+
+ if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc -l) )); then
+ log 'Bypass multiple repo source checks...'
+ SOURCE_URL="${_image}"
+ else
+ repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]!
+ fi
+
+ if [[ -z "${SOURCE_URL}" ]]; then
+ _error=30
+ log "Warning ${_error}: didn't find any sources for ${_image}, continuing..."
+ # exit ${_error}
+ fi
+
+ # TODO:0 TOFIX: acs-centos ugly override for today...
+ if (( $(echo "${_image}" | grep -i 'acs-centos' | wc -l ) > 0 )); then
+ _name=acs-centos
+ fi
+
+ if [[ ${_cli} == 'acli' ]]; then
+ _image_type='kDiskImage'
+ _command+=" ${_name} annotation=${_image} image_type=${_image_type} \
+ container=${STORAGE_IMAGES} architecture=kX86_64 wait=true"
+ else
+ _command+=" name=${_name} description=\"${_image}\""
+ fi
+
+ if [[ ${_cli} == 'nuclei' ]]; then
+ _http_body=$(cat <&1 &
+ if (( $? != 0 )); then
+ log "Warning: Image submission: $?. Continuing..."
+ #exit 10
+ fi
+
+ if [[ ${_cli} == 'nuclei' ]]; then
+ log "NOTE: image.uuid = RUNNING, but takes a while to show up in:"
+ log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State"
+ fi
+ fi
+ fi
+
+ done
+
+}
+
+###############################################################################################
+# Priority Images that need to be uploaded and controlled before we move to the mass upload
+###############################################################################################
+
+function priority_images(){
+
+
+ local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure "
+
+ # Set the correct High Perf FileServer
+ #if [[ ${OCTET[1]} == '42' ]] || [[ ${OCTET[1]} == '38' ]]; then
+ # SOURCE_URL="10.42.38.10/images"
+ #else
+ # SOURCE_URL="10.55.76.10"
+ #fi
+
+ log "Grabbing the priority files from the ${QCOW2_REPOS} fileserver..."
+
+ for _image in "${_prio_images_arr[@]}"; do
+ if [[ ${_image} == *"iso"* ]]; then
+ DISK_TYPE="ISO_IMAGE"
else
+ DISK_TYPE="DISK_IMAGE"
+ fi
+ _http_body=$(cat <&1 \
+ | grep -i complete \
+ | grep "${_image}")
+ #else
+ # _test=$(source /etc/profile.d/nutanix_env.sh \
+ # && ${_cli} image.list 2>&1 \
+ # | grep "${_image}")
+ fi
+
+ if [[ ! -z ${_test} ]]; then
+ log "Skip: ${_image} already complete on cluster."
+ else
+ _command=''
+ _name="${_image}"
+
+ if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc -l) )); then
+ log 'Bypass multiple repo source checks...'
+ SOURCE_URL="${_image}"
+ else
+ repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]!
+ fi
+
+ if [[ -z "${SOURCE_URL}" ]]; then
+ _error=30
+ log "Warning ${_error}: didn't find any sources for ${_image}, continuing..."
+ # exit ${_error}
+ fi
+
+ # TODO:0 TOFIX: acs-centos ugly override for today...
+ if (( $(echo "${_image}" | grep -i 'acs-centos' | wc -l ) > 0 )); then
+ _name=acs-centos
+ fi
+
+ if [[ ${_cli} == 'acli' ]]; then
+ _image_type='kIsoImage'
+ _command+=" ${_name} annotation=${_image} image_type=${_image_type} \
+ container=${STORAGE_ERA} architecture=kX86_64 wait=true"
+ else
+ _command+=" name=${_name} description=\"${_image}\""
+ fi
+
+ if [[ ${_cli} == 'nuclei' ]]; then
+ _http_body=$(cat <&1 &
+ if (( $? != 0 )); then
+ log "Warning: Image submission: $?. Continuing..."
+ #exit 10
+ fi
+
+ if [[ ${_cli} == 'nuclei' ]]; then
+ log "NOTE: image.uuid = RUNNING, but takes a while to show up in:"
+ log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State"
+ fi
+ fi
+ fi
+
+done
+
+#######################################
+# For doing Disk IMAGES
+#######################################
+
+ for _image in "${QCOW2_IMAGES[@]}" ; do
+
+ # log "DEBUG: ${_image} image.create..."
+ if [[ ${_cli} == 'nuclei' ]]; then
_test=$(source /etc/profile.d/nutanix_env.sh \
&& ${_cli} image.list 2>&1 \
+ | grep -i complete \
| grep "${_image}")
+
fi
if [[ ! -z ${_test} ]]; then
@@ -287,7 +614,7 @@ function images() {
_command=''
_name="${_image}"
- if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc --lines) )); then
+ if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc -l) )); then
log 'Bypass multiple repo source checks...'
SOURCE_URL="${_image}"
else
@@ -301,18 +628,14 @@ function images() {
fi
# TODO:0 TOFIX: acs-centos ugly override for today...
- if (( $(echo "${_image}" | grep -i 'acs-centos' | wc --lines ) > 0 )); then
+ if (( $(echo "${_image}" | grep -i 'acs-centos' | wc -l ) > 0 )); then
_name=acs-centos
fi
if [[ ${_cli} == 'acli' ]]; then
_image_type='kDiskImage'
- if (( $(echo "${SOURCE_URL}" | grep -i -e 'iso$' | wc --lines ) > 0 )); then
- _image_type='kIsoImage'
- fi
-
_command+=" ${_name} annotation=${_image} image_type=${_image_type} \
- container=${STORAGE_IMAGES} architecture=kX86_64 wait=true"
+ container=${STORAGE_ERA} architecture=kX86_64 wait=true"
else
_command+=" name=${_name} description=\"${_image}\""
fi
@@ -350,8 +673,11 @@ EOF
fi
done
+
}
+##################################################################################
+
function log() {
local _caller
@@ -359,6 +685,9 @@ function log() {
echo "$(date '+%Y-%m-%d %H:%M:%S')|$$|${_caller}|${1}"
}
+##################################################################################
+
+
function ntnx_cmd() {
local _attempts=25
local _error=10
@@ -372,7 +701,7 @@ function ntnx_cmd() {
_hold=$(source /etc/profile ; nuclei cluster.list 2>&1)
_status=$?
- if (( $(echo "${_hold}" | grep websocket | wc --lines) > 0 )); then
+ if (( $(echo "${_hold}" | grep websocket | wc -l) > 0 )); then
log "Warning: Zookeeper isn't up yet."
elif (( ${_status} > 0 )); then
log "${_status} = ${_hold}, uh oh!"
@@ -391,6 +720,9 @@ function ntnx_cmd() {
done
}
+##################################################################################
+
+
function ntnx_download() {
local _checksum
local _error
@@ -404,23 +736,28 @@ function ntnx_download() {
if [[ "${PC_VERSION}" == "${PC_DEV_VERSION}" ]]; then
_meta_url="${PC_DEV_METAURL}"
+ _source_url="${PC_DEV_URL}"
+ elif [[ "${PC_VERSION}" == "${PC_CURRENT_VERSION}" ]]; then
+ _meta_url="${PC_CURRENT_METAURL}"
+ _source_url="${PC_CURRENT_URL}"
else
_meta_url="${PC_STABLE_METAURL}"
+ _source_url="${PC_STABLE_URL}"
fi
- if [[ -z ${_meta_url} ]]; then
- _error=22
- log "Error ${_error}: unsupported PC_VERSION=${PC_VERSION}!"
- log 'Sync the following to global.var.sh...'
- log 'Browse to https://portal.nutanix.com/#/page/releases/prismDetails'
- log " - Find ${PC_VERSION} in the Additional Releases section on the lower right side"
- log ' - Provide the metadata URL for the "PC 1-click deploy from PE" option to this function, both case stanzas.'
- exit ${_error}
- fi
-
- if [[ ! -z ${PC_URL} ]]; then
- _source_url="${PC_URL}"
- fi
+ #if [[ -z ${_meta_url} ]]; then
+ # _error=22
+ # log "Error ${_error}: unsupported PC_VERSION=${PC_VERSION}!"
+ # log 'Sync the following to global.var.sh...'
+ # log 'Browse to https://portal.nutanix.com/#/page/releases/prismDetails'
+ # log " - Find ${PC_VERSION} in the Additional Releases section on the lower right side"
+ # log ' - Provide the metadata URL for the "PC 1-click deploy from PE" option to this function, both case stanzas.'
+ # exit ${_error}
+ #fi
+
+ #if [[ ! -z ${PC_URL} ]]; then
+ # _source_url="${PC_URL}"
+ #fi
;;
'NOS' | 'nos' | 'AOS' | 'aos')
# TODO:70 nos is a prototype
@@ -458,6 +795,24 @@ function ntnx_download() {
_source_url="${FILES_URL}"
fi
;;
+ FILE_ANALYTICS | file_analytics )
+ args_required 'FILE_ANALYTICS_VERSION'
+ _meta_url="${FILE_ANALYTICS_METAURL}"
+
+ if [[ -z ${_meta_url} ]]; then
+ _error=22
+ log "Error ${_error}: unsupported FILES_VERSION=${FILE_ANALYTICS_VERSION}!"
+ log 'Sync the following to global.var.sh...'
+ log 'Browse to https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA'
+ log " - Find ${FILE_ANALYTICS_VERSION} in the Additional Releases section on the lower right side"
+ log ' - Provide the metadata URL option to this function, both case stanzas.'
+ exit ${_error}
+ fi
+
+ if [[ ! -z ${FILE_ANALYTICS_URL} ]]; then
+ _source_url="${FILE_ANALYTICS_URL}"
+ fi
+ ;;
* )
_error=88
log "Error ${_error}:: couldn't determine software-type ${_ncli_softwaretype}!"
@@ -477,7 +832,7 @@ function ntnx_download() {
_source_url=$(cat ${_meta_url##*/} | jq -r .download_url_cdn)
fi
- if (( $(pgrep curl | wc --lines | tr -d '[:space:]') > 0 )); then
+ if (( $(pgrep curl | wc -l | tr -d '[:space:]') > 0 )); then
pkill curl
fi
log "Retrieving Nutanix ${_ncli_softwaretype} bits..."
@@ -498,16 +853,19 @@ function ntnx_download() {
file-path="$(pwd)/${_source_url##*/}" \
meta-file-path="$(pwd)/${_meta_url##*/}"
- if (( $? == 0 )) ; then
- log "Success! Delete ${_ncli_softwaretype} sources to free CVM space..."
- rm -f ${_source_url##*/} ${_meta_url##*/}
- else
- _error=3
- log "Error ${_error}: failed ncli upload of ${_ncli_softwaretype}."
- exit ${_error}
- fi
+ #if (( $? == 0 )) ; then
+ # log "Success! Delete ${_ncli_softwaretype} sources to free CVM space..."
+ # rm -fr ${_source_url##*/} ${_meta_url##*/}
+ #else
+ # _error=3
+ # log "Error ${_error}: failed ncli upload of ${_ncli_softwaretype}."
+ # exit ${_error}
+ #fi
}
+##################################################################################
+
+
function pe_determine() {
# ${1} REQUIRED: run on 'PE' or 'PC'
local _error
@@ -550,6 +908,7 @@ function pe_determine() {
PE | pe )
CLUSTER_NAME=$(echo ${_hold} | jq -r .data.name)
PE_HOST=$(echo ${_hold} | jq -r .data.clusterExternalIPAddress)
+ PE_CLUSTER_NAME=$(echo ${_hold} | jq -r .data.name)
;;
PC | Pc | pc )
CLUSTER_NAME=$(echo ${_hold} | jq -r .name)
@@ -562,6 +921,9 @@ function pe_determine() {
fi
}
+##################################################################################
+
+
function prism_check {
# Argument ${1} = REQUIRED: PE or PC
# Argument ${2} = OPTIONAL: number of attempts
@@ -627,6 +989,9 @@ function prism_check {
done
}
+##################################################################################
+
+
function remote_exec() {
# Argument ${1} = REQUIRED: ssh or scp
# Argument ${2} = REQUIRED: PE, PC, or AUTH_SERVER
@@ -716,6 +1081,9 @@ function remote_exec() {
done
}
+##################################################################################
+
+
function repo_source() {
# https://stackoverflow.com/questions/1063347/passing-arrays-as-parameters-in-bash#4017175
local _candidates=("${!1}") # REQUIRED
@@ -735,31 +1103,32 @@ function repo_source() {
if [[ -z ${_package} ]]; then
_suffix=${_candidates[0]##*/}
- if (( $(echo "${_suffix}" | grep . | wc --lines) > 0)); then
+ if (( $(echo "${_suffix}" | grep . | wc -l) > 0)); then
log "Convenience: omitted package argument, added package=${_package}"
_package="${_suffix}"
fi
fi
# Prepend your local HTTP cache...
- _candidates=( "http://${HTTP_CACHE_HOST}:${HTTP_CACHE_PORT}/" "${_candidates[@]}" )
+ #_candidates=( "http://${HTTP_CACHE_HOST}:${HTTP_CACHE_PORT}/" "${_candidates[@]}" )
while (( ${_index} < ${#_candidates[@]} ))
do
+ echo ${_candidates[${_index}]}
unset SOURCE_URL
# log "DEBUG: ${_index} ${_candidates[${_index}]}, OPTIONAL: _package=${_package}"
_url=${_candidates[${_index}]}
if [[ -z ${_package} ]]; then
- if (( $(echo "${_url}" | grep '/$' | wc --lines) == 0 )); then
+ if (( $(echo "${_url}" | grep '/$' | wc -l) == 0 )); then
log "error ${_error}: ${_url} doesn't end in trailing slash, please correct."
exit ${_error}
fi
- elif (( $(echo "${_url}" | grep '/$' | wc --lines) == 1 )); then
+ elif (( $(echo "${_url}" | grep '/$' | wc -l) == 1 )); then
_url+="${_package}"
fi
- if (( $(echo "${_url}" | grep '^nfs' | wc --lines) == 1 )); then
+ if (( $(echo "${_url}" | grep '^nfs' | wc -l) == 1 )); then
log "warning: TODO: cURL can't test nfs URLs...assuming a pass!"
export SOURCE_URL="${_url}"
break
@@ -783,6 +1152,22 @@ function repo_source() {
fi
}
+##################################################################################
+
+
+function run_once() {
+ # Try to login to the PC UI using an API and use the NEW to be password so we can check if PC config has run....
+ _Configured_PC=$(curl -X POST https://${PC_HOST}:9440/api/nutanix/v3/clusters/list --user ${PRISM_ADMIN}:${PE_PASSWORD} -H 'Content-Type: application/json' -d '{ "kind": "cluster" }' --insecure --silent | grep "AUTHENTICATION_REQUIRED" | wc -l)
+ if [[ $_Configured_PC -lt 1 ]]; then
+ _error=2
+ log "Warning ${_error}: ${PC_LAUNCH} already ran and configured PRISM Central, exit!"
+ exit ${_error}
+ fi
+}
+
+##################################################################################
+
+
function ssh_pubkey() {
local _dir
local _directories=(\
@@ -815,3 +1200,38 @@ function ssh_pubkey() {
log "IDEMPOTENCY: found pubkey ${_name}"
fi
}
+
+###############################################################################################################################################################################
+# Routine to be run/loop till yes we are ok.
+###############################################################################################################################################################################
+# Need to grab the percentage_complete value including the status to make disissions
+
+# TODO: Also look at the status!!
+
+function loop(){
+
+ local _attempts=45
+ local _loops=0
+ local _sleep=60
+ local _url_progress='https://localhost:9440/api/nutanix/v3/tasks'
+ local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure "
+
+ echo ${_task_id}
+ # What is the progress of the taskid??
+ while true; do
+ (( _loops++ ))
+ # Get the progress of the task
+ _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}/${_task_id} | jq '.percentage_complete' 2>nul | tr -d \")
+
+ if (( ${_progress} == 100 )); then
+ log "The step has been succesfuly run"
+ break;
+ elif (( ${_loops} > ${_attempts} )); then
+ log "Warning ${_error} @${1}: Giving up after ${_loop} tries."
+ return ${_error}
+ else
+ log "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds"
+ sleep ${_sleep}
+ fi
+ done
+}
diff --git a/scripts/lib.pc.org.sh b/scripts/lib.pc.org.sh
new file mode 100755
index 0000000..8a0ebaa
--- /dev/null
+++ b/scripts/lib.pc.org.sh
@@ -0,0 +1,531 @@
+#!/usr/bin/env bash
+# -x
+# Dependencies: curl, ncli, nuclei, jq
+
+function calm_update() {
+ local _attempts=12
+ local _calm_bin=/usr/local/nutanix/epsilon
+ local _container
+ local _error=19
+ local _loop=0
+ local _sleep=10
+ local _url=http://${AUTH_HOST}:8080
+
+ if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then
+ log "Bypassing download of updated containers."
+ else
+ dependencies 'install' 'sshpass' || exit 13
+ remote_exec 'ssh' 'AUTH_SERVER' \
+ 'if [[ ! -e nucalm.tar ]]; then smbclient -I 10.21.249.12 \\\\pocfs\\images --user ${1} --command "prompt ; cd /Calm-EA/pc-'${PC_VERSION}'/ ; mget *tar"; echo; ls -lH *tar ; fi' \
+ 'OPTIONAL'
+
+ while true ; do
+ (( _loop++ ))
+ _test=$(curl ${CURL_HTTP_OPTS} ${_url} \
+ | tr -d \") # wonderful addition of "" around HTTP status code by cURL
+
+ if (( ${_test} == 200 )); then
+ log "Success reaching ${_url}"
+ break;
+ elif (( ${_loop} > ${_attempts} )); then
+ log "Warning ${_error} @${1}: Giving up after ${_loop} tries."
+ return ${_error}
+ else
+ log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..."
+ sleep ${_sleep}
+ fi
+ done
+
+ download ${_url}/epsilon.tar
+ download ${_url}/nucacallm.tar
+ fi
+
+ if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then
+ ls -lh ${HOME}/*tar
+ mkdir ${HOME}/calm.backup || true
+ cp ${_calm_bin}/*tar ${HOME}/calm.backup/ \
+ && genesis stop nucalm epsilon \
+ && docker rm -f "$(docker ps -aq)" || true \
+ && docker rmi -f "$(docker images -q)" || true \
+ && cp ${HOME}/*tar ${_calm_bin}/ \
+ && cluster start # ~75 seconds to start both containers
+
+ for _container in epsilon nucalm ; do
+ local _test=0
+ while (( ${_test} < 1 )); do
+ _test=$(docker ps -a | grep ${_container} | grep -i healthy | wc --lines)
+ done
+ done
+ fi
+}
+
+function flow_enable() {
+ ## (API; Didn't work. Used nuclei instead)
+ ## https://localhost:9440/api/nutanix/v3/services/microseg
+ ## {"state":"ENABLE"}
+ # To disable flow run the following on PC: nuclei microseg.disable
+
+ log "Enable Nutanix Flow..."
+ nuclei microseg.enable 2>/dev/null
+ nuclei microseg.get_status 2>/dev/null
+}
+
+function lcm() {
+ local _http_body
+ local _pc_version
+ local _test
+
+ # shellcheck disable=2206
+ _pc_version=(${PC_VERSION//./ })
+
+ if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then
+ log "PC_VERSION ${PC_VERSION} >= 5.9, starting LCM inventory..."
+
+ _http_body='{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"}'
+
+ _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" \
+ https://localhost:9440/PrismGateway/services/rest/v1/genesis)
+ log "inventory _test=|${_test}|"
+ fi
+}
+
+function pc_admin() {
+ local _http_body
+ local _test
+ local _admin_user='marklavi'
+
+ _http_body=$(cat <= 5.9"
+ if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then
+ _http_body+=$(cat < 0 )); then
+ log "Warning: password not reset: $?."# exit 10
+ fi
+ # TOFIX: nutanix@PC Linux account password change as well?
+
+ # local _old_pw='nutanix/4u'
+ # local _http_body=$(cat <= 5 && ${_pc_version[1]} >= 10 && ${_test} != 500 )); then
+ log "PC_VERSION ${PC_VERSION} >= 5.10, setting favorites..."
+
+ _json=$(cat </dev/null | grep ${_name} | wc --lines)
+ if (( ${_count} > 0 )); then
+ nuclei project.delete ${_name} confirm=false 2>/dev/null
+ else
+ log "Warning: _count=${_count}"
+ fi
+
+ log "Creating ${_name}..."
+ nuclei project.create name=${_name} description='test from NuCLeI!' 2>/dev/null
+ _uuid=$(. /etc/profile.d/nutanix_env.sh \
+ && nuclei project.get ${_name} format=json 2>/dev/null \
+ | jq .metadata.project_reference.uuid | tr -d '"')
+ log "${_name}.uuid = ${_uuid}"
+
+ # - project.get mark.lavi.test
+ # - project.update mark.lavi.test
+ # spec.resources.account_reference_list.kind= or .uuid
+ # spec.resources.default_subnet_reference.kind=
+ # spec.resources.environment_reference_list.kind=
+ # spec.resources.external_user_group_reference_list.kind=
+ # spec.resources.subnet_reference_list.kind=
+ # spec.resources.user_reference_list.kind=
+
+ # {"spec":{"access_control_policy_list":[],"project_detail":{"name":"mark.lavi.test1","resources":{"external_user_group_reference_list":[],"user_reference_list":[],"environment_reference_list":[],"account_reference_list":[],"subnet_reference_list":[{"kind":"subnet","name":"Primary","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"},{"kind":"subnet","name":"Secondary","uuid":"4689bc7f-61dd-4527-bc7a-9d737ae61322"}],"default_subnet_reference":{"kind":"subnet","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"}},"description":"test from NuCLeI!"},"user_list":[],"user_group_list":[]},"api_version":"3.1","metadata":{"creation_time":"2018-06-22T03:54:59Z","spec_version":0,"kind":"project","last_update_time":"2018-06-22T03:55:00Z","uuid":"1be7f66a-5006-4061-b9d2-76caefedd298","categories":{},"owner_reference":{"kind":"user","name":"admin","uuid":"00000000-0000-0000-0000-000000000000"}}}
+}
diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh
index b368308..31b02a3 100755
--- a/scripts/lib.pc.sh
+++ b/scripts/lib.pc.sh
@@ -2,103 +2,407 @@
# -x
# Dependencies: curl, ncli, nuclei, jq
-function calm_update() {
- local _attempts=12
- local _calm_bin=/usr/local/nutanix/epsilon
- local _container
- local _error=19
- local _loop=0
- local _sleep=10
- local _url=http://${AUTH_HOST}:8080
-
- if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then
- log "Bypassing download of updated containers."
+###############################################################################################################################################################################
+# 12th of April 2019 - Willem Essenstam
+# Added a "-d" character in the flow_enable so the command would run.
+# Changed the Karbon Eanable function so it also checks that Karbon has been enabled. Some small typos changed so the Karbon part should work
+#
+# 31-05-2019 - Willem Essenstam
+# Added the download bits for the Centos Image for Karbon
+###############################################################################################################################################################################
+
+###############################################################################################################################################################################
+# Routine to mark PC has finished staging
+###############################################################################################################################################################################
+
+function finish_staging() {
+ log "Staging is complete. Writing to .staging_complete"
+ touch .staging_complete
+ date >> .staging_complete
+}
+
+
+###############################################################################################################################################################################
+# Routine to enable Flow
+###############################################################################################################################################################################
+
+function flow_enable() {
+ local _attempts=30
+ local _loops=0
+ local _sleep=60
+ local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure '
+ local _url_flow='https://localhost:9440/api/nutanix/v3/services/microseg'
+
+ # Create the JSON payload
+ _json_data='{"state":"ENABLE"}'
+
+ log "Enable Nutanix Flow..."
+
+ # Enabling Flow and put the task id in a variable
+ _task_id=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow | jq '.task_uuid' | tr -d \")
+
+ # Try one more time then fail, but continue
+ if [ -z $_task_id ]; then
+ log "Flow not yet enabled. Will retry...."
+ _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow | jq '.task_uuid' | tr -d \")
+
+ if [ -z $_task_id ]; then
+ log "Flow still not enabled.... ***Not retrying. Please enable via UI.***"
+ fi
+ else
+ loop ${_task_id}
+ log "Flow has been Enabled..."
+ fi
+
+
+
+}
+
+
+
+###############################################################################################################################################################################
+# Routine to start the LCM Inventory and the update.
+###############################################################################################################################################################################
+
+function lcm() {
+
+ local _url_lcm='https://localhost:9440/PrismGateway/services/rest/v1/genesis'
+ local _url_progress='https://localhost:9440/api/nutanix/v3/tasks'
+ local _url_groups='https://localhost:9440/api/nutanix/v3/groups'
+ local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure '
+
+ # Reset the variables we use so we're not adding extra values to the arrays
+ unset uuid_arr
+ unset version_ar
+
+ # Inventory download/run
+ _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"}' ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \")
+
+ # If there has been a reply (task_id) then the URL has accepted by PC
+ # Changed (()) to [] so it works....
+ if [ -z "$_task_id" ]; then
+ log "LCM Inventory start has encountered an eror..."
+ else
+ log "LCM Inventory started.."
+ set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run
+
+ # Run the progess checker
+ loop
+
+ #################################################################
+ # Grab the json from the possible to be updated UUIDs and versions and save local in reply_json.json
+ #################################################################
+
+ # Need loop so we can create the full json more dynamical
+
+ # Issue is taht after the LCM inventory the LCM will be updated to a version 2.0 and the API call needs to change!!!
+ # We need to figure out if we are running V1 or V2!
+ lcm_version=$(curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"get_config\"}}"}' ${_url_lcm} | jq '.value' | tr -d \\ | sed 's/^"\(.*\)"$/\1/' | sed 's/.return/return/g' | jq '.return.lcm_cpdb_table_def_list.entity' | tr -d \"| grep "lcm_entity_v2" | wc -l)
+
+ if [ $lcm_version -lt 1 ]; then
+ log "LCM Version 1 found.."
+ # V1: Run the Curl command and save the oputput in a temp file
+ curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json
+
+ # Fill the uuid array with the correct values
+ uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " "))
+
+ # Grabbing the versions of the UUID and put them in a versions array
+ for uuid in "${uuid_arr[@]}"
+ do
+ version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply_json.json | tail -4 | head -n 1 | tr -d \"))
+ done
+ else
+ log "LCM Version 2 found.."
+
+ #''_V2: run the other V2 API call to get the UUIDs of the to be updated software parts
+ # Grab the installed version of the software first UUIDs
+ curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_entity_v2","group_member_count": 500,"group_member_attributes": [{"attribute": "id"}, {"attribute": "uuid"}, {"attribute": "entity_model"}, {"attribute": "version"}, {"attribute": "location_id"}, {"attribute": "entity_class"}, {"attribute": "description"}, {"attribute": "last_updated_time_usecs"}, {"attribute": "request_version"}, {"attribute": "_master_cluster_uuid_"}, {"attribute": "entity_type"}, {"attribute": "single_group_uuid"}],"query_name": "lcm:EntityGroupModel","grouping_attribute": "location_id","filter_criteria": "entity_model!=AOS;entity_model!=NCC;entity_model!=PC;_master_cluster_uuid_==[no_val]"}' $_url_groups > reply_json_uuid.json
+
+ # Fill the uuid array with the correct values
+ uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="uuid") | .values[0].values[0]' reply_json_uuid.json | sort -u | tr "\"" " " | tr -s " "))
+
+ # Grab the available updates from the PC after LCMm has run
+ curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version_v2","group_member_count": 500,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"}, {"attribute": "entity_class"}, {"attribute": "status"}, {"attribute": "version"}, {"attribute": "dependencies"},{"attribute": "single_group_uuid"}, {"attribute": "_master_cluster_uuid_"}, {"attribute": "order"}],"query_name": "lcm:VersionModel","filter_criteria": "_master_cluster_uuid_==[no_val]"}' $_url_groups > reply_json_ver.json
+
+ # Grabbing the versions of the UUID and put them in a versions array
+ for uuid in "${uuid_arr[@]}"
+ do
+ # Get the latest version from the to be updated uuid. Put always a value in the array otherwise we loose/have skewed verrsions to products
+ version=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | sort |tail -1 | tr -d \"))
+ # If no version upgrade available add a blank item in the array
+ if [[ -z $version ]]; then
+ version='NA'
+ fi
+ version_ar+=($version)
+ done
+ # Copy the right info into the to be used array
+ fi
+
+ # Set the parameter to create the ugrade plan
+ # Create the curl json string '-d blablablablabla' so we can call the string and not the full json data line
+ # Begin of the JSON data payload
+ _json_data="-d "
+ _json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"generate_plan\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",["
+
+ # Combine the two created UUID and Version arrays to the full needed data using a loop
+ count=0
+ while [ $count -lt ${#uuid_arr[@]} ]
+ do
+ if [[ ${version_ar[$count]} != *"NA"* ]]; then
+ _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"],"
+ log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}"
+ fi
+ let count=count+1
+ done
+
+ # Remove the last "," as we don't need it.
+ _json_data=${_json_data%?};
+
+ # Last part of the JSON data payload
+ _json_data+="]]}}\"}"
+
+ # Run the generate plan task
+ _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm})
+
+ # Notify the log server that the LCM has created a plan
+ log "LCM Inventory has created a plan"
+
+ # Reset the loop counter so we restart the amount of loops we need to run
+ set _loops=0
+
+ # As the new json for the perform the upgrade only needs to have "generate_plan" changed into "perform_update" we use sed...
+ _json_data=$(echo $_json_data | sed -e 's/generate_plan/perform_update/g')
+
+
+ # Run the upgrade to have the latest versions
+ _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \")
+
+ # If there has been a reply task_id then the URL has accepted by PC
+ if [ -z "$_task_id" ]; then
+ # There has been an error!!!
+ log "LCM Upgrade has encountered an error!!!!"
+ else
+ # Notify the logserver that we are starting the LCM Upgrade
+ log "LCM Upgrade starting...Process may take up to 45 minutes!!!"
+
+ # Run the progess checker
+ loop
+ fi
+ fi
+
+ # Remove the temp json files as we don't need it anymore
+ #rm -rf reply_json.json
+ #rm -rf reply_json_ver.json
+ #rm -rf reply_json_uuid.json
+
+}
+
+###############################################################################################################################################################################
+# Routine to enable Karbon
+###############################################################################################################################################################################
+
+function karbon_enable() {
+ local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure '
+ local _loop=0
+ local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}"
+ local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} "
+ local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis"
+
+ # Start the enablement process
+ _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l)
+
+ # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled...
+ if [[ $_response -eq 1 ]]; then
+ # Check if Karbon has been enabled
+ _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l)
+ while [ $_response -ne 1 ]; do
+ _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l)
+ done
+ log "Karbon has been enabled."
else
- dependencies 'install' 'sshpass' || exit 13
- remote_exec 'ssh' 'AUTH_SERVER' \
- 'if [[ ! -e nucalm.tar ]]; then smbclient -I 10.21.249.12 \\\\pocfs\\images --user ${1} --command "prompt ; cd /Calm-EA/pc-'${PC_VERSION}'/ ; mget *tar"; echo; ls -lH *tar ; fi' \
- 'OPTIONAL'
-
- while true ; do
- (( _loop++ ))
- _test=$(curl ${CURL_HTTP_OPTS} ${_url} \
- | tr -d \") # wonderful addition of "" around HTTP status code by cURL
-
- if (( ${_test} == 200 )); then
- log "Success reaching ${_url}"
- break;
- elif (( ${_loop} > ${_attempts} )); then
- log "Warning ${_error} @${1}: Giving up after ${_loop} tries."
- return ${_error}
+ log "Retrying to enable Karbon one more time."
+ _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l)
+ if [[ $_response -eq 1 ]]; then
+ _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l)
+ if [ $_response -lt 1 ]; then
+ log "Karbon isn't enabled. Please use the UI to enable it."
else
- log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..."
- sleep ${_sleep}
+ log "Karbon has been enabled."
fi
- done
+ fi
+ fi
+}
- download ${_url}/epsilon.tar
- download ${_url}/nucacallm.tar
+###############################################################################################################################################################################
+# Download Karbon CentOS Image
+###############################################################################################################################################################################
+
+function karbon_image_download() {
+ local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure '
+ local _loop=0
+ local _cookies=''NTNX_IGW_SESSION': resp.cookies['NTNX_IGW_SESSION']'
+ local _startDownload="https://localhost:9440/karbon/acs/image/download"
+ local _getuuidDownload="https://localhost:9440/karbon/acs/image/list"
+
+ # Create the Basic Authentication using base6 commands
+ _auth=$(echo "admin:${PE_PASSWORD}" | base64)
+
+ # Call the UUID URL so we have the right UUID for the image
+ uuid=$(curl -X GET -H "X-NTNX-AUTH: Basic ${_auth}" https://localhost:9440/karbon/acs/image/list $CURL_HTTP_OPTS | jq '.[0].uuid' | tr -d \/\")
+ log "UUID for The Karbon image is: $uuid"
+
+ # Use the UUID to download the image
+ response=$(curl -X POST ${_startDownload} -d "{\"uuid\":\"${uuid}\"}" -H "X-NTNX-AUTH: Basic ${_auth}" ${CURL_HTTP_OPTS})
+
+ if [ -z $response ]; then
+ log "Download of the CentOS image for Karbon has not been started. Trying one more time..."
+ response=$(curl -X POST ${_startDownload} -d "{\"uuid\":\"${uuid}\"}" -H "X-NTNX-AUTH: Basic ${_auth}" ${CURL_HTTP_OPTS})
+ if [ -z $response ]; then
+ log "Download of CentOS image for Karbon failed... Please run manually."
+ fi
+ else
+ log "Download of CentOS image for Karbon has started..."
fi
+}
+
+###############################################################################################################################################################################
+# Routine to enable Objects
+###############################################################################################################################################################################
- if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then
- ls -lh ${HOME}/*tar
- mkdir ${HOME}/calm.backup || true
- cp ${_calm_bin}/*tar ${HOME}/calm.backup/ \
- && genesis stop nucalm epsilon \
- && docker rm -f "$(docker ps -aq)" || true \
- && docker rmi -f "$(docker images -q)" || true \
- && cp ${HOME}/*tar ${_calm_bin}/ \
- && cluster start # ~75 seconds to start both containers
-
- for _container in epsilon nucalm ; do
- local _test=0
- while (( ${_test} < 1 )); do
- _test=$(docker ps -a | grep ${_container} | grep -i healthy | wc --lines)
- done
+function objects_enable() {
+ local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure '
+ local _loops=0
+ local _json_data_set_enable="{\"state\":\"ENABLE\"}"
+ local _json_data_check="{\"entity_type\":\"objectstore\"}"
+ local _httpURL_check="https://localhost:9440/oss/api/nutanix/v3/groups"
+ local _httpURL="https://localhost:9440/api/nutanix/v3/services/oss"
+ local _maxtries=30
+
+ # Start the enablement process
+ _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL})
+ log "Enabling Objects....."
+
+ # The response should be a Task UUID
+ if [[ ! -z $_response ]]; then
+ # Check if OSS has been enabled
+ _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_check ${_httpURL_check}| grep "objectstore" | wc -l)
+ while [ $_response -ne 1 ]; do
+ _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_check ${_httpURL_check}| grep "objectstore" | wc -l)
+ if [[ $loops -ne 30 ]]; then
+ sleep 10
+ (( _loops++ ))
+ else
+ log "Objects isn't enabled. Please use the UI to enable it."
+ break
+ fi
done
+ log "Objects has been enabled."
+ else
+ log "Objects isn't enabled. Please use the UI to enable it."
fi
}
-function flow_enable() {
- ## (API; Didn't work. Used nuclei instead)
- ## https://localhost:9440/api/nutanix/v3/services/microseg
- ## {"state":"ENABLE"}
- # To disable flow run the following on PC: nuclei microseg.disable
+###############################################################################################################################################################################
+# Create an object store called ntnx_object.ntnxlab.local
+###############################################################################################################################################################################
- log "Enable Nutanix Flow..."
- nuclei microseg.enable 2>/dev/null
- nuclei microseg.get_status 2>/dev/null
-}
+function object_store() {
+ local _attempts=30
+ local _loops=0
+ local _sleep=60
+ local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure '
+ local _url_network='https://localhost:9440/api/nutanix/v3/subnets/list'
+ local _url_oss='https://localhost:9440/oss/api/nutanix/v3/objectstores'
+ local _url_oss_check='https://localhost:9440/oss/api/nutanix/v3/objectstores/list'
-function lcm() {
- local _http_body
- local _pc_version
- local _test
- # shellcheck disable=2206
- _pc_version=(${PC_VERSION//./ })
+ # Enable Dark Site Repo and wait 3 seconds
+ #mspctl airgap --enable --lcm-server=${OBJECTS_OFFLINE_REPO}
+ #sleep 3
+ # Confirm airgap is enabled
+ #_response=$(mspctl airgap --status | grep "\"enable\":true" | wc -l)
+
+ #if [ $_response -eq 1 ]; then
+ # log "Objects dark site staging successfully enabled. Response is $_response. "
+ #else
+ # log "Objects failed to enable dark site staging. Will use standard WAN download (this will take longer). Response is $_response."
+ #fi
+
+ # Payload for the _json_data
+ _json_data='{"kind":"subnet"}'
+
+ # Get the json data and split into CLUSTER_UUID and Primary_Network_UUID
+ CLUSTER_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[].spec | select (.name=="Primary") | .cluster_reference.uuid' | tr -d \")
+ echo ${CLUSTER_UUID}
+
+ PRIM_NETWORK_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[] | select (.spec.name=="Primary") | .metadata.uuid' | tr -d \")
+ echo ${PRIM_NETWORK_UUID}
- if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then
- log "PC_VERSION ${PC_VERSION} >= 5.9, starting LCM inventory..."
+ echo "BUCKETS_DNS_IP: ${BUCKETS_DNS_IP}, BUCKETS_VIP: ${BUCKETS_VIP}, OBJECTS_NW_START: ${OBJECTS_NW_START}, OBJECTS_NW_END: ${OBJECTS_NW_END}"
+ sleep 5
+ _json_data_oss='{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"ntnx-objects","description":"NTNXLAB","resources":{"domain":"ntnxlab.local","cluster_reference":{"kind":"cluster","uuid":"'
+ _json_data_oss+=${CLUSTER_UUID}
+ _json_data_oss+='"},"buckets_infra_network_dns":"'
+ _json_data_oss+=${BUCKETS_DNS_IP}
+ _json_data_oss+='","buckets_infra_network_vip":"'
+ _json_data_oss+=${BUCKETS_VIP}
+ _json_data_oss+='","buckets_infra_network_reference":{"kind":"subnet","uuid":"'
+ _json_data_oss+=${PRIM_NETWORK_UUID}
+ _json_data_oss+='"},"client_access_network_reference":{"kind":"subnet","uuid":"'
+ _json_data_oss+=${PRIM_NETWORK_UUID}
+ _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":32768,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"'
+ _json_data_oss+=${OBJECTS_NW_START}
+ _json_data_oss+='","ipv4_end":"'
+ _json_data_oss+=${OBJECTS_NW_END}
+ _json_data_oss+='"}}}}'
- _http_body='value: "{".oid":"LifeCycleManager",".method":"lcm_framework_rpc",".kwargs":{"method_class":"LcmFramework","method":"perform_inventory","args":["http://download.nutanix.com/lcm/2.0"]}}"'
+ # Set the right VLAN dynamically so we are configuring in the right network
+ _json_data_oss=${_json_data_oss//VLANX/${VLAN}}
+ _json_data_oss=${_json_data_oss//NETWORKX/${NETWORK}}
- _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" \
- https://localhost:9440/PrismGateway/services/rest/v1/genesis)
- log "inventory _test=|${_test}|"
+ #curl -X POST -d $_json_data_oss $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_oss
+ _createresponse=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_oss ${_url_oss})
+ log "Creating Object Store....."
+
+ # The response should be a Task UUID
+ if [[ ! -z $_createresponse ]]; then
+ # Check if Object store is deployed
+ _response=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_url_oss_check}| grep "ntnx-objects" | wc -l)
+ while [ $_response -ne 1 ]; do
+ log "Object Store not yet created. $_loops/$_attempts... sleeping 10 seconds"
+ _response=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_url_oss_check}| grep "ntnx-objects" | wc -l)
+ if [[ $_loops -ne 30 ]]; then
+ _createresponse=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_oss ${_url_oss})
+ sleep 10
+ (( _loops++ ))
+ else
+ log "Objects store ntnx-objects not created. Please use the UI to create it."
+ break
+ fi
+ done
+ log "Objects store been created."
+ else
+ log "Objects store could not be created. Please use the UI to create it."
fi
+
}
+
+###############################################################################################################################################################################
+# Routine for PC_Admin
+###############################################################################################################################################################################
+
function pc_admin() {
local _http_body
local _test
- local _admin_user='marklavi'
+ local _admin_user='nathan'
_http_body=$(cat <= 5.9"
- if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then
- _http_body+=$(cat < /dev/null 2>&1)
+ _setup=$(/home/nutanix/lab/initialize_lab.sh ${PC_HOST} admin ${PE_PASSWORD} ${PE_HOST} nutanix ${PE_PASSWORD} > /dev/null 2>&1)
+ log "Running Setup Script|$_setup"
+
+ popd
+}
+
+###############################################################################################################################################################################
+# Routine to setp up the SSP authentication to use the AutoDC server
+###############################################################################################################################################################################
+
function ssp_auth() {
args_required 'AUTH_SERVER AUTH_HOST AUTH_ADMIN_USER AUTH_ADMIN_PASS'
@@ -292,14 +675,11 @@ function ssp_auth() {
local _ssp_connect
log "Find ${AUTH_SERVER} uuid"
- _ldap_uuid=$(PATH=${PATH}:${HOME}; curl ${CURL_POST_OPTS} \
- --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{ "kind": "directory_service" }' \
- https://localhost:9440/api/nutanix/v3/directory_services/list \
- | jq -r .entities[0].metadata.uuid)
+ _ldap_uuid=$(PATH=${PATH}:${HOME}; curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{ "kind": "directory_service" }' 'https://localhost:9440/api/nutanix/v3/directory_services/list' | jq -r .entities[0].metadata.uuid)
log "_ldap_uuid=|${_ldap_uuid}|"
# TODO:110 get directory service name _ldap_name
- _ldap_name=${AUTH_SERVER}
+ _ldap_name=${AUTH_DOMAIN}
# TODO:140 bats? test ldap connection
log "Connect SSP Authentication (spec-ssp-authrole.json)..."
@@ -414,46 +794,79 @@ EOF
}
EOF
)
- _ssp_connect=$(curl ${CURL_POST_OPTS} \
- --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT --data "${_http_body}" \
- https://localhost:9440/api/nutanix/v3/directory_services/${_ldap_uuid})
+ _ssp_connect=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT --data "${_http_body}" https://localhost:9440/api/nutanix/v3/directory_services/${_ldap_uuid})
log "_ssp_connect=|${_ssp_connect}|"
}
+###############################################################################################################################################################################
+# Routine to enable Calm and proceed only if Calm is enabled
+###############################################################################################################################################################################
+
function calm_enable() {
local _http_body
local _test
+ local _sleep=30
+ local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure '
log "Enable Nutanix Calm..."
- _http_body=$(cat <nul | tr -d \")
+ if [[ ${_progress} == "ENABLED" ]]; then
+ log "Calm has been Enabled..."
+ break;
+ else
+ log "Still enabling Calm.....Sleeping ${_sleep} seconds"
+ sleep ${_sleep}
+ fi
+ done
}
+
+
+
+
+###############################################################################################################################################################################
+# Routine to make changes to the PC UI; Colors, naming and the Welcome Banner
+###############################################################################################################################################################################
+
function pc_ui() {
# http://vcdx56.com/2017/08/change-nutanix-prism-ui-login-screen/
local _http_body
local _json
local _pc_version
local _test
-
+#{"type":"WELCOME_BANNER","username":"system_data","key":"welcome_banner_content","value":"${PRISM_ADMIN}:${PE_PASSWORD}@${CLUSTER_NAME}"} \
_json=$(cat </dev/null | grep ${_name} | wc --lines)
- if (( ${_count} > 0 )); then
- nuclei project.delete ${_name} confirm=false 2>/dev/null
+ VMName="CentOS"
+
+ Log "Creating ${VMName}"
+
+HTTP_JSON_BODY=$(cat </dev/null
- _uuid=$(. /etc/profile.d/nutanix_env.sh \
- && nuclei project.get ${_name} format=json 2>/dev/null \
- | jq .metadata.project_reference.uuid | tr -d '"')
- log "${_name}.uuid = ${_uuid}"
-
- # - project.get mark.lavi.test
- # - project.update mark.lavi.test
- # spec.resources.account_reference_list.kind= or .uuid
- # spec.resources.default_subnet_reference.kind=
- # spec.resources.environment_reference_list.kind=
- # spec.resources.external_user_group_reference_list.kind=
- # spec.resources.subnet_reference_list.kind=
- # spec.resources.user_reference_list.kind=
-
- # {"spec":{"access_control_policy_list":[],"project_detail":{"name":"mark.lavi.test1","resources":{"external_user_group_reference_list":[],"user_reference_list":[],"environment_reference_list":[],"account_reference_list":[],"subnet_reference_list":[{"kind":"subnet","name":"Primary","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"},{"kind":"subnet","name":"Secondary","uuid":"4689bc7f-61dd-4527-bc7a-9d737ae61322"}],"default_subnet_reference":{"kind":"subnet","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"}},"description":"test from NuCLeI!"},"user_list":[],"user_group_list":[]},"api_version":"3.1","metadata":{"creation_time":"2018-06-22T03:54:59Z","spec_version":0,"kind":"project","last_update_time":"2018-06-22T03:55:00Z","uuid":"1be7f66a-5006-4061-b9d2-76caefedd298","categories":{},"owner_reference":{"kind":"user","name":"admin","uuid":"00000000-0000-0000-0000-000000000000"}}}
+ log "${VMName} Deployment Completed"
+
+
+
+
+#set +x
+
+}
+
+#########################################################################################################################################
+# Routine to configure Era
+#########################################################################################################################################
+
+function configure_era() {
+ local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure "
+
+#set -x
+
+log "Starting Era Config"
+
+log "PE Cluster IP |${PE_HOST}|"
+log "EraServer IP |${ERA_HOST}|"
+
+## Create the EraManaged network inside Era ##
+log "Reset Default Era Password"
+
+ _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/auth/update" --data '{ "password": "'${ERA_PASSWORD}'"}' | jq -r '.status' | tr -d \")
+
+log "Password Reset |${_reset_passwd}|"
+
+## Accept EULA ##
+log "Accept Era EULA"
+
+ _accept_eula=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/auth/validate" --data '{ "eulaAccepted": true }' | jq -r '.status' | tr -d \")
+
+log "Accept EULA |${_accept_eula}|"
+
+## Register Cluster ##
+log "Register ${CLUSTER_NAME} with Era"
+
+HTTP_JSON_BODY=$(cat < cluster.json
+
+ _task_id=$(curl -k -H 'Content-Type: multipart/form-data' -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/clusters/${_era_cluster_id}/json" -F file="@"cluster.json)
+
+## Add the Secondary Network inside Era ##
+log "Create ${NW2_NAME} DHCP/IPAM Network"
+
+ _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/resources/networks" --data '{"name": "'${NW2_NAME}'","type": "DHCP",
+ "clusterId":"'${_era_cluster_id}'"}' | jq -r '.id' | tr -d \")
+
+log "Created ${NW2_NAME} Network with Network ID |${_dhcp_network_id}|"
+
+## Create the EraManaged network inside Era ##
+log "Create ${NW3_NAME} Static Network"
+
+HTTP_JSON_BODY=$(cat <"$tmp" && mv "$tmp" $JSONFile)
+ fi
+
+ # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported)
+ tmp_removal=$(mktemp)
+ $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile)
+
+ # GET BP NAME (affects all BPs being imported)
+ # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all
+ blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile)
+ blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix "
+ blueprint_name="${blueprint_name#\"}" # will remove the prefix "
+
+ if [ $blueprint_name == 'null' ]; then
+ echo "Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?"
+ exit 0
+ else
+ # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload
+ echo "Uploading the updated blueprint: $blueprint_name..."
+
+ path_to_file=$JSONFile
+ bp_name=$blueprint_name
+ project_uuid=$project_uuid
+
+ upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid "https://localhost:9440/api/nutanix/v3/blueprints/import_file")
+
+ #if the upload_result var is not empty then let's say it was succcessful
+ if [ -z "$upload_result" ]; then
+ echo "Upload for $bp_name did not finish."
+ else
+ echo "Upload for $bp_name finished."
+ echo "-----------------------------------------"
+ # echo "Result: $upload_result"
+ fi
+ fi
+
+ echo "Finished uploading ${BLUEPRINT}!"
+
+ #Getting the Blueprint UUID
+ CITRIX_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CitrixBootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \")
+
+ echo "Citrix Blueprint UUID = $CITRIX_BLUEPRINT_UUID"
+
+ echo "Update Blueprint and writing to temp file"
+ echo "${CALM_PROJECT} network UUID: ${project_uuid}"
+ echo "DOMAIN=${DOMAIN}"
+ echo "AD_IP=${AD_IP}"
+ echo "PE_IP=${PE_IP}"
+ echo "DDC_IP=${DDC_IP}"
+ echo "CVM_NETWORK=${CVM_NETWORK}"
+ echo "SERVER_IMAGE=${SERVER_IMAGE}"
+ echo "SERVER_IMAGE_UUID=${SERVER_IMAGE_UUID}"
+ echo "CITRIX_IMAGE=${CITRIX_IMAGE}"
+ echo "CITRIX_IMAGE_UUID=${CITRIX_IMAGE_UUID}"
+ echo "NETWORK_UUID=${NETWORK_UUID}"
+
+ DOWNLOADED_JSONFile="${BLUEPRINT}-${CITRIX_BLUEPRINT_UUID}.json"
+ UPDATED_JSONFile="${BLUEPRINT}-${CITRIX_BLUEPRINT_UUID}-updated.json"
+
+ # GET The Blueprint so it can be updated
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" > ${DOWNLOADED_JSONFile}
+
+ cat $DOWNLOADED_JSONFile \
+ | jq -c 'del(.status)' \
+ | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[0].value = \"$DOMAIN\")" \
+ | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[1].value = \"$AD_IP\")" \
+ | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[2].value = \"$PE_IP\")" \
+ | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[6].value = \"$DDC_IP\")" \
+ | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[4].value = \"$CVM_NETWORK\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$SERVER_IMAGE\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference.name = \"$CITRIX_IMAGE\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference.uuid = \"$CITRIX_IMAGE_UUID\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \
+ | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value = \"$LOCAL_PASSWORD\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \
+ | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$DOMAIN_CREDS_PASSWORD\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \
+ | jq -c -r "(.spec.resources.credential_definition_list[2].secret.value = \"$PE_CREDS_PASSWORD\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \
+ | jq -c -r "(.spec.resources.credential_definition_list[3].secret.value = \"$SQL_CREDS_PASSWORD\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[3].secret.attrs.is_secret_modified = "true")' \
+ > $UPDATED_JSONFile
+
+ echo "Saving Credentials Edits with PUT"
+
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @$UPDATED_JSONFile "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}"
+
+ echo "Finished Updating Credentials"
+
+ # GET The Blueprint payload
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Citrix Infra", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json
+
+ # Launch the BLUEPRINT
+
+ echo "Launching the Era Server Application"
+
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}/launch"
+
+ echo "Finished Launching the Citrix Infra Application"
+
+}
+
+###############################################################################################################################################################################
+# Routine to upload Era Calm Blueprint and set variables
+###############################################################################################################################################################################
+
+function upload_era_calm_blueprint() {
+ local DIRECTORY="/home/nutanix/era"
+ local BLUEPRINT=${ERA_Blueprint}
+ local CALM_PROJECT="BootcampInfra"
+ local ERA_IP=${ERA_HOST}
+ local PE_IP=${PE_HOST}
+ local CLSTR_NAME="none"
+ local CTR_UUID=${_storage_default_uuid}
+ local CTR_NAME=${STORAGE_DEFAULT}
+ local NETWORK_NAME=${NW1_NAME}
+ local VLAN_NAME=${NW1_VLAN}
+ local ERAADMIN_PASSWORD="nutanix/4u"
+ local PE_CREDS_PASSWORD="${PE_PASSWORD}"
+ #local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE})
+ local DOWNLOAD_BLUEPRINTS
+ local ERA_IMAGE="ERA-Server-build-1.2.1.qcow2"
+ local ERA_IMAGE_UUID
+ local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure"
+ local _loops="0"
+ local _maxtries="75"
+
+
+ echo "Starting Era Blueprint Deployment"
+
+ mkdir $DIRECTORY
+
+ echo "Getting Era Image UUID"
+ #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter
+ _loops="0"
+ _maxtries="75"
+
+ ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.1.qcow2' | wc -l)
+ # The response should be a Task UUID
+ while [[ $ERA_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do
+ log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds"
+ sleep 60
+ ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.1.qcow2' | wc -l)
+ (( _loops++ ))
+ done
+ if [[ $_loops -lt $_maxtries ]]; then
+ log "Image has been uploaded."
+ ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \")
+ else
+ log "Image is not upload, please check."
+ fi
+
+
+ echo "ERA Image UUID = $ERA_IMAGE_UUID"
+ echo "-----------------------------------------"
+
+ echo "Getting NETWORK UUID"
+
+ NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \")
+
+ echo "NETWORK UUID = $NETWORK_UUID"
+ echo "-----------------------------------------"
+
+ # download the blueprint
+ DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT})
+ log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}"
+
+ # ensure the directory that contains the blueprints to be imported is not empty
+ if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then
+ echo "There are no .json files found in the directory provided."
+ exit 0
+ fi
+
+ if [ $CALM_PROJECT != 'none' ]; then
+
+ # curl command needed:
+ # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid'
+
+ # make API call and store project_uuid
+ project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid')
+
+ echo "Projet UUID = $project_uuid"
+
+ if [ -z "$project_uuid" ]; then
+ # project wasn't found
+ # exit at this point as we don't want to assume all blueprints should then hit the 'default' project
+ echo "Project $CALM_PROJECT was not found. Please check the name and retry."
+ exit 0
+ else
+ echo "Project $CALM_PROJECT exists..."
+ fi
+ fi
+
+ # update the user with script progress...
+
+ echo "Starting blueprint updates and then Uploading to Calm..."
+
+ # read the entire JSON file from the directory
+ JSONFile="${DIRECTORY}/${BLUEPRINT}"
+
+ echo "Currently updating blueprint $JSONFile..."
+
+ # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint
+ tmp=$(mktemp)
+
+ # ADD PROJECT , we need to add it into the JSON data
+ if [ $CALM_PROJECT != 'none' ]; then
+ # add the new atributes to the JSON and overwrite the old JSON file with the new one
+ $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile)
+ fi
+
+ # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported)
+ tmp_removal=$(mktemp)
+ $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile)
+
+ # GET BP NAME (affects all BPs being imported)
+ # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all
+ blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile)
+ blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix "
+ blueprint_name="${blueprint_name#\"}" # will remove the prefix "
+
+ if [ $blueprint_name == 'null' ]; then
+ echo "Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?"
+ exit 0
+ else
+ # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload
+ echo "Uploading the updated blueprint: $blueprint_name..."
+
+ path_to_file=$JSONFile
+ bp_name=$blueprint_name
+ project_uuid=$project_uuid
+
+ upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid)
+
+ #if the upload_result var is not empty then let's say it was succcessful
+ if [ -z "$upload_result" ]; then
+ echo "Upload for $bp_name did not finish."
+ else
+ echo "Upload for $bp_name finished."
+ echo "-----------------------------------------"
+ # echo "Result: $upload_result"
+ fi
+ fi
+
+ echo "Finished uploading ${BLUEPRINT}!"
+
+ #Getting the Blueprint UUID
+ ERA_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==EraServerDeployment"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \")
+
+ echo "ERA Blueprint UUID = $ERA_BLUEPRINT_UUID"
+
+ echo "Update Blueprint and writing to temp file"
+
+ echo "${CALM_PROJECT} network UUID: ${project_uuid}"
+ echo "ERA_IP=${ERA_IP}"
+ echo "PE_IP=${PE_IP}"
+ echo "ERA_IMAGE=${ERA_IMAGE}"
+ echo "ERA_IMAGE_UUID=${ERA_IMAGE_UUID}"
+ echo "NETWORK_UUID=${NETWORK_UUID}"
+
+ DOWNLOADED_JSONFile="${BLUEPRINT}-${ERA_BLUEPRINT_UUID}.json"
+ UPDATED_JSONFile="${BLUEPRINT}-${ERA_BLUEPRINT_UUID}-updated.json"
+
+ # GET The Blueprint so it can be updated
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" > ${DOWNLOADED_JSONFile}
+
+ cat $DOWNLOADED_JSONFile \
+ | jq -c 'del(.status)' \
+ | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[0].value = \"$ERA_IP\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$ERA_IMAGE\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$ERA_IMAGE_UUID\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \
+ | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value = \"$ERAADMIN_PASSWORD\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \
+ | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$PE_CREDS_PASSWORD\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \
+ | jq -c -r "(.spec.resources.credential_definition_list[2].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \
+ > $UPDATED_JSONFile
+
+ echo "Saving Credentials Edits with PUT"
+
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @$UPDATED_JSONFile "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}"
+
+ echo "Finished Updating Credentials"
+
+ # GET The Blueprint payload
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Era Server", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json
+
+ # Launch the BLUEPRINT
+
+ echo "Launching the Era Server Application"
+
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}/launch"
+
+ echo "Finished Launching the Era Server Application"
+
+}
+
+###############################################################################################################################################################################
+# Routine to upload Karbon Calm Blueprint and set variables
+###############################################################################################################################################################################
+
+function upload_karbon_calm_blueprint() {
+ local DIRECTORY="/home/nutanix/karbon"
+ local BLUEPRINT=${Karbon_Blueprint}
+ local CALM_PROJECT="BootcampInfra"
+ local KARBON_IMAGE='ntnx-0.4'
+ local PE_IP=${PE_HOST}
+ local CLSTR_NAME="none"
+ local CTR_UUID=${_storage_default_uuid}
+ local CTR_NAME=${STORAGE_DEFAULT}
+ local NETWORK_NAME=${NW1_NAME}
+ local VLAN_NAME=${NW1_VLAN}
+ local PE_CREDS_PASSWORD="${PE_PASSWORD}"
+ local PC_CREDS_PASSWORD="${PE_PASSWORD}"
+ #local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE})
+ local DOWNLOAD_BLUEPRINTS
+ local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure"
+ local _loops="0"
+ local _maxtries="75"
+
+
+ echo "Starting Karbon Blueprint Deployment"
+
+ mkdir $DIRECTORY
+
+ # download the blueprint
+ DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT})
+ log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}"
+
+ # ensure the directory that contains the blueprints to be imported is not empty
+ if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then
+ echo "There are no .json files found in the directory provided."
+ exit 0
+ fi
+
+ if [ $CALM_PROJECT != 'none' ]; then
+
+ # curl command needed:
+ # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid'
+
+ # make API call and store project_uuid
+ project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid')
+
+ echo "Projet UUID = $project_uuid"
+
+ if [ -z "$project_uuid" ]; then
+ # project wasn't found
+ # exit at this point as we don't want to assume all blueprints should then hit the 'default' project
+ echo "Project $CALM_PROJECT was not found. Please check the name and retry."
+ exit 0
+ else
+ echo "Project $CALM_PROJECT exists..."
+ fi
+ fi
+
+ # update the user with script progress...
+
+ echo "Starting blueprint updates and then Uploading to Calm..."
+
+ # read the entire JSON file from the directory
+ JSONFile="${DIRECTORY}/${BLUEPRINT}"
+
+ echo "Currently updating blueprint $JSONFile..."
+
+ # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint
+ tmp=$(mktemp)
+
+ # ADD PROJECT , we need to add it into the JSON data
+ if [ $CALM_PROJECT != 'none' ]; then
+ # add the new atributes to the JSON and overwrite the old JSON file with the new one
+ $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile)
+ fi
+
+ # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported)
+ tmp_removal=$(mktemp)
+ $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile)
+
+ # GET BP NAME (affects all BPs being imported)
+ # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all
+ blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile)
+ blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix "
+ blueprint_name="${blueprint_name#\"}" # will remove the prefix "
+
+ if [ $blueprint_name == 'null' ]; then
+ echo "Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?"
+ exit 0
+ else
+ # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload
+ echo "Uploading the updated blueprint: $blueprint_name..."
+
+ path_to_file=$JSONFile
+ bp_name=$blueprint_name
+ project_uuid=$project_uuid
+
+ upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid)
+
+ #if the upload_result var is not empty then let's say it was succcessful
+ if [ -z "$upload_result" ]; then
+ echo "Upload for $bp_name did not finish."
+ else
+ echo "Upload for $bp_name finished."
+ echo "-----------------------------------------"
+ # echo "Result: $upload_result"
+ fi
+ fi
+
+ echo "Finished uploading ${BLUEPRINT}!"
+
+ #Getting the Blueprint UUID
+ KARBON_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==KarbonClusterDeployment"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \")
+
+ echo "Karbon Blueprint UUID = $KARBON_BLUEPRINT_UUID"
+
+ echo "Update Blueprint and writing to temp file"
+
+ echo "${CALM_PROJECT} network UUID: ${project_uuid}"
+ echo "KARBON_BLUEPRINT_UUID=${KARBON_BLUEPRINT_UUID}"
+
+ DOWNLOADED_JSONFile="${BLUEPRINT}-${KARBON_BLUEPRINT_UUID}.json"
+ UPDATED_JSONFile="${BLUEPRINT}-${KARBON_BLUEPRINT_UUID}-updated.json"
+
+ # GET The Blueprint so it can be updated
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}" > ${DOWNLOADED_JSONFile}
+
+ cat $DOWNLOADED_JSONFile \
+ | jq -c 'del(.status)' \
+ | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value = \"$PE_CREDS_PASSWORD\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \
+ | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$PC_CREDS_PASSWORD\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \
+ > $UPDATED_JSONFile
+
+ echo "Saving Credentials Edits with PUT"
+
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @$UPDATED_JSONFile "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}"
+
+ echo "Finished Updating Credentials"
+
+ # GET The Blueprint payload
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "KarbonClusterDeployment", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json
+
+ # Launch the BLUEPRINT
+
+ log "Sleep 30 seconds so the blueprint can settle in......"
+ sleep 30
+
+ log "Launching the Karbon Cluster Blueprint"
+
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch"
+
+ log "Finished Launching the Karbon Cluster Deployment Blueprint"
+
+}
+
+###############################################################################################################################################################################
+# Routine to upload CICDInfra Calm Blueprint and set variables
+###############################################################################################################################################################################
+
+function upload_CICDInfra_calm_blueprint() {
+ local DIRECTORY="/home/nutanix/cicdinfra"
+ local BLUEPRINT=${CICDInfra_Blueprint}
+ local CALM_PROJECT="BootcampInfra"
+ local NETWORK_NAME=${NW1_NAME}
+ local DOWNLOAD_BLUEPRINTS
+ local NETWORK_UUID
+ local SERVER_IMAGE="CentOS7.qcow2"
+ local SERVER_IMAGE_UUID
+ local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure"
+ local _loops="0"
+ local _maxtries="75"
+
+ echo "Starting CICDInfra Blueprint Deployment"
+
+ mkdir $DIRECTORY
+
+ NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \")
+
+ echo "NETWORK UUID = $NETWORK_UUID"
+ echo "-----------------------------------------"
+
+ # download the blueprint
+ DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT})
+ log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}"
+
+ # ensure the directory that contains the blueprints to be imported is not empty
+ if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then
+ echo "There are no .json files found in the directory provided."
+ exit 0
+ fi
+
+ if [ $CALM_PROJECT != 'none' ]; then
+
+ # curl command needed:
+ # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid'
+
+ # make API call and store project_uuid
+ project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid')
+
+ echo "Projet UUID = $project_uuid"
+
+ if [ -z "$project_uuid" ]; then
+ # project wasn't found
+ # exit at this point as we don't want to assume all blueprints should then hit the 'default' project
+ echo "Project $CALM_PROJECT was not found. Please check the name and retry."
+ exit 0
+ else
+ echo "Project $CALM_PROJECT exists..."
+ fi
+ fi
+
+ # update the user with script progress...
+ echo "Starting blueprint updates and then Uploading to Calm..."
+
+ JSONFile="${DIRECTORY}/${BLUEPRINT}"
+
+ echo "Currently updating blueprint $JSONFile..."
+
+ # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint
+ tmp=$(mktemp)
+
+ # ADD PROJECT , we need to add it into the JSON data
+ if [ $CALM_PROJECT != 'none' ]; then
+ # add the new atributes to the JSON and overwrite the old JSON file with the new one
+ $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile)
+ fi
+ # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported)
+ tmp_removal=$(mktemp)
+ $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile)
+
+ # GET BP NAME (affects all BPs being imported)
+ # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all
+ blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile)
+ blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix "
+ blueprint_name="${blueprint_name#\"}" # will remove the prefix "
+
+ if [ $blueprint_name == 'null' ]; then
+ echo "Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?"
+ exit 0
+ else
+ # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload
+ echo "Uploading the updated blueprint: $blueprint_name..."
+
+
+ path_to_file=$JSONFile
+ bp_name=$blueprint_name
+ project_uuid=$project_uuid
+
+ upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file')
+
+ #if the upload_result var is not empty then let's say it was succcessful
+ if [ -z "$upload_result" ]; then
+ echo "Upload for $bp_name did not finish."
+ else
+ echo "Upload for $bp_name finished."
+ echo "-----------------------------------------"
+ # echo "Result: $upload_result"
+ fi
+ fi
+
+ echo "Finished uploading ${BLUEPRINT}!"
+
+ #Getting the Blueprint UUID
+ CICDInfra_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CICD_Infra"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \")
+
+ echo "CICD Blueprint UUID = $CICDInfra_BLUEPRINT_UUID"
+
+ echo "Update Blueprint and writing to temp file"
+
+ echo "${CALM_PROJECT} network UUID: ${project_uuid}"
+ echo "NETWORK_UUID=${NETWORK_UUID}"
+
+ DOWNLOADED_JSONFile="${BLUEPRINT}-${CICDInfra_BLUEPRINT_UUID}.json"
+ UPDATED_JSONFile="${BLUEPRINT}-${CICDInfra_BLUEPRINT_UUID}-updated.json"
+
+ # GET The Blueprint so it can be updated
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}" > ${DOWNLOADED_JSONFile}
+
+ cat $DOWNLOADED_JSONFile \
+ | jq -c 'del(.status)' \
+ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[1].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[1].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[2].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[2].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[3].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \
+ | jq -c -r "(.spec.resources.substrate_definition_list[3].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \
+ | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \
+ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \
+ > $UPDATED_JSONFile
+
+ echo "Saving Credentials Edits with PUT"
+
+ curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @$UPDATED_JSONFile "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}"
+
+ echo "Finished Updating Credentials"
+
+ echo "Finished CICDInfra Blueprint Deployment"
+
}
diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh
old mode 100644
new mode 100755
index 28fc3a5..755bb16
--- a/scripts/lib.pe.sh
+++ b/scripts/lib.pe.sh
@@ -2,6 +2,9 @@
# -x
# Dependencies: acli, ncli, jq, sshpass, curl, md5sum, pgrep, wc, tr, pkill
+###############################################################################################################################################################################
+# Routine to set the acli command
+###############################################################################################################################################################################
function acli() {
local _cmd
@@ -10,6 +13,9 @@ function acli() {
# DEBUG=1 && if [[ ${DEBUG} ]]; then log "$@"; fi
}
+###############################################################################################################################################################################
+# Routine to install the AutoDC and join the Domain
+###############################################################################################################################################################################
function authentication_source() {
local _attempts
local _error=13
@@ -30,8 +36,75 @@ function authentication_source() {
_pc_version=(${PC_VERSION//./ })
case "${AUTH_SERVER}" in
- 'ActiveDirectory')
- log "Manual setup = https://github.com/nutanixworkshops/labs/blob/master/setup/active_directory/active_directory_setup.rst"
+ 'AutoAD')
+ local _autoad_auth
+ local _autoad_index=1
+ local _autoad_release=1
+ local _autoad_service='samba-ad-dc'
+ local _autoad_restart="service ${_autoad_service} restart"
+ local _autoad_status="AD Is Running"
+ local _autoad_success="AD Is Running"
+
+
+ dns_check "dc.${AUTH_FQDN}"
+ _result=$?
+
+ if (( ${_result} == 0 )); then
+ log "${AUTH_SERVER}.IDEMPOTENCY: dc.${AUTH_FQDN} set, skip. ${_result}"
+ else
+ log "${AUTH_SERVER}.IDEMPOTENCY failed, no DNS record dc.${AUTH_FQDN}"
+
+ _error=12
+ _loop=0
+ _sleep=${SLEEP}
+
+ repo_source AUTOAD_REPOS[@]
+
+ if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${AUTH_SERVER}| wc --lines) == 0 )); then
+ log "Import ${AUTH_SERVER} image from ${SOURCE_URL}..."
+ acli image.create ${AUTH_SERVER} \
+ image_type=kDiskImage wait=true \
+ container=${STORAGE_IMAGES} source_url=${SOURCE_URL}
+ else
+ log "Image found, assuming ready. Skipping ${AUTH_SERVER} import."
+ fi
+
+ log "Create ${AUTH_SERVER} VM based on ${AUTH_SERVER} image"
+ acli "vm.create ${AUTH_SERVER} num_vcpus=2 num_cores_per_vcpu=1 memory=4G"
+ # vmstat --wide --unit M --active # suggests 2G sufficient, was 4G
+ #acli "vm.disk_create ${AUTH_SERVER}${_autodc_release} cdrom=true empty=true"
+ acli "vm.disk_create ${AUTH_SERVER} clone_from_image=${AUTH_SERVER}"
+ acli "vm.nic_create ${AUTH_SERVER} network=${NW1_NAME} ip=${AUTH_HOST}"
+
+ log "Power on ${AUTH_SERVER} VM..."
+ acli "vm.on ${AUTH_SERVER}"
+
+ _attempts=45
+ _loop=0
+ _sleep=60
+
+ while true ; do
+ (( _loop++ ))
+
+ _test=$(curl ${CURL_OPTS} -X GET http://${AUTH_HOST}:8000/ | grep "${_autoad_success}")
+ if [[ "${_test}" == "${_autoad_success}" ]]; then
+ log "${AUTH_SERVER} is ready."
+ sleep ${_sleep}
+ break
+ elif (( ${_loop} > ${_attempts} )); then
+ log "Error ${_error}: ${AUTH_SERVER} VM running: giving up after ${_loop} tries."
+ #_result=$(source /etc/profile.d/nutanix_env.sh \
+ # && for _vm in $(source /etc/profile.d/nutanix_env.sh && acli vm.list | grep ${AUTH_SERVER}) ; do acli -y vm.delete $_vm; done)
+ # acli image.delete ${AUTH_SERVER}${_autodc_release}
+ #log "Remediate by deleting the ${AUTH_SERVER} VM from PE (just attempted by this script: ${_result}) and then running acli $_"
+ exit ${_error}
+ else
+ log "_test ${_loop}/${_attempts}=|${_test}|: sleep ${_sleep} seconds..."
+ sleep ${_sleep}
+ fi
+ done
+
+ fi
;;
'AutoDC')
local _autodc_auth
@@ -42,8 +115,10 @@ function authentication_source() {
local _autodc_status="systemctl show ${_autodc_service} --property=SubState"
local _autodc_success='SubState=running'
- if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then
- log "PC_VERSION ${PC_VERSION} >= 5.9, setting AutoDC-2.0..."
+ #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then
+ if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 8 )); then
+ log "PC_VERSION ${PC_VERSION} >= 5.9, setting AutoDC2..."
+
_autodc_auth=" --username=${AUTH_ADMIN_USER} --password=${AUTH_ADMIN_PASS}"
_autodc_index=''
_autodc_release=2
@@ -51,15 +126,6 @@ function authentication_source() {
_autodc_restart="sleep 2 && service ${_autodc_service} stop && sleep 5 && service ${_autodc_service} start"
_autodc_status="service ${_autodc_service} status"
_autodc_success=' * status: started'
-
- # REVIEW: override global.vars
- export AUTODC_REPOS=(\
- 'http://10.132.128.50:81/share/saved-images/autodc-2.0.qcow2' \
- 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \
- # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \
- 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \
- 'http://10.59.103.143:8000/autodc-2.0.qcow2' \
- )
fi
dns_check "dc${_autodc_index}.${AUTH_FQDN}"
@@ -149,6 +215,16 @@ function authentication_source() {
fi
done
+ # Adding the needed group and users to the AutoDC that may be used. Calm would otherwise have no BootInfra Project
+ remote_exec 'SSH' 'AUTH_SERVER' \
+ 'samba-tool group add "SSP Custom"' \
+ 'OPTIONAL'
+ sleep ${_sleep}
+ remote_exec 'SSH' 'AUTH_SERVER' \
+ 'for i in `samba-tool user list | grep ^user`; do samba-tool group addmembers "SSP Custom" $i;done' \
+ 'OPTIONAL'
+ sleep ${_sleep}
+
fi
;;
'OpenLDAP')
@@ -157,6 +233,10 @@ function authentication_source() {
esac
}
+###############################################################################################################################################################################
+# Routine to get the Nutanix Files injected
+###############################################################################################################################################################################
+
function files_install() {
local _ncli_softwaretype='FILE_SERVER'
local _ncli_software_type='afs'
@@ -178,6 +258,255 @@ function files_install() {
fi
}
+###############################################################################################################################################################################
+# Routine to get the Nutanix File Analytics injected
+###############################################################################################################################################################################
+
+function file_analytics_install() {
+ local _ncli_softwaretype='FILE_ANALYTICS'
+ local _ncli_software_type='file_analytics'
+ local _test
+
+ dependencies 'install' 'jq' || exit 13
+
+ log "IDEMPOTENCY: checking for ${_ncli_software_type} completed..."
+ _test=$(source /etc/profile.d/nutanix_env.sh \
+ && ncli --json=true software list \
+ | jq -r \
+ '.data[] | select(.softwareType == "'${_ncli_softwaretype}'") | select(.status == "COMPLETED") | .version')
+
+ if [[ ${_test} != "${FILE_ANALYTICS_VERSION}" ]]; then
+ log "Files ${FILE_ANALYTICS_VERSION} not completed. ${_test}"
+ ntnx_download "${_ncli_software_type}"
+ else
+ log "IDEMPOTENCY: Files ${FILE_ANALYTICS_VERSION} already completed."
+ fi
+}
+
+###############################################################################################################################################################################
+# Create File Server
+###############################################################################################################################################################################
+
+function create_file_server() {
+ #local CURL_HTTP_OPTS=' --max-time 25 --silent --show-error --header Content-Type:application/json --header Accept:application/json --insecure '
+ local _fileserver_name="BootcampFS"
+ local _internal_nw_name="${1}"
+ local _internal_nw_uuid
+ local _external_nw_name="${2}"
+ local _external_nw_uuid
+ local _test
+ local _maxtries=30
+ local _tries=0
+ local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/vfilers"
+ local _grab_afs_version="https://localhost:9440/PrismGateway/services/rest/v1/upgrade/afs/softwares"
+ local _ntp_formatted="$(echo $NTP_SERVERS | sed -r 's/[^,]+/'\"'&'\"'/g')"
+
+ # Get dynamically the version of the AFS that has been installed
+ afs_version=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_grab_afs_version} | jq '.entities[] | select (.status=="COMPLETED") .version' | tr -d \")
+
+ log "Found installed version: $afs_version of Nutanix Files..."
+
+ echo "Get cluster network and storage container UUIDs..."
+ _internal_nw_uuid=$(acli net.get ${_internal_nw_name} \
+ | grep "uuid" | cut -f 2 -d ':' | xargs)
+ _external_nw_uuid=$(acli net.get ${_external_nw_name} \
+ | grep "uuid" | cut -f 2 -d ':' | xargs)
+ _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \
+ | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs)
+ echo "${_internal_nw_name} network UUID: ${_internal_nw_uuid}"
+ echo "${_external_nw_name} network UUID: ${_external_nw_uuid}"
+ echo "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}"
+
+ HTTP_JSON_BODY=$(cat <= 5 && ${_pc_version[1]} >= 10 )); then
- _test=$(ncli multicluster add-to-multicluster \
- external-ip-address-or-svm-ips=${PC_HOST} \
- username=${PRISM_ADMIN} password=${PE_PASSWORD})
- log "PC>=5.10, manual join PE to PC = |${_test}|"
+ # If we are being called via the we-*.sh, we need to change the lib.common.sh to we-lib.common.sh
+ if [[ ${PC_LAUNCH} != *"we-"* ]]; then
+ _dependencies+=" lib.common.sh"
+ else
+ _dependencies+=" we-lib.common.sh"
fi
if [[ -e ${RELEASE} ]]; then
@@ -246,15 +743,22 @@ function pc_configure() {
remote_exec 'SCP' 'PC' ${_container}.tar 'OPTIONAL' &
fi
done
+ #####################################################################################
+ ### Handing of to the PC for rest of the installation
+ #####################################################################################
+ ## TODO: If DEBUG is set, we run the below command with bash -x
_command="EMAIL=${EMAIL} \
PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
- PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC"
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC"
log "Remote asynchroneous launch PC configuration script... ${_command}"
remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &"
log "PC Configuration complete: try Validate Staged Clusters now."
}
+###############################################################################################################################################################################
+# Routine to install the PC in the PE
+###############################################################################################################################################################################
function pc_install() {
local _ncli_softwaretype='PRISM_CENTRAL_DEPLOY'
local _nw_name="${1}"
@@ -273,8 +777,10 @@ function pc_install() {
log "Get cluster network and storage container UUIDs..."
_nw_uuid=$(acli "net.get ${_nw_name}" \
| grep "uuid" | cut -f 2 -d ':' | xargs)
- _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \
+ _storage_default_uuid=$(ncli container ls name=${STORAGE_IMAGES} \
| grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs)
+ #_storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \
+ # | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs)
log "${_nw_name} network UUID: ${_nw_uuid}"
log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}"
@@ -292,7 +798,7 @@ function pc_install() {
# shellcheck disable=2206
_pc_version=(${PC_VERSION//./ })
- if (( ${_pc_version[0]} = 5 && ${_pc_version[1]} <= 6 )); then
+ if (( ${_pc_version[0]} == 5 && ${_pc_version[1]} <= 6 )); then
_should_auto_register='"should_auto_register":true,'
fi
@@ -311,9 +817,9 @@ function pc_install() {
"data_disk_size_bytes":536870912000,
"nic_list":[{
"network_configuration":{
- "subnet_mask":"255.255.255.128",
+ "subnet_mask":"${SUBNET_MASK}",
"network_uuid":"${_nw_uuid}",
- "default_gateway":"${IPV4_PREFIX}.1"
+ "default_gateway":"${NW1_GATEWAY}"
},
"ip_list":["${PC_HOST}"]
}],
@@ -335,6 +841,9 @@ EOF
fi
}
+###############################################################################################################################################################################
+# Routine to set the PE to use the AutoDC for authentication
+###############################################################################################################################################################################
function pe_auth() {
local _aos
local _aos_version
@@ -378,16 +887,19 @@ function pe_auth() {
fi
}
+###############################################################################################################################################################################
+# Routine set PE's initial configuration
+###############################################################################################################################################################################
function pe_init() {
args_required 'DATA_SERVICE_IP EMAIL \
SMTP_SERVER_ADDRESS SMTP_SERVER_FROM SMTP_SERVER_PORT \
STORAGE_DEFAULT STORAGE_POOL STORAGE_IMAGES \
SLEEP ATTEMPTS'
- if [[ `ncli cluster get-params | grep 'External Data' | \
- awk -F: '{print $2}' | tr -d '[:space:]'` == "${DATA_SERVICE_IP}" ]]; then
- log "IDEMPOTENCY: Data Services IP set, skip."
- else
+ #if [[ `ncli cluster get-params | grep 'External Data' | \
+ # awk -F: '{print $2}' | tr -d '[:space:]'` == "${DATA_SERVICE_IP}" ]]; then
+ # log "IDEMPOTENCY: Data Services IP set, skip."
+ #else
log "Configure SMTP"
ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \
from-email-address=${SMTP_SERVER_FROM} address=${SMTP_SERVER_ADDRESS}
@@ -418,9 +930,12 @@ function pe_init() {
log "Set Data Services IP address to ${DATA_SERVICE_IP}"
ncli cluster edit-params external-data-services-ip-address=${DATA_SERVICE_IP}
- fi
+ #fi
}
+###############################################################################################################################################################################
+# Routine to accept the EULA and disable pulse
+###############################################################################################################################################################################
function pe_license() {
local _test
args_required 'CURL_POST_OPTS PE_PASSWORD'
@@ -461,6 +976,9 @@ function pe_license() {
fi
}
+###############################################################################################################################################################################
+# Routine to unregister PE from PC
+###############################################################################################################################################################################
function pc_unregister {
local _cluster_uuid
local _pc_uuid
@@ -491,6 +1009,9 @@ function pc_unregister {
pc_destroy
}
+###############################################################################################################################################################################
+# Routine to destroy the PC VM
+###############################################################################################################################################################################
function pc_destroy() {
local _vm
@@ -501,3 +1022,266 @@ function pc_destroy() {
acli vm.off ${_vm} && acli -y vm.delete ${_vm}
done
}
+
+###################################################################################################################################################
+# Routine to deploy PrismProServer
+###################################################################################################################################################
+
+function prism_pro_server_deploy() {
+
+### Import Image ###
+
+if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PrismOpsServer} | wc --lines) == 0 )); then
+ log "Import ${PrismOpsServer} image from ${QCOW2_REPOS}..."
+ acli image.create ${PrismOpsServer} \
+ image_type=kDiskImage wait=true \
+ container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}${PrismOpsServer}.qcow2"
+else
+ log "Image found, assuming ready. Skipping ${PrismOpsServer} import."
+fi
+
+### Deploy PrismProServer ###
+
+log "Create ${PrismOpsServer} VM based on ${PrismOpsServer} image"
+acli "vm.create ${PrismOpsServer} num_vcpus=2 num_cores_per_vcpu=1 memory=2G"
+# vmstat --wide --unit M --active # suggests 2G sufficient, was 4G
+#acli "vm.disk_create ${VMNAME} cdrom=true empty=true"
+acli "vm.disk_create ${PrismOpsServer} clone_from_image=${PrismOpsServer}"
+#acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME}"
+acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME} ip=${PrismOpsServer_HOST}"
+
+log "Power on ${PrismOpsServer} VM..."
+acli "vm.on ${PrismOpsServer}"
+
+}
+
+###################################################################################################################################################
+# Routine create the Era Storage container for the Era Bootcamps.
+###################################################################################################################################################
+
+function create_era_container() {
+
+ log "Creating Era Storage Container"
+ ncli container create name="${STORAGE_ERA}" rf="${ERA_Container_RF}" sp-name="${STORAGE_POOL}" enable-compression=true compression-delay=60
+
+}
+
+#########################################################################################################################################
+# Routine to Create Era Server
+#########################################################################################################################################
+
+function deploy_era() {
+
+### Import Image ###
+
+if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${ERAServerImage} | wc --lines) == 0 )); then
+ log "Import ${ERAServerImage} image from ${QCOW2_REPOS}..."
+ acli image.create ${ERAServerImage} \
+ image_type=kDiskImage wait=true \
+ container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}${ERAServerImage}"
+else
+ log "Image found, assuming ready. Skipping ${ERAServerImage} import."
+fi
+
+### Deploy PrismProServer ###
+
+log "Create ${ERAServerName} VM based on ${ERAServerImage} image"
+acli "vm.create ${ERAServerName} num_vcpus=1 num_cores_per_vcpu=4 memory=4G"
+acli "vm.disk_create ${ERAServerName} clone_from_image=${ERAServerImage}"
+acli "vm.nic_create ${ERAServerName} network=${NW1_NAME} ip=${ERA_HOST}"
+
+log "Power on ${ERAServerName} VM..."
+acli "vm.on ${ERAServerName}"
+
+}
+
+
+#########################################################################################################################################
+# Routine to Create Era Bootcamp PreProvisioned MSSQL Server
+#########################################################################################################################################
+
+function deploy_mssql() {
+
+ if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${MSSQL_SourceVM_Image} | wc --lines) == 0 )); then
+ log "Import ${MSSQL_SourceVM_Image} image from ${QCOW2_REPOS}..."
+
+ acli image.create ${MSSQL_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/SQLServer/${MSSQL_SourceVM_Image1}.qcow2"
+ acli image.create ${MSSQL_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/SQLServer/${MSSQL_SourceVM_Image2}.qcow2"
+ else
+ log "Image found, assuming ready. Skipping ${MSSQL_SourceVM} import."
+ fi
+
+ for _user in "${USERS[@]}" ; do
+
+ SourceVM="${_user}_${MSSQL_SourceVM}"
+
+ echo "## ${SourceVM} Creation_INPROGRESS ##"
+ acli "vm.create ${SourceVM} memory=2048M num_cores_per_vcpu=1 num_vcpus=2"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${MSSQL_SourceVM_Image1}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${MSSQL_SourceVM_Image2}"
+ acli "vm.nic_create ${SourceVM} network=${NW1_NAME}"
+ echo "## ${SourceVM} - Powering On ##"
+ acli "vm.on ${SourceVM}"
+ echo "## ${SourceVM} Creation_COMPLETE ##"
+
+ done
+
+}
+
+#########################################################################################################################################
+# Routine to Create Era Bootcamp PreProvisioned Oracle Server
+#########################################################################################################################################
+
+function deploy_oracle_12c() {
+
+ if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_12c_SourceVM_BootImage} | wc --lines) == 0 )); then
+ log "Import ${Oracle_12c_SourceVM_BootImage} image from ${QCOW2_REPOS}..."
+ acli image.create ${Oracle_12c_SourceVM_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_BootImage}.qcow2"
+ acli image.create ${Oracle_12c_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image1}.qcow2"
+ acli image.create ${Oracle_12c_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image2}.qcow2"
+ acli image.create ${Oracle_12c_SourceVM_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image3}.qcow2"
+ acli image.create ${Oracle_12c_SourceVM_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image4}.qcow2"
+ acli image.create ${Oracle_12c_SourceVM_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image5}.qcow2"
+ acli image.create ${Oracle_12c_SourceVM_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image6}.qcow2"
+ else
+ log "Image found, assuming ready. Skipping ${Oracle_SourceVM} import."
+ fi
+
+ for _user in "${USERS[@]}" ; do
+
+ SourceVM="${_user}_${Oracle_12c_SourceVM}"
+
+ echo "## ${SourceVM} Creation_INPROGRESS ##"
+ acli "vm.create ${SourceVM} memory=4G num_cores_per_vcpu=2 num_vcpus=2"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_BootImage}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image1}"
+ acli "vm.disk_create ${SourceVM}clone_from_image=${Oracle_12c_SourceVM_Image2}"
+ acli "vm.disk_create ${SourceVM}clone_from_image=${Oracle_12c_SourceVM_Image3}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image4}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image5}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image6}"
+ acli "vm.nic_create ${SourceVM} network=${NW1_NAME}"
+ echo "## ${SourceVM} - Powering On ##"
+ acli "vm.on ${SourceVM}"
+ echo "### ${SourceVM} Creation_COMPLETE ##"
+
+ done
+
+}
+
+#########################################################################################################################################
+# Routine to Upload Era Bootcamp Patch images for Oracle
+#########################################################################################################################################
+
+function deploy_oracle_19c() {
+
+ if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_19c_SourceVM_BootImage} | wc --lines) == 0 )); then
+ log "Import ${Oracle_19c_SourceVM_BootImage} image from ${QCOW2_REPOS}..."
+ acli image.create ${Oracle_19c_SourceVM_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_BootImage}.qcow2"
+ acli image.create ${Oracle_19c_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image1}.qcow2"
+ acli image.create ${Oracle_19c_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image2}.qcow2"
+ acli image.create ${Oracle_19c_SourceVM_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image3}.qcow2"
+ acli image.create ${Oracle_19c_SourceVM_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image4}.qcow2"
+ acli image.create ${Oracle_19c_SourceVM_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image5}.qcow2"
+ acli image.create ${Oracle_19c_SourceVM_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image6}.qcow2"
+ acli image.create ${Oracle_19c_SourceVM_Image7} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image7}.qcow2"
+ acli image.create ${Oracle_19c_SourceVM_Image8} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image8}.qcow2"
+ acli image.create ${Oracle_19c_SourceVM_Image9} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image9}.qcow2"
+ else
+ log "Image found, assuming ready. Skipping ${Oracle_19c_SourceVM_BootImage} import."
+ fi
+
+ for _user in "${USERS[@]}" ; do
+
+ SourceVM="${_user}_${Oracle_19c_SourceVM}"
+
+ echo "## ${SourceVM} Creation_INPROGRESS ##"
+ acli "vm.create ${SourceVM} memory=8G num_cores_per_vcpu=1 num_vcpus=2"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_BootImage}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image1}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image2}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image3}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image4}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image5}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image6}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image7}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image8}"
+ acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image9}"
+ acli "vm.nic_create ${SourceVM} network=${NW1_NAME}"
+ echo "## ${SourceVM} - Powering On ##"
+ acli "vm.on ${SourceVM}"
+ echo "### ${SourceVM} Creation_COMPLETE ##"
+
+ done
+
+}
+
+###################################################################################################################################################
+# Routine to deploy the Peer Management Center
+###################################################################################################################################################
+# MTM TODO When integrating with Nutanix scripts, need to change echo to log and put quotes around text after all acli commands
+function deploy_peer_mgmt_server() {
+
+ if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PeerMgmtServer} | wc --lines) == 0 )); then
+ log "Import ${PeerMgmtServer} image from ${QCOW2_REPOS}..."
+ acli image.create ${PeerMgmtServer} \
+ image_type=kDiskImage wait=true \
+ container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}peer/${PeerMgmtServer}.qcow2"
+ else
+ log "Image found, assuming ready. Skipping ${PeerMgmtServer} import."
+ fi
+ echo "Creating temp folder and applying perms..."
+ mkdir /home/nutanix/peer_staging/
+ VMNAME=$1
+ ### Get sysyprep config file ready ###
+ echo "${VMNAME} - Prepping sysprep config..."
+ wget http://10.42.194.11/workshop_staging/peer/unattend-pmc.xml -P /home/nutanix/peer_staging/
+ mv /home/nutanix/peer_staging/unattend-pmc.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml
+ chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml
+ sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml
+ ### Deploy PMC Server ###
+ echo "${VMNAME} - Deploying VM..."
+ #log "Create ${VMNAME} VM based on ${IMAGENAME} image"
+ acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml"
+ acli "vm.disk_create ${VMNAME} clone_from_image=${PeerMgmtServer}"
+ # MTM TODO replace net1 with appropriate variable
+ acli "vm.nic_create ${VMNAME} network=${NW1_NAME}"
+ #log "Power on ${VMNAME} VM..."
+ echo "${VMNAME} - Powering on..."
+ acli "vm.on ${VMNAME}"
+ echo "${VMNAME} - Deployed."
+}
+
+###################################################################################################################################################
+# Routine to deploy a Peer Agent
+###################################################################################################################################################
+# MTM TODO When integrating with Nutanix scripts, need to change echo to log and put quotes around text after all acli commands
+function deploy_peer_agent_server() {
+
+ if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PeerAgentServer} | wc --lines) == 0 )); then
+ log "Import ${PeerAgentServer} image from ${QCOW2_REPOS}..."
+ acli image.create ${PeerAgentServer} \
+ image_type=kDiskImage wait=true \
+ container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}peer/${PeerAgentServer}.qcow2"
+ else
+ log "Image found, assuming ready. Skipping ${PeerAgentServer} import."
+ fi
+ VMNAME=$1
+ ### Get sysyprep config file ready ###
+ echo "${VMNAME} - Prepping sysprep config..."
+ wget http://10.42.194.11/workshop_staging/peer/unattend-agent.xml -P /home/nutanix/peer_staging/
+ mv /home/nutanix/peer_staging/unattend-agent.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml
+ chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml
+ sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml
+ ### Deploy Agent Server ###
+ echo "${VMNAME} - Deploying VM..."
+ #log "Create ${VMNAME} VM based on ${IMAGENAME} image"
+ acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml"
+ acli "vm.disk_create ${VMNAME} clone_from_image=${PeerAgentServer}"
+ # MTM TODO replace net1 with appropriate variable
+ acli "vm.nic_create ${VMNAME} network=${NW1_NAME}"
+ #log "Power on ${VMNAME} VM..."
+ echo "${VMNAME} - Powering on..."
+ acli "vm.on ${VMNAME}"
+ echo "${VMNAME} - Deployed."
+}
diff --git a/scripts/lib.shell-convenience.sh b/scripts/lib.shell-convenience.sh
index 4ddc8aa..41f1806 100755
--- a/scripts/lib.shell-convenience.sh
+++ b/scripts/lib.shell-convenience.sh
@@ -180,14 +180,15 @@ SSHPASS='${PE_PASSWORD}' sshpass -e ssh \\
${SSH_OPTS} \\
nutanix@${PE_HOST}
-pkill -f calm ; tail -f calm*log
+pkill -f calm ; tail -f *log
EOF
echo 'rm -rf master.zip calm*.log stageworkshop-master/ && \'
echo ' curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh \'
echo ' && SOURCE=${_} 'EMAIL=${EMAIL} PE_PASSWORD=${_password}' sh ${_##*/} \'
- echo ' && tail -f ~/calm*.log'
+ echo ' && tail -f ~/*log'
echo -e "cd stageworkshop-master/scripts/ && \ \n PE_HOST=${PE_HOST} PE_PASSWORD='${_password}' PC_VERSION=${PC_DEV_VERSION} EMAIL=${EMAIL} ./calm.sh 'PE'"
+ echo "ncli multicluster add-to-multicluster external-ip-address-or-svm-ips=10.42.x.39 username=admin password='x'"
;;
AUTH | auth | ldap)
_host=${AUTH_HOST}
@@ -198,7 +199,7 @@ EOF
case "${2}" in
log | logs)
- _command='date; echo; tail -f calm*log'
+ _command='date; echo; tail -f *log'
;;
calm | inflight)
_command='ps -efww | grep calm'
diff --git a/scripts/localhost.sh b/scripts/localhost.sh
new file mode 100755
index 0000000..a2896cd
--- /dev/null
+++ b/scripts/localhost.sh
@@ -0,0 +1,99 @@
+
+
+HTTP_CACHE_HOST='localhost'
+HTTP_CACHE_PORT=8181
+
+ AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file
+ AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))"
+ LDAP_PORT=389
+ AUTH_FQDN='ntnxlab.local'
+ AUTH_DOMAIN='NTNXLAB'
+AUTH_ADMIN_USER='administrator@'${AUTH_FQDN}
+AUTH_ADMIN_PASS='nutanix/4u'
+AUTH_ADMIN_GROUP='SSP Admins'
+ AUTODC_REPOS=(\
+ 'http://10.42.8.50/images/AutoDC.qcow2' \
+ 'http://10.42.8.50/images/AutoDC2.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \
+ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \
+
+ #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \
+ # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \
+ #'http://10.59.103.143:8000/autodc-2.0.qcow2' \
+
+)
+
+# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42)
+# https://sewiki.nutanix.com/index.php/HPOC_IP_Schema
+case "${OCTET[0]}.${OCTET[1]}" in
+ 10.20 ) #Marketing: us-west = SV
+ DNS_SERVERS='10.21.253.10'
+ ;;
+ 10.21 ) #HPOC: us-west = SV
+ if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then
+ log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters'
+ exit 0
+ fi
+
+ # backup cluster; override relative IP addressing
+ if (( ${OCTET[2]} == 249 )); then
+ AUTH_HOST="${IPV4_PREFIX}.118"
+ PC_HOST="${IPV4_PREFIX}.119"
+ fi
+
+ DNS_SERVERS='10.21.253.10,10.21.253.11'
+ NW2_NAME='Secondary'
+ NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 ))
+ NW2_SUBNET="${IPV4_PREFIX}.129/25"
+ NW2_DHCP_START="${IPV4_PREFIX}.132"
+ NW2_DHCP_END="${IPV4_PREFIX}.253"
+ ;;
+ 10.55 ) # HPOC us-east = DUR
+ DNS_SERVERS='10.21.253.11'
+ NW2_NAME='Secondary'
+ NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 ))
+ NW2_SUBNET="${IPV4_PREFIX}.129/25"
+ NW2_DHCP_START="${IPV4_PREFIX}.132"
+ NW2_DHCP_END="${IPV4_PREFIX}.253"
+ ;;
+ 10.42 ) # HPOC us-west = PHX
+ DNS_SERVERS='10.42.196.10'
+ NW2_NAME='Secondary'
+ NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 ))
+ NW2_SUBNET="${IPV4_PREFIX}.129/25"
+ NW2_DHCP_START="${IPV4_PREFIX}.132"
+ NW2_DHCP_END="${IPV4_PREFIX}.253"
+ ;;
+ 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR
+ DNS_SERVERS='10.132.71.40'
+ NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17"
+ NW1_DHCP_START="${IPV4_PREFIX}.100"
+ NW1_DHCP_END="${IPV4_PREFIX}.250"
+ # PC deploy file local override, TODO:30 make an PC_URL array and eliminate
+ PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar
+ PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json
+ PC_STABLE_METAURL=${PC_CURRENT_METAURL}
+
+ QCOW2_IMAGES=(\
+ Centos7-Base.qcow2 \
+ Centos7-Update.qcow2 \
+ Windows2012R2.qcow2 \
+ panlm-img-52.qcow2 \
+ kx_k8s_01.qcow2 \
+ kx_k8s_02.qcow2 \
+ kx_k8s_03.qcow2 \
+ )
+ ;;
+esac
+
+HTTP_CACHE_HOST='localhost'
+HTTP_CACHE_PORT=8181
+
+ ATTEMPTS=40
+ SLEEP=60 # pause (in seconds) between ATTEMPTS
+
+ CURL_OPTS='--insecure --silent --show-error' # --verbose'
+CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null"
+CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}"
+ SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null'
+ SSH_OPTS+=' -q' # -v'
diff --git a/scripts/objects_bootcamp.sh b/scripts/objects_bootcamp.sh
new file mode 100755
index 0000000..b771590
--- /dev/null
+++ b/scripts/objects_bootcamp.sh
@@ -0,0 +1,193 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+ && prism_pro_server_deploy \
+ && files_install \
+ && sleep 30 \
+ && create_file_server "${NW1_NAME}" "${NW1_NAME}" \
+ && sleep 30 \
+ && file_analytics_install \
+ && sleep 30 \
+ && create_file_analytics_server \
+ && sleep 30
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ deploy_peer_mgmt_server "${PMC}" \
+ && deploy_peer_agent_server "${AGENTA}" \
+ && deploy_peer_agent_server "${AGENTB}"
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ Win10v1903.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ CentOS7.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'PE_PASSWORD'
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ args_required 'PE_HOST'
+
+ dependencies 'install' 'jq' \
+ && files_install
+
+ log "PE = https://${PE_HOST}:9440"
+ ;;
+esac
+
+finish
diff --git a/scripts/poc_workshop_base_staging.sh b/scripts/poc_workshop_base_staging.sh
new file mode 100755
index 0000000..4a4816e
--- /dev/null
+++ b/scripts/poc_workshop_base_staging.sh
@@ -0,0 +1,141 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+ export NW2_NAME='Xray'
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && pocguide_network_configure \
+ && authentication_source \
+ && pe_auth \
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export _prio_images_arr=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ )
+
+ export QCOW2_IMAGES=(\
+ Windows2019.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && priority_images \
+ && images \
+ && deploy_pocworkshop_vms \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+
+esac
diff --git a/scripts/privatecloud_bootcamp.sh b/scripts/privatecloud_bootcamp.sh
new file mode 100755
index 0000000..cbc69e6
--- /dev/null
+++ b/scripts/privatecloud_bootcamp.sh
@@ -0,0 +1,171 @@
+#!/usr/bin/env bash
+ #-x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ export _external_nw_name="${1}"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+ && prism_pro_server_deploy \
+ && files_install \
+ && sleep 30 \
+ && create_file_server "${NW1_NAME}" "${NW1_NAME}" \
+ && sleep 30 \
+ && file_analytics_install \
+ && sleep 30 \
+ && create_file_analytics_server \
+ && sleep 30
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \
+ veeam/VeeamAHVProxy2.0.404.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ veeam/VBR_10.0.0.4442.iso \
+ Windows2016.iso \
+ )
+
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && karbon_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && karbon_image_download \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && seedPC \
+ && images \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/splunk_bootcamp.sh b/scripts/splunk_bootcamp.sh
new file mode 100755
index 0000000..08bb292
--- /dev/null
+++ b/scripts/splunk_bootcamp.sh
@@ -0,0 +1,165 @@
+#!/usr/bin/env bash
+ #-x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export AUTH_SERVER='AutoAD'
+
+ export _external_nw_name="${1}"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+ && files_install \
+ && sleep 30 \
+ && create_file_server "${NW1_NAME}" "${NW1_NAME}" \
+ && sleep 30 \
+ && file_analytics_install \
+ && sleep 30 \
+ && create_file_analytics_server \
+ && sleep 30
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ #export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ #export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ #export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export QCOW2_IMAGES=(\
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Nutanix-VirtIO-1.1.5.iso \
+ )
+
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && karbon_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && karbon_image_download \
+ && images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/stage_citrixhow.sh b/scripts/stage_citrixhow.sh
deleted file mode 100644
index 6ae85a5..0000000
--- a/scripts/stage_citrixhow.sh
+++ /dev/null
@@ -1,355 +0,0 @@
-#!/bin/bash
-#
-# Please configure according to your needs
-#
-function pc_remote_exec {
- sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null nutanix@10.21.${MY_HPOC_NUMBER}.39 "$@"
-}
-function pc_send_file {
- sshpass -p nutanix/4u scp -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null "$1" nutanix@10.21.${MY_HPOC_NUMBER}.39:/home/nutanix/"$1"
-}
-
-# Loging date format
-#Never:0 Make logging format configurable
-#MY_LOG_DATE='date +%Y-%m-%d %H:%M:%S'
-# Script file name
-MY_SCRIPT_NAME=`basename "$0"`
-# Derive HPOC number from IP 3rd byte
-#MY_CVM_IP=$(ip addr | grep inet | cut -d ' ' -f 6 | grep ^10.21 | head -n 1)
-MY_CVM_IP=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}')
-array=(${MY_CVM_IP//./ })
-MY_HPOC_NUMBER=${array[2]}
-# HPOC Password (if commented, we assume we get that from environment)
-#MY_PE_PASSWORD='nx2TechXXX!'
-STORAGE_POOL='SP01'
-STORAGE_DEFAULT='Default'
-STORAGE_IMAGES='Images'
-MY_DOMAIN_FQDN='ntnxlab.local'
-MY_DOMAIN_NAME='NTNXLAB'
-MY_DOMAIN_USER='administrator@ntnxlab.local'
-MY_DOMAIN_PASS='nutanix/4u'
-MY_DOMAIN_ADMIN_GROUP='SSP Admins'
-MY_DOMAIN_URL="ldaps://10.21.${MY_HPOC_NUMBER}.40/"
-MY_PRIMARY_NET_NAME='Primary'
-MY_PRIMARY_NET_VLAN='0'
-MY_SECONDARY_NET_NAME='Secondary'
-MY_SECONDARY_NET_VLAN="${MY_HPOC_NUMBER}1"
-MY_PC_SRC_URL='http://10.21.249.53/pc-5.7.1-stable-prism_central.tar'
-MY_PC_META_URL='http://10.21.249.53/pc-5.7.1-stable-prism_central_metadata.json'
-MY_AFS_SRC_URL='http://10.21.250.221/images/ahv/techsummit/nutanix-afs-el7.3-release-afs-3.0.0.1-stable.qcow2'
-MY_AFS_META_URL='http://10.21.250.221/images/ahv/techsummit/nutanix-afs-el7.3-release-afs-3.0.0.1-stable-metadata.json'
-
-# From this point, we assume:
-# IP Range: 10.21.${MY_HPOC_NUMBER}.0/25
-# Gateway: 10.21.${MY_HPOC_NUMBER}.1
-# DNS: 10.21.253.10,10.21.253.11
-# Domain: nutanixdc.local
-# DHCP Pool: 10.21.${MY_HPOC_NUMBER}.50 - 10.21.${MY_HPOC_NUMBER}.120
-#
-# DO NOT CHANGE ANYTHING BELOW THIS LINE UNLESS YOU KNOW WHAT YOU'RE DOING!!
-#
-# Source Nutanix environments (for PATH and other things)
-source /etc/profile.d/nutanix_env.sh
-# Logging function
-function my_log {
- #echo `$MY_LOG_DATE`" $1"
- echo $(date "+%Y-%m-%d %H:%M:%S") $1
-}
-# Check if we got a password from environment or from the settings above, otherwise exit before doing anything
-if [[ -z ${MY_PE_PASSWORD+x} ]]; then
- my_log "No password provided, exiting"
- exit -1
-fi
-my_log "My PID is $$"
-my_log "Installing sshpass"
-sudo rpm -ivh https://fr2.rpmfind.net/linux/epel/7/x86_64/Packages/s/sshpass-1.06-1.el7.x86_64.rpm
-# Configure SMTP
-my_log "Configure SMTP"
-ncli cluster set-smtp-server address=nutanix-com.mail.protection.outlook.com from-email-address=cluster@nutanix.com port=25
-# Configure NTP
-my_log "Configure NTP"
-ncli cluster add-to-ntp-servers servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org
-# Rename default storage container to STORAGE_DEFAULT
-my_log "Rename default container to ${STORAGE_DEFAULT}"
-default_container=$(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep '^default-container-')
-ncli container edit name="${default_container}" new-name="${STORAGE_DEFAULT}"
-# Rename default storage pool to STORAGE_POOL
-my_log "Rename default storage pool to ${STORAGE_POOL}"
-default_sp=$(ncli storagepool ls | grep 'Name' | cut -d ':' -f 2 | sed s/' '//g)
-ncli sp edit name="${default_sp}" new-name="${STORAGE_POOL}"
-# Check if there is a container named STORAGE_IMAGES, if not create one
-my_log "Check if there is a container named ${STORAGE_IMAGES}, if not create one"
-(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep "^${STORAGE_IMAGES}" 2>&1 > /dev/null) \
- && echo "Container ${STORAGE_IMAGES} already exists" \
- || ncli container create name="${STORAGE_IMAGES}" sp-name="${STORAGE_POOL}"
-# Set external IP address:
-#ncli cluster edit-params external-ip-address=10.21.${MY_HPOC_NUMBER}.37
-# Set Data Services IP address:
-my_log "Set Data Services IP address to 10.21.${MY_HPOC_NUMBER}.38"
-ncli cluster edit-params external-data-services-ip-address=10.21.${MY_HPOC_NUMBER}.38
-
-# Importing images
-MY_IMAGE="AutoDC"
-retries=1
-my_log "Importing ${MY_IMAGE} image"
-until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2 wait=true) =~ "complete" ]]; do
- let retries++
- if [ $retries -gt 5 ]; then
- my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation."
- acli vm.create STAGING-FAILED-${MY_IMAGE}
- break
- fi
- my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..."
- sleep 5
-done
-
-MY_IMAGE="CentOS"
-retries=1
-my_log "Importing ${MY_IMAGE} image"
-until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/CentOS7-04282018.qcow2 wait=true) =~ "complete" ]]; do
- let retries++
- if [ $retries -gt 5 ]; then
- my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation."
- acli vm.create STAGING-FAILED-${MY_IMAGE}
- break
- fi
- my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..."
- sleep 5
-done
-
-MY_IMAGE="Windows2012"
-retries=1
-my_log "Importing ${MY_IMAGE} image"
-until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows2012R2-04282018.qcow2 wait=true) =~ "complete" ]]; do
- let retries++
- if [ $retries -gt 5 ]; then
- my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation."
- acli vm.create STAGING-FAILED-${MY_IMAGE}
- break
- fi
- my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..."
- sleep 5
-done
-
-MY_IMAGE="Windows10"
-retries=1
-my_log "Importing ${MY_IMAGE} image"
-until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows10-1709-04282018.qcow2 wait=true) =~ "complete" ]]; do
- let retries++
- if [ $retries -gt 5 ]; then
- my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation."
- acli vm.create STAGING-FAILED-${MY_IMAGE}
- break
- fi
- my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..."
- sleep 5
-done
-
-MY_IMAGE="XenDesktop-7.15.iso"
-retries=1
-my_log "Importing ${MY_IMAGE} image"
-until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kIsoImage source_url=http://10.21.250.221/images/ahv/techsummit/XD715.iso wait=true) =~ "complete" ]]; do
- let retries++
- if [ $retries -gt 5 ]; then
- my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation."
- acli vm.create STAGING-FAILED-${MY_IMAGE}
- break
- fi
- my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..."
- sleep 5
-done
-
-# Remove existing VMs, if any
-my_log "Removing \"Windows 2012\" VM if it exists"
-acli -y vm.delete Windows\ 2012\ VM delete_snapshots=true
-my_log "Removing \"Windows 10\" VM if it exists"
-acli -y vm.delete Windows\ 10\ VM delete_snapshots=true
-my_log "Removing \"CentOS\" VM if it exists"
-acli -y vm.delete CentOS\ VM delete_snapshots=true
-
-# Remove Rx-Automation-Network network
-my_log "Removing \"Rx-Automation-Network\" Network if it exists"
-acli -y net.delete Rx-Automation-Network
-
-# Create primary network
-my_log "Create primary network:"
-my_log "Name: ${MY_PRIMARY_NET_NAME}"
-my_log "VLAN: ${MY_PRIMARY_NET_VLAN}"
-my_log "Subnet: 10.21.${MY_HPOC_NUMBER}.1/25"
-my_log "Domain: ${MY_DOMAIN_NAME}"
-my_log "Pool: 10.21.${MY_HPOC_NUMBER}.50 to 10.21.${MY_HPOC_NUMBER}.125"
-acli net.create ${MY_PRIMARY_NET_NAME} vlan=${MY_PRIMARY_NET_VLAN} ip_config=10.21.${MY_HPOC_NUMBER}.1/25
-acli net.update_dhcp_dns ${MY_PRIMARY_NET_NAME} servers=10.21.${MY_HPOC_NUMBER}.40,10.21.253.10 domains=${MY_DOMAIN_NAME}
-acli net.add_dhcp_pool ${MY_PRIMARY_NET_NAME} start=10.21.${MY_HPOC_NUMBER}.50 end=10.21.${MY_HPOC_NUMBER}.125
-
-# Create secondary network
-if [[ ${MY_SECONDARY_NET_NAME} ]]; then
- my_log "Create secondary network:"
- my_log "Name: ${MY_SECONDARY_NET_NAME}"
- my_log "VLAN: ${MY_SECONDARY_NET_VLAN}"
- my_log "Subnet: 10.21.${MY_HPOC_NUMBER}.129/25"
- my_log "Domain: ${MY_DOMAIN_NAME}"
- my_log "Pool: 10.21.${MY_HPOC_NUMBER}.132 to 10.21.${MY_HPOC_NUMBER}.253"
- acli net.create ${MY_SECONDARY_NET_NAME} vlan=${MY_SECONDARY_NET_VLAN} ip_config=10.21.${MY_HPOC_NUMBER}.129/25
- acli net.update_dhcp_dns ${MY_SECONDARY_NET_NAME} servers=10.21.${MY_HPOC_NUMBER}.40,10.21.253.10 domains=${MY_DOMAIN_NAME}
- acli net.add_dhcp_pool ${MY_SECONDARY_NET_NAME} start=10.21.${MY_HPOC_NUMBER}.132 end=10.21.${MY_HPOC_NUMBER}.253
-fi
-
-# Create AutoDC & power on
-my_log "Create DC VM based on AutoDC image"
-acli vm.create DC num_vcpus=2 num_cores_per_vcpu=1 memory=4G
-acli vm.disk_create DC cdrom=true empty=true
-acli vm.disk_create DC clone_from_image=AutoDC
-acli vm.nic_create DC network=${MY_PRIMARY_NET_NAME} ip=10.21.${MY_HPOC_NUMBER}.40
-my_log "Power on DC VM"
-acli vm.on DC
-
-# Need to wait for AutoDC to be up (30?60secs?)
-my_log "Waiting 60sec to give DC VM time to start"
-sleep 60
-
-# Configure PE external authentication
-my_log "Configure PE external authentication"
-ncli authconfig add-directory directory-type=ACTIVE_DIRECTORY connection-type=LDAP directory-url="${MY_DOMAIN_URL}" domain="${MY_DOMAIN_FQDN}" name="${MY_DOMAIN_NAME}" service-account-username="${MY_DOMAIN_USER}" service-account-password="${MY_DOMAIN_PASS}"
-
-# Configure PE role mapping
-my_log "Configure PE role mapping"
-ncli authconfig add-role-mapping role=ROLE_CLUSTER_ADMIN entity-type=group name="${MY_DOMAIN_NAME}" entity-values="${MY_DOMAIN_ADMIN_GROUP}"
-
-# Reverse Lookup Zone
-my_log "Creating Reverse Lookup Zone on DC VM"
-sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \
-root@10.21.${MY_HPOC_NUMBER}.40 "samba-tool dns zonecreate dc1 ${MY_HPOC_NUMBER}.21.10.in-addr.arpa; service samba-ad-dc restart"
-
-# Create custom OUs
-sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \
-root@10.21.${MY_HPOC_NUMBER}.40 "apt install ldb-tools -y -q"
-
-sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \
-root@10.21.${MY_HPOC_NUMBER}.40 "cat << EOF > ous.ldif
-dn: OU=Non-PersistentDesktop,DC=NTNXLAB,DC=local
-changetype: add
-objectClass: top
-objectClass: organizationalunit
-description: Non-Persistent Desktop OU
-
-dn: OU=PersistentDesktop,DC=NTNXLAB,DC=local
-changetype: add
-objectClass: top
-objectClass: organizationalunit
-description: Persistent Desktop OU
-
-dn: OU=XenAppServer,DC=NTNXLAB,DC=local
-changetype: add
-objectClass: top
-objectClass: organizationalunit
-description: XenApp Server OU
-EOF"
-
-sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \
-root@10.21.${MY_HPOC_NUMBER}.40 "ldbmodify -H /var/lib/samba/private/sam.ldb ous.ldif; service samba-ad-dc restart"
-
-# Provision local Prism account for XD MCS Plugin
-my_log "Create PE user account xd for MCS Plugin"
-ncli user create user-name=xd user-password=nutanix/4u first-name=XenDesktop last-name=Service email-id=no-reply@nutanix.com
-ncli user grant-cluster-admin-role user-name=xd
-
-# Get UUID from cluster
-my_log "Get UUIDs from cluster:"
-MY_NET_UUID=$(acli net.get ${MY_PRIMARY_NET_NAME} | grep "uuid" | cut -f 2 -d ':' | xargs)
-my_log "${MY_PRIMARY_NET_NAME} UUID is ${MY_NET_UUID}"
-MY_CONTAINER_UUID=$(ncli container ls name=${STORAGE_DEFAULT} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs)
-my_log "${STORAGE_DEFAULT} UUID is ${MY_CONTAINER_UUID}"
-
-# Validate EULA on PE
-my_log "Validate EULA on PE"
-curl -u admin:${MY_PE_PASSWORD} -k -H 'Content-Type: application/json' -X POST \
- https://127.0.0.1:9440/PrismGateway/services/rest/v1/eulas/accept \
- -d '{
- "username": "SE",
- "companyName": "NTNX",
- "jobTitle": "SE"
-}'
-
-# Disable Pulse in PE
-my_log "Disable Pulse in PE"
-curl -u admin:${MY_PE_PASSWORD} -k -H 'Content-Type: application/json' -X PUT \
- https://127.0.0.1:9440/PrismGateway/services/rest/v1/pulse \
- -d '{
- "defaultNutanixEmail": null,
- "emailContactList": null,
- "enable": false,
- "enableDefaultNutanixEmail": false,
- "isPulsePromptNeeded": false,
- "nosVersion": null,
- "remindLater": null,
- "verbosityType": null
-}'
-
-# AFS Download
-my_log "Download AFS image from ${MY_AFS_SRC_URL}"
-wget -nv ${MY_AFS_SRC_URL}
-my_log "Download AFS metadata JSON from ${MY_AFS_META_URL}"
-wget -nv ${MY_AFS_META_URL}
-
-# Staging AFS
-my_log "Stage AFS"
-ncli software upload file-path=/home/nutanix/${MY_AFS_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_AFS_META_URL##*/} software-type=FILE_SERVER
-
-# Freeing up space
-my_log "Delete AFS sources to free some space"
-rm ${MY_AFS_SRC_URL##*/} ${MY_AFS_META_URL##*/}
-
-# Prism Central Download
-my_log "Download PC tarball from ${MY_PC_SRC_URL}"
-wget -nv ${MY_PC_SRC_URL}
-my_log "Download PC metadata JSON from ${MY_PC_META_URL}"
-wget -nv ${MY_PC_META_URL}
-
-# Staging Prism Central
-my_log "Stage Prism Central"
-ncli software upload file-path=/home/nutanix/${MY_PC_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_PC_META_URL##*/} software-type=PRISM_CENTRAL_DEPLOY
-
-# Freeing up space
-my_log "Delete PC sources to free some space"
-rm ${MY_PC_SRC_URL##*/} ${MY_PC_META_URL##*/}
-
-# Deploy Prism Central
-my_log "Deploy Prism Central"
-MY_DEPLOY_BODY=$(cat <> pcconfig.log 2>&1 &"
-my_log "Removing sshpass"
-sudo rpm -e sshpass
-my_log "PE Configuration complete"
diff --git a/scripts/stage_citrixhow_pc.sh b/scripts/stage_citrixhow_pc.sh
deleted file mode 100644
index 2cacfda..0000000
--- a/scripts/stage_citrixhow_pc.sh
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/bin/bash
-
-#MY_PC_UPGRADE_URL='http://10.21.250.221/images/ahv/techsummit/nutanix_installer_package_pc-release-euphrates-5.5.0.6-stable-14bd63735db09b1c9babdaaf48d062723137fc46.tar.gz'
-
-# Script file name
-MY_SCRIPT_NAME=`basename "$0"`
-
-# Source Nutanix environments (for PATH and other things)
-. /etc/profile.d/nutanix_env.sh
-. lib.common.sh # source common routines
-Dependencies 'install';
-
-# Derive HPOC number from IP 3rd byte
-#MY_CVM_IP=$(ip addr | grep inet | cut -d ' ' -f 6 | grep ^10.21 | head -n 1)
- MY_CVM_IP=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}')
- array=(${MY_CVM_IP//./ })
-MY_HPOC_NUMBER=${array[2]}
-
-CURL_OPTS="${CURL_OPTS} --user admin:${MY_PE_PASSWORD}" #lib.common.sh initialized
-#CURL_OPTS="${CURL_OPTS} --verbose"
-
-# Set Prism Central Password to Prism Element Password
-my_log "Setting PC password to PE password"
-ncli user reset-password user-name="admin" password="${MY_PE_PASSWORD}"
-
-# Add NTP Server\
-my_log "Configure NTP on PC"
-ncli cluster add-to-ntp-servers servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org
-
-# Accept Prism Central EULA
-my_log "Validate EULA on PC"
-curl ${CURL_OPTS} \
- https://10.21.${MY_HPOC_NUMBER}.39:9440/PrismGateway/services/rest/v1/eulas/accept \
- -d '{
- "username": "SE",
- "companyName": "NTNX",
- "jobTitle": "SE"
-}'
-
-# Disable Prism Central Pulse
-my_log "Disable Pulse on PC"
-curl ${CURL_OPTS} -X PUT \
- https://10.21.${MY_HPOC_NUMBER}.39:9440/PrismGateway/services/rest/v1/pulse \
- -d '{
- "emailContactList":null,
- "enable":false,
- "verbosityType":null,
- "enableDefaultNutanixEmail":false,
- "defaultNutanixEmail":null,
- "nosVersion":null,
- "isPulsePromptNeeded":false,
- "remindLater":null
-}'
-
-# Prism Central upgrade
-#my_log "Download PC upgrade image: ${MY_PC_UPGRADE_URL##*/}"
-#wget -nv ${MY_PC_UPGRADE_URL}
-
-#my_log "Prepare PC upgrade image"
-#tar -xzf ${MY_PC_UPGRADE_URL##*/}
-#rm ${MY_PC_UPGRADE_URL##*/}
-
-#my_log "Upgrade PC"
-#cd /home/nutanix/install ; ./bin/cluster -i . -p upgrade
-
-my_log "PC Configuration complete on `$date`"
diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh
new file mode 100755
index 0000000..fc1a3a1
--- /dev/null
+++ b/scripts/ts2019.sh
@@ -0,0 +1,176 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ #export PC_DEV_VERSION='5.10.2'
+ #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json'
+ #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar'
+ #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json'
+ #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar'
+ #export FILES_VERSION='3.2.0.1'
+ #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json'
+ #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2'
+ #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json'
+ #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2'
+ export NW2_DHCP_START="${IPV4_PREFIX}.132"
+ export NW2_DHCP_END="${IPV4_PREFIX}.229"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ #export QCOW2_REPOS=(\
+ #'http://10.42.8.50/images/' \
+ #'https://s3.amazonaws.com/get-ahv-images/' \
+ #) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share
+ export QCOW2_IMAGES=(\
+ CentOS7.qcow2 \
+ Windows2016.qcow2 \
+ Windows2012R2.qcow2 \
+ Windows10-1709.qcow2 \
+ ToolsVM.qcow2 \
+ move-3.0.1.qcow2 \
+ ERA-Server-build-1.0.1.qcow2 \
+ sherlock-k8s-base-image_403.qcow2 \
+ hycu-3.5.0-6253.qcow2 \
+ VeeamAvailability_1.0.457.vmdk \
+ 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \
+ )
+ export ISO_IMAGES=(\
+ Windows2012R2.iso \
+ SQLServer2014SP3.iso \
+ Nutanix-VirtIO-1.1.3.iso \
+ VeeamBR_9.5.4.2615.Update4.iso \
+ )
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+
+ # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be
+ if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then
+ pc_smtp
+ fi
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && images \
+ && karbon_enable \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+ #IMAGES | images )
+ # . lib.pc.sh
+ #ts_images
+ #;;
+esac
diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh
new file mode 100755
index 0000000..f9f7022
--- /dev/null
+++ b/scripts/ts2020.sh
@@ -0,0 +1,214 @@
+#!/usr/bin/env bash
+ #-x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ #export PC_DEV_VERSION='5.10.2'
+ #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json'
+ #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar'
+ #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json'
+ #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar'
+ #export FILES_VERSION='3.2.0.1'
+ #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json'
+ #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2'
+ #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json'
+ #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2'
+ export NW2_DHCP_START="${IPV4_PREFIX}.132"
+ export NW2_DHCP_END="${IPV4_PREFIX}.229"
+
+ export AUTH_SERVER='AutoAD'
+ export PrismOpsServer='GTSPrismOpsLabUtilityServer'
+ export SeedPC='GTSseedPC.zip'
+
+ export _external_nw_name="${1}"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth \
+ && prism_pro_server_deploy \
+ && files_install \
+ && sleep 30 \
+ && create_file_server "${NW1_NAME}" "${NW2_NAME}" \
+ && sleep 30 \
+ && file_analytics_install \
+ && sleep 30 \
+ && create_file_analytics_server \
+ && sleep 30
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ deploy_peer_mgmt_server "${PMC}" \
+ && deploy_peer_agent_server "${AGENTA}" \
+ && deploy_peer_agent_server "${AGENTB}"
+ #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export BUCKETS_DNS_IP="${IPV4_PREFIX}.16"
+ export BUCKETS_VIP="${IPV4_PREFIX}.17"
+ export OBJECTS_NW_START="${IPV4_PREFIX}.18"
+ export OBJECTS_NW_END="${IPV4_PREFIX}.21"
+
+ export QCOW2_IMAGES=(\
+ ERA-Server-build-1.2.1.qcow2 \
+ Windows2016.qcow2 \
+ CentOS7.qcow2 \
+ Win10v1903.qcow2 \
+ WinToolsVM.qcow2 \
+ Linux_ToolsVM.qcow2 \
+ move-3.4.1.qcow2 \
+ MSSQL-2016-VM.qcow2 \
+ GTSOracle/19c-april/19c-bootdisk.qcow2 \
+ GTSOracle/19c-april/19c-disk1.qcow2 \
+ GTSOracle/19c-april/19c-disk2.qcow2 \
+ GTSOracle/19c-april/19c-disk3.qcow2 \
+ GTSOracle/19c-april/19c-disk4.qcow2 \
+ GTSOracle/19c-april/19c-disk5.qcow2 \
+ GTSOracle/19c-april/19c-disk6.qcow2 \
+ GTSOracle/19c-april/19c-disk7.qcow2 \
+ GTSOracle/19c-april/19c-disk8.qcow2 \
+ GTSOracle/19c-april/19c-disk9.qcow2 \
+ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \
+ veeam/VeeamAHVProxy2.0.404.qcow2 \
+ )
+ export ISO_IMAGES=(\
+ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \
+ Nutanix-VirtIO-1.1.5.iso \
+ FrameCCA-2.1.6.iso \
+ FrameCCA-2.1.0.iso \
+ FrameGuestAgentInstaller_1.0.2.2_7930.iso \
+ veeam/VBR_10.0.0.4442.iso \
+ )
+
+
+ run_once
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && karbon_enable \
+ && objects_enable \
+ && lcm \
+ && pc_project \
+ && object_store \
+ && karbon_image_download \
+ && priority_images \
+ && flow_enable \
+ && pc_cluster_img_import \
+ && upload_citrix_calm_blueprint \
+ && sleep 30 \
+ && upload_era_calm_blueprint \
+ && sleep 30 \
+ && upload_karbon_calm_blueprint \
+ && sleep 30 \
+ && upload_CICDInfra_calm_blueprint \
+ && seedPC \
+ && images \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ #pc_project
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+esac
diff --git a/scripts/vmdisk2image-pc.sh b/scripts/vmdisk2image-pc.sh
old mode 100644
new mode 100755
diff --git a/scripts/we-lib.common.sh b/scripts/we-lib.common.sh
new file mode 100755
index 0000000..804f625
--- /dev/null
+++ b/scripts/we-lib.common.sh
@@ -0,0 +1,849 @@
+#!/usr/bin/env bash
+# dependencies: dig
+
+function args_required() {
+ local _argument
+ local _error=88
+
+ for _argument in ${1}; do
+ if [[ ${DEBUG} ]]; then
+ log "DEBUG: Checking ${_argument}..."
+ fi
+ _RESULT=$(eval "echo \$${_argument}")
+ if [[ -z ${_RESULT} ]]; then
+ log "Error ${_error}: ${_argument} not provided!"
+ exit ${_error}
+ elif [[ ${DEBUG} ]]; then
+ log "Non-error: ${_argument} for ${_RESULT}"
+ fi
+ done
+
+ if [[ ${DEBUG} ]]; then
+ log 'Success: required arguments provided.'
+ fi
+}
+
+function begin() {
+ local _release
+
+ if [[ -e ${RELEASE} ]]; then
+ _release=" release: $(grep FullSemVer ${RELEASE} | awk -F\" '{print $4}')"
+ fi
+
+ log "$(basename ${0})${_release} start._____________________"
+}
+
+function dependencies {
+ local _argument
+ local _error
+ local _index
+ local _jq_pkg=${JQ_REPOS[0]##*/}
+ local _sshpass_pkg=${SSHPASS_REPOS[0]##*/}
+
+ if [[ -z ${1} ]]; then
+ _error=20
+ log "Error ${_error}: missing install or remove verb."
+ exit ${_error}
+ elif [[ -z ${2} ]]; then
+ _error=21
+ log "Error ${_error}: missing package name."
+ exit ${_error}
+ elif [[ "${1}" != 'install' ]] && [[ "${1}" != 'remove' ]]; then
+ _error=20
+ log "Error ${_error}: wrong install or remove verb (case sensitive)."
+ exit ${_error}
+ fi
+
+ case "${1}" in
+ 'install')
+
+ if [[ -z $(which ${2}) ]]; then
+ log "Install ${2}..."
+ case "${2}" in
+ sshpass | ${_sshpass_pkg})
+ if [[ ( ${OS_NAME} == 'Ubuntu' || ${OS_NAME} == 'LinuxMint' ) ]]; then
+ sudo apt-get install --yes sshpass
+ elif [[ ${OS_NAME} == '"centos"' ]]; then
+ # TOFIX: assumption, probably on NTNX CVM or PCVM = CentOS7
+ if [[ ! -e ${_sshpass_pkg} ]]; then
+ repo_source SSHPASS_REPOS[@] ${_sshpass_pkg}
+ download ${SOURCE_URL}
+ fi
+ sudo rpm -ivh ${_sshpass_pkg}
+ if (( $? > 0 )); then
+ _error=31
+ log "Error ${_error}: cannot install ${2}."
+ exit ${_error}
+ fi
+ elif [[ ${OS_NAME} == 'Darwin' ]]; then
+ brew install https://raw.githubusercontent.com/kadwanev/bigboybrew/master/Library/Formula/sshpass.rb
+ fi
+ ;;
+ jq | ${_jq_pkg} )
+ if [[ ( ${OS_NAME} == 'Ubuntu' || ${OS_NAME} == 'LinuxMint' ) ]]; then
+ if [[ ! -e ${_jq_pkg} ]]; then
+ sudo apt-get install --yes jq
+ fi
+ elif [[ ${OS_NAME} == '"centos"' ]]; then
+ if [[ ! -e ${_jq_pkg} ]]; then
+ repo_source JQ_REPOS[@] ${_jq_pkg}
+ download ${SOURCE_URL}
+ fi
+ chmod u+x ${_jq_pkg} && ln -s ${_jq_pkg} jq
+
+ if [[ -d ${HOME}/bin ]]; then
+ mv jq* ${HOME}/bin/
+ else
+ PATH+=:$(pwd)
+ export PATH
+ fi
+ elif [[ ${OS_NAME} == 'Darwin' ]]; then
+ brew install jq
+ fi
+ ;;
+ esac
+
+ if (( $? > 0 )); then
+ _error=98
+ log "Error ${_error}: can't install ${2}."
+ exit ${_error}
+ fi
+ else
+ log "Success: found ${2}."
+ fi
+ ;;
+ 'remove')
+ if [[ ${OS_NAME} == '"centos"' ]]; then
+ log "Warning: assuming on PC or PE VM, removing ${2}..."
+ case "${2}" in
+ sshpass | ${_sshpass_pkg})
+ sudo rpm -e sshpass
+ ;;
+ jq | ${_jq_pkg} )
+ if [[ -d ${HOME}/bin ]]; then
+ pushd bin || true
+ rm -f jq ${_jq_pkg}
+ popd || true
+ else
+ rm -f jq ${_jq_pkg}
+ fi
+ ;;
+ esac
+ else
+ log "Feature: don't remove dependencies on Mac OS Darwin, Ubuntu, or LinuxMint."
+ fi
+ ;;
+ esac
+}
+
+function dns_check() {
+ local _dns
+ local _error
+ local _lookup=${1} # REQUIRED
+ local _test
+
+ if [[ -z ${_lookup} ]]; then
+ _error=43
+ log "Error ${_error}: missing lookup record!"
+ exit ${_error}
+ fi
+
+ _dns=$(dig +retry=0 +time=2 +short @${AUTH_HOST} ${_lookup})
+ _test=$?
+
+ if [[ ${_dns} != "${AUTH_HOST}" ]]; then
+ _error=44
+ log "Error ${_error}: result was ${_test}: ${_dns}"
+ return ${_error}
+ fi
+}
+
+function download() {
+ local _attempts=5
+ local _error=0
+ local _http_range_enabled # TODO:40 OPTIMIZATION: disabled '--continue-at -'
+ local _loop=0
+ local _output
+ local _sleep=2
+
+ if [[ -z ${1} ]]; then
+ _error=33
+ log "Error ${_error}: no URL to download!"
+ exit ${_error}
+ fi
+
+ while true ; do
+ (( _loop++ ))
+ log "${1}..."
+ _output=''
+ curl ${CURL_OPTS} ${_http_range_enabled} --remote-name --location ${1}
+ _output=$?
+ #DEBUG=1; if [[ ${DEBUG} ]]; then log "DEBUG: curl exited ${_output}."; fi
+
+ if (( ${_output} == 0 )); then
+ log "Success: ${1##*/}"
+ break
+ fi
+
+ if (( ${_loop} == ${_attempts} )); then
+ _error=11
+ log "Error ${_error}: couldn't download from: ${1}, giving up after ${_loop} tries."
+ exit ${_error}
+ elif (( ${_output} == 33 )); then
+ log "Web server doesn't support HTTP range command, purging and falling back."
+ _http_range_enabled=''
+ rm -f ${1##*/}
+ else
+ log "${_loop}/${_attempts}: curl=${_output} ${1##*/} sleep ${_sleep}..."
+ sleep ${_sleep}
+ fi
+ done
+}
+
+function fileserver() {
+ local _action=${1} # REQUIRED
+ local _host=${2} # REQUIRED, TODO: default to PE?
+ local _port=${3} # OPTIONAL
+ local _directory=${4} # OPTIONAL
+
+ if [[ -z ${1} ]]; then
+ _error=38
+ log "Error ${_error}: start or stop action required!"
+ exit ${_error}
+ fi
+ if [[ -z ${2} ]]; then
+ _error=39
+ log "Error ${_error}: host required!"
+ exit ${_error}
+ fi
+ if [[ -z ${3} ]]; then
+ _port=8181
+ fi
+ if [[ -z ${4} ]]; then
+ _directory=cache
+ fi
+
+ case ${_action} in
+ 'start' )
+ # Determine if on PE or PC with _host PE or PC, then _host=localhost
+ # ssh -nNT -R 8181:localhost:8181 nutanix@10.21.31.31
+ pushd ${_directory} || exit
+
+ remote_exec 'ssh' ${_host} \
+ "python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}"
+
+ # acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \
+ # source_url=http://10.4.150.64:8181/autodc-2.0.qcow2
+ #AutoDC2: pending
+ #AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable
+ popd || exit
+ ;;
+ 'stop' )
+ remote_exec 'ssh' ${_host} \
+ "kill -9 $(pgrep python -a | grep ${_port} | awk '{ print $1 }')" 'OPTIONAL'
+ ;;
+ esac
+}
+
+function finish() {
+ log "${0} ran for ${SECONDS} seconds._____________________"
+ echo
+}
+
+function images() {
+ # https://portal.nutanix.com/#/page/docs/details?targetId=Command-Ref-AOS-v59:acl-acli-image-auto-r.html
+ local _cli='acli'
+ local _command
+ local _http_body
+ local _image
+ local _image_type
+ local _name
+ local _source='source_url'
+ local _test
+
+ which "$_cli"
+ if (( $? > 0 )); then
+ _cli='nuclei'
+ _source='source_uri'
+ fi
+
+ for _image in "${QCOW2_IMAGES[@]}" ; do
+
+ # log "DEBUG: ${_image} image.create..."
+ if [[ ${_cli} == 'nuclei' ]]; then
+ _test=$(source /etc/profile.d/nutanix_env.sh \
+ && ${_cli} image.list 2>&1 \
+ | grep -i complete \
+ | grep "${_image}")
+ else
+ _test=$(source /etc/profile.d/nutanix_env.sh \
+ && ${_cli} image.list 2>&1 \
+ | grep "${_image}")
+ fi
+
+ if [[ ! -z ${_test} ]]; then
+ log "Skip: ${_image} already complete on cluster."
+ else
+ _command=''
+ _name="${_image}"
+
+ if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc --lines) )); then
+ log 'Bypass multiple repo source checks...'
+ SOURCE_URL="${_image}"
+ else
+ repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]!
+ fi
+
+ if [[ -z "${SOURCE_URL}" ]]; then
+ _error=30
+ log "Warning ${_error}: didn't find any sources for ${_image}, continuing..."
+ # exit ${_error}
+ fi
+
+ # TODO:0 TOFIX: acs-centos ugly override for today...
+ if (( $(echo "${_image}" | grep -i 'acs-centos' | wc --lines ) > 0 )); then
+ _name=acs-centos
+ fi
+
+ if [[ ${_cli} == 'acli' ]]; then
+ _image_type='kDiskImage'
+ if (( $(echo "${SOURCE_URL}" | grep -i -e 'iso$' | wc --lines ) > 0 )); then
+ _image_type='kIsoImage'
+ fi
+
+ _command+=" ${_name} annotation=${_image} image_type=${_image_type} \
+ container=${STORAGE_IMAGES} architecture=kX86_64 wait=true"
+ else
+ _command+=" name=${_name} description=\"${_image}\""
+ fi
+
+ if [[ ${_cli} == 'nuclei' ]]; then
+ _http_body=$(cat <&1 &
+ if (( $? != 0 )); then
+ log "Warning: Image submission: $?. Continuing..."
+ #exit 10
+ fi
+
+ if [[ ${_cli} == 'nuclei' ]]; then
+ log "NOTE: image.uuid = RUNNING, but takes a while to show up in:"
+ log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State"
+ fi
+ fi
+ fi
+
+ done
+}
+
+# Function to encode the message in the POST as web encoding.
+function rawurlencode() {
+ local string="${1}"
+ local strlen=${#string}
+ local encoded=""
+ local pos c o
+
+ for (( pos=0 ; pos&1)
+ _status=$?
+
+ if (( $(echo "${_hold}" | grep websocket | wc --lines) > 0 )); then
+ log "Warning: Zookeeper isn't up yet."
+ elif (( ${_status} > 0 )); then
+ log "${_status} = ${_hold}, uh oh!"
+ else
+ log "Cluster info via nuclei seems good: ${_status}, moving on!"
+ break
+ fi
+
+ if (( ${_loop} == ${_attempts} )); then
+ log "Error ${_error}: couldn't determine cluster information, giving up after ${_loop} tries."
+ exit ${_error}
+ else
+ log "${_loop}/${_attempts}: hold=${_hold} sleep ${_sleep}..."
+ sleep ${_sleep}
+ fi
+ done
+}
+
+function ntnx_download() {
+ local _checksum
+ local _error
+ local _meta_url
+ local _ncli_softwaretype="${1}"
+ local _source_url
+
+ case "${_ncli_softwaretype}" in
+ PC | pc | PRISM_CENTRAL_DEPLOY )
+ args_required 'PC_VERSION'
+
+ if [[ "${PC_VERSION}" == "${PC_DEV_VERSION}" ]]; then
+ _meta_url="${PC_DEV_METAURL}"
+ else
+ _meta_url="${PC_STABLE_METAURL}"
+ fi
+
+ if [[ -z ${_meta_url} ]]; then
+ _error=22
+ log "Error ${_error}: unsupported PC_VERSION=${PC_VERSION}!"
+ log 'Sync the following to global.var.sh...'
+ log 'Browse to https://portal.nutanix.com/#/page/releases/prismDetails'
+ log " - Find ${PC_VERSION} in the Additional Releases section on the lower right side"
+ log ' - Provide the metadata URL for the "PC 1-click deploy from PE" option to this function, both case stanzas.'
+ exit ${_error}
+ fi
+
+ if [[ ! -z ${PC_URL} ]]; then
+ _source_url="${PC_URL}"
+ fi
+ ;;
+ 'NOS' | 'nos' | 'AOS' | 'aos')
+ # TODO:70 nos is a prototype
+ args_required 'AOS_VERSION AOS_UPGRADE'
+ _meta_url="${AOS_METAURL}"
+
+ if [[ -z ${_meta_url} ]]; then
+ _error=23
+ log "Error ${_error}: unsupported AOS_UPGRADE=${AOS_UPGRADE}!"
+ log 'Browse to https://portal.nutanix.com/#/page/releases/nosDetails'
+ log " - Find ${AOS_UPGRADE} in the Additional Releases section on the lower right side"
+ log ' - Provide the Upgrade metadata URL to this function for both case stanzas.'
+ exit ${_error}
+ fi
+
+ if [[ ! -z ${AOS_URL} ]]; then
+ _source_url="${AOS_URL}"
+ fi
+ ;;
+ FILES | files | AFS | afs )
+ args_required 'FILES_VERSION'
+ _meta_url="${FILES_METAURL}"
+
+ if [[ -z ${_meta_url} ]]; then
+ _error=22
+ log "Error ${_error}: unsupported FILES_VERSION=${FILES_VERSION}!"
+ log 'Sync the following to global.var.sh...'
+ log 'Browse to https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA'
+ log " - Find ${FILES_VERSION} in the Additional Releases section on the lower right side"
+ log ' - Provide the metadata URL option to this function, both case stanzas.'
+ exit ${_error}
+ fi
+
+ if [[ ! -z ${FILES_URL} ]]; then
+ _source_url="${FILES_URL}"
+ fi
+ ;;
+ * )
+ _error=88
+ log "Error ${_error}:: couldn't determine software-type ${_ncli_softwaretype}!"
+ exit ${_error}
+ ;;
+ esac
+
+ if [[ ! -e ${_meta_url##*/} ]]; then
+ log "Retrieving download metadata ${_meta_url##*/} ..."
+ download "${_meta_url}"
+ else
+ log "Warning: using cached download ${_meta_url##*/}"
+ fi
+
+ if [[ -z ${_source_url} ]]; then
+ dependencies 'install' 'jq' || exit 13
+ _source_url=$(cat ${_meta_url##*/} | jq -r .download_url_cdn)
+ fi
+
+ if (( $(pgrep curl | wc --lines | tr -d '[:space:]') > 0 )); then
+ pkill curl
+ fi
+ log "Retrieving Nutanix ${_ncli_softwaretype} bits..."
+ download "${_source_url}"
+
+ _checksum=$(md5sum ${_source_url##*/} | awk '{print $1}')
+ if [[ $(cat ${_meta_url##*/} | jq -r .hex_md5) != "${_checksum}" ]]; then
+
+ _error=2
+ log "Error ${_error}: md5sum ${_checksum} doesn't match on: ${_source_url##*/} removing and exit!"
+ rm -f ${_source_url##*/}
+ exit ${_error}
+ else
+ log "Success: ${_ncli_softwaretype} bits downloaded and passed MD5 checksum!"
+ fi
+
+ ncli software upload software-type=${_ncli_softwaretype} \
+ file-path="$(pwd)/${_source_url##*/}" \
+ meta-file-path="$(pwd)/${_meta_url##*/}"
+
+ if (( $? == 0 )) ; then
+ log "Success! Delete ${_ncli_softwaretype} sources to free CVM space..."
+ rm -f ${_source_url##*/} ${_meta_url##*/}
+ else
+ _error=3
+ log "Error ${_error}: failed ncli upload of ${_ncli_softwaretype}."
+ exit ${_error}
+ fi
+}
+
+function pe_determine() {
+ # ${1} REQUIRED: run on 'PE' or 'PC'
+ local _error
+ local _hold
+
+ dependencies 'install' 'jq'
+
+ # ncli @PE and @PC yeild different info! So PC uses nuclei.
+ case ${1} in
+ PE | pe )
+ _hold=$(source /etc/profile.d/nutanix_env.sh \
+ && ncli --json=true cluster info)
+ ;;
+ PC | Pc | pc )
+ # WORKAROUND: Entities non-JSON outputs by nuclei on lines 1-2...
+ _hold=$(source /etc/profile.d/nutanix_env.sh \
+ && export NUCLEI_SERVER='localhost' \
+ && export NUCLEI_USERNAME="${PRISM_ADMIN}" \
+ && export NUCLEI_PASSWORD="${PE_PASSWORD}" \
+ && nuclei cluster.list format=json 2>/dev/null \
+ | grep -v 'Entities :' \
+ | jq \
+ '.entities[].status | select(.state == "COMPLETE") | select(.resources.network.external_ip != null)'
+ )
+ ;;
+ *)
+ log 'Error: invoke with PC or PE argument.'
+ ;;
+ esac
+
+ #log "DEBUG: cluster info on ${1}. |${_hold}|"
+
+ if [[ -z "${_hold}" ]]; then
+ _error=12
+ log "Error ${_error}: couldn't resolve cluster info on ${1}. |${_hold}|"
+ args_required 'PE_HOST'
+ exit ${_error}
+ else
+ case ${1} in
+ PE | pe )
+ CLUSTER_NAME=$(echo ${_hold} | jq -r .data.name)
+ PE_HOST=$(echo ${_hold} | jq -r .data.clusterExternalIPAddress)
+ ;;
+ PC | Pc | pc )
+ CLUSTER_NAME=$(echo ${_hold} | jq -r .name)
+ PE_HOST=$(echo ${_hold} | jq -r .resources.network.external_ip)
+ ;;
+ esac
+
+ export CLUSTER_NAME PE_HOST
+ log "Success: Cluster name=${CLUSTER_NAME}, PE external IP=${PE_HOST}"
+ fi
+}
+
+function prism_check {
+ # Argument ${1} = REQUIRED: PE or PC
+ # Argument ${2} = OPTIONAL: number of attempts
+ # Argument ${3} = OPTIONAL: number of seconds per cycle
+
+ args_required 'ATTEMPTS PE_PASSWORD SLEEP'
+
+ local _attempts=${ATTEMPTS}
+ local _error=77
+ local _host
+ local _loop=0
+ local _password="${PE_PASSWORD}"
+ local _pw_init='Nutanix/4u'
+ local _sleep=${SLEEP}
+ local _test=0
+
+ #shellcheck disable=2153
+ if [[ ${1} == 'PC' ]]; then
+ _host=${PC_HOST}
+ else
+ _host=${PE_HOST}
+ fi
+ if [[ ! -z ${2} ]]; then
+ _attempts=${2}
+ fi
+
+ while true ; do
+ (( _loop++ ))
+ _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${_password} \
+ -X POST --data '{ "kind": "cluster" }' \
+ https://${_host}:9440/api/nutanix/v3/clusters/list \
+ | tr -d \") # wonderful addition of "" around HTTP status code by cURL
+
+ if [[ ! -z ${3} ]]; then
+ _sleep=${3}
+ fi
+
+ if (( ${_test} == 401 )); then
+ log "Warning: unauthorized ${1} user or password on ${_host}."
+
+ if [[ ${1} == 'PC' && ${_password} != "${_pw_init}" ]]; then
+ _password=${_pw_init}
+ log "Warning @${1}: Fallback on ${_host}: try initial password next cycle..."
+ #_sleep=0 #break
+ elif [[ ${1} == 'PC' && ${_password} == "${_pw_init}" && ${PC_VERSION} == "${PC_DEV_VERSION}" ]]; then
+ _password=${PE_PASSWORD}
+ log "Warning @${1}-dev: Fallback on ${_host}: try PE cluster password next cycle..."
+ #_sleep=0 #break
+ fi
+
+ fi
+
+ if (( ${_test} == 200 )); then
+ log "@${1}: successful."
+ return 0
+ elif (( ${_loop} > ${_attempts} )); then
+ log "Warning ${_error} @${1}: Giving up after ${_loop} tries."
+ return ${_error}
+ else
+ log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..."
+ sleep ${_sleep}
+ fi
+ done
+}
+
+function remote_exec() {
+# Argument ${1} = REQUIRED: ssh or scp
+# Argument ${2} = REQUIRED: PE, PC, or AUTH_SERVER
+# Argument ${3} = REQUIRED: command configuration
+# Argument ${4} = OPTIONAL: populated with anything = allowed to fail
+
+ local _account='nutanix'
+ local _attempts=3
+ local _error=99
+ local _host
+ local _loop=0
+ local _password="${PE_PASSWORD}"
+ local _pw_init="${NTNX_INIT_PASSWORD}"
+ local _sleep=${SLEEP}
+ local _test=0
+
+ args_required 'SSH_OPTS'
+
+ # shellcheck disable=SC2153
+ case ${2} in
+ 'PE' )
+ _host=${PE_HOST}
+ ;;
+ 'PC' )
+ _host=${PC_HOST}
+ _password=${_pw_init}
+ ;;
+ 'AUTH_SERVER' )
+ _account='root'
+ _host=${AUTH_HOST}
+ _password=${_pw_init}
+ _sleep=7
+ ;;
+ esac
+
+ if [[ -z ${3} ]]; then
+ log 'Error ${_error}: missing third argument.'
+ exit ${_error}
+ fi
+
+ if [[ ! -z ${4} ]]; then
+ _attempts=1
+ _sleep=0
+ fi
+
+ while true ; do
+ (( _loop++ ))
+ case "${1}" in
+ 'SSH' | 'ssh')
+ #DEBUG=1; if [[ ${DEBUG} ]]; then log "_test will perform ${_account}@${_host} ${3}..."; fi
+ SSHPASS="${_password}" sshpass -e ssh -x ${SSH_OPTS} ${_account}@${_host} "${3}"
+ _test=$?
+ ;;
+ 'SCP' | 'scp')
+ #DEBUG=1; if [[ ${DEBUG} ]]; then log "_test will perform scp ${3} ${_account}@${_host}:"; fi
+ SSHPASS="${_password}" sshpass -e scp ${SSH_OPTS} ${3} ${_account}@${_host}:
+ _test=$?
+ ;;
+ *)
+ log "Error ${_error}: improper first argument, should be ssh or scp."
+ exit ${_error}
+ ;;
+ esac
+
+ if (( ${_test} > 0 )) && [[ -z ${4} ]]; then
+ _error=22
+ log "Error ${_error}: pwd=$(pwd), _test=${_test}, _host=${_host}"
+ exit ${_error}
+ fi
+
+ if (( ${_test} == 0 )); then
+ if [[ ${DEBUG} ]]; then log "${3} executed properly."; fi
+ return 0
+ elif (( ${_loop} == ${_attempts} )); then
+ if [[ -z ${4} ]]; then
+ _error=11
+ log "Error ${_error}: giving up after ${_loop} tries."
+ exit ${_error}
+ else
+ log "Optional: giving up."
+ break
+ fi
+ else
+ log "${_loop}/${_attempts}: _test=$?|${_test}| SLEEP ${_sleep}..."
+ sleep ${_sleep}
+ fi
+ done
+}
+
+function repo_source() {
+ # https://stackoverflow.com/questions/1063347/passing-arrays-as-parameters-in-bash#4017175
+ local _candidates=("${!1}") # REQUIRED
+ local _package="${2}" # OPTIONAL
+ local _error=29
+ local _http_code
+ local _index=0
+ local _suffix
+ local _url
+
+ if (( ${#_candidates[@]} == 0 )); then
+ log "Error ${_error}: Missing array!"
+ exit ${_error}
+ # else
+ # log "DEBUG: _candidates count is ${#_candidates[@]}"
+ fi
+
+ if [[ -z ${_package} ]]; then
+ _suffix=${_candidates[0]##*/}
+ if (( $(echo "${_suffix}" | grep . | wc --lines) > 0)); then
+ log "Convenience: omitted package argument, added package=${_package}"
+ _package="${_suffix}"
+ fi
+ fi
+ # Prepend your local HTTP cache...
+ _candidates=( "http://${HTTP_CACHE_HOST}:${HTTP_CACHE_PORT}/" "${_candidates[@]}" )
+
+ while (( ${_index} < ${#_candidates[@]} ))
+ do
+ unset SOURCE_URL
+
+ # log "DEBUG: ${_index} ${_candidates[${_index}]}, OPTIONAL: _package=${_package}"
+ _url=${_candidates[${_index}]}
+
+ if [[ -z ${_package} ]]; then
+ if (( $(echo "${_url}" | grep '/$' | wc --lines) == 0 )); then
+ log "error ${_error}: ${_url} doesn't end in trailing slash, please correct."
+ exit ${_error}
+ fi
+ elif (( $(echo "${_url}" | grep '/$' | wc --lines) == 1 )); then
+ _url+="${_package}"
+ fi
+
+ if (( $(echo "${_url}" | grep '^nfs' | wc --lines) == 1 )); then
+ log "warning: TODO: cURL can't test nfs URLs...assuming a pass!"
+ export SOURCE_URL="${_url}"
+ break
+ fi
+
+ _http_code=$(curl ${CURL_OPTS} --max-time 5 --write-out '%{http_code}' --head ${_url} | tail -n1)
+
+ if [[ (( ${_http_code} == 200 )) || (( ${_http_code} == 302 )) ]]; then
+ export SOURCE_URL="${_url}"
+ log "Found, HTTP:${_http_code} = ${SOURCE_URL}"
+ break
+ fi
+ log " Lost, HTTP:${_http_code} = ${_url}"
+ ((_index++))
+ done
+
+ if [[ -z "${SOURCE_URL}" ]]; then
+ _error=30
+ log "Error ${_error}: didn't find any sources, last try was ${_url} with HTTP ${_http_code}."
+ exit ${_error}
+ fi
+}
+
+function run_once() {
+ # TODO: PC dependent
+ if [[ ! -z ${PC_LAUNCH} ]] && (( $(cat ${HOME}/${PC_LAUNCH%%.sh}.log | wc ${WC_ARG}) > 20 )); then
+ finish
+ _error=2
+ log "Warning ${_error}: ${PC_LAUNCH} already ran, exit!"
+ exit ${_error}
+ fi
+}
+
+function ssh_pubkey() {
+ local _dir
+ local _directories=(\
+ "${HOME}" \
+ "${HOME}/ssh_keys" \
+ "${HOME}/cache" \
+ )
+ local _name
+ local _test
+
+ args_required 'EMAIL SSH_PUBKEY'
+
+ _name=${EMAIL//\./_DOT_}
+ _name=${_name/@/_AT_}
+ _test=$(source /etc/profile.d/nutanix_env.sh \
+ && ncli cluster list-public-keys name=${_name})
+
+ if (( $(echo ${_test} | grep -i "Failed" | wc ${WC_ARG}) > 0 )); then
+ for _dir in "${_directories[@]}"; do
+ if [[ -e ${_dir}/${SSH_PUBKEY##*/} ]]; then
+ log "Note that a period and other symbols aren't allowed to be a key name."
+
+ log "Locally adding ${_dir}/${SSH_PUBKEY##*/} under ${_name} label..."
+ ncli cluster add-public-key name=${_name} file-path=${_dir}/${SSH_PUBKEY##*/} || true
+
+ break
+ fi
+ done
+ else
+ log "IDEMPOTENCY: found pubkey ${_name}"
+ fi
+}
diff --git a/scripts/we-ts2019.sh b/scripts/we-ts2019.sh
new file mode 100755
index 0000000..86f6bf9
--- /dev/null
+++ b/scripts/we-ts2019.sh
@@ -0,0 +1,176 @@
+#!/usr/bin/env bash
+# -x
+
+#__main()__________
+
+# Source Nutanix environment (PATH + aliases), then common routines + global variables
+. /etc/profile.d/nutanix_env.sh
+. we-lib.common.sh
+. global.vars.sh
+begin
+
+args_required 'EMAIL PE_PASSWORD PC_VERSION'
+
+#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization
+# Some parallelization possible to critical path; not much: would require pre-requestite checks to work!
+
+case ${1} in
+ PE | pe )
+ . lib.pe.sh
+
+ export PC_DEV_VERSION='5.10.2'
+ export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json'
+ export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar'
+ #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json'
+ #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar'
+ export FILES_VERSION='3.2.0.1'
+ export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json'
+ export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2'
+ #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json'
+ #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2'
+ export NW2_DHCP_START="${IPV4_PREFIX}.132"
+ export NW2_DHCP_END="${IPV4_PREFIX}.229"
+
+ args_required 'PE_HOST PC_LAUNCH'
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \
+ && pe_license \
+ && pe_init \
+ && network_configure \
+ && authentication_source \
+ && pe_auth
+
+ if (( $? == 0 )) ; then
+ pc_install "${NW1_NAME}" \
+ && prism_check 'PC' \
+
+ if (( $? == 0 )) ; then
+ _command="EMAIL=${EMAIL} \
+ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \
+ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES"
+
+ cluster_check \
+ && log "Remote asynchroneous PC Image import script... ${_command}" \
+ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" &
+
+ pc_configure \
+ && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!"
+ log "PE = https://${PE_HOST}:9440"
+ log "PC = https://${PC_HOST}:9440"
+
+ files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' &
+ #dependencies 'remove' 'sshpass'
+ finish
+ fi
+ else
+ finish
+ _error=18
+ log "Error ${_error}: in main functional chain, exit!"
+ exit ${_error}
+ fi
+ ;;
+ PC | pc )
+ . lib.pc.sh
+
+ export QCOW2_REPOS=(\
+ 'http://10.42.8.50/images/' \
+ 'https://s3.amazonaws.com/get-ahv-images/' \
+ ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share
+ export QCOW2_IMAGES=(\
+ CentOS7.qcow2 \
+ Windows2016.qcow2 \
+ Windows2012R2.qcow2 \
+ Windows10-1709.qcow2 \
+ ToolsVM.qcow2 \
+ Windows2012R2.iso \
+ SQLServer2014SP3.iso \
+ Nutanix-VirtIO-1.1.3.iso \
+ xtract-vm-2.0.3.qcow2 \
+ ERA-Server-build-1.0.1.qcow2 \
+ sherlock-k8s-base-image_403.qcow2 \
+ hycu-3.5.0-6253.qcow2 \
+ VeeamAvailability_1.0.457.vmdk \
+ VeeamBR_9.5.4.2615.Update4.iso \
+ 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \
+ )
+
+ dependencies 'install' 'jq' || exit 13
+
+ ssh_pubkey & # non-blocking, parallel suitable
+
+ pc_passwd
+ ntnx_cmd # check cli services available?
+
+ export NUCLEI_SERVER='localhost'
+ export NUCLEI_USERNAME="${PRISM_ADMIN}"
+ export NUCLEI_PASSWORD="${PE_PASSWORD}"
+ # nuclei -debug -username admin -server localhost -password x vm.list
+
+ if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX
+ log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|"
+ pe_determine ${1}
+ . global.vars.sh # re-populate PE_HOST dependencies
+ else
+ CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \
+ jq -r .data[0].clusterDetails.clusterName)
+ if [[ ${CLUSTER_NAME} != '' ]]; then
+ log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}."
+ fi
+ fi
+
+ if [[ ! -z "${2}" ]]; then # hidden bonus
+ log "Don't forget: $0 first.last@nutanixdc.local%password"
+ calm_update && exit 0
+ fi
+
+ export ATTEMPTS=2
+ export SLEEP=10
+
+ pc_init \
+ && pc_dns_add \
+ && pc_ui \
+ && pc_auth \
+ && pc_smtp
+
+ ssp_auth \
+ && calm_enable \
+ && lcm \
+ && images \
+ && prism_check 'PC'
+
+ log "Non-blocking functions (in development) follow."
+ # shellcheck disable=2206
+ _pc_version=(${PC_VERSION//./ })
+
+ #commenting out to take images back to prevuous update
+ #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} <= 8 )); then
+ # log "PC<=5.8, Image imports..."
+ # ts_images
+ #fi
+ pc_project
+ flow_enable
+ pc_admin
+ # ntnx_download 'AOS' # function in lib.common.sh
+
+ unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD
+
+ if (( $? == 0 )); then
+ #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \
+ #&&
+ log "PC = https://${PC_HOST}:9440"
+ finish
+ else
+ _error=19
+ log "Error ${_error}: failed to reach PC!"
+ exit ${_error}
+ fi
+ ;;
+ FILES | files | afs )
+ files_install
+ ;;
+ #IMAGES | images )
+ # . lib.pc.sh
+ #ts_images
+ #;;
+esac
diff --git a/stage_workshop.sh b/stage_workshop.sh
index 82e2b78..af564bd 100755
--- a/stage_workshop.sh
+++ b/stage_workshop.sh
@@ -2,15 +2,37 @@
# use bash -x to debug command substitution and evaluation instead of echo.
DEBUG=
+# Source Workshop common routines + global variables
+. scripts/lib.common.sh
+. scripts/global.vars.sh
+begin
+
# For WORKSHOPS keyword mappings to scripts and variables, please use:
-# - Calm || Citrix || Summit
+# - Calm || Bootcamp || Citrix || Summit
# - PC #.#
WORKSHOPS=(\
-"Calm Workshop (AOS 5.5+/AHV PC 5.8.x) = Stable (AutoDC1)" \
-"Calm Workshop (AOS 5.8.x/AHV PC 5.10.x) = Stable (AutoDC2)" \
-"Calm Workshop (AOS 5.9+/AHV PC 5.10.x) = Development" \
-"Citrix Desktop on AHV Workshop (AOS/AHV 5.6)" \
-#"Tech Summit 2018" \
+"Basic / API Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Private Cloud Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Leap Add-On Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Databases Era with MSSQL Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Databases Era with Oracle Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Databases Era with Postgres Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Databases Era -Stage All- Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Consolidated Storage Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Files Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Calm IaaS Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Calm Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Frame Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Citrix Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Stage-All Bootcamps (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Cloud Native Application Modernization Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"SE POC Guide (AHV) Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+"In Development Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \
+
) # Adjust function stage_clusters, below, for file/script mappings as needed
function stage_clusters() {
@@ -23,38 +45,127 @@ function stage_clusters() {
local _pe_launch # will be transferred and executed on PE
local _pc_launch # will be transferred and executed on PC
local _sshkey=${SSH_PUBKEY}
- local _wc_arg='--lines'
+ #local _wc_arg='--lines'
+ local _wc_arg=${WC_ARG}
local _workshop=${WORKSHOPS[$((${WORKSHOP_NUM}-1))]}
# Map to latest and greatest of each point release
# Metadata URLs MUST be specified in lib.common.sh function: ntnx_download
# TODO: make WORKSHOPS and map a JSON configuration file?
- if (( $(echo ${_workshop} | grep -i "PC 5.10" | wc ${WC_ARG}) > 0 )); then
+ if (( $(echo ${_workshop} | grep -i "PC 2020.9" | wc ${WC_ARG}) > 0 )); then
export PC_VERSION="${PC_DEV_VERSION}"
- elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then
+ elif (( $(echo ${_workshop} | grep -i "PC 2020.9" | wc ${WC_ARG}) > 0 )); then
+ export PC_VERSION="${PC_CURRENT_VERSION}"
+ elif (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then
export PC_VERSION="${PC_STABLE_VERSION}"
- elif (( $(echo ${_workshop} | grep -i "PC 5.9" | wc ${WC_ARG}) > 0 )); then
- export PC_VERSION=5.9.2
- elif (( $(echo ${_workshop} | grep -i "PC 5.7" | wc ${WC_ARG}) > 0 )); then
- export PC_VERSION=5.7.1.1
- elif (( $(echo ${_workshop} | grep -i "PC 5.6" | wc ${WC_ARG}) > 0 )); then
- export PC_VERSION=5.6.2
fi
# Map workshop to staging script(s) and libraries,
# _pe_launch will be executed on PE
- if (( $(echo ${_workshop} | grep -i Calm | wc ${WC_ARG}) > 0 )); then
+ if (( $(echo ${_workshop} | grep -i "^Stage-All Bootcamps" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='all_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Consolidated Storage Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='Consolidated_Storage_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Basic / API Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='basic_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Private Cloud" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='privatecloud_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Leap Add-On" | wc ${WC_ARG}) > 0 )); then
_libraries+='lib.pe.sh lib.pc.sh'
- _pe_launch='calm.sh'
+ _pe_launch='leap_addon_bootcamp.sh'
_pc_launch=${_pe_launch}
fi
- if (( $(echo ${_workshop} | grep -i Citrix | wc ${WC_ARG}) > 0 )); then
- _pe_launch='stage_citrixhow.sh'
- _pc_launch='stage_citrixhow_pc.sh'
+ if (( $(echo ${_workshop} | grep -i "^Databases Era -Stage All- Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='era_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Databases Era with MSSQL Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='era_mssql_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Databases Era with Oracle Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='era_oracle_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Databases Era with Postgres Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='era_postgres_bootcamp.sh'
+ _pc_launch=${_pe_launch}
fi
- if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then
- _pe_launch='stage_ts18.sh'
- _pc_launch='stage_ts18_pc.sh'
+ if (( $(echo ${_workshop} | grep -i "^Files" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='files_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Objects" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='objects_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Calm Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='calm_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Calm IaaS" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='calm_iaas_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Karbon Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='karbon_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Citrix" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='citrix_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Frame" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='frame_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Private Cloud Splunk on AHV with Objects Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='splunk_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Cloud Native Application Modernization Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='app_modernization_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^Cloud Native CI/CD with Calm and Karbon Bootcamp" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='cicd_bootcamp.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^SE POC Guide (AHV) Staging" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='poc_workshop_base_staging.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i "^In Development Bootcamp Staging" | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='dev_privatecloud_bootcamp.sh'
+ _pc_launch=${_pe_launch}
fi
dependencies 'install' 'sshpass'
@@ -92,7 +203,13 @@ ______Warning -- curl time out indicates either:
- Foundation and initialization (Cluster IP API response) hasn't completed.
EoM
- prism_check 'PE' 60
+ _error=$(prism_check 'PE' '1')
+ # If we were unable to connect to the PRISM UI, send a message to the console and move to the next
+ if [[ ${_error} != *"successful"* ]]; then
+ log "We were unable to connect to the PRISM UI on ${PE_HOST}..."
+ continue
+ fi
+
if [[ -d cache ]]; then
pushd cache || true
@@ -143,7 +260,8 @@ EoM
fi
log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}"
- remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &"
+ ## TODO: If DEBUG is set, we run the below command with bash -x
+ remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &"
unset PE_CONFIGURATION
# shellcheck disable=SC2153
@@ -297,11 +415,6 @@ function select_workshop() {
#__main__
-# Source Workshop common routines + global variables
-. scripts/lib.common.sh
-. scripts/global.vars.sh
-begin
-
_VALIDATE='Validate Staged Clusters'
_CLUSTER_FILE='Cluster Input File'
CLUSTER_LIST=
diff --git a/sync_upstream.sh b/sync_upstream.sh
new file mode 100755
index 0000000..4dc256c
--- /dev/null
+++ b/sync_upstream.sh
@@ -0,0 +1,3 @@
+git fetch upstream
+git checkout master
+git merge upstream/master
diff --git a/test/LCM_Test/# GRABBING THE UUIDS b/test/LCM_Test/# GRABBING THE UUIDS
new file mode 100644
index 0000000..034e00f
--- /dev/null
+++ b/test/LCM_Test/# GRABBING THE UUIDS
@@ -0,0 +1,72 @@
+# GRABBING THE UUIDS
+curl -X POST https://10.42.12.39:9440/api/nutanix/v3/groups -H 'Authorization: Basic YWRtaW46dGVjaFgyMDE5IQ==' -H 'Content-Type: application/json' -H 'Postman-Token: 113df335-5985-4145-bf7c-46aa97b2bef4' -H 'cache-control: no-cache' -d '{
+ "entity_type": "lcm_available_version",
+ "grouping_attribute": "entity_uuid",
+ "group_member_count": 1000,
+ "group_member_attributes": [
+ {
+ "attribute": "uuid"
+ },
+ {
+ "attribute": "entity_uuid"
+ },
+ {
+ "attribute": "entity_class"
+ },
+ {
+ "attribute": "status"
+ },
+ {
+ "attribute": "version"
+ },
+ {
+ "attribute": "dependencies"
+ },
+ {
+ "attribute": "order"
+ }
+ ]
+}' --insecure | jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid")' | sort -u | head -3
+
+
+
+# GRABBING THE Versions of the UUID
+
+curl -X POST https://10.42.12.39:9440/api/nutanix/v3/groups -H 'Authorization: Basic YWRtaW46dGVjaFgyMDE5IQ==' -H 'Content-Type: application/json' -H 'Postman-Token: 113df335-5985-4145-bf7c-46aa97b2bef4' -H 'cache-control: no-cache' -d '{
+ "entity_type": "lcm_available_version",
+ "grouping_attribute": "entity_uuid",
+ "group_member_count": 1000,
+ "group_member_attributes": [
+ {
+ "attribute": "uuid"
+ },
+ {
+ "attribute": "entity_uuid"
+ },
+ {
+ "attribute": "entity_class"
+ },
+ {
+ "attribute": "status"
+ },
+ {
+ "attribute": "version"
+ },
+ {
+ "attribute": "dependencies"
+ },
+ {
+ "attribute": "order"
+ }
+ ]
+}' --insecure | jq 'if .group_results[].entity_results[].data[].values[].values[0]=="03a6e4a2-fa0e-4698-b0c0-e142820a2e94" then
+> if .group_results[].entity_results[].data[].name=="version" then
+
+jq 'if .group_results[].entity_results[].data[].values[].values[0]=="03a6e4a2-fa0e-4698-b0c0-e142820a2e94" then
+if .group_results[].entity_results[].data[].name=="version" then
+.group_results[].entity_results[].data[].values[].values[0] end
+end'
+
+
+jq 'if .group_results[].entity_results[].data[].values[].values[0]=="03a6e4a2-fa0e-4698-b0c0-e142820a2e94" then if .group_results[].entity_results[].data[].name=="version" then .group_results[].entity_results[].data[].values[].values[0] end
+end'
\ No newline at end of file
diff --git a/test/LCM_Test/Run the LCM inventory b/test/LCM_Test/Run the LCM inventory
new file mode 100644
index 0000000..750588d
--- /dev/null
+++ b/test/LCM_Test/Run the LCM inventory
@@ -0,0 +1,47 @@
+# Run the LCM inventory
+{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"}
+
+# Run the upgrade of the LCM-2 steps; 1) Generate a plan; 2) execute the plan
+
+# Generate Plan API
+https://10.42.9.39:9440/PrismGateway/services/rest/v1/genesis
+{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"generate_plan\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",[[\"639b6f37-06c8-4fe0-aeca-5b2c89e61fe6\",\"2.6.0.2\"],[\"dd69fc72-df7f-4195-bb28-6f74eafe353a\",\"2.6.0.2\"]]]}}"}
+
+{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"generate_plan\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",[[\"06ac7598-d25a-4ba6-a6de-e728446747a1\",\"2.6.0.3\"],[\"5f99949b-7ae6-4095-b23f-f959054f6099\",\"2.6.0.3\"]]]}}"}
+
+{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method":\"generate_plan\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",[[\"293c7506-565e-40a4-93e0-75f1749581ef\",\"2.6.0.3\"],[\"83ee21f9-3468-4693-9e63-a83a657ec6a3\",\"2.6.0.3\"]]]}}"}
+
+# Run the upgrade API
+https://10.42.9.39:9440/PrismGateway/services/rest/v1/genesis
+{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_update\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",[[\"639b6f37-06c8-4fe0-aeca-5b2c89e61fe6\",\"2.6.0.2\"],[\"dd69fc72-df7f-4195-bb28-6f74eafe353a\",\"2.6.0.2\"]]]}}"}
+
+
+
+# Progress API is at for all LCM steps where the API calls return an ID; entities -> percentageCompleted: (JSON value)
+https://10.42.9.39:9440/PrismGateway/services/rest/v1/progress_monitors?filterCriteria=parent_task_uuid%3D%3D817a37e2-9e57-4774-b889-57325485fd31
+817a37e2-9e57-4774-b889-57325485fd31 = UUID of the task that has been returned by the Execute plan
+%3D%3D means '=='
+
+# Set LCM auto update
+{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"configure\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",null,null,true]}}"}
+
+# Get the UUIDs and versions of Calm and PC.
+https://10.42.41.39:9440/api/nutanix/v3/groups
+{"entity_type":"lcm_available_version","group_member_attributes":[{"attribute":"uuid"},{"attribute":"entity_uuid"},{"attribute":"entity_class"},{"attribute":"status"},{"attribute":"version"},{"attribute":"dependencies"},{"attribute":"order"}],"query_name":"prism:BaseGroupModel"}
+This will deliver a massive nested JSON file....
+
+
+Get another look at the data in the cluster on existing versions in the PC.
+{"entity_type":"lcm_entity","group_member_count":1000,"group_member_attributes":[{"attribute":"id"},{"attribute":"uuid"},{"attribute":"entity_model"},{"attribute":"version"},{"attribute":"location_id"},{"attribute":"entity_class"},{"attribute":"description"},{"attribute":"last_updated_time_usecs"},{"attribute":"request_version"},{"attribute":"_master_cluster_uuid_"}],"query_name":"prism:LCMQueryModel","filter_criteria":"entity_class==PC;_master_cluster_uuid_==[no_val]"}
+
+ _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d "{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"generate_plan\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"${_first_uuid}\\\",\\\"${_first_version}\\\"],[\\\"${_sec_uuid}\\\",\\\"${_sec_version}\\\"]]]}}\"}" ${_url_lcm})
+
+ _task_id=`curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d "{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\k_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"perform_update\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"${_first_uuid}\\\",\\\"${_first_version}\\\"],[\\\"${_sec_uuid}\\\",\\\"${_sec_version}\\\"]]]}}\"}" ${_url_lcm}`
+
+Checking if Calm is enabled via:
+
+https://10.42.41.39:9440/api/nutanix/v3/services/nucalm/status
+Response is:
+{
+ "service_enablement_status": "ENABLED"
+}
\ No newline at end of file
diff --git a/test/LCM_Test/lcm_ops.out b/test/LCM_Test/lcm_ops.out
new file mode 100644
index 0000000..efa7e34
--- /dev/null
+++ b/test/LCM_Test/lcm_ops.out
@@ -0,0 +1,3728 @@
+2019-03-27 08:36:21 INFO zookeeper_session.py:113 lcm_ops_by_pc is attempting to connect to Zookeeper
+2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory NVMUtils
+2019-03-27 08:36:21 INFO genesis_utils.py:2113 Trying to fetch cluster_functions from cached config_proto
+2019-03-27 08:36:21 INFO nvm_utils.py:55 IS PC VM Call: True
+2019-03-27 08:36:21 INFO nvm_utils.py:45 Retrieving PC Utilities.
+2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory StagingUtils
+2019-03-27 08:36:21 INFO zookeeper_session.py:113 lcm_ops_by_pc is attempting to connect to Zookeeper
+2019-03-27 08:36:21 INFO operations.py:222 Found operation kDownloadOp
+2019-03-27 08:36:21 INFO operations.py:222 Found operation kInventoryOp
+2019-03-27 08:36:21 INFO ergon_utils.py:308 Root task uuid 3c1b3233-2377-4209-8cc1-421c2cb8c228
+2019-03-27 08:36:21 INFO zookeeper_session.py:113 lcm_ops_by_pc is attempting to connect to Zookeeper
+2019-03-27 08:36:21 INFO staging_utils.py:33 Retrieving the Catalog staging utils.
+2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory FoundationClient
+2019-03-27 08:36:21 INFO foundation_rest_client.py:134 Getting foundation version
+2019-03-27 08:36:21 INFO foundation_rest_client.py:95 Making GET request to http://localhost:8000/foundation/version
+2019-03-27 08:36:21 ERROR foundation_rest_client.py:140 Failed to get foundation version. The ret code is and the response returned is
+2019-03-27 08:36:21 INFO foundation_client.py:114 Retrieving the Foundation Genesis Client.
+2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory PCUtils
+2019-03-27 08:36:21 INFO genesis_utils.py:2113 Trying to fetch cluster_functions from cached config_proto
+2019-03-27 08:36:21 INFO nvm_utils.py:55 IS PC VM Call: True
+2019-03-27 08:36:21 INFO nvm_utils.py:45 Retrieving PC Utilities.
+2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory Downloader
+2019-03-27 08:36:21 INFO downloader.py:23 Retrieving the Catalog Downloader.
+2019-03-27 08:36:21 INFO lcm_ops_by_pc:399 Waiting for leadership change event
+2019-03-27 08:36:21 INFO lcm_ops_by_pc:127 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, Operation: 101, Environment: 304, PC: 10.42.12.39, State: 1000
+2019-03-27 08:36:21 INFO lcm_ops_by_pc:149 Starting to run state machine for LCM ops on PC.
+2019-03-27 08:36:21 INFO lcm_ops_by_pc:160 State [1000], Handler [_execute_pre_actions], PC - [10.42.12.39]
+2019-03-27 08:36:21 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 0 [None]
+2019-03-27 08:36:21 INFO lcm_actions_helper.py:275 Using ZK WAL to store actions list
+2019-03-27 08:36:21 INFO actions.py:584 actionsType: 0
+2019-03-27 08:36:21 INFO actions.py:588 Getting flags for task: 9d565964-0c6d-4247-bde5-903e76a31fd1
+2019-03-27 08:36:21 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5"
+reference_name: "release.karbon.update"
+status {
+ state: 1000
+ description: "Created LCM task WAL for an inventory sub-task"
+}
+env_list: "pc"
+
+2019-03-27 08:36:21 INFO actions.py:588 Getting flags for task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740
+2019-03-27 08:36:21 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5"
+reference_name: "nutanix.pc.update"
+status {
+ state: 1000
+ description: "Created LCM task WAL for an inventory sub-task"
+}
+env_list: "pc"
+
+2019-03-27 08:36:21 INFO actions.py:588 Getting flags for task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8
+2019-03-27 08:36:21 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5"
+reference_name: "release.epsilon.update"
+status {
+ state: 1000
+ description: "Created LCM task WAL for an inventory sub-task"
+}
+env_list: "pc"
+
+2019-03-27 08:36:21 INFO actions.py:588 Getting flags for task: b462c306-b2a6-48dd-8ecc-8237e3006d14
+2019-03-27 08:36:21 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5"
+reference_name: "release.calm.update"
+status {
+ state: 1000
+ description: "Created LCM task WAL for an inventory sub-task"
+}
+env_list: "pc"
+
+2019-03-27 08:36:21 ERROR lcm_genesis.py:438 Failed to get host type
+2019-03-27 08:36:21 INFO actions.py:618 De-duplicated flag list: []
+2019-03-27 08:36:21 INFO lcm_actions_helper.py:93 action_list: []
+2019-03-27 08:36:21 INFO lcm_actions_helper.py:359 No actions found.
+2019-03-27 08:36:21 INFO ergon_utils.py:189 Updating task with state 1000, message Finished to execute pre-actions on PC
+2019-03-27 08:36:21 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 30 [30]
+2019-03-27 08:36:21 INFO lcm_ops_by_pc:160 State [1001], Handler [_prepare_pc_for_operation], PC - [10.42.12.39]
+2019-03-27 08:36:21 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 30 [None]
+2019-03-27 08:36:21 INFO lcm_ops_by_pc:205 Preparing PC of [10.42.12.39]
+2019-03-27 08:36:21 INFO catalog_staging_utils.py:105 Staging module release.karbon.update and dependancies to cvm 10.42.12.39
+2019-03-27 08:36:21 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging
+2019-03-27 08:36:32 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1', '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c', '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8', '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655']
+2019-03-27 08:36:32 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:36:32 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:36:33 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1
+2019-03-27 08:36:33 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1
+2019-03-27 08:36:34 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73
+2019-03-27 08:36:34 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73
+2019-03-27 08:36:35 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:36:35 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:36:36 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8
+2019-03-27 08:36:36 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8
+2019-03-27 08:36:37 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655
+2019-03-27 08:36:37 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655
+2019-03-27 08:36:38 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None'
+2019-03-27 08:36:38 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39
+2019-03-27 08:36:38 INFO ergon_utils.py:189 Updating task with state 1001, message Finished to prepare PC for operation
+2019-03-27 08:36:38 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 40 [40]
+2019-03-27 08:36:38 INFO lcm_ops_by_pc:160 State [1002], Handler [_perform_operation_by_pc], PC - [10.42.12.39]
+2019-03-27 08:36:38 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 40 [None]
+2019-03-27 08:36:38 INFO lcm_ops_by_pc:250 Performing 101 operation
+2019-03-27 08:36:39 INFO lcm_ops_by_pc:295 Output: DEBUG: Currently installed version of karbon-core is 0.8.2
+DEBUG: Currently installed version of karbon-ui is 0.8.2
+##START##
+PC,None,Karbon,Karbon container service,0.8.2,1
+##END##
+
+2019-03-27 08:36:39 INFO lcm_ops_by_pc:302 Inventory result: ['PC', 'None', 'Karbon', 'Karbon container service', '0.8.2', '1']
+2019-03-27 08:36:39 INFO cpdb_utils.py:1143 Creating new entity: c9ee6d12-7141-453f-a345-115e392e27e1
+2019-03-27 08:36:39 INFO cpdb_utils.py:1161 Family: , Class: PC, Model: Karbon
+2019-03-27 08:36:39 INFO cpdb_utils.py:888 No available version tables exists to delete
+2019-03-27 08:36:39 INFO ergon_utils.py:189 Updating task with state 1002, message Finished to perform operation by PC
+2019-03-27 08:36:39 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 60 [60]
+2019-03-27 08:36:39 INFO lcm_ops_by_pc:160 State [1003], Handler [_post_ops_pc_cleanup], PC - [10.42.12.39]
+2019-03-27 08:36:39 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 60 [None]
+2019-03-27 08:36:39 INFO lcm_ops_by_pc:322 Performing clean up post operation
+2019-03-27 08:36:40 INFO ergon_utils.py:189 Updating task with state 1003, message Finished to perform cleanup on PC post operation
+2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 70 [70]
+2019-03-27 08:36:40 INFO lcm_ops_by_pc:160 State [1004], Handler [_execute_post_actions], PC - [10.42.12.39]
+2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 70 [None]
+2019-03-27 08:36:40 INFO lcm_ops_by_pc:333 Deferring post-actions till the last task of the batch.
+2019-03-27 08:36:40 INFO ergon_utils.py:189 Updating task with state 1004, message Finished to execute post-actions on PC
+2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 3, Percentage: 100 [100]
+2019-03-27 08:36:40 INFO lcm_ops_by_pc:141 Operation on 10.42.12.39 is successful
+2019-03-27 08:36:40 INFO lcm_ops_by_pc:127 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, Operation: 101, Environment: 304, PC: 10.42.12.39, State: 1000
+2019-03-27 08:36:40 INFO lcm_ops_by_pc:149 Starting to run state machine for LCM ops on PC.
+2019-03-27 08:36:40 INFO lcm_ops_by_pc:160 State [1000], Handler [_execute_pre_actions], PC - [10.42.12.39]
+2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 0 [None]
+2019-03-27 08:36:40 INFO lcm_ops_by_pc:192 Pre-actions already executed. Skipping.
+2019-03-27 08:36:40 INFO ergon_utils.py:189 Updating task with state 1000, message Finished to execute pre-actions on PC
+2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 30 [30]
+2019-03-27 08:36:40 INFO lcm_ops_by_pc:160 State [1001], Handler [_prepare_pc_for_operation], PC - [10.42.12.39]
+2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 30 [None]
+2019-03-27 08:36:40 INFO lcm_ops_by_pc:205 Preparing PC of [10.42.12.39]
+2019-03-27 08:36:40 INFO catalog_staging_utils.py:105 Staging module nutanix.pc.update and dependancies to cvm 10.42.12.39
+2019-03-27 08:36:40 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging
+2019-03-27 08:36:48 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/216663c2-9dde-42f3-be4d-6b9ee86476d9', '/home/nutanix/tmp/lcm_staging/3203a9de-6158-4830-9dee-19eb1c13e250', '/home/nutanix/tmp/lcm_staging/3c5ee449-6b6e-4640-890f-c546f794ca6a', '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b']
+2019-03-27 08:36:48 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/216663c2-9dde-42f3-be4d-6b9ee86476d9
+2019-03-27 08:36:48 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/216663c2-9dde-42f3-be4d-6b9ee86476d9
+2019-03-27 08:36:49 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/3203a9de-6158-4830-9dee-19eb1c13e250
+2019-03-27 08:36:49 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/3203a9de-6158-4830-9dee-19eb1c13e250
+2019-03-27 08:36:50 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/3c5ee449-6b6e-4640-890f-c546f794ca6a
+2019-03-27 08:36:50 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/3c5ee449-6b6e-4640-890f-c546f794ca6a
+2019-03-27 08:36:51 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:36:51 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:36:52 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None'
+2019-03-27 08:36:52 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39
+2019-03-27 08:36:52 INFO ergon_utils.py:189 Updating task with state 1001, message Finished to prepare PC for operation
+2019-03-27 08:36:52 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 40 [40]
+2019-03-27 08:36:52 INFO lcm_ops_by_pc:160 State [1002], Handler [_perform_operation_by_pc], PC - [10.42.12.39]
+2019-03-27 08:36:52 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 40 [None]
+2019-03-27 08:36:52 INFO lcm_ops_by_pc:250 Performing 101 operation
+2019-03-27 08:36:55 INFO lcm_ops_by_pc:295 Output: ##START##
+PC CORE CLUSTER,None,PC,PC version,5.10.2,1
+##END##
+
+2019-03-27 08:36:55 INFO lcm_ops_by_pc:302 Inventory result: ['PC CORE CLUSTER', 'None', 'PC', 'PC version', '5.10.2', '1']
+2019-03-27 08:36:55 INFO cpdb_utils.py:1143 Creating new entity: d6edff2c-f59f-4754-978b-06b6237796b4
+2019-03-27 08:36:55 INFO cpdb_utils.py:1161 Family: , Class: PC CORE CLUSTER, Model: PC
+2019-03-27 08:36:55 INFO cpdb_utils.py:888 No available version tables exists to delete
+2019-03-27 08:36:55 INFO ergon_utils.py:189 Updating task with state 1002, message Finished to perform operation by PC
+2019-03-27 08:36:55 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 60 [60]
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:160 State [1003], Handler [_post_ops_pc_cleanup], PC - [10.42.12.39]
+2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 60 [None]
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:322 Performing clean up post operation
+2019-03-27 08:36:56 INFO ergon_utils.py:189 Updating task with state 1003, message Finished to perform cleanup on PC post operation
+2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 70 [70]
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:160 State [1004], Handler [_execute_post_actions], PC - [10.42.12.39]
+2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 70 [None]
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:333 Deferring post-actions till the last task of the batch.
+2019-03-27 08:36:56 INFO ergon_utils.py:189 Updating task with state 1004, message Finished to execute post-actions on PC
+2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 3, Percentage: 100 [100]
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:141 Operation on 10.42.12.39 is successful
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:127 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, Operation: 101, Environment: 304, PC: 10.42.12.39, State: 1000
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:149 Starting to run state machine for LCM ops on PC.
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:160 State [1000], Handler [_execute_pre_actions], PC - [10.42.12.39]
+2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 0 [None]
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:192 Pre-actions already executed. Skipping.
+2019-03-27 08:36:56 INFO ergon_utils.py:189 Updating task with state 1000, message Finished to execute pre-actions on PC
+2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 30 [30]
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:160 State [1001], Handler [_prepare_pc_for_operation], PC - [10.42.12.39]
+2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 30 [None]
+2019-03-27 08:36:56 INFO lcm_ops_by_pc:205 Preparing PC of [10.42.12.39]
+2019-03-27 08:36:56 INFO catalog_staging_utils.py:105 Staging module release.epsilon.update and dependancies to cvm 10.42.12.39
+2019-03-27 08:36:56 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging
+2019-03-27 08:37:06 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771', '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c', '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8']
+2019-03-27 08:37:06 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:37:06 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:37:07 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771
+2019-03-27 08:37:07 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771
+2019-03-27 08:37:08 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8
+2019-03-27 08:37:08 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8
+2019-03-27 08:37:09 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:37:09 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:37:10 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8
+2019-03-27 08:37:10 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8
+2019-03-27 08:37:11 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None'
+2019-03-27 08:37:11 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39
+2019-03-27 08:37:11 INFO ergon_utils.py:189 Updating task with state 1001, message Finished to prepare PC for operation
+2019-03-27 08:37:11 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 40 [40]
+2019-03-27 08:37:11 INFO lcm_ops_by_pc:160 State [1002], Handler [_perform_operation_by_pc], PC - [10.42.12.39]
+2019-03-27 08:37:11 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 40 [None]
+2019-03-27 08:37:11 INFO lcm_ops_by_pc:250 Performing 101 operation
+2019-03-27 08:37:11 INFO lcm_ops_by_pc:295 Output: ##START##
+PC,None,Epsilon,PC Container based orchestration engine service,2.5.1,1
+##END##
+
+2019-03-27 08:37:11 INFO lcm_ops_by_pc:302 Inventory result: ['PC', 'None', 'Epsilon', 'PC Container based orchestration engine service', '2.5.1', '1']
+2019-03-27 08:37:11 INFO cpdb_utils.py:1143 Creating new entity: 758942f1-d42d-4d49-99fc-b73e2f2dca30
+2019-03-27 08:37:11 INFO cpdb_utils.py:1161 Family: , Class: PC, Model: Epsilon
+2019-03-27 08:37:11 INFO cpdb_utils.py:888 No available version tables exists to delete
+2019-03-27 08:37:11 INFO ergon_utils.py:189 Updating task with state 1002, message Finished to perform operation by PC
+2019-03-27 08:37:11 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 60 [60]
+2019-03-27 08:37:11 INFO lcm_ops_by_pc:160 State [1003], Handler [_post_ops_pc_cleanup], PC - [10.42.12.39]
+2019-03-27 08:37:11 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 60 [None]
+2019-03-27 08:37:11 INFO lcm_ops_by_pc:322 Performing clean up post operation
+2019-03-27 08:37:12 INFO ergon_utils.py:189 Updating task with state 1003, message Finished to perform cleanup on PC post operation
+2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 70 [70]
+2019-03-27 08:37:12 INFO lcm_ops_by_pc:160 State [1004], Handler [_execute_post_actions], PC - [10.42.12.39]
+2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 70 [None]
+2019-03-27 08:37:12 INFO lcm_ops_by_pc:333 Deferring post-actions till the last task of the batch.
+2019-03-27 08:37:12 INFO ergon_utils.py:189 Updating task with state 1004, message Finished to execute post-actions on PC
+2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 3, Percentage: 100 [100]
+2019-03-27 08:37:12 INFO lcm_ops_by_pc:141 Operation on 10.42.12.39 is successful
+2019-03-27 08:37:12 INFO lcm_ops_by_pc:127 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, Operation: 101, Environment: 304, PC: 10.42.12.39, State: 1000
+2019-03-27 08:37:12 INFO lcm_ops_by_pc:149 Starting to run state machine for LCM ops on PC.
+2019-03-27 08:37:12 INFO lcm_ops_by_pc:160 State [1000], Handler [_execute_pre_actions], PC - [10.42.12.39]
+2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 0 [None]
+2019-03-27 08:37:12 INFO lcm_ops_by_pc:192 Pre-actions already executed. Skipping.
+2019-03-27 08:37:12 INFO ergon_utils.py:189 Updating task with state 1000, message Finished to execute pre-actions on PC
+2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 30 [30]
+2019-03-27 08:37:12 INFO lcm_ops_by_pc:160 State [1001], Handler [_prepare_pc_for_operation], PC - [10.42.12.39]
+2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 30 [None]
+2019-03-27 08:37:12 INFO lcm_ops_by_pc:205 Preparing PC of [10.42.12.39]
+2019-03-27 08:37:12 INFO catalog_staging_utils.py:105 Staging module release.calm.update and dependancies to cvm 10.42.12.39
+2019-03-27 08:37:12 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging
+2019-03-27 08:37:21 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac', '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867', '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc', '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c']
+2019-03-27 08:37:21 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac
+2019-03-27 08:37:22 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac
+2019-03-27 08:37:22 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867
+2019-03-27 08:37:23 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867
+2019-03-27 08:37:23 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc
+2019-03-27 08:37:24 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc
+2019-03-27 08:37:24 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:37:25 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:37:25 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:37:26 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:37:26 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None'
+2019-03-27 08:37:26 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39
+2019-03-27 08:37:26 INFO ergon_utils.py:189 Updating task with state 1001, message Finished to prepare PC for operation
+2019-03-27 08:37:26 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 40 [40]
+2019-03-27 08:37:26 INFO lcm_ops_by_pc:160 State [1002], Handler [_perform_operation_by_pc], PC - [10.42.12.39]
+2019-03-27 08:37:26 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 40 [None]
+2019-03-27 08:37:26 INFO lcm_ops_by_pc:250 Performing 101 operation
+2019-03-27 08:37:27 INFO lcm_ops_by_pc:295 Output: ##START##
+PC,None,Calm,PC Container based cloud application lifecycle management service,2.4.0,1
+##END##
+
+2019-03-27 08:37:27 INFO lcm_ops_by_pc:302 Inventory result: ['PC', 'None', 'Calm', 'PC Container based cloud application lifecycle management service', '2.4.0', '1']
+2019-03-27 08:37:27 INFO cpdb_utils.py:1143 Creating new entity: 03a6e4a2-fa0e-4698-b0c0-e142820a2e94
+2019-03-27 08:37:27 INFO cpdb_utils.py:1161 Family: , Class: PC, Model: Calm
+2019-03-27 08:37:27 INFO cpdb_utils.py:888 No available version tables exists to delete
+2019-03-27 08:37:27 INFO ergon_utils.py:189 Updating task with state 1002, message Finished to perform operation by PC
+2019-03-27 08:37:27 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 60 [60]
+2019-03-27 08:37:27 INFO lcm_ops_by_pc:160 State [1003], Handler [_post_ops_pc_cleanup], PC - [10.42.12.39]
+2019-03-27 08:37:27 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 60 [None]
+2019-03-27 08:37:27 INFO lcm_ops_by_pc:322 Performing clean up post operation
+2019-03-27 08:37:28 INFO ergon_utils.py:189 Updating task with state 1003, message Finished to perform cleanup on PC post operation
+2019-03-27 08:37:28 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 70 [70]
+2019-03-27 08:37:28 INFO lcm_ops_by_pc:160 State [1004], Handler [_execute_post_actions], PC - [10.42.12.39]
+2019-03-27 08:37:28 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 70 [None]
+2019-03-27 08:37:28 INFO lcm_actions_helper.py:275 Using ZK WAL to store actions list
+2019-03-27 08:37:28 INFO actions.py:584 actionsType: 1
+2019-03-27 08:37:28 INFO actions.py:588 Getting flags for task: 9d565964-0c6d-4247-bde5-903e76a31fd1
+2019-03-27 08:37:28 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5"
+reference_name: "release.karbon.update"
+status {
+ state: 1004
+ description: "Finished to execute post-actions on PC"
+}
+env_list: "pc"
+
+2019-03-27 08:37:28 INFO actions.py:588 Getting flags for task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740
+2019-03-27 08:37:28 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5"
+reference_name: "nutanix.pc.update"
+status {
+ state: 1004
+ description: "Finished to execute post-actions on PC"
+}
+env_list: "pc"
+
+2019-03-27 08:37:28 INFO actions.py:588 Getting flags for task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8
+2019-03-27 08:37:28 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5"
+reference_name: "release.epsilon.update"
+status {
+ state: 1004
+ description: "Finished to execute post-actions on PC"
+}
+env_list: "pc"
+
+2019-03-27 08:37:28 INFO actions.py:588 Getting flags for task: b462c306-b2a6-48dd-8ecc-8237e3006d14
+2019-03-27 08:37:28 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5"
+reference_name: "release.calm.update"
+status {
+ state: 1003
+ description: "Finished to perform cleanup on PC post operation"
+ operation_done: true
+}
+env_list: "pc"
+
+2019-03-27 08:37:28 ERROR lcm_genesis.py:438 Failed to get host type
+2019-03-27 08:37:28 INFO actions.py:618 De-duplicated flag list: []
+2019-03-27 08:37:28 INFO lcm_actions_helper.py:93 action_list: []
+2019-03-27 08:37:28 INFO lcm_actions_helper.py:359 No actions found.
+2019-03-27 08:37:28 INFO ergon_utils.py:189 Updating task with state 1004, message Finished to execute post-actions on PC
+2019-03-27 08:37:28 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 3, Percentage: 100 [100]
+2019-03-27 08:37:28 INFO lcm_ops_by_pc:141 Operation on 10.42.12.39 is successful
+2019-03-27 08:37:28 INFO lcm_ops_by_pc:381 LCM operation 101 for 304 is successful
+2019-03-27 08:37:51 DEBUG zookeeper_session.py:90 Using host_port_list: zk1:9876
+2019-03-27 08:37:51 INFO zookeeper_session.py:113 lcm_ops_for_rim is attempting to connect to Zookeeper
+2019-03-27 08:37:51 INFO lcm_ops_for_rim:143 Setting Factories for RIM operations!
+2019-03-27 08:37:51 INFO base_factory.py:63 Setting factory NVMUtils implementation
+2019-03-27 08:37:51 INFO genesis_utils.py:2113 Trying to fetch cluster_functions from cached config_proto
+2019-03-27 08:37:51 INFO nvm_utils.py:55 IS PC VM Call: True
+2019-03-27 08:37:51 INFO nvm_utils.py:45 Retrieving PC Utilities.
+2019-03-27 08:37:51 INFO base_factory.py:63 Setting factory PCUtils implementation
+2019-03-27 08:37:51 INFO genesis_utils.py:2113 Trying to fetch cluster_functions from cached config_proto
+2019-03-27 08:37:51 INFO nvm_utils.py:55 IS PC VM Call: True
+2019-03-27 08:37:51 INFO nvm_utils.py:45 Retrieving PC Utilities.
+2019-03-27 08:37:51 INFO lcm_ops_for_rim:105 Fetching available versions from repository
+2019-03-27 08:37:51 INFO repository_image_utils.py:345 Performing Inventory for Repository Image
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:299 Fetching entities currently inventoried
+2019-03-27 08:37:51 INFO lcm_ops_for_rim:173 Waiting for leadership change event
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:310 Adding uuid: "c9ee6d12-7141-453f-a345-115e392e27e1"
+location_id: "pc:e636212f-3d79-4a4b-8e78-afa94c05cb66"
+entity_class: "PC"
+entity_model: "Karbon"
+version: "0.8.2"
+hw_family: ""
+description: "Karbon container service"
+count: 1
+last_detected_time_usecs: 1553700999578310
+ to entity set
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:310 Adding uuid: "d6edff2c-f59f-4754-978b-06b6237796b4"
+location_id: "pc:e636212f-3d79-4a4b-8e78-afa94c05cb66"
+entity_class: "PC CORE CLUSTER"
+entity_model: "PC"
+version: "5.10.2"
+hw_family: ""
+description: "PC version"
+count: 1
+last_detected_time_usecs: 1553701015964970
+ to entity set
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:310 Adding uuid: "758942f1-d42d-4d49-99fc-b73e2f2dca30"
+location_id: "pc:e636212f-3d79-4a4b-8e78-afa94c05cb66"
+entity_class: "PC"
+entity_model: "Epsilon"
+version: "2.5.1"
+hw_family: ""
+description: "PC Container based orchestration engine service"
+count: 1
+last_detected_time_usecs: 1553701031786357
+ to entity set
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:310 Adding uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94"
+location_id: "pc:e636212f-3d79-4a4b-8e78-afa94c05cb66"
+entity_class: "PC"
+entity_model: "Calm"
+version: "2.4.0"
+hw_family: ""
+description: "PC Container based cloud application lifecycle management service"
+count: 1
+last_detected_time_usecs: 1553701047595510
+ to entity set
+2019-03-27 08:37:51 INFO repository_image_utils.py:316 Entity Dict created is as follows: defaultdict(, {('PC', 'CALM'): [u'pc:e636212f-3d79-4a4b-8e78-afa94c05cb66'], ('PC', 'EPSILON'): [u'pc:e636212f-3d79-4a4b-8e78-afa94c05cb66'], ('PC', 'KARBON'): [u'pc:e636212f-3d79-4a4b-8e78-afa94c05cb66'], ('PC CORE CLUSTER', 'PC'): [u'pc:e636212f-3d79-4a4b-8e78-afa94c05cb66']})
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:364 Fetching modules of type: update
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:374 Looping over all the update modules to perform inventory only on those modules which support repository image
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:387 Managed Entity with entity class: PC and entity model: Karbon supports repository image module
+2019-03-27 08:37:51 INFO repository_image_utils.py:1086 Retrieving module with reference name release.karbon.repository_image
+2019-03-27 08:37:51 INFO repository_image_utils.py:779 Getting tag list for: release.karbon.repository_image
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:783 Tag List for release.karbon.repository_image is []
+2019-03-27 08:37:51 INFO repository_image_utils.py:756 Getting flag list for: release.karbon.repository_image
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:760 Flag List for release.karbon.repository_image is []
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:893 Building repository_image_base_url
+2019-03-27 08:37:51 DEBUG zookeeper_session.py:90 Using host_port_list: zk1:9876
+2019-03-27 08:37:51 INFO zookeeper_session.py:113 lcm_ops_for_rim is attempting to connect to Zookeeper
+2019-03-27 08:37:51 DEBUG configuration.py:158 Found config version: "2.1.5579"
+url: "http://download.nutanix.com/lcm/2.0"
+auto_update_enabled: false
+lcm_standalone_ui_enabled: false
+lcm_pc_enabled: false
+deprecated_software_entities: "Firmware"
+
+2019-03-27 08:37:51 INFO configuration.py:137 Read config: {'url': u'http://download.nutanix.com/lcm/2.0', 'deprecated_software_entities': [u'Firmware'], 'lcm_standalone_ui_enabled': False, 'lcm_pc_enabled': False, 'version': u'2.1.5579', 'auto_update_enabled': False}
+2019-03-27 08:37:51 DEBUG repository_image_utils.py:896 URL present in config is: http://download.nutanix.com/lcm/2.0
+2019-03-27 08:37:51 INFO repository_image_utils.py:900 Repository Image Base URL built by framework: http://download.nutanix.com/lcm/2.0/builds/
+2019-03-27 08:37:51 INFO repository_image_utils.py:84 Repository Image Verification as a feature is disabled or not supported in the current AOS
+2019-03-27 08:37:51 INFO repository_image_utils.py:831 Parameters created for inventory by repository image module:{'public_key_location': '/home/nutanix/cluster/config/lcm/lcm_public.pem', 'flag_list': [], 'base_url': u'http://download.nutanix.com/lcm/2.0/builds/', 'entity_model': u'Karbon', 'rim_verification_support': False, 'tag_list': [], 'proxy_env': {'JETTY_HOME': '/usr/local/nutanix/jetty', 'MY_SERVICE_NAME': 'genesis', 'ZOO_LOG_DIR': '/home/nutanix/data/logs', 'ZOO_START_LOG_FILE': '/home/nutanix/data/logs/zookeeper.log', 'CATALINA_OUT': '/home/nutanix/data/logs/catalina.out', 'LESSOPEN': '||/usr/bin/lesspipe.sh %s', 'LOGNAME': 'nutanix', 'USER': 'nutanix', 'HOME': '/home/nutanix', 'NUTANIX_BINARY_LOG_DIR': '/home/nutanix/data/binary_logs', 'PATH': '/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin', 'CASSANDRA_HOME': '/usr/local/nutanix/apache-cassandra', 'LANG': 'en_US.UTF-8', 'CATALINA_BASE': '/home/nutanix/prism', 'SHELL': '/bin/bash', 'NUTANIX_START_LOG_FILE': '/home/nutanix/data/logs/startup.log', 'JDK_HOME': '/usr/lib/jvm/jre-1.8.0', 'SHLVL': '2', 'NUTANIX_BASE_DIR': '/usr/local/nutanix', 'HISTSIZE': '1000', 'GEVENT_RESOLVER': 'ares', 'ZOOKEEPER_HOST_PORT_LIST': 'zk1:9876', 'NUTANIX_LOG_DIR': '/home/nutanix/data/logs', 'MY_EXTERNAL_IP': '10.42.12.39', 'LIBVIRT_DEFAULT_URI': 'qemu+ssh://root@192.168.5.1/system?no_verify=1', 'JAVA_HOME': '/usr/lib/jvm/jre-1.8.0', 'CASSANDRA_INCLUDE': '/home/nutanix/config/cassandra/cassandra.in.sh', 'XDG_RUNTIME_DIR': '/run/user/1000', 'CORE_PATTERN': '|/home/nutanix/serviceability/bin/coredump.py %p %s %e', 'PERL_LWP_SSL_VERIFY_HOSTNAME': '0', 'SSH_KEY': '/home/nutanix/.ssh/id_rsa', 'TOMCAT_HOME': '/usr/local/nutanix/apache-tomcat', 'LC_ALL': 'en_US.UTF-8', 'XDG_SESSION_ID': 'c1', '_': '/home/nutanix/cluster/bin/genesis', 'ZOOCFGDIR': '/home/nutanix/config/zookeeper', 'GLOG_max_log_size': '100', 'CASSANDRA_MAX_HEAP_SIZE': '2560M', 'HOSTNAME': 'ntnx-10-42-12-39-a-pcvm', 'CASSANDRA_CONF': '/home/nutanix/config/cassandra', 'HISTCONTROL': 'ignoredups', 'PWD': '/home/nutanix', 'MAIL': '/var/spool/mail/nutanix'}}
+2019-03-27 08:37:51 INFO repository_image_utils.py:421 Getting Version List for release.karbon.repository_image repository_image_module
+2019-03-27 08:37:51 INFO catalog_staging_utils.py:105 Staging module release.karbon.update and dependancies to cvm 10.42.12.39
+2019-03-27 08:37:51 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging
+2019-03-27 08:37:51 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:37:51 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging
+2019-03-27 08:37:52 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0
+2019-03-27 08:37:52 DEBUG ssh_client.py:188 Executing mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:37:52 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 mkdir -p /home/nutanix/tmp/lcm_staging
+2019-03-27 08:37:52 DEBUG ssh_client.py:203 Executed mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0
+2019-03-27 08:37:52 DEBUG cpdb_utils.py:267 Module list is
+2019-03-27 08:37:52 DEBUG cpdb_utils.py:820 Found modules: [{'location': u'c88e6e66-f152-49bc-adce-0b44827cdcaa', 'digest': u'd3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c'}, {'location': u'd7cfc33d-2f6d-408e-af8d-f67c9b8ffb67', 'digest': u'48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c'}, {'location': u'8512cf7d-c1a5-4da0-922c-7c6c1ec16859', 'digest': u'77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12'}, {'location': u'60dd8442-1902-4b0a-a16a-1d3248069c6c', 'digest': u'f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e'}, {'location': u'f80da7c4-eb51-4df8-95ca-c92cfbdf5afa', 'digest': u'd51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978'}, {'location': u'469d6ddf-2093-4585-b93c-bfad0aea61b7', 'digest': u'bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7'}]
+2019-03-27 08:37:52 DEBUG lcm_catalog.py:197 Found catalog items: GTwMh8bBe, ꊝaEP'_, QCpw, O
+&.NSqT&, p.LFP{Vp,
+&(LrdCF
+2019-03-27 08:37:52 DEBUG catalog_utils.py:262 Getting file from uuid: "\226GT\356\332wM\030\221h\235\2628bBe"
+name: "release.karbon.update"
+annotation: "d3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c"
+item_type: kLCM
+version: 0
+opaque: "\n@d3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "Wz6GM3E,\227\r\364\254\226\306\377\352"
+ source_list {
+ file_uuid: "\276\246C-\352_B\205\261\344>\245\230\236\344\350"
+ }
+}
+global_catalog_item_uuid: "\310\216nf\361RI\274\255\316\013D\202|\334\252"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\310\216nf\361RI\274\255\316\013D\202|\334\252"
+ version: 0
+}
+
+2019-03-27 08:37:52 DEBUG lcm_catalog.py:283 Found file uuids ['bea6432d-ea5f-4285-b1e4-3ea5989ee4e8']
+2019-03-27 08:37:52 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:37:53 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ'}
+2019-03-27 08:37:53 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\226GT\356\332wM\030\221h\235\2628bBe"
+name: "release.karbon.update"
+annotation: "d3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c"
+item_type: kLCM
+version: 0
+opaque: "\n@d3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "Wz6GM3E,\227\r\364\254\226\306\377\352"
+ source_list {
+ file_uuid: "\276\246C-\352_B\205\261\344>\245\230\236\344\350"
+ }
+}
+global_catalog_item_uuid: "\310\216nf\361RI\274\255\316\013D\202|\334\252"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\310\216nf\361RI\274\255\316\013D\202|\334\252"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ'
+file_name = '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:37:53 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ'
+file_name = '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:37:53 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ'
+file_name = '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:37:54 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ'
+file_name = '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:37:54 DEBUG catalog_utils.py:262 Getting file from uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275"
+name: "nutanix.tools"
+annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c"
+item_type: kLCM
+version: 0
+opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t"
+ source_list {
+ file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+"
+ }
+}
+global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+ version: 0
+}
+
+2019-03-27 08:37:54 DEBUG lcm_catalog.py:283 Found file uuids ['48310501-3d7a-4be1-a3ed-9d990e39222b']
+2019-03-27 08:37:54 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:37:54 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw'}
+2019-03-27 08:37:54 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275"
+name: "nutanix.tools"
+annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c"
+item_type: kLCM
+version: 0
+opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t"
+ source_list {
+ file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+"
+ }
+}
+global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:37:54 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:37:54 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:37:55 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:37:55 DEBUG catalog_utils.py:262 Getting file from uuid: "\262\367Q\375\363\341Cp\276w\005\300\365\355\206\352"
+name: "release.karbon.update_tools"
+annotation: "77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12"
+item_type: kLCM
+version: 0
+opaque: "\n@77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "j\310\212\237]\250C\246\225\212\375y\357\350s\225"
+ source_list {
+ file_uuid: "R\261\316|]\234Mn\267\343`\355og\'\241"
+ }
+}
+global_catalog_item_uuid: "\205\022\317}\301\245M\240\222,|l\036\301hY"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\205\022\317}\301\245M\240\222,|l\036\301hY"
+ version: 0
+}
+
+2019-03-27 08:37:55 DEBUG lcm_catalog.py:283 Found file uuids ['52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1']
+2019-03-27 08:37:55 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:37:56 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA'}
+2019-03-27 08:37:56 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\262\367Q\375\363\341Cp\276w\005\300\365\355\206\352"
+name: "release.karbon.update_tools"
+annotation: "77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12"
+item_type: kLCM
+version: 0
+opaque: "\n@77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "j\310\212\237]\250C\246\225\212\375y\357\350s\225"
+ source_list {
+ file_uuid: "R\261\316|]\234Mn\267\343`\355og\'\241"
+ }
+}
+global_catalog_item_uuid: "\205\022\317}\301\245M\240\222,|l\036\301hY"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\205\022\317}\301\245M\240\222,|l\036\301hY"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA'
+file_name = '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:37:56 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA'
+file_name = '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:37:56 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA'
+file_name = '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:37:57 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA'
+file_name = '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:37:57 DEBUG catalog_utils.py:262 Getting file from uuid: "\375O\n\026&.NS\232qT\273\363\201&\322"
+name: "release.karbon.repository_image"
+annotation: "d51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978"
+item_type: kLCM
+version: 0
+opaque: "\n@d51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "(r\345\272)\221D\206\242\230\277\257q\271\355\034"
+ source_list {
+ file_uuid: "\231\304\200\302\364TO\213\276\215 \273\322\004Ls"
+ }
+}
+global_catalog_item_uuid: "\370\r\247\304\353QM\370\225\312\311,\373\337Z\372"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\370\r\247\304\353QM\370\225\312\311,\373\337Z\372"
+ version: 0
+}
+
+2019-03-27 08:37:57 DEBUG lcm_catalog.py:283 Found file uuids ['99c480c2-f454-4f8b-be8d-20bbd2044c73']
+2019-03-27 08:37:57 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:37:58 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ'}
+2019-03-27 08:37:58 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\375O\n\026&.NS\232qT\273\363\201&\322"
+name: "release.karbon.repository_image"
+annotation: "d51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978"
+item_type: kLCM
+version: 0
+opaque: "\n@d51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "(r\345\272)\221D\206\242\230\277\257q\271\355\034"
+ source_list {
+ file_uuid: "\231\304\200\302\364TO\213\276\215 \273\322\004Ls"
+ }
+}
+global_catalog_item_uuid: "\370\r\247\304\353QM\370\225\312\311,\373\337Z\372"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\370\r\247\304\353QM\370\225\312\311,\373\337Z\372"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/99c480c2-f454-4f8b-be8d-20bbd2044c73'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ'
+file_name = '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:37:58 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/99c480c2-f454-4f8b-be8d-20bbd2044c73'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ'
+file_name = '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:37:58 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/99c480c2-f454-4f8b-be8d-20bbd2044c73'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ'
+file_name = '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:37:59 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/99c480c2-f454-4f8b-be8d-20bbd2044c73'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ'
+file_name = '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:37:59 DEBUG catalog_utils.py:262 Getting file from uuid: "\025\304p\022.\353L\225\224FP{V\377p\035"
+name: "release.karbon.precheck"
+annotation: "bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7"
+item_type: kLCM
+version: 0
+opaque: "\n@bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "69\323\026\270\327N\200\273\365L8\214\031\325?"
+ source_list {
+ file_uuid: "\333q\255\303\337\265Bg\215P\336\220@\377\346U"
+ }
+}
+global_catalog_item_uuid: "F\235m\337 \223E\205\271<\277\255\n\352a\267"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "F\235m\337 \223E\205\271<\277\255\n\352a\267"
+ version: 0
+}
+
+2019-03-27 08:37:59 DEBUG lcm_catalog.py:283 Found file uuids ['db71adc3-dfb5-4267-8d50-de9040ffe655']
+2019-03-27 08:37:59 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:37:59 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ'}
+2019-03-27 08:37:59 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\025\304p\022.\353L\225\224FP{V\377p\035"
+name: "release.karbon.precheck"
+annotation: "bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7"
+item_type: kLCM
+version: 0
+opaque: "\n@bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "69\323\026\270\327N\200\273\365L8\214\031\325?"
+ source_list {
+ file_uuid: "\333q\255\303\337\265Bg\215P\336\220@\377\346U"
+ }
+}
+global_catalog_item_uuid: "F\235m\337 \223E\205\271<\277\255\n\352a\267"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "F\235m\337 \223E\205\271<\277\255\n\352a\267"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/db71adc3-dfb5-4267-8d50-de9040ffe655'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ'
+file_name = '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:37:59 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/db71adc3-dfb5-4267-8d50-de9040ffe655'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ'
+file_name = '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:37:59 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/db71adc3-dfb5-4267-8d50-de9040ffe655'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ'
+file_name = '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:00 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/db71adc3-dfb5-4267-8d50-de9040ffe655'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ'
+file_name = '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:00 DEBUG catalog_utils.py:262 Getting file from uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F"
+name: "release.linux.tools"
+annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e"
+item_type: kLCM
+version: 0
+opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "wl\207P\271OK8\263\327\214ox\207[\236"
+ source_list {
+ file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214"
+ }
+}
+global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+ version: 0
+}
+
+2019-03-27 08:38:00 DEBUG lcm_catalog.py:283 Found file uuids ['9fa74857-28e6-4f6e-9c6f-fa4aa379a38c']
+2019-03-27 08:38:00 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:01 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA'}
+2019-03-27 08:38:01 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F"
+name: "release.linux.tools"
+annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e"
+item_type: kLCM
+version: 0
+opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "wl\207P\271OK8\263\327\214ox\207[\236"
+ source_list {
+ file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214"
+ }
+}
+global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:01 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:01 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:02 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:02 DEBUG ssh_client.py:188 Executing ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:02 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 ls "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:02 DEBUG ssh_client.py:203 Executed ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:02 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1', '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c', '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8', '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655']
+2019-03-27 08:38:02 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:38:02 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:02 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:03 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:03 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:38:03 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:03 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:38:03 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39: rv 0
+2019-03-27 08:38:03 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1
+2019-03-27 08:38:03 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:03 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:04 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:04 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1
+2019-03-27 08:38:04 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:04 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1
+2019-03-27 08:38:04 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1 on 10.42.12.39: rv 0
+2019-03-27 08:38:04 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73
+2019-03-27 08:38:04 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:04 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:05 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:05 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73
+2019-03-27 08:38:05 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:05 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73
+2019-03-27 08:38:05 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73 on 10.42.12.39: rv 0
+2019-03-27 08:38:05 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:38:05 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:05 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:06 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:06 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:38:06 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:06 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:38:06 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39: rv 0
+2019-03-27 08:38:06 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8
+2019-03-27 08:38:06 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:06 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:07 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:07 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8
+2019-03-27 08:38:07 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:07 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8
+2019-03-27 08:38:07 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8 on 10.42.12.39: rv 0
+2019-03-27 08:38:07 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655
+2019-03-27 08:38:07 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:07 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:08 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:08 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655
+2019-03-27 08:38:08 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:08 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655
+2019-03-27 08:38:08 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655 on 10.42.12.39: rv 0
+2019-03-27 08:38:08 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None'
+2019-03-27 08:38:08 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39
+2019-03-27 08:38:08 INFO repository_image_utils.py:182 Repository Image Path is as follows: release.karbon.repository_image
+2019-03-27 08:38:08 DEBUG repository_image_utils.py:108 Repository Image Module Path: release.karbon.repository_image
+2019-03-27 08:38:08 DEBUG repository_image_utils.py:113 Version List returned is: [Status: available Image: karbon.tar.gz Version: 1.0.0 Flag List: [] Update Library List: []]
+2019-03-27 08:38:08 DEBUG repository_image_utils.py:429 Versions List is: [Status: available Image: karbon.tar.gz Version: 1.0.0 Flag List: [] Update Library List: []]
+2019-03-27 08:38:08 INFO repository_image_utils.py:441 Managed Entity with entity_class: PC and entity_model: Karbon is a pc entity
+2019-03-27 08:38:08 DEBUG cpdb_utils.py:572 [(UUID('c9ee6d12-7141-453f-a345-115e392e27e1'), ), (UUID('d6edff2c-f59f-4754-978b-06b6237796b4'), ), (UUID('758942f1-d42d-4d49-99fc-b73e2f2dca30'), ), (UUID('03a6e4a2-fa0e-4698-b0c0-e142820a2e94'), )]
+2019-03-27 08:38:08 DEBUG cpdb_utils.py:573 location_id of entity: pc:e636212f-3d79-4a4b-8e78-afa94c05cb66
+2019-03-27 08:38:08 INFO repository_image_utils.py:257 Updating the managed entity with versions
+2019-03-27 08:38:08 INFO repository_image_utils.py:269 Updating the available versions table with versions from repository
+2019-03-27 08:38:08 INFO cpdb_utils.py:888 No available version tables exists to delete
+2019-03-27 08:38:08 DEBUG cpdb_utils.py:1201 Saved index 0
+2019-03-27 08:38:08 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:08 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 51d5c46b-c2dc-4f13-9b9b-b2b46fdacd81, object:
+uuid: "51d5c46b-c2dc-4f13-9b9b-b2b46fdacd81"
+entity_uuid: "c9ee6d12-7141-453f-a345-115e392e27e1"
+version: "1.0.0"
+status: "available"
+dependencies: "[]"
+order: 1
+entity_class: "PC"
+
+2019-03-27 08:38:08 DEBUG repository_image_utils.py:382 Managed Entity with entity class: PC CORE CLUSTER and entity model: PC does not support repository image module
+2019-03-27 08:38:08 DEBUG repository_image_utils.py:387 Managed Entity with entity class: PC and entity model: Epsilon supports repository image module
+2019-03-27 08:38:08 INFO repository_image_utils.py:1086 Retrieving module with reference name release.epsilon.repository_image
+2019-03-27 08:38:08 INFO repository_image_utils.py:779 Getting tag list for: release.epsilon.repository_image
+2019-03-27 08:38:08 DEBUG repository_image_utils.py:783 Tag List for release.epsilon.repository_image is []
+2019-03-27 08:38:08 INFO repository_image_utils.py:756 Getting flag list for: release.epsilon.repository_image
+2019-03-27 08:38:08 DEBUG repository_image_utils.py:760 Flag List for release.epsilon.repository_image is []
+2019-03-27 08:38:08 DEBUG repository_image_utils.py:893 Building repository_image_base_url
+2019-03-27 08:38:08 DEBUG configuration.py:158 Found config version: "2.1.5579"
+url: "http://download.nutanix.com/lcm/2.0"
+auto_update_enabled: false
+lcm_standalone_ui_enabled: false
+lcm_pc_enabled: false
+deprecated_software_entities: "Firmware"
+
+2019-03-27 08:38:08 INFO configuration.py:137 Read config: {'url': u'http://download.nutanix.com/lcm/2.0', 'deprecated_software_entities': [u'Firmware'], 'lcm_standalone_ui_enabled': False, 'lcm_pc_enabled': False, 'version': u'2.1.5579', 'auto_update_enabled': False}
+2019-03-27 08:38:08 DEBUG repository_image_utils.py:896 URL present in config is: http://download.nutanix.com/lcm/2.0
+2019-03-27 08:38:08 INFO repository_image_utils.py:900 Repository Image Base URL built by framework: http://download.nutanix.com/lcm/2.0/builds/
+2019-03-27 08:38:08 INFO repository_image_utils.py:84 Repository Image Verification as a feature is disabled or not supported in the current AOS
+2019-03-27 08:38:08 INFO repository_image_utils.py:831 Parameters created for inventory by repository image module:{'public_key_location': '/home/nutanix/cluster/config/lcm/lcm_public.pem', 'flag_list': [], 'base_url': u'http://download.nutanix.com/lcm/2.0/builds/', 'entity_model': u'Epsilon', 'rim_verification_support': False, 'tag_list': [], 'proxy_env': {'JETTY_HOME': '/usr/local/nutanix/jetty', 'MY_SERVICE_NAME': 'genesis', 'ZOO_LOG_DIR': '/home/nutanix/data/logs', 'ZOO_START_LOG_FILE': '/home/nutanix/data/logs/zookeeper.log', 'CATALINA_OUT': '/home/nutanix/data/logs/catalina.out', 'LESSOPEN': '||/usr/bin/lesspipe.sh %s', 'LOGNAME': 'nutanix', 'USER': 'nutanix', 'HOME': '/home/nutanix', 'NUTANIX_BINARY_LOG_DIR': '/home/nutanix/data/binary_logs', 'PATH': '/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin', 'CASSANDRA_HOME': '/usr/local/nutanix/apache-cassandra', 'LANG': 'en_US.UTF-8', 'CATALINA_BASE': '/home/nutanix/prism', 'SHELL': '/bin/bash', 'NUTANIX_START_LOG_FILE': '/home/nutanix/data/logs/startup.log', 'JDK_HOME': '/usr/lib/jvm/jre-1.8.0', 'SHLVL': '2', 'NUTANIX_BASE_DIR': '/usr/local/nutanix', 'HISTSIZE': '1000', 'GEVENT_RESOLVER': 'ares', 'ZOOKEEPER_HOST_PORT_LIST': 'zk1:9876', 'NUTANIX_LOG_DIR': '/home/nutanix/data/logs', 'MY_EXTERNAL_IP': '10.42.12.39', 'LIBVIRT_DEFAULT_URI': 'qemu+ssh://root@192.168.5.1/system?no_verify=1', 'JAVA_HOME': '/usr/lib/jvm/jre-1.8.0', 'CASSANDRA_INCLUDE': '/home/nutanix/config/cassandra/cassandra.in.sh', 'XDG_RUNTIME_DIR': '/run/user/1000', 'CORE_PATTERN': '|/home/nutanix/serviceability/bin/coredump.py %p %s %e', 'PERL_LWP_SSL_VERIFY_HOSTNAME': '0', 'SSH_KEY': '/home/nutanix/.ssh/id_rsa', 'TOMCAT_HOME': '/usr/local/nutanix/apache-tomcat', 'LC_ALL': 'en_US.UTF-8', 'XDG_SESSION_ID': 'c1', '_': '/home/nutanix/cluster/bin/genesis', 'ZOOCFGDIR': '/home/nutanix/config/zookeeper', 'GLOG_max_log_size': '100', 'CASSANDRA_MAX_HEAP_SIZE': '2560M', 'HOSTNAME': 'ntnx-10-42-12-39-a-pcvm', 'CASSANDRA_CONF': '/home/nutanix/config/cassandra', 'HISTCONTROL': 'ignoredups', 'PWD': '/home/nutanix', 'MAIL': '/var/spool/mail/nutanix'}}
+2019-03-27 08:38:08 INFO repository_image_utils.py:421 Getting Version List for release.epsilon.repository_image repository_image_module
+2019-03-27 08:38:08 INFO catalog_staging_utils.py:105 Staging module release.epsilon.update and dependancies to cvm 10.42.12.39
+2019-03-27 08:38:08 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging
+2019-03-27 08:38:08 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:08 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging
+2019-03-27 08:38:09 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0
+2019-03-27 08:38:09 DEBUG ssh_client.py:188 Executing mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:09 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 mkdir -p /home/nutanix/tmp/lcm_staging
+2019-03-27 08:38:09 DEBUG ssh_client.py:203 Executed mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0
+2019-03-27 08:38:09 DEBUG cpdb_utils.py:267 Module list is
+2019-03-27 08:38:09 DEBUG cpdb_utils.py:820 Found modules: [{'location': u'8d8e6715-cd27-495f-b7ba-9c5570f34669', 'digest': u'1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f'}, {'location': u'd75633ce-c0ba-40a2-a447-b907a6d9fdc3', 'digest': u'4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70'}, {'location': u'd7cfc33d-2f6d-408e-af8d-f67c9b8ffb67', 'digest': u'48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c'}, {'location': u'60dd8442-1902-4b0a-a16a-1d3248069c6c', 'digest': u'f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e'}, {'location': u'1ff9f0ce-da75-43b7-91d4-f92865b99c5c', 'digest': u'57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9'}]
+2019-03-27 08:38:09 DEBUG lcm_catalog.py:197 Found catalog items: @zFM>U7N, ?HGhj, ꊝaEP'_,
+&(LrdCF, NpO|b/(
+2019-03-27 08:38:09 DEBUG catalog_utils.py:262 Getting file from uuid: "\021\332\320@\217zFM\274>U\3267\211\223N"
+name: "release.epsilon.update_tools"
+annotation: "57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9"
+item_type: kLCM
+version: 0
+opaque: "\n@57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\333\367\214\363X\325D\223\215\346J\370.\372cT"
+ source_list {
+ file_uuid: "e\355\027Z\217GG\223\247y\234\007\002`\237\350"
+ }
+}
+global_catalog_item_uuid: "\037\371\360\316\332uC\267\221\324\371(e\271\234\\"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\037\371\360\316\332uC\267\221\324\371(e\271\234\\"
+ version: 0
+}
+
+2019-03-27 08:38:09 DEBUG lcm_catalog.py:283 Found file uuids ['65ed175a-8f47-4793-a779-9c0702609fe8']
+2019-03-27 08:38:09 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:10 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg'}
+2019-03-27 08:38:10 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\021\332\320@\217zFM\274>U\3267\211\223N"
+name: "release.epsilon.update_tools"
+annotation: "57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9"
+item_type: kLCM
+version: 0
+opaque: "\n@57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\333\367\214\363X\325D\223\215\346J\370.\372cT"
+ source_list {
+ file_uuid: "e\355\027Z\217GG\223\247y\234\007\002`\237\350"
+ }
+}
+global_catalog_item_uuid: "\037\371\360\316\332uC\267\221\324\371(e\271\234\\"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\037\371\360\316\332uC\267\221\324\371(e\271\234\\"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/65ed175a-8f47-4793-a779-9c0702609fe8'
+cookie = 'JSESSIONID=.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg'
+file_name = '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:10 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/65ed175a-8f47-4793-a779-9c0702609fe8'
+cookie = 'JSESSIONID=.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg'
+file_name = '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:10 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/65ed175a-8f47-4793-a779-9c0702609fe8'
+cookie = 'JSESSIONID=.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg'
+file_name = '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:11 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/65ed175a-8f47-4793-a779-9c0702609fe8'
+cookie = 'JSESSIONID=.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg'
+file_name = '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:11 DEBUG catalog_utils.py:262 Getting file from uuid: "?\276\262\375\345HGh\271\020j\231\004\312\031\265"
+name: "release.epsilon.update"
+annotation: "1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f"
+item_type: kLCM
+version: 0
+opaque: "\n@1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "%\276:{\n\270I?\235\215\207\351\276\320\3654"
+ source_list {
+ file_uuid: "U\000\237\001\'\303@\016\251\367QEv\243\027q"
+ }
+}
+global_catalog_item_uuid: "\215\216g\025\315\'I_\267\272\234Up\363Fi"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\215\216g\025\315\'I_\267\272\234Up\363Fi"
+ version: 0
+}
+
+2019-03-27 08:38:11 DEBUG lcm_catalog.py:283 Found file uuids ['55009f01-27c3-400e-a9f7-514576a31771']
+2019-03-27 08:38:11 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:11 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg'}
+2019-03-27 08:38:11 DEBUG catalog_utils.py:278 Getting catalog item uuid: "?\276\262\375\345HGh\271\020j\231\004\312\031\265"
+name: "release.epsilon.update"
+annotation: "1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f"
+item_type: kLCM
+version: 0
+opaque: "\n@1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "%\276:{\n\270I?\235\215\207\351\276\320\3654"
+ source_list {
+ file_uuid: "U\000\237\001\'\303@\016\251\367QEv\243\027q"
+ }
+}
+global_catalog_item_uuid: "\215\216g\025\315\'I_\267\272\234Up\363Fi"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\215\216g\025\315\'I_\267\272\234Up\363Fi"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/55009f01-27c3-400e-a9f7-514576a31771'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg'
+file_name = '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:11 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/55009f01-27c3-400e-a9f7-514576a31771'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg'
+file_name = '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:11 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/55009f01-27c3-400e-a9f7-514576a31771'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg'
+file_name = '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:12 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/55009f01-27c3-400e-a9f7-514576a31771'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg'
+file_name = '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:12 DEBUG catalog_utils.py:262 Getting file from uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275"
+name: "nutanix.tools"
+annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c"
+item_type: kLCM
+version: 0
+opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t"
+ source_list {
+ file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+"
+ }
+}
+global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+ version: 0
+}
+
+2019-03-27 08:38:12 DEBUG lcm_catalog.py:283 Found file uuids ['48310501-3d7a-4be1-a3ed-9d990e39222b']
+2019-03-27 08:38:12 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:13 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ'}
+2019-03-27 08:38:13 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275"
+name: "nutanix.tools"
+annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c"
+item_type: kLCM
+version: 0
+opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t"
+ source_list {
+ file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+"
+ }
+}
+global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:13 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:13 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:14 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:14 DEBUG catalog_utils.py:262 Getting file from uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F"
+name: "release.linux.tools"
+annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e"
+item_type: kLCM
+version: 0
+opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "wl\207P\271OK8\263\327\214ox\207[\236"
+ source_list {
+ file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214"
+ }
+}
+global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+ version: 0
+}
+
+2019-03-27 08:38:14 DEBUG lcm_catalog.py:283 Found file uuids ['9fa74857-28e6-4f6e-9c6f-fa4aa379a38c']
+2019-03-27 08:38:14 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:15 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw'}
+2019-03-27 08:38:15 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F"
+name: "release.linux.tools"
+annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e"
+item_type: kLCM
+version: 0
+opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "wl\207P\271OK8\263\327\214ox\207[\236"
+ source_list {
+ file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214"
+ }
+}
+global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:15 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:15 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:16 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:16 DEBUG catalog_utils.py:262 Getting file from uuid: "\342\341N\300p\177O\270\250\264|b\035/\001("
+name: "release.epsilon.repository_image"
+annotation: "4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70"
+item_type: kLCM
+version: 0
+opaque: "\n@4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "1\t5\240\177\361A\243\226\364\274\325b\005\233F"
+ source_list {
+ file_uuid: "\354\263\026\353|\325E`\214.\001\343\212E2\330"
+ }
+}
+global_catalog_item_uuid: "\327V3\316\300\272@\242\244G\271\007\246\331\375\303"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\327V3\316\300\272@\242\244G\271\007\246\331\375\303"
+ version: 0
+}
+
+2019-03-27 08:38:16 DEBUG lcm_catalog.py:283 Found file uuids ['ecb316eb-7cd5-4560-8c2e-01e38a4532d8']
+2019-03-27 08:38:16 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:16 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA'}
+2019-03-27 08:38:16 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\342\341N\300p\177O\270\250\264|b\035/\001("
+name: "release.epsilon.repository_image"
+annotation: "4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70"
+item_type: kLCM
+version: 0
+opaque: "\n@4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "1\t5\240\177\361A\243\226\364\274\325b\005\233F"
+ source_list {
+ file_uuid: "\354\263\026\353|\325E`\214.\001\343\212E2\330"
+ }
+}
+global_catalog_item_uuid: "\327V3\316\300\272@\242\244G\271\007\246\331\375\303"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\327V3\316\300\272@\242\244G\271\007\246\331\375\303"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA'
+file_name = '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:16 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA'
+file_name = '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:16 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA'
+file_name = '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:17 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA'
+file_name = '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:17 DEBUG ssh_client.py:188 Executing ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:17 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 ls "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:18 DEBUG ssh_client.py:203 Executed ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:18 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771', '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c', '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8']
+2019-03-27 08:38:18 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:38:18 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:18 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:18 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:18 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:38:18 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:18 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:38:19 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39: rv 0
+2019-03-27 08:38:19 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771
+2019-03-27 08:38:19 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:19 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:19 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:19 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771
+2019-03-27 08:38:19 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:19 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771
+2019-03-27 08:38:20 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771 on 10.42.12.39: rv 0
+2019-03-27 08:38:20 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8
+2019-03-27 08:38:20 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:20 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:20 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:20 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8
+2019-03-27 08:38:20 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:20 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8
+2019-03-27 08:38:21 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8 on 10.42.12.39: rv 0
+2019-03-27 08:38:21 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:38:21 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:21 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:21 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:21 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:38:21 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:21 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:38:22 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39: rv 0
+2019-03-27 08:38:22 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8
+2019-03-27 08:38:22 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:22 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:22 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:22 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8
+2019-03-27 08:38:22 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:22 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8
+2019-03-27 08:38:23 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8 on 10.42.12.39: rv 0
+2019-03-27 08:38:23 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None'
+2019-03-27 08:38:23 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39
+2019-03-27 08:38:23 INFO repository_image_utils.py:182 Repository Image Path is as follows: release.epsilon.repository_image
+2019-03-27 08:38:23 DEBUG repository_image_utils.py:108 Repository Image Module Path: release.epsilon.repository_image
+2019-03-27 08:38:23 DEBUG repository_image_utils.py:113 Version List returned is: [Status: available Image: epsilon.tar.gz Version: 2.4.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.4.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.5.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.5.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.2 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.3 Flag List: [u'smoke'] Update Library List: []]
+2019-03-27 08:38:23 DEBUG repository_image_utils.py:429 Versions List is: [Status: available Image: epsilon.tar.gz Version: 2.4.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.4.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.5.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.5.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.2 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.3 Flag List: [u'smoke'] Update Library List: []]
+2019-03-27 08:38:23 INFO repository_image_utils.py:441 Managed Entity with entity_class: PC and entity_model: Epsilon is a pc entity
+2019-03-27 08:38:23 DEBUG cpdb_utils.py:572 [(UUID('c9ee6d12-7141-453f-a345-115e392e27e1'), ), (UUID('d6edff2c-f59f-4754-978b-06b6237796b4'), ), (UUID('758942f1-d42d-4d49-99fc-b73e2f2dca30'), ), (UUID('03a6e4a2-fa0e-4698-b0c0-e142820a2e94'), )]
+2019-03-27 08:38:23 DEBUG cpdb_utils.py:573 location_id of entity: pc:e636212f-3d79-4a4b-8e78-afa94c05cb66
+2019-03-27 08:38:23 INFO repository_image_utils.py:257 Updating the managed entity with versions
+2019-03-27 08:38:23 INFO repository_image_utils.py:269 Updating the available versions table with versions from repository
+2019-03-27 08:38:23 DEBUG cpdb_utils.py:1201 Saved index 4
+2019-03-27 08:38:23 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:23 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID a9c18b96-3655-42f4-ac91-f91f53261c5c, object:
+uuid: "a9c18b96-3655-42f4-ac91-f91f53261c5c"
+entity_uuid: "758942f1-d42d-4d49-99fc-b73e2f2dca30"
+version: "2.6.0.1"
+status: "available"
+dependencies: "[]"
+order: 1
+entity_class: "PC"
+
+2019-03-27 08:38:23 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:23 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 851481ff-7807-4a94-be60-22b6c97aeac2, object:
+uuid: "851481ff-7807-4a94-be60-22b6c97aeac2"
+entity_uuid: "758942f1-d42d-4d49-99fc-b73e2f2dca30"
+version: "2.6.0.2"
+status: "available"
+dependencies: "[]"
+order: 2
+entity_class: "PC"
+
+2019-03-27 08:38:23 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:23 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 61337a17-132c-45ba-8fe0-f441ad922dd9, object:
+uuid: "61337a17-132c-45ba-8fe0-f441ad922dd9"
+entity_uuid: "758942f1-d42d-4d49-99fc-b73e2f2dca30"
+version: "2.6.0.3"
+status: "available"
+dependencies: "[]"
+order: 3
+entity_class: "PC"
+
+2019-03-27 08:38:23 DEBUG repository_image_utils.py:387 Managed Entity with entity class: PC and entity model: Calm supports repository image module
+2019-03-27 08:38:23 INFO repository_image_utils.py:1086 Retrieving module with reference name release.calm.repository_image
+2019-03-27 08:38:23 INFO repository_image_utils.py:779 Getting tag list for: release.calm.repository_image
+2019-03-27 08:38:23 DEBUG repository_image_utils.py:783 Tag List for release.calm.repository_image is []
+2019-03-27 08:38:23 INFO repository_image_utils.py:756 Getting flag list for: release.calm.repository_image
+2019-03-27 08:38:23 DEBUG repository_image_utils.py:760 Flag List for release.calm.repository_image is []
+2019-03-27 08:38:23 DEBUG repository_image_utils.py:893 Building repository_image_base_url
+2019-03-27 08:38:23 DEBUG configuration.py:158 Found config version: "2.1.5579"
+url: "http://download.nutanix.com/lcm/2.0"
+auto_update_enabled: false
+lcm_standalone_ui_enabled: false
+lcm_pc_enabled: false
+deprecated_software_entities: "Firmware"
+
+2019-03-27 08:38:23 INFO configuration.py:137 Read config: {'url': u'http://download.nutanix.com/lcm/2.0', 'deprecated_software_entities': [u'Firmware'], 'lcm_standalone_ui_enabled': False, 'lcm_pc_enabled': False, 'version': u'2.1.5579', 'auto_update_enabled': False}
+2019-03-27 08:38:23 DEBUG repository_image_utils.py:896 URL present in config is: http://download.nutanix.com/lcm/2.0
+2019-03-27 08:38:23 INFO repository_image_utils.py:900 Repository Image Base URL built by framework: http://download.nutanix.com/lcm/2.0/builds/
+2019-03-27 08:38:23 INFO repository_image_utils.py:84 Repository Image Verification as a feature is disabled or not supported in the current AOS
+2019-03-27 08:38:23 INFO repository_image_utils.py:831 Parameters created for inventory by repository image module:{'public_key_location': '/home/nutanix/cluster/config/lcm/lcm_public.pem', 'flag_list': [], 'base_url': u'http://download.nutanix.com/lcm/2.0/builds/', 'entity_model': u'Calm', 'rim_verification_support': False, 'tag_list': [], 'proxy_env': {'JETTY_HOME': '/usr/local/nutanix/jetty', 'MY_SERVICE_NAME': 'genesis', 'ZOO_LOG_DIR': '/home/nutanix/data/logs', 'ZOO_START_LOG_FILE': '/home/nutanix/data/logs/zookeeper.log', 'CATALINA_OUT': '/home/nutanix/data/logs/catalina.out', 'LESSOPEN': '||/usr/bin/lesspipe.sh %s', 'LOGNAME': 'nutanix', 'USER': 'nutanix', 'HOME': '/home/nutanix', 'NUTANIX_BINARY_LOG_DIR': '/home/nutanix/data/binary_logs', 'PATH': '/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin', 'CASSANDRA_HOME': '/usr/local/nutanix/apache-cassandra', 'LANG': 'en_US.UTF-8', 'CATALINA_BASE': '/home/nutanix/prism', 'SHELL': '/bin/bash', 'NUTANIX_START_LOG_FILE': '/home/nutanix/data/logs/startup.log', 'JDK_HOME': '/usr/lib/jvm/jre-1.8.0', 'SHLVL': '2', 'NUTANIX_BASE_DIR': '/usr/local/nutanix', 'HISTSIZE': '1000', 'GEVENT_RESOLVER': 'ares', 'ZOOKEEPER_HOST_PORT_LIST': 'zk1:9876', 'NUTANIX_LOG_DIR': '/home/nutanix/data/logs', 'MY_EXTERNAL_IP': '10.42.12.39', 'LIBVIRT_DEFAULT_URI': 'qemu+ssh://root@192.168.5.1/system?no_verify=1', 'JAVA_HOME': '/usr/lib/jvm/jre-1.8.0', 'CASSANDRA_INCLUDE': '/home/nutanix/config/cassandra/cassandra.in.sh', 'XDG_RUNTIME_DIR': '/run/user/1000', 'CORE_PATTERN': '|/home/nutanix/serviceability/bin/coredump.py %p %s %e', 'PERL_LWP_SSL_VERIFY_HOSTNAME': '0', 'SSH_KEY': '/home/nutanix/.ssh/id_rsa', 'TOMCAT_HOME': '/usr/local/nutanix/apache-tomcat', 'LC_ALL': 'en_US.UTF-8', 'XDG_SESSION_ID': 'c1', '_': '/home/nutanix/cluster/bin/genesis', 'ZOOCFGDIR': '/home/nutanix/config/zookeeper', 'GLOG_max_log_size': '100', 'CASSANDRA_MAX_HEAP_SIZE': '2560M', 'HOSTNAME': 'ntnx-10-42-12-39-a-pcvm', 'CASSANDRA_CONF': '/home/nutanix/config/cassandra', 'HISTCONTROL': 'ignoredups', 'PWD': '/home/nutanix', 'MAIL': '/var/spool/mail/nutanix'}}
+2019-03-27 08:38:23 INFO repository_image_utils.py:421 Getting Version List for release.calm.repository_image repository_image_module
+2019-03-27 08:38:23 INFO catalog_staging_utils.py:105 Staging module release.calm.update and dependancies to cvm 10.42.12.39
+2019-03-27 08:38:23 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging
+2019-03-27 08:38:23 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:23 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging
+2019-03-27 08:38:23 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0
+2019-03-27 08:38:23 DEBUG ssh_client.py:188 Executing mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:23 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 mkdir -p /home/nutanix/tmp/lcm_staging
+2019-03-27 08:38:24 DEBUG ssh_client.py:203 Executed mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0
+2019-03-27 08:38:24 DEBUG cpdb_utils.py:267 Module list is
+2019-03-27 08:38:24 DEBUG cpdb_utils.py:820 Found modules: [{'location': u'1ed8d805-f2d8-4ad8-8f4c-d41623b986d3', 'digest': u'6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5'}, {'location': u'd7cfc33d-2f6d-408e-af8d-f67c9b8ffb67', 'digest': u'48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c'}, {'location': u'b12cfd60-e347-4261-8f33-9a54c6cd0fdd', 'digest': u'c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba'}, {'location': u'60dd8442-1902-4b0a-a16a-1d3248069c6c', 'digest': u'f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e'}, {'location': u'fc61a6d3-ca50-4f6e-a966-cb87a2c12339', 'digest': u'c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d'}]
+2019-03-27 08:38:24 DEBUG lcm_catalog.py:197 Found catalog items: ꊝaEP'_, FUaKZy4<, Rx=K8]x, V~HϩV ,
+&(LrdCF
+2019-03-27 08:38:24 DEBUG catalog_utils.py:262 Getting file from uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275"
+name: "nutanix.tools"
+annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c"
+item_type: kLCM
+version: 0
+opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t"
+ source_list {
+ file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+"
+ }
+}
+global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+ version: 0
+}
+
+2019-03-27 08:38:24 DEBUG lcm_catalog.py:283 Found file uuids ['48310501-3d7a-4be1-a3ed-9d990e39222b']
+2019-03-27 08:38:24 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:24 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ'}
+2019-03-27 08:38:24 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275"
+name: "nutanix.tools"
+annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c"
+item_type: kLCM
+version: 0
+opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t"
+ source_list {
+ file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+"
+ }
+}
+global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:24 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:24 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:26 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b'
+cookie = 'JSESSIONID=.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ'
+file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:26 DEBUG catalog_utils.py:262 Getting file from uuid: "\357FU\361\032aK\225\231\251\027Zy4<\177"
+name: "release.calm.update_tools"
+annotation: "c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba"
+item_type: kLCM
+version: 0
+opaque: "\n@c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\230U\203\350\213\254HI\236*d\351\240u\020s"
+ source_list {
+ file_uuid: "\n\347^\264\230RE\314\256\013\257B\345\362\347\254"
+ }
+}
+global_catalog_item_uuid: "\261,\375`\343GBa\2173\232T\306\315\017\335"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\261,\375`\343GBa\2173\232T\306\315\017\335"
+ version: 0
+}
+
+2019-03-27 08:38:26 DEBUG lcm_catalog.py:283 Found file uuids ['0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac']
+2019-03-27 08:38:26 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:26 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw'}
+2019-03-27 08:38:26 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\357FU\361\032aK\225\231\251\027Zy4<\177"
+name: "release.calm.update_tools"
+annotation: "c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba"
+item_type: kLCM
+version: 0
+opaque: "\n@c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\230U\203\350\213\254HI\236*d\351\240u\020s"
+ source_list {
+ file_uuid: "\n\347^\264\230RE\314\256\013\257B\345\362\347\254"
+ }
+}
+global_catalog_item_uuid: "\261,\375`\343GBa\2173\232T\306\315\017\335"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\261,\375`\343GBa\2173\232T\306\315\017\335"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw'
+file_name = '/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:26 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw'
+file_name = '/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:26 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw'
+file_name = '/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:27 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw'
+file_name = '/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:27 DEBUG catalog_utils.py:262 Getting file from uuid: "Rx=\364\010\366K8\207\014\232]\376x\271\024"
+name: "release.calm.update"
+annotation: "6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5"
+item_type: kLCM
+version: 0
+opaque: "\n@6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "j\301\335\225\005\010I\351\247@\331\334j\327\3376"
+ source_list {
+ file_uuid: "\035L\215\207|\276B\243\225\004P\243\0273(\374"
+ }
+}
+global_catalog_item_uuid: "\036\330\330\005\362\330J\330\217L\324\026#\271\206\323"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\036\330\330\005\362\330J\330\217L\324\026#\271\206\323"
+ version: 0
+}
+
+2019-03-27 08:38:27 DEBUG lcm_catalog.py:283 Found file uuids ['1d4c8d87-7cbe-42a3-9504-50a3173328fc']
+2019-03-27 08:38:27 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:28 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg'}
+2019-03-27 08:38:28 DEBUG catalog_utils.py:278 Getting catalog item uuid: "Rx=\364\010\366K8\207\014\232]\376x\271\024"
+name: "release.calm.update"
+annotation: "6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5"
+item_type: kLCM
+version: 0
+opaque: "\n@6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "j\301\335\225\005\010I\351\247@\331\334j\327\3376"
+ source_list {
+ file_uuid: "\035L\215\207|\276B\243\225\004P\243\0273(\374"
+ }
+}
+global_catalog_item_uuid: "\036\330\330\005\362\330J\330\217L\324\026#\271\206\323"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\036\330\330\005\362\330J\330\217L\324\026#\271\206\323"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/1d4c8d87-7cbe-42a3-9504-50a3173328fc'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg'
+file_name = '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:28 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/1d4c8d87-7cbe-42a3-9504-50a3173328fc'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg'
+file_name = '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:28 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/1d4c8d87-7cbe-42a3-9504-50a3173328fc'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg'
+file_name = '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:29 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/1d4c8d87-7cbe-42a3-9504-50a3173328fc'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg'
+file_name = '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:29 DEBUG catalog_utils.py:262 Getting file from uuid: "V\027\014~\333\322H\317\251\330\032\265V\375\t\210"
+name: "release.calm.repository_image"
+annotation: "c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d"
+item_type: kLCM
+version: 0
+opaque: "\n@c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\244U\230\024`xJe\272\325\227\367l\004\362\236"
+ source_list {
+ file_uuid: "\025\343\303\326\206\035L\026\254M\375TDd\230g"
+ }
+}
+global_catalog_item_uuid: "\374a\246\323\312POn\251f\313\207\242\301#9"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\374a\246\323\312POn\251f\313\207\242\301#9"
+ version: 0
+}
+
+2019-03-27 08:38:29 DEBUG lcm_catalog.py:283 Found file uuids ['15e3c3d6-861d-4c16-ac4d-fd5444649867']
+2019-03-27 08:38:29 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:29 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow'}
+2019-03-27 08:38:29 DEBUG catalog_utils.py:278 Getting catalog item uuid: "V\027\014~\333\322H\317\251\330\032\265V\375\t\210"
+name: "release.calm.repository_image"
+annotation: "c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d"
+item_type: kLCM
+version: 0
+opaque: "\n@c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "\244U\230\024`xJe\272\325\227\367l\004\362\236"
+ source_list {
+ file_uuid: "\025\343\303\326\206\035L\026\254M\375TDd\230g"
+ }
+}
+global_catalog_item_uuid: "\374a\246\323\312POn\251f\313\207\242\301#9"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "\374a\246\323\312POn\251f\313\207\242\301#9"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/15e3c3d6-861d-4c16-ac4d-fd5444649867'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow'
+file_name = '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:29 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/15e3c3d6-861d-4c16-ac4d-fd5444649867'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow'
+file_name = '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:29 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/15e3c3d6-861d-4c16-ac4d-fd5444649867'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow'
+file_name = '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:30 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/15e3c3d6-861d-4c16-ac4d-fd5444649867'
+cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow'
+file_name = '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:30 DEBUG catalog_utils.py:262 Getting file from uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F"
+name: "release.linux.tools"
+annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e"
+item_type: kLCM
+version: 0
+opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "wl\207P\271OK8\263\327\214ox\207[\236"
+ source_list {
+ file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214"
+ }
+}
+global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+ version: 0
+}
+
+2019-03-27 08:38:30 DEBUG lcm_catalog.py:283 Found file uuids ['9fa74857-28e6-4f6e-9c6f-fa4aa379a38c']
+2019-03-27 08:38:30 DEBUG catalog_utils.py:675 Prism is up and running
+2019-03-27 08:38:31 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA'}
+2019-03-27 08:38:31 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F"
+name: "release.linux.tools"
+annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e"
+item_type: kLCM
+version: 0
+opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f"
+source_group_list {
+ uuid: "wl\207P\271OK8\263\327\214ox\207[\236"
+ source_list {
+ file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214"
+ }
+}
+global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+location_list {
+ cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267"
+}
+owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f"
+source_catalog_item_id {
+ global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l"
+ version: 0
+}
+ with "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:31 DEBUG ssh_client.py:188 Executing "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:31 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" "
+2019-03-27 08:38:32 DEBUG ssh_client.py:203 Executed "python -c \"try:
+ from urllib import request
+except:
+ import urllib2 as request
+import ssl
+
+# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning.
+try:
+ import urllib3
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+except:
+ pass
+
+url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA'
+file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'
+
+download_out = '~/download.out'
+
+print('Performing the fetch request')
+req = request.Request(url, headers={'Cookie': cookie})
+try:
+ resp = request.urlopen(req, context=ssl._create_unverified_context())
+except (TypeError, AttributeError):
+ resp = request.urlopen(req)
+
+buf_size = 2**20
+print('Writing the catalog item to a file')
+
+with open(file_name, 'wb') as writer:
+ while True:
+ data = resp.read(buf_size)
+ if not data:
+ break
+ writer.write(data)\" " on 10.42.12.39: rv 0
+2019-03-27 08:38:32 DEBUG ssh_client.py:188 Executing ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:32 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 ls "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:32 DEBUG ssh_client.py:203 Executed ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:32 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac', '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867', '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc', '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c']
+2019-03-27 08:38:32 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac
+2019-03-27 08:38:32 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:32 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:33 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:33 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac
+2019-03-27 08:38:33 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:33 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac
+2019-03-27 08:38:33 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac on 10.42.12.39: rv 0
+2019-03-27 08:38:33 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867
+2019-03-27 08:38:33 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:33 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:34 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:34 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867
+2019-03-27 08:38:34 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:34 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867
+2019-03-27 08:38:34 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867 on 10.42.12.39: rv 0
+2019-03-27 08:38:34 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc
+2019-03-27 08:38:34 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:34 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:35 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:35 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc
+2019-03-27 08:38:35 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:35 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc
+2019-03-27 08:38:35 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc on 10.42.12.39: rv 0
+2019-03-27 08:38:35 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:38:35 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:35 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:36 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:36 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:38:36 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:36 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b
+2019-03-27 08:38:36 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39: rv 0
+2019-03-27 08:38:36 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:38:36 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:36 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging"
+2019-03-27 08:38:37 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0
+2019-03-27 08:38:37 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:38:37 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39
+2019-03-27 08:38:37 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c
+2019-03-27 08:38:37 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39: rv 0
+2019-03-27 08:38:37 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None'
+2019-03-27 08:38:37 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39
+2019-03-27 08:38:37 INFO repository_image_utils.py:182 Repository Image Path is as follows: release.calm.repository_image
+2019-03-27 08:38:37 DEBUG repository_image_utils.py:108 Repository Image Module Path: release.calm.repository_image
+2019-03-27 08:38:37 DEBUG repository_image_utils.py:113 Version List returned is: [Status: available Image: nucalm.tar.gz Version: 2.4.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.5.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.5.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.2 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.3 Flag List: [u'smoke'] Update Library List: []]
+2019-03-27 08:38:37 DEBUG repository_image_utils.py:429 Versions List is: [Status: available Image: nucalm.tar.gz Version: 2.4.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.5.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.5.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.2 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.3 Flag List: [u'smoke'] Update Library List: []]
+2019-03-27 08:38:37 INFO repository_image_utils.py:441 Managed Entity with entity_class: PC and entity_model: Calm is a pc entity
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:572 [(UUID('c9ee6d12-7141-453f-a345-115e392e27e1'), ), (UUID('d6edff2c-f59f-4754-978b-06b6237796b4'), ), (UUID('758942f1-d42d-4d49-99fc-b73e2f2dca30'), ), (UUID('03a6e4a2-fa0e-4698-b0c0-e142820a2e94'), )]
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:573 location_id of entity: pc:e636212f-3d79-4a4b-8e78-afa94c05cb66
+2019-03-27 08:38:37 INFO repository_image_utils.py:257 Updating the managed entity with versions
+2019-03-27 08:38:37 INFO repository_image_utils.py:269 Updating the available versions table with versions from repository
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1201 Saved index 1
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 36ef8785-241f-43eb-85db-b733a1a3d013, object:
+uuid: "36ef8785-241f-43eb-85db-b733a1a3d013"
+entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94"
+version: "2.5.0"
+status: "available"
+dependencies: "[]"
+order: 1
+entity_class: "PC"
+
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 990eb0aa-e595-4e64-ac09-7602dc98e8fa, object:
+uuid: "990eb0aa-e595-4e64-ac09-7602dc98e8fa"
+entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94"
+version: "2.5.0.1"
+status: "available"
+dependencies: "[]"
+order: 2
+entity_class: "PC"
+
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 476e0eb7-e40e-4886-84e4-a6988edfc79b, object:
+uuid: "476e0eb7-e40e-4886-84e4-a6988edfc79b"
+entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94"
+version: "2.6.0"
+status: "available"
+dependencies: "[]"
+order: 3
+entity_class: "PC"
+
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 4b20d8e4-1d0c-4203-a3b6-fe30264f3115, object:
+uuid: "4b20d8e4-1d0c-4203-a3b6-fe30264f3115"
+entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94"
+version: "2.6.0.1"
+status: "available"
+dependencies: "[]"
+order: 4
+entity_class: "PC"
+
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID e53f2a96-eb2b-4a05-83d5-2a70e95f6b4f, object:
+uuid: "e53f2a96-eb2b-4a05-83d5-2a70e95f6b4f"
+entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94"
+version: "2.6.0.2"
+status: "available"
+dependencies: "[]"
+order: 5
+entity_class: "PC"
+
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: []
+2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID e6806517-0eb8-4d7d-b722-5764c532d3db, object:
+uuid: "e6806517-0eb8-4d7d-b722-5764c532d3db"
+entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94"
+version: "2.6.0.3"
+status: "available"
+dependencies: "[]"
+order: 6
+entity_class: "PC"
+
+2019-03-27 08:38:37 INFO repository_image_utils.py:479 Repository Image Inventory Done
+2019-03-27 08:38:37 INFO lcm_ops_for_rim:155 LCM operation 101 is successful
+DEBUG: Karbon rim url: http://download.nutanix.com/lcm/builds/karbon-builds/repository_metadata/metadata.json
diff --git a/test/LCM_Test/lcm_run.sh b/test/LCM_Test/lcm_run.sh
new file mode 100644
index 0000000..4a4494d
--- /dev/null
+++ b/test/LCM_Test/lcm_run.sh
@@ -0,0 +1,142 @@
+#!/bin/bash
+CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure --write-out %{http_code}' # --output /dev/null --silent --show-error
+PRISM_ADMIN='admin'
+PE_PASSWORD='techX2019!'
+_url_lcm='https://localhost:9440/PrismGateway/services/rest/v1/genesis'
+_url_progress='https://localhost:9440/PrismGateway/services/rest/v1/progress_monitors'
+_url_groups='https://localhost:9440/api/nutanix/v3/groups'
+
+###############################################################################################################################################################################
+# Routine to be run/loop till yes we are ok.
+###############################################################################################################################################################################
+function loop(){
+
+ local _attempts=40
+ local _error=22
+ local _loops=0
+ local _sleep=30
+
+ if [ -z "$1" ]; then
+ echo "No parameter"
+ else
+ _task_id=$1
+ fi
+
+ # What is the progress of the taskid??
+ while true; do
+ (( _loops++ ))
+ # Get the progress of the task
+ _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}?filterCriteria=parent_task_uuid%3D%3D${_task_id} | jq '.entities[0].percentageCompleted' 2>nul | tr -d \")
+ if (( ${_progress} == 100 )); then
+ echo "The step has been succesfuly run"
+ set _error=0
+ break;
+ elif (( ${_loops} > ${_attempts} )); then
+ echo "Warning ${_error} @${1}: Giving up after ${_loop} tries."
+ return ${_error}
+ else
+ echo "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds"
+ sleep ${_sleep}
+ fi
+ done
+}
+
+function calm_enable() {
+ local _http_body
+ local _test
+
+ log "Enable Nutanix Calm..."
+ _http_body=$(cat <nul | cut -d "\\" -f 4 | tr -d \")
+
+# If there has been a reply (task_id) then the URL has accepted by PC
+if [ -z "$_task_id" ]; then
+ echo "LCM Inventory start has encountered an eror..."
+else
+ echo "LCM Inventory started.."
+ set _loops=0 # Reset the loop counter
+
+ # Run the progess checker
+ loop
+
+ # We need to get the UUIDs and the versions to be used.. so we can start the update. They are in the /home/nutanix/data/logs/lcm_ops.out AFTER an inventory run!
+ _full_uuids=$(cat /home/nutanix/data/logs/lcm_ops.out | grep -A 1 entity_uuid | grep -B 1 "2.6.0.3")
+ # As we need to have the latest version from the LCM we need to reverse the string so we get the last (rev) version
+ _first_uuid=$(echo $_full_uuids |rev|cut -d":" -f 4 |rev | cut -d "\"" -f2)
+ _first_version="2.6.0.3"
+ _sec_uuid=$(echo $_full_uuids rev|rev | cut -d":" -f 2 |rev | cut -d "\"" -f2)
+ _sec_version=$_first_version
+
+ #echo "This values have been found:" $_first_uuid" and " $_first_version " and " $_sec_uuid " and " $_sec_version
+
+ # Set the parameter to create the ugrade plan
+ # Create the curl json string -d xyz
+ _json_data="-d "
+ _json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"generate_plan\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\""
+ _json_data+=$_first_uuid
+ _json_data+="\\\",\\\""
+ _json_data+=$_first_version
+ _json_data+="\\\"],[\\\""
+ _json_data+=$_sec_uuid
+ _json_data+="\\\",\\\""
+ _json_data+=$_sec_version
+ _json_data+="\\\"]]]}}\"}"
+
+
+ # Run the generate plan task
+ _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm})
+
+ # Notify the log server that the LCM has been creating a plan
+ echo "LCM Inventory has created a plan"
+ set _loops=0 # Reset the loop counter
+
+ # Create new json data string
+ _json_data="-d "
+ _json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"perform_update\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\""
+ _json_data+=$_first_uuid
+ _json_data+="\\\",\\\""
+ _json_data+=$_first_version
+ _json_data+="\\\"],[\\\""
+ _json_data+=$_sec_uuid
+ _json_data+="\\\",\\\""
+ _json_data+=$_sec_version
+ _json_data+="\\\"]]]}}\"}"
+
+
+ # Run the upgrade to have the latest versions
+ _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \")
+
+ # If there has been a reply task_id then the URL has accepted by PC
+ if [ -z "$_task_id" ]; then
+ # There has been an error!!!
+ echo "LCM Upgrade has encountered an error!!!!"
+ else
+ # Notify the logserver that we are starting the LCM Upgrade
+ echo "LCM Upgrade starting..."
+
+ # Run the progess checker
+ loop
+ fi
+fi
\ No newline at end of file
diff --git a/test/LCM_Test/lcm_update.sh b/test/LCM_Test/lcm_update.sh
new file mode 100644
index 0000000..dc76458
--- /dev/null
+++ b/test/LCM_Test/lcm_update.sh
@@ -0,0 +1,27 @@
+#################################################################
+# Grab the json from the possible to be updated UUIDs and versions and save local in reply_json.json
+#################################################################
+_url_groups='https://10.42.8.39:9440/api/nutanix/v3/groups'
+CURL_HTTP_OPTS=' --silent --max-time 25 --header Content-Type:application/json --header Accept:application/json --insecure '
+PRISM_ADMIN="admin"
+PE_PASSWORD="techX2019!"
+
+# Run the Curl command and save the oputput in a temp file
+curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json
+
+# Fill the uuid array with the correct values
+my_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " "))
+
+# Grabbing the versions of the UUID and put them in a versions array
+for uuid in "${my_arr[@]}"
+do
+ version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply-inventory.json | tail -4 | head -n 1))
+done
+
+# Combine the two values to the full needed data
+count=0
+while [ $count -lt ${#my_arr[@]} ]
+do
+ echo "$count: UUID is ${my_arr[$count]} and the version is ${version_ar[$count]}"
+ let count=count+1
+done
\ No newline at end of file
diff --git a/test/LCM_Test/reply-inventory.json b/test/LCM_Test/reply-inventory.json
new file mode 100644
index 0000000..1a5ee6c
--- /dev/null
+++ b/test/LCM_Test/reply-inventory.json
@@ -0,0 +1,495 @@
+{
+ "entity_type": "lcm_available_version",
+ "filtered_group_count": 3,
+ "total_entity_count": 5,
+ "filtered_entity_count": 5,
+ "group_results": [
+ {
+ "entity_results": [
+ {
+ "entity_id": "1d18b99f-12e8-42f5-a4b5-b41dcc803ef3",
+ "data": [
+ {
+ "values": [
+ {
+ "values": [
+ "1d18b99f-12e8-42f5-a4b5-b41dcc803ef3"
+ ],
+ "time": 1554372384133118
+ }
+ ],
+ "name": "uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "41520d2e-76ab-4cbc-88ca-42bb484ba69f"
+ ],
+ "time": 1554372384133118
+ }
+ ],
+ "name": "entity_uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "PC"
+ ],
+ "time": 1554372384133118
+ }
+ ],
+ "name": "entity_class"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "available"
+ ],
+ "time": 1554372384133118
+ }
+ ],
+ "name": "status"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "1.0.0"
+ ],
+ "time": 1554372384133118
+ }
+ ],
+ "name": "version"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "[]"
+ ],
+ "time": 1554372384133118
+ }
+ ],
+ "name": "dependencies"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "1"
+ ],
+ "time": 1554372384133118
+ }
+ ],
+ "name": "order"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "1554372384133118"
+ ],
+ "time": 1554372384133118
+ }
+ ],
+ "name": "_created_timestamp_usecs_"
+ }
+ ]
+ }
+ ],
+ "group_by_column_value": "41520d2e-76ab-4cbc-88ca-42bb484ba69f",
+ "total_entity_count": 1,
+ "group_summaries": {}
+ },
+ {
+ "entity_results": [
+ {
+ "entity_id": "bcab72a9-d2f9-4327-a882-da4d0675eeea",
+ "data": [
+ {
+ "values": [
+ {
+ "values": [
+ "bcab72a9-d2f9-4327-a882-da4d0675eeea"
+ ],
+ "time": 1554372397678814
+ }
+ ],
+ "name": "uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "7dbb76af-09d5-4b83-9cdb-8a3c0a63f23e"
+ ],
+ "time": 1554372397678814
+ }
+ ],
+ "name": "entity_uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "PC"
+ ],
+ "time": 1554372397678814
+ }
+ ],
+ "name": "entity_class"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "available"
+ ],
+ "time": 1554372397678814
+ }
+ ],
+ "name": "status"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "2.6.0.3"
+ ],
+ "time": 1554372397678814
+ }
+ ],
+ "name": "version"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "[]"
+ ],
+ "time": 1554372397678814
+ }
+ ],
+ "name": "dependencies"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "1"
+ ],
+ "time": 1554372397678814
+ }
+ ],
+ "name": "order"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "1554372397678814"
+ ],
+ "time": 1554372397678814
+ }
+ ],
+ "name": "_created_timestamp_usecs_"
+ }
+ ]
+ },
+ {
+ "entity_id": "8e3b6079-d474-4e3a-bae5-558a2c0d5bb9",
+ "data": [
+ {
+ "values": [
+ {
+ "values": [
+ "8e3b6079-d474-4e3a-bae5-558a2c0d5bb9"
+ ],
+ "time": 1554372397685975
+ }
+ ],
+ "name": "uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "7dbb76af-09d5-4b83-9cdb-8a3c0a63f23e"
+ ],
+ "time": 1554372397685975
+ }
+ ],
+ "name": "entity_uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "PC"
+ ],
+ "time": 1554372397685975
+ }
+ ],
+ "name": "entity_class"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "available"
+ ],
+ "time": 1554372397685975
+ }
+ ],
+ "name": "status"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "2.6.0.4"
+ ],
+ "time": 1554372397685975
+ }
+ ],
+ "name": "version"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "[]"
+ ],
+ "time": 1554372397685975
+ }
+ ],
+ "name": "dependencies"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "2"
+ ],
+ "time": 1554372397685975
+ }
+ ],
+ "name": "order"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "1554372397685975"
+ ],
+ "time": 1554372397685975
+ }
+ ],
+ "name": "_created_timestamp_usecs_"
+ }
+ ]
+ }
+ ],
+ "group_by_column_value": "7dbb76af-09d5-4b83-9cdb-8a3c0a63f23e",
+ "total_entity_count": 2,
+ "group_summaries": {}
+ },
+ {
+ "entity_results": [
+ {
+ "entity_id": "18aa787f-4694-487f-8278-e95eb78da3be",
+ "data": [
+ {
+ "values": [
+ {
+ "values": [
+ "18aa787f-4694-487f-8278-e95eb78da3be"
+ ],
+ "time": 1554372411718254
+ }
+ ],
+ "name": "uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "acd9117e-d0c7-4d22-a2ac-7468dfdf07ec"
+ ],
+ "time": 1554372411718254
+ }
+ ],
+ "name": "entity_uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "PC"
+ ],
+ "time": 1554372411718254
+ }
+ ],
+ "name": "entity_class"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "available"
+ ],
+ "time": 1554372411718254
+ }
+ ],
+ "name": "status"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "2.6.0.3"
+ ],
+ "time": 1554372411718254
+ }
+ ],
+ "name": "version"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "[{\"entity_class\": \"PC\", \"version\": \"2.6.0.3\", \"exact\": \"true\", \"entity_model\": \"Epsilon\"}]"
+ ],
+ "time": 1554372414999630
+ }
+ ],
+ "name": "dependencies"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "1"
+ ],
+ "time": 1554372411718254
+ }
+ ],
+ "name": "order"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "1554372411718254"
+ ],
+ "time": 1554372411718254
+ }
+ ],
+ "name": "_created_timestamp_usecs_"
+ }
+ ]
+ },
+ {
+ "entity_id": "00a06ebf-0ba1-403c-ac0f-8ae3d5a8c341",
+ "data": [
+ {
+ "values": [
+ {
+ "values": [
+ "00a06ebf-0ba1-403c-ac0f-8ae3d5a8c341"
+ ],
+ "time": 1554372411724529
+ }
+ ],
+ "name": "uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "acd9117e-d0c7-4d22-a2ac-7468dfdf07ec"
+ ],
+ "time": 1554372411724529
+ }
+ ],
+ "name": "entity_uuid"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "PC"
+ ],
+ "time": 1554372411724529
+ }
+ ],
+ "name": "entity_class"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "available"
+ ],
+ "time": 1554372411724529
+ }
+ ],
+ "name": "status"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "2.6.0.4"
+ ],
+ "time": 1554372411724529
+ }
+ ],
+ "name": "version"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "[{\"entity_class\": \"PC\", \"version\": \"2.6.0.4\", \"exact\": \"true\", \"entity_model\": \"Epsilon\"}]"
+ ],
+ "time": 1554372415349026
+ }
+ ],
+ "name": "dependencies"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "2"
+ ],
+ "time": 1554372411724529
+ }
+ ],
+ "name": "order"
+ },
+ {
+ "values": [
+ {
+ "values": [
+ "1554372411724529"
+ ],
+ "time": 1554372411724529
+ }
+ ],
+ "name": "_created_timestamp_usecs_"
+ }
+ ]
+ }
+ ],
+ "group_by_column_value": "acd9117e-d0c7-4d22-a2ac-7468dfdf07ec",
+ "total_entity_count": 2,
+ "group_summaries": {}
+ }
+ ],
+ "total_group_count": 3
+}
\ No newline at end of file
diff --git a/test/LCM_Test/reply.json b/test/LCM_Test/reply.json
new file mode 100644
index 0000000..9a53f6d
--- /dev/null
+++ b/test/LCM_Test/reply.json
@@ -0,0 +1,3 @@
+{
+ "service_enablement_status": "ENABLED"
+}
\ No newline at end of file
diff --git a/test/Run PC local installation b/test/Run PC local installation
new file mode 100644
index 0000000..29873bd
--- /dev/null
+++ b/test/Run PC local installation
@@ -0,0 +1 @@
+EMAIL=nathan.cox@nutanix.com PC_HOST=10.42.41.39 PE_HOST=10.42.41.37 PE_PASSWORD=techX2019! PC_LAUNCH=ts2019.sh PC_VERSION=5.10.2 :bash -x /home/nutanix/ts2019.sh PC
\ No newline at end of file
diff --git a/test/data.json b/test/data.json
new file mode 100644
index 0000000..3151fd9
--- /dev/null
+++ b/test/data.json
@@ -0,0 +1 @@
+{"api_version":"3.1","metadata":{"total_matches": 2, "kind": "subnet", "length": 2, "offset": 0},"entities":[{"status": {"state": "COMPLETE", "name": "Primary", "resources": {"vswitch_name": "br0", "subnet_type": "VLAN", "ip_config": {"default_gateway_ip": "10.42.3.1", "dhcp_server_address": {"ip": "10.42.3.126"}, "pool_list": [{"range": "10.42.3.50 10.42.3.125"}], "prefix_length": 25, "subnet_ip": "10.42.3.0", "dhcp_options": {"domain_name_server_list": ["10.42.3.41", "10.42.196.10", "10.42.194.10"], "domain_search_list": ["NTNXLAB"], "domain_name": "NTNXLAB"}}, "vlan_id": 0}, "cluster_reference": {"kind": "cluster", "name": "PHX-POC003", "uuid": "000590a7-e56b-3415-1a72-0cc47ac3b4a0"}}, "spec": {"name": "Primary", "resources": {"vswitch_name": "br0", "subnet_type": "VLAN", "ip_config": {"default_gateway_ip": "10.42.3.1", "dhcp_server_address": {"ip": "10.42.3.126"}, "pool_list": [{"range": "10.42.3.50 10.42.3.125"}], "prefix_length": 25, "subnet_ip": "10.42.3.0", "dhcp_options": {"domain_name_server_list": ["10.42.3.41", "10.42.196.10", "10.42.194.10"], "domain_search_list": ["NTNXLAB"], "domain_name": "NTNXLAB"}}, "vlan_id": 0}, "cluster_reference": {"kind": "cluster", "name": "PHX-POC003", "uuid": "000590a7-e56b-3415-1a72-0cc47ac3b4a0"}}, "metadata": {"last_update_time": "2019-08-21T23:48:27Z", "kind": "subnet", "uuid": "5709b4a1-f481-43f3-9b2e-2bf8e3a855f6", "spec_version": 0, "creation_time": "2019-08-21T23:48:27Z", "categories_mapping": {}, "categories": {}}},{"status": {"state": "COMPLETE", "name": "Secondary", "resources": {"vswitch_name": "br0", "subnet_type": "VLAN", "ip_config": {"default_gateway_ip": "10.42.3.129", "dhcp_server_address": {"ip": "10.42.3.254"}, "pool_list": [{"range": "10.42.3.132 10.42.3.253"}], "prefix_length": 25, "subnet_ip": "10.42.3.128", "dhcp_options": {"domain_name_server_list": ["10.42.3.41", "10.42.196.10", "10.42.194.10"], "domain_search_list": ["NTNXLAB"], "domain_name": "NTNXLAB"}}, "vlan_id": 31}, "cluster_reference": {"kind": "cluster", "name": "PHX-POC003", "uuid": "000590a7-e56b-3415-1a72-0cc47ac3b4a0"}}, "spec": {"name": "Secondary", "resources": {"vswitch_name": "br0", "subnet_type": "VLAN", "ip_config": {"default_gateway_ip": "10.42.3.129", "dhcp_server_address": {"ip": "10.42.3.254"}, "pool_list": [{"range": "10.42.3.132 10.42.3.253"}], "prefix_length": 25, "subnet_ip": "10.42.3.128", "dhcp_options": {"domain_name_server_list": ["10.42.3.41", "10.42.196.10", "10.42.194.10"], "domain_search_list": ["NTNXLAB"], "domain_name": "NTNXLAB"}}, "vlan_id": 31}, "cluster_reference": {"kind": "cluster", "name": "PHX-POC003", "uuid": "000590a7-e56b-3415-1a72-0cc47ac3b4a0"}}, "metadata": {"last_update_time": "2019-08-21T23:48:27Z", "kind": "subnet", "uuid": "a6bb09cf-bc81-43f0-a4f1-1941a9c6486a", "spec_version": 0, "creation_time": "2019-08-21T23:48:27Z", "categories_mapping": {}, "categories": {}}}]}
\ No newline at end of file
diff --git a/test/images.json b/test/images.json
new file mode 100644
index 0000000..b8fe3e2
--- /dev/null
+++ b/test/images.json
@@ -0,0 +1,35 @@
+{
+ "api_response_list": [
+ {
+ "status": "202",
+ "api_response": {
+ "status": {
+ "state": "PENDING",
+ "execution_context": {
+ "task_uuid": "6c6a2bfa-c74d-4bbe-a572-5aecae2aca42"
+ }
+ },
+ "spec": {
+ "name": "Nutanix-VirtIO-1.1.3",
+ "resources": {
+ "image_type": "ISO_IMAGE",
+ "source_uri": "http://10.42.194.11/workshop_staging/Nutanix-VirtIO-1.1.3.iso"
+ }
+ },
+ "api_version": "3.1",
+ "metadata": {
+ "owner_reference": {
+ "kind": "user",
+ "uuid": "00000000-0000-0000-0000-000000000000",
+ "name": "admin"
+ },
+ "use_categories_mapping": false,
+ "kind": "image",
+ "spec_version": 0,
+ "uuid": "b959c4a4-c155-4e77-b10d-ee4ec38b753d"
+ }
+ },
+ "path_and_params": "/api/nutanix/v3/images"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/test/logserver/curl_sim.sh b/test/logserver/curl_sim.sh
new file mode 100644
index 0000000..09cb05e
--- /dev/null
+++ b/test/logserver/curl_sim.sh
@@ -0,0 +1,233 @@
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.100.37%7cbegin%7cwe-ts2019.sh%20release%3a%202.0.6-ci.13%20start._____________________ -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.100.37%7cdependencies%7cInstall%20sshpass... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.100.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20sshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7cdependencies%7cInstall%20jq... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.100.37%7crepo_source%7cFound%2c%20HTTP%3a302%20%3d%20https%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.100.37%7cdownload%7chttps%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a48%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20jq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a49%7c11219%7c10.42.100.37%7cpe_license%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a52%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.100.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.100.37%7cpe_license%7cValidate%20EULA%20on%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a59%7c11219%7c10.42.100.37%7cpe_license%7cDisable%20Pulse%20in%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a04%7c11219%7c10.42.100.37%7cpe_init%7cConfigure%20SMTP -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a17%7c11219%7c10.42.100.37%7cpe_init%7cConfigure%20NTP -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a24%7c11219%7c10.42.100.37%7cpe_init%7cRename%20default%20container%20to%20Default -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a34%7c11219%7c10.42.100.37%7cpe_init%7cRename%20default%20storage%20pool%20to%20SP01 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a44%7c11219%7c10.42.100.37%7cpe_init%7cCheck%20if%20there%20is%20a%20container%20named%20Images%2c%20if%20not%20create%20one -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.100.37%7cpe_init%7cContainer%20Images%20exists -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.100.37%7cpe_init%7cSet%20Data%20Services%20IP%20address%20to%2010.42.100.38 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a56%7c11219%7c10.42.100.37%7cnetwork_configure%7cRemove%20Rx-Automation-Network... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a00%7c11219%7c10.42.100.37%7cnetwork_configure%7cCreate%20primary%20network%3a%20Name%3a%20Primary%2c%20VLAN%3a%200%2c%20Subnet%3a%2010.42.100.1%2f25%2c%20Domain%3a%20NTNXLAB%2c%20Pool%3a%2010.42.100.50%20to%2010.42.100.125 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a04%7c11219%7c10.42.100.37%7cnetwork_configure%7cCreate%20secondary%20network%3a%20Name%3a%20Secondary%2c%20VLAN%3a%201001%2c%20Subnet%3a%2010.42.100.129%2f25%2c%20Pool%3a%2010.42.100.132%20to%2010.42.100.229 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a08%7c11219%7c10.42.100.37%7cauthentication_source%7cPC_VERSION%205.10.2%20%3e%3d%205.9%2c%20setting%20AutoDC-2.0... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7cdns_check%7cError%2044%3a%20result%20was%209%3a%20%3b%3b%20connection%20timed%20out%3b%20no%20servers%20could%20be%20reached -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7cauthentication_source%7cAutoDC2.IDEMPOTENCY%20failed%2c%20no%20DNS%20record%20dc.ntnxlab.local -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7crepo_source%7cConvenience%3a%20omitted%20package%20argument%2c%20added%20package%3d -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7cauthentication_source%7cImport%20AutoDC2%20image%20from%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a20%7c11219%7c10.42.100.37%7cauthentication_source%7cCreate%20AutoDC2%20VM%20based%20on%20AutoDC2%20image -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a22%7c11219%7c10.42.100.37%7cauthentication_source%7cPower%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.100.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.100.37%7cauthentication_source%7c_test%201%2f40%3d%7c2019-03-11%2017%3a52%3a30%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.100.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.100.37%7cauthentication_source%7c_test%202%2f40%3d%7c2019-03-11%2017%3a52%3a43%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.100.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.100.37%7cauthentication_source%7c_test%203%2f40%3d%7c2019-03-11%2017%3a52%3a53%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.100.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.100.37%7cauthentication_source%7c_test%204%2f40%3d%7c%20%2a%20status%3a%20stopped%0a2019-03-11%2017%3a53%3a03%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a14%7c11219%7c10.42.100.37%7cauthentication_source%7cAutoDC2%20is%20ready. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a24%7c11219%7c10.42.100.37%7cauthentication_source%7cCreate%20Reverse%20Lookup%20Zone%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a41%7c11219%7c10.42.100.37%7cauthentication_source%7cSuccess%3a%20DNS%20record%20dc.ntnxlab.local%20set. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.100.37%7cpe_auth%7cAdjusted%20directory-url%3dldap%3a%2f%2f10.42.100.40%3a389%20because%20AOS-5.10.2%20%3e%3d%205.9 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.100.37%7cpe_auth%7cConfigure%20PE%20external%20authentication -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a56%7c11219%7c10.42.100.37%7cpe_auth%7cConfigure%20PE%20role%20map -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a01%7c11219%7c10.42.100.37%7cpc_install%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a04%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a07%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.100.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.100.37%7cpc_install%7cGet%20cluster%20network%20and%20storage%20container%20UUIDs... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.100.37%7cpc_install%7cPrimary%20network%20UUID%3a%2084f854ca-e65a-4ba4-8e84-7c3061c33c42 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.100.37%7cpc_install%7cDefault%20storage%20container%20UUID%3a%20118bd727-366f-418f-8686-e3434b18e6bd -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.100.37%7cpc_install%7cPC-5.10.2%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.100.37%7cntnx_download%7cRetrieving%20download%20metadata%20pcdeploy-5.10.2.json%20... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fpcdeploy-5.10.2.json... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20pcdeploy-5.10.2.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.100.37%7cntnx_download%7cRetrieving%20Nutanix%20PRISM_CENTRAL_DEPLOY%20bits... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2feuphrates-5.10.2-stable-prism_central.tar... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a37%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20euphrates-5.10.2-stable-prism_central.tar -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a45%7c11219%7c10.42.100.37%7cntnx_download%7cSuccess%3a%20PRISM_CENTRAL_DEPLOY%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.100.37%7cntnx_download%7cSuccess%21%20Delete%20PRISM_CENTRAL_DEPLOY%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.100.37%7cpc_install%7cDeploy%20Prism%20Central%20%28typically%20takes%2017%2b%20minutes%29... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a45%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%201%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a59%3a48%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%202%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a00%3a51%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%203%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a01%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%204%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a02%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%205%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a03%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%206%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a04%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%207%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a05%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%208%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a06%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%209%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a07%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2010%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a08%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2011%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a09%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2012%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a10%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2013%2f40%3d403%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.100.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.100.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2014%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.100.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.100.37%7cprism_check%7cWarning%20%40PC-dev%3a%20Fallback%20on%2010.42.100.39%3a%20try%20PE%20cluster%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2015%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.100.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.100.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2016%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.100.37%7ccluster_check%7cPC%3e%3d5.10%2c%20checking%20multicluster%20state... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.100.37%7cpc_configure%7cSend%20configuration%20scripts%20to%20PC%20and%20remove%3a%20global.vars.sh%20lib.pc.sh%20we-ts2019.sh%20we-lib.common.sh%20release.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.100.37%7cpc_configure%7cOPTIONAL%3a%20Send%20binary%20dependencies%20to%20PC%3a%20bin%2fjq-linux64%20sshpass-1.06-2.el7.x86_64.rpm%20id_rsa.pub -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.100.37%7cremote_exec%7cOptional%3a%20giving%20up. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.100.37%7cpc_configure%7cRemote%20asynchroneous%20launch%20PC%20configuration%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.100.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20PC -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cpc_configure%7cPC%20Configuration%20complete%3a%20try%20Validate%20Staged%20Clusters%20now. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cmain%7cPC%20Configuration%20complete%3a%20Waiting%20for%20PC%20deployment%20to%20complete%2c%20API%20is%20up%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cmain%7cPE%20%3d%20https%3a%2f%2f10.42.100.37%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cmain%7cPC%20%3d%20https%3a%2f%2f10.42.100.39%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cfinish%7c%2fhome%2fnutanix%2fwe-ts2019.sh%20ran%20for%201462%20seconds._____________________ -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cdependencies%7cSuccess%3a%20found%20jq. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cfiles_install%7cIDEMPOTENCY%3a%20checking%20for%20afs%20completed... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a10%7c11219%7c10.42.100.37%7ccluster_check%7cCluster%20status%3a%20%7c%7c%2c%20exit%3a%205. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.100.37%7cfiles_install%7cFiles%203.2.0.1%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.100.37%7cntnx_download%7cRetrieving%20download%20metadata%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json%20... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.100.37%7cntnx_download%7cRetrieving%20Nutanix%20afs%20bits... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a23%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a25%7c11219%7c10.42.100.37%7ccluster_check%7cManual%20join%20PE%20to%20PC%20%3d%20%7cCluster%20registration%20is%20currently%20in%20progress.%20This%20operation%20may%20take%20a%20while.%0a%0a%20%20%20%20Status%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%3a%20true%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a27%7c11219%7c10.42.100.37%7cntnx_download%7cSuccess%3a%20afs%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.100.37%7ccluster_check%7cCluster%20status%3a%20%7ctrue%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.100.37%7ccluster_check%7cPE%20to%20PC%20%3d%20cluster%20registration%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.100.37%7cmain%7cRemote%20asynchroneous%20PC%20Image%20import%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20%20%20%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.100.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20%20%20%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20IMAGES -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a26%7c11219%7c10.42.100.37%7cntnx_download%7cSuccess%21%20Delete%20afs%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a57%7c11219%7c10.42.100.37%7cdependencies%7cWarning%3a%20assuming%20on%20PC%20or%20PE%20VM%2c%20removing%20jq... -H 'cache-control: no-cache' -H 'content-length: 0'
\ No newline at end of file
diff --git a/test/logserver/curl_sim1.sh b/test/logserver/curl_sim1.sh
new file mode 100644
index 0000000..138c1d9
--- /dev/null
+++ b/test/logserver/curl_sim1.sh
@@ -0,0 +1,233 @@
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.21.37%7cbegin%7cwe-ts2019.sh%20release%3a%202.0.6-ci.13%20start._____________________ -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.21.37%7cdependencies%7cInstall%20sshpass... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.21.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20sshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7cdependencies%7cInstall%20jq... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.21.37%7crepo_source%7cFound%2c%20HTTP%3a302%20%3d%20https%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.21.37%7cdownload%7chttps%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a48%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20jq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a49%7c11219%7c10.42.21.37%7cpe_license%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a52%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.21.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.21.37%7cpe_license%7cValidate%20EULA%20on%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a59%7c11219%7c10.42.21.37%7cpe_license%7cDisable%20Pulse%20in%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a04%7c11219%7c10.42.21.37%7cpe_init%7cConfigure%20SMTP -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a17%7c11219%7c10.42.21.37%7cpe_init%7cConfigure%20NTP -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a24%7c11219%7c10.42.21.37%7cpe_init%7cRename%20default%20container%20to%20Default -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a34%7c11219%7c10.42.21.37%7cpe_init%7cRename%20default%20storage%20pool%20to%20SP01 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a44%7c11219%7c10.42.21.37%7cpe_init%7cCheck%20if%20there%20is%20a%20container%20named%20Images%2c%20if%20not%20create%20one -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.21.37%7cpe_init%7cContainer%20Images%20exists -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.21.37%7cpe_init%7cSet%20Data%20Services%20IP%20address%20to%2010.42.100.38 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a56%7c11219%7c10.42.21.37%7cnetwork_configure%7cRemove%20Rx-Automation-Network... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a00%7c11219%7c10.42.21.37%7cnetwork_configure%7cCreate%20primary%20network%3a%20Name%3a%20Primary%2c%20VLAN%3a%200%2c%20Subnet%3a%2010.42.100.1%2f25%2c%20Domain%3a%20NTNXLAB%2c%20Pool%3a%2010.42.100.50%20to%2010.42.100.125 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a04%7c11219%7c10.42.21.37%7cnetwork_configure%7cCreate%20secondary%20network%3a%20Name%3a%20Secondary%2c%20VLAN%3a%201001%2c%20Subnet%3a%2010.42.100.129%2f25%2c%20Pool%3a%2010.42.100.132%20to%2010.42.100.229 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a08%7c11219%7c10.42.21.37%7cauthentication_source%7cPC_VERSION%205.10.2%20%3e%3d%205.9%2c%20setting%20AutoDC-2.0... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7cdns_check%7cError%2044%3a%20result%20was%209%3a%20%3b%3b%20connection%20timed%20out%3b%20no%20servers%20could%20be%20reached -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7cauthentication_source%7cAutoDC2.IDEMPOTENCY%20failed%2c%20no%20DNS%20record%20dc.ntnxlab.local -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7crepo_source%7cConvenience%3a%20omitted%20package%20argument%2c%20added%20package%3d -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7cauthentication_source%7cImport%20AutoDC2%20image%20from%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a20%7c11219%7c10.42.21.37%7cauthentication_source%7cCreate%20AutoDC2%20VM%20based%20on%20AutoDC2%20image -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a22%7c11219%7c10.42.21.37%7cauthentication_source%7cPower%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.21.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.21.37%7cauthentication_source%7c_test%201%2f40%3d%7c2019-03-11%2017%3a52%3a30%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.21.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.21.37%7cauthentication_source%7c_test%202%2f40%3d%7c2019-03-11%2017%3a52%3a43%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.21.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.21.37%7cauthentication_source%7c_test%203%2f40%3d%7c2019-03-11%2017%3a52%3a53%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.21.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.21.37%7cauthentication_source%7c_test%204%2f40%3d%7c%20%2a%20status%3a%20stopped%0a2019-03-11%2017%3a53%3a03%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a14%7c11219%7c10.42.21.37%7cauthentication_source%7cAutoDC2%20is%20ready. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a24%7c11219%7c10.42.21.37%7cauthentication_source%7cCreate%20Reverse%20Lookup%20Zone%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a41%7c11219%7c10.42.21.37%7cauthentication_source%7cSuccess%3a%20DNS%20record%20dc.ntnxlab.local%20set. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.21.37%7cpe_auth%7cAdjusted%20directory-url%3dldap%3a%2f%2f10.42.100.40%3a389%20because%20AOS-5.10.2%20%3e%3d%205.9 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.21.37%7cpe_auth%7cConfigure%20PE%20external%20authentication -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a56%7c11219%7c10.42.21.37%7cpe_auth%7cConfigure%20PE%20role%20map -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a01%7c11219%7c10.42.21.37%7cpc_install%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a04%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a07%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.21.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.21.37%7cpc_install%7cGet%20cluster%20network%20and%20storage%20container%20UUIDs... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.21.37%7cpc_install%7cPrimary%20network%20UUID%3a%2084f854ca-e65a-4ba4-8e84-7c3061c33c42 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.21.37%7cpc_install%7cDefault%20storage%20container%20UUID%3a%20118bd727-366f-418f-8686-e3434b18e6bd -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.21.37%7cpc_install%7cPC-5.10.2%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.21.37%7cntnx_download%7cRetrieving%20download%20metadata%20pcdeploy-5.10.2.json%20... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fpcdeploy-5.10.2.json... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20pcdeploy-5.10.2.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.21.37%7cntnx_download%7cRetrieving%20Nutanix%20PRISM_CENTRAL_DEPLOY%20bits... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2feuphrates-5.10.2-stable-prism_central.tar... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a37%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20euphrates-5.10.2-stable-prism_central.tar -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a45%7c11219%7c10.42.21.37%7cntnx_download%7cSuccess%3a%20PRISM_CENTRAL_DEPLOY%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.21.37%7cntnx_download%7cSuccess%21%20Delete%20PRISM_CENTRAL_DEPLOY%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.21.37%7cpc_install%7cDeploy%20Prism%20Central%20%28typically%20takes%2017%2b%20minutes%29... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a45%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%201%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a59%3a48%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%202%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a00%3a51%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%203%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a01%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%204%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a02%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%205%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a03%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%206%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a04%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%207%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a05%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%208%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a06%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%209%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a07%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2010%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a08%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2011%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a09%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2012%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a10%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2013%2f40%3d403%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.21.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.21.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2014%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.21.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.21.37%7cprism_check%7cWarning%20%40PC-dev%3a%20Fallback%20on%2010.42.100.39%3a%20try%20PE%20cluster%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2015%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.21.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.21.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2016%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.21.37%7ccluster_check%7cPC%3e%3d5.10%2c%20checking%20multicluster%20state... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.21.37%7cpc_configure%7cSend%20configuration%20scripts%20to%20PC%20and%20remove%3a%20global.vars.sh%20lib.pc.sh%20we-ts2019.sh%20we-lib.common.sh%20release.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.21.37%7cpc_configure%7cOPTIONAL%3a%20Send%20binary%20dependencies%20to%20PC%3a%20bin%2fjq-linux64%20sshpass-1.06-2.el7.x86_64.rpm%20id_rsa.pub -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.21.37%7cremote_exec%7cOptional%3a%20giving%20up. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.21.37%7cpc_configure%7cRemote%20asynchroneous%20launch%20PC%20configuration%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.21.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20PC -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cpc_configure%7cPC%20Configuration%20complete%3a%20try%20Validate%20Staged%20Clusters%20now. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cmain%7cPC%20Configuration%20complete%3a%20Waiting%20for%20PC%20deployment%20to%20complete%2c%20API%20is%20up%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cmain%7cPE%20%3d%20https%3a%2f%2f10.42.21.37%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cmain%7cPC%20%3d%20https%3a%2f%2f10.42.100.39%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cfinish%7c%2fhome%2fnutanix%2fwe-ts2019.sh%20ran%20for%201462%20seconds._____________________ -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cdependencies%7cSuccess%3a%20found%20jq. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cfiles_install%7cIDEMPOTENCY%3a%20checking%20for%20afs%20completed... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a10%7c11219%7c10.42.21.37%7ccluster_check%7cCluster%20status%3a%20%7c%7c%2c%20exit%3a%205. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.21.37%7cfiles_install%7cFiles%203.2.0.1%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.21.37%7cntnx_download%7cRetrieving%20download%20metadata%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json%20... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.21.37%7cntnx_download%7cRetrieving%20Nutanix%20afs%20bits... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a23%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a25%7c11219%7c10.42.21.37%7ccluster_check%7cManual%20join%20PE%20to%20PC%20%3d%20%7cCluster%20registration%20is%20currently%20in%20progress.%20This%20operation%20may%20take%20a%20while.%0a%0a%20%20%20%20Status%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%3a%20true%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a27%7c11219%7c10.42.21.37%7cntnx_download%7cSuccess%3a%20afs%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.21.37%7ccluster_check%7cCluster%20status%3a%20%7ctrue%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.21.37%7ccluster_check%7cPE%20to%20PC%20%3d%20cluster%20registration%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.21.37%7cmain%7cRemote%20asynchroneous%20PC%20Image%20import%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20%20%20%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.21.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20%20%20%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20IMAGES -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a26%7c11219%7c10.42.21.37%7cntnx_download%7cSuccess%21%20Delete%20afs%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a57%7c11219%7c10.42.21.37%7cdependencies%7cWarning%3a%20assuming%20on%20PC%20or%20PE%20VM%2c%20removing%20jq... -H 'cache-control: no-cache' -H 'content-length: 0'
\ No newline at end of file
diff --git a/test/logserver/curl_sim2.sh b/test/logserver/curl_sim2.sh
new file mode 100644
index 0000000..e835977
--- /dev/null
+++ b/test/logserver/curl_sim2.sh
@@ -0,0 +1,233 @@
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.2.37%7cbegin%7cwe-ts2019.sh%20release%3a%202.0.6-ci.13%20start._____________________ -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.2.37%7cdependencies%7cInstall%20sshpass... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.2.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20sshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7cdependencies%7cInstall%20jq... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.2.37%7crepo_source%7cFound%2c%20HTTP%3a302%20%3d%20https%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.2.37%7cdownload%7chttps%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a48%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20jq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a49%7c11219%7c10.42.2.37%7cpe_license%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a52%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.2.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.2.37%7cpe_license%7cValidate%20EULA%20on%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a59%7c11219%7c10.42.2.37%7cpe_license%7cDisable%20Pulse%20in%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a04%7c11219%7c10.42.2.37%7cpe_init%7cConfigure%20SMTP -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a17%7c11219%7c10.42.2.37%7cpe_init%7cConfigure%20NTP -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a24%7c11219%7c10.42.2.37%7cpe_init%7cRename%20default%20container%20to%20Default -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a34%7c11219%7c10.42.2.37%7cpe_init%7cRename%20default%20storage%20pool%20to%20SP01 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a44%7c11219%7c10.42.2.37%7cpe_init%7cCheck%20if%20there%20is%20a%20container%20named%20Images%2c%20if%20not%20create%20one -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.2.37%7cpe_init%7cContainer%20Images%20exists -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.2.37%7cpe_init%7cSet%20Data%20Services%20IP%20address%20to%2010.42.100.38 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a56%7c11219%7c10.42.2.37%7cnetwork_configure%7cRemove%20Rx-Automation-Network... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a00%7c11219%7c10.42.2.37%7cnetwork_configure%7cCreate%20primary%20network%3a%20Name%3a%20Primary%2c%20VLAN%3a%200%2c%20Subnet%3a%2010.42.100.1%2f25%2c%20Domain%3a%20NTNXLAB%2c%20Pool%3a%2010.42.100.50%20to%2010.42.100.125 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a04%7c11219%7c10.42.2.37%7cnetwork_configure%7cCreate%20secondary%20network%3a%20Name%3a%20Secondary%2c%20VLAN%3a%201001%2c%20Subnet%3a%2010.42.100.129%2f25%2c%20Pool%3a%2010.42.100.132%20to%2010.42.100.229 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a08%7c11219%7c10.42.2.37%7cauthentication_source%7cPC_VERSION%205.10.2%20%3e%3d%205.9%2c%20setting%20AutoDC-2.0... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7cdns_check%7cError%2044%3a%20result%20was%209%3a%20%3b%3b%20connection%20timed%20out%3b%20no%20servers%20could%20be%20reached -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7cauthentication_source%7cAutoDC2.IDEMPOTENCY%20failed%2c%20no%20DNS%20record%20dc.ntnxlab.local -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7crepo_source%7cConvenience%3a%20omitted%20package%20argument%2c%20added%20package%3d -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7cauthentication_source%7cImport%20AutoDC2%20image%20from%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a20%7c11219%7c10.42.2.37%7cauthentication_source%7cCreate%20AutoDC2%20VM%20based%20on%20AutoDC2%20image -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a22%7c11219%7c10.42.2.37%7cauthentication_source%7cPower%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.2.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.2.37%7cauthentication_source%7c_test%201%2f40%3d%7c2019-03-11%2017%3a52%3a30%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.2.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.2.37%7cauthentication_source%7c_test%202%2f40%3d%7c2019-03-11%2017%3a52%3a43%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.2.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.2.37%7cauthentication_source%7c_test%203%2f40%3d%7c2019-03-11%2017%3a52%3a53%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.2.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.2.37%7cauthentication_source%7c_test%204%2f40%3d%7c%20%2a%20status%3a%20stopped%0a2019-03-11%2017%3a53%3a03%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a14%7c11219%7c10.42.2.37%7cauthentication_source%7cAutoDC2%20is%20ready. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a24%7c11219%7c10.42.2.37%7cauthentication_source%7cCreate%20Reverse%20Lookup%20Zone%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a41%7c11219%7c10.42.2.37%7cauthentication_source%7cSuccess%3a%20DNS%20record%20dc.ntnxlab.local%20set. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.2.37%7cpe_auth%7cAdjusted%20directory-url%3dldap%3a%2f%2f10.42.100.40%3a389%20because%20AOS-5.10.2%20%3e%3d%205.9 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.2.37%7cpe_auth%7cConfigure%20PE%20external%20authentication -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a56%7c11219%7c10.42.2.37%7cpe_auth%7cConfigure%20PE%20role%20map -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a01%7c11219%7c10.42.2.37%7cpc_install%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a04%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a07%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.2.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.2.37%7cpc_install%7cGet%20cluster%20network%20and%20storage%20container%20UUIDs... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.2.37%7cpc_install%7cPrimary%20network%20UUID%3a%2084f854ca-e65a-4ba4-8e84-7c3061c33c42 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.2.37%7cpc_install%7cDefault%20storage%20container%20UUID%3a%20118bd727-366f-418f-8686-e3434b18e6bd -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.2.37%7cpc_install%7cPC-5.10.2%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.2.37%7cntnx_download%7cRetrieving%20download%20metadata%20pcdeploy-5.10.2.json%20... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fpcdeploy-5.10.2.json... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20pcdeploy-5.10.2.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.2.37%7cntnx_download%7cRetrieving%20Nutanix%20PRISM_CENTRAL_DEPLOY%20bits... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2feuphrates-5.10.2-stable-prism_central.tar... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a37%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20euphrates-5.10.2-stable-prism_central.tar -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a45%7c11219%7c10.42.2.37%7cntnx_download%7cSuccess%3a%20PRISM_CENTRAL_DEPLOY%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.2.37%7cntnx_download%7cSuccess%21%20Delete%20PRISM_CENTRAL_DEPLOY%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.2.37%7cpc_install%7cDeploy%20Prism%20Central%20%28typically%20takes%2017%2b%20minutes%29... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a45%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%201%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a59%3a48%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%202%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a00%3a51%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%203%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a01%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%204%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a02%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%205%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a03%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%206%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a04%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%207%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a05%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%208%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a06%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%209%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a07%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2010%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a08%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2011%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a09%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2012%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a10%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2013%2f40%3d403%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.2.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.2.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2014%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.2.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.2.37%7cprism_check%7cWarning%20%40PC-dev%3a%20Fallback%20on%2010.42.100.39%3a%20try%20PE%20cluster%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2015%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.2.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.2.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2016%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.2.37%7ccluster_check%7cPC%3e%3d5.10%2c%20checking%20multicluster%20state... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.2.37%7cpc_configure%7cSend%20configuration%20scripts%20to%20PC%20and%20remove%3a%20global.vars.sh%20lib.pc.sh%20we-ts2019.sh%20we-lib.common.sh%20release.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.2.37%7cpc_configure%7cOPTIONAL%3a%20Send%20binary%20dependencies%20to%20PC%3a%20bin%2fjq-linux64%20sshpass-1.06-2.el7.x86_64.rpm%20id_rsa.pub -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.2.37%7cremote_exec%7cOptional%3a%20giving%20up. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.2.37%7cpc_configure%7cRemote%20asynchroneous%20launch%20PC%20configuration%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.2.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20PC -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cpc_configure%7cPC%20Configuration%20complete%3a%20try%20Validate%20Staged%20Clusters%20now. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cmain%7cPC%20Configuration%20complete%3a%20Waiting%20for%20PC%20deployment%20to%20complete%2c%20API%20is%20up%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cmain%7cPE%20%3d%20https%3a%2f%2f10.42.2.37%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cmain%7cPC%20%3d%20https%3a%2f%2f10.42.100.39%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cfinish%7c%2fhome%2fnutanix%2fwe-ts2019.sh%20ran%20for%201462%20seconds._____________________ -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cdependencies%7cSuccess%3a%20found%20jq. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cfiles_install%7cIDEMPOTENCY%3a%20checking%20for%20afs%20completed... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a10%7c11219%7c10.42.2.37%7ccluster_check%7cCluster%20status%3a%20%7c%7c%2c%20exit%3a%205. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.2.37%7cfiles_install%7cFiles%203.2.0.1%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.2.37%7cntnx_download%7cRetrieving%20download%20metadata%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json%20... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.2.37%7cntnx_download%7cRetrieving%20Nutanix%20afs%20bits... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a23%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a25%7c11219%7c10.42.2.37%7ccluster_check%7cManual%20join%20PE%20to%20PC%20%3d%20%7cCluster%20registration%20is%20currently%20in%20progress.%20This%20operation%20may%20take%20a%20while.%0a%0a%20%20%20%20Status%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%3a%20true%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a27%7c11219%7c10.42.2.37%7cntnx_download%7cSuccess%3a%20afs%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.2.37%7ccluster_check%7cCluster%20status%3a%20%7ctrue%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.2.37%7ccluster_check%7cPE%20to%20PC%20%3d%20cluster%20registration%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.2.37%7cmain%7cRemote%20asynchroneous%20PC%20Image%20import%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20%20%20%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.2.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20%20%20%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20IMAGES -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a26%7c11219%7c10.42.2.37%7cntnx_download%7cSuccess%21%20Delete%20afs%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a57%7c11219%7c10.42.2.37%7cdependencies%7cWarning%3a%20assuming%20on%20PC%20or%20PE%20VM%2c%20removing%20jq... -H 'cache-control: no-cache' -H 'content-length: 0'
\ No newline at end of file
diff --git a/test/logserver/curl_sim3.sh b/test/logserver/curl_sim3.sh
new file mode 100644
index 0000000..2b91ec8
--- /dev/null
+++ b/test/logserver/curl_sim3.sh
@@ -0,0 +1,233 @@
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.31.37%7cbegin%7cwe-ts2019.sh%20release%3a%202.0.6-ci.13%20start._____________________ -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.31.37%7cdependencies%7cInstall%20sshpass... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.31.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20sshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7cdependencies%7cInstall%20jq... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.31.37%7crepo_source%7cFound%2c%20HTTP%3a302%20%3d%20https%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.31.37%7cdownload%7chttps%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a48%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20jq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a49%7c11219%7c10.42.31.37%7cpe_license%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a52%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.31.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.31.37%7cpe_license%7cValidate%20EULA%20on%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a59%7c11219%7c10.42.31.37%7cpe_license%7cDisable%20Pulse%20in%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a04%7c11219%7c10.42.31.37%7cpe_init%7cConfigure%20SMTP -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a17%7c11219%7c10.42.31.37%7cpe_init%7cConfigure%20NTP -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a24%7c11219%7c10.42.31.37%7cpe_init%7cRename%20default%20container%20to%20Default -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a34%7c11219%7c10.42.31.37%7cpe_init%7cRename%20default%20storage%20pool%20to%20SP01 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a44%7c11219%7c10.42.31.37%7cpe_init%7cCheck%20if%20there%20is%20a%20container%20named%20Images%2c%20if%20not%20create%20one -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.31.37%7cpe_init%7cContainer%20Images%20exists -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.31.37%7cpe_init%7cSet%20Data%20Services%20IP%20address%20to%2010.42.100.38 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a56%7c11219%7c10.42.31.37%7cnetwork_configure%7cRemove%20Rx-Automation-Network... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a00%7c11219%7c10.42.31.37%7cnetwork_configure%7cCreate%20primary%20network%3a%20Name%3a%20Primary%2c%20VLAN%3a%200%2c%20Subnet%3a%2010.42.100.1%2f25%2c%20Domain%3a%20NTNXLAB%2c%20Pool%3a%2010.42.100.50%20to%2010.42.100.125 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a04%7c11219%7c10.42.31.37%7cnetwork_configure%7cCreate%20secondary%20network%3a%20Name%3a%20Secondary%2c%20VLAN%3a%201001%2c%20Subnet%3a%2010.42.100.129%2f25%2c%20Pool%3a%2010.42.100.132%20to%2010.42.100.229 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a08%7c11219%7c10.42.31.37%7cauthentication_source%7cPC_VERSION%205.10.2%20%3e%3d%205.9%2c%20setting%20AutoDC-2.0... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7cdns_check%7cError%2044%3a%20result%20was%209%3a%20%3b%3b%20connection%20timed%20out%3b%20no%20servers%20could%20be%20reached -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7cauthentication_source%7cAutoDC2.IDEMPOTENCY%20failed%2c%20no%20DNS%20record%20dc.ntnxlab.local -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7crepo_source%7cConvenience%3a%20omitted%20package%20argument%2c%20added%20package%3d -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7cauthentication_source%7cImport%20AutoDC2%20image%20from%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a20%7c11219%7c10.42.31.37%7cauthentication_source%7cCreate%20AutoDC2%20VM%20based%20on%20AutoDC2%20image -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a22%7c11219%7c10.42.31.37%7cauthentication_source%7cPower%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.31.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.31.37%7cauthentication_source%7c_test%201%2f40%3d%7c2019-03-11%2017%3a52%3a30%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.31.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.31.37%7cauthentication_source%7c_test%202%2f40%3d%7c2019-03-11%2017%3a52%3a43%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.31.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.31.37%7cauthentication_source%7c_test%203%2f40%3d%7c2019-03-11%2017%3a52%3a53%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.31.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.31.37%7cauthentication_source%7c_test%204%2f40%3d%7c%20%2a%20status%3a%20stopped%0a2019-03-11%2017%3a53%3a03%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a14%7c11219%7c10.42.31.37%7cauthentication_source%7cAutoDC2%20is%20ready. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a24%7c11219%7c10.42.31.37%7cauthentication_source%7cCreate%20Reverse%20Lookup%20Zone%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a41%7c11219%7c10.42.31.37%7cauthentication_source%7cSuccess%3a%20DNS%20record%20dc.ntnxlab.local%20set. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.31.37%7cpe_auth%7cAdjusted%20directory-url%3dldap%3a%2f%2f10.42.100.40%3a389%20because%20AOS-5.10.2%20%3e%3d%205.9 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.31.37%7cpe_auth%7cConfigure%20PE%20external%20authentication -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a56%7c11219%7c10.42.31.37%7cpe_auth%7cConfigure%20PE%20role%20map -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a01%7c11219%7c10.42.31.37%7cpc_install%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a04%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a07%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.31.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.31.37%7cpc_install%7cGet%20cluster%20network%20and%20storage%20container%20UUIDs... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.31.37%7cpc_install%7cPrimary%20network%20UUID%3a%2084f854ca-e65a-4ba4-8e84-7c3061c33c42 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.31.37%7cpc_install%7cDefault%20storage%20container%20UUID%3a%20118bd727-366f-418f-8686-e3434b18e6bd -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.31.37%7cpc_install%7cPC-5.10.2%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.31.37%7cntnx_download%7cRetrieving%20download%20metadata%20pcdeploy-5.10.2.json%20... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fpcdeploy-5.10.2.json... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20pcdeploy-5.10.2.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.31.37%7cntnx_download%7cRetrieving%20Nutanix%20PRISM_CENTRAL_DEPLOY%20bits... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2feuphrates-5.10.2-stable-prism_central.tar... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a37%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20euphrates-5.10.2-stable-prism_central.tar -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a45%7c11219%7c10.42.31.37%7cntnx_download%7cSuccess%3a%20PRISM_CENTRAL_DEPLOY%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.31.37%7cntnx_download%7cSuccess%21%20Delete%20PRISM_CENTRAL_DEPLOY%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.31.37%7cpc_install%7cDeploy%20Prism%20Central%20%28typically%20takes%2017%2b%20minutes%29... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a45%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%201%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a59%3a48%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%202%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a00%3a51%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%203%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a01%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%204%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a02%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%205%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a03%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%206%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a04%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%207%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a05%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%208%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a06%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%209%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a07%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2010%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a08%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2011%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a09%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2012%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a10%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2013%2f40%3d403%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.31.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.31.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2014%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.31.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.31.37%7cprism_check%7cWarning%20%40PC-dev%3a%20Fallback%20on%2010.42.100.39%3a%20try%20PE%20cluster%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2015%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.31.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.31.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2016%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.31.37%7ccluster_check%7cPC%3e%3d5.10%2c%20checking%20multicluster%20state... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.31.37%7cpc_configure%7cSend%20configuration%20scripts%20to%20PC%20and%20remove%3a%20global.vars.sh%20lib.pc.sh%20we-ts2019.sh%20we-lib.common.sh%20release.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.31.37%7cpc_configure%7cOPTIONAL%3a%20Send%20binary%20dependencies%20to%20PC%3a%20bin%2fjq-linux64%20sshpass-1.06-2.el7.x86_64.rpm%20id_rsa.pub -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.31.37%7cremote_exec%7cOptional%3a%20giving%20up. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.31.37%7cpc_configure%7cRemote%20asynchroneous%20launch%20PC%20configuration%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.31.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20PC -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cpc_configure%7cPC%20Configuration%20complete%3a%20try%20Validate%20Staged%20Clusters%20now. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cmain%7cPC%20Configuration%20complete%3a%20Waiting%20for%20PC%20deployment%20to%20complete%2c%20API%20is%20up%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cmain%7cPE%20%3d%20https%3a%2f%2f10.42.31.37%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cmain%7cPC%20%3d%20https%3a%2f%2f10.42.100.39%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cfinish%7c%2fhome%2fnutanix%2fwe-ts2019.sh%20ran%20for%201462%20seconds._____________________ -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cdependencies%7cSuccess%3a%20found%20jq. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cfiles_install%7cIDEMPOTENCY%3a%20checking%20for%20afs%20completed... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a10%7c11219%7c10.42.31.37%7ccluster_check%7cCluster%20status%3a%20%7c%7c%2c%20exit%3a%205. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.31.37%7cfiles_install%7cFiles%203.2.0.1%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.31.37%7cntnx_download%7cRetrieving%20download%20metadata%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json%20... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.31.37%7cntnx_download%7cRetrieving%20Nutanix%20afs%20bits... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a23%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a25%7c11219%7c10.42.31.37%7ccluster_check%7cManual%20join%20PE%20to%20PC%20%3d%20%7cCluster%20registration%20is%20currently%20in%20progress.%20This%20operation%20may%20take%20a%20while.%0a%0a%20%20%20%20Status%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%3a%20true%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a27%7c11219%7c10.42.31.37%7cntnx_download%7cSuccess%3a%20afs%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.31.37%7ccluster_check%7cCluster%20status%3a%20%7ctrue%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.31.37%7ccluster_check%7cPE%20to%20PC%20%3d%20cluster%20registration%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.31.37%7cmain%7cRemote%20asynchroneous%20PC%20Image%20import%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20%20%20%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.31.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20%20%20%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20IMAGES -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a26%7c11219%7c10.42.31.37%7cntnx_download%7cSuccess%21%20Delete%20afs%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0'
+sleep 10
+curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a57%7c11219%7c10.42.31.37%7cdependencies%7cWarning%3a%20assuming%20on%20PC%20or%20PE%20VM%2c%20removing%20jq... -H 'cache-control: no-cache' -H 'content-length: 0'
\ No newline at end of file
diff --git a/test/logserver/hpoc_deploy.sql b/test/logserver/hpoc_deploy.sql
new file mode 100644
index 0000000..67c31d9
--- /dev/null
+++ b/test/logserver/hpoc_deploy.sql
@@ -0,0 +1,61 @@
+-- MySQL dump 10.14 Distrib 5.5.60-MariaDB, for Linux (x86_64)
+--
+-- Host: localhost Database: hpoc_deploy
+-- ------------------------------------------------------
+-- Server version 5.5.60-MariaDB
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Current Database: `hpoc_deploy`
+--
+
+CREATE DATABASE /*!32312 IF NOT EXISTS*/ `hpoc_deploy` /*!40100 DEFAULT CHARACTER SET latin1 */;
+
+USE `hpoc_deploy`;
+
+--
+-- Table structure for table `deploy_status`
+--
+
+DROP TABLE IF EXISTS `deploy_status`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `deploy_status` (
+ `id` int(6) NOT NULL AUTO_INCREMENT,
+ `hostname` varchar(255) DEFAULT NULL,
+ `module` varchar(255) DEFAULT NULL,
+ `replycode` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=129 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `deploy_status`
+--
+
+LOCK TABLES `deploy_status` WRITE;
+/*!40000 ALTER TABLE `deploy_status` DISABLE KEYS */;
+INSERT INTO `deploy_status` VALUES (125,'10.42.100.37','dependencies','Warning: assuming on PC or PE VM, removing jq...'),(126,'10.42.21.37','dependencies','Warning: assuming on PC or PE VM, removing jq...'),(127,'10.42.2.37','dependencies','Warning: assuming on PC or PE VM, removing jq...'),(128,'10.42.31.37','dependencies','Warning: assuming on PC or PE VM, removing jq...');
+/*!40000 ALTER TABLE `deploy_status` ENABLE KEYS */;
+UNLOCK TABLES;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2019-03-13 3:49:55
diff --git a/test/logserver/logserver.py b/test/logserver/logserver.py
new file mode 100644
index 0000000..f60a01d
--- /dev/null
+++ b/test/logserver/logserver.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python3
+
+from http.server import BaseHTTPRequestHandler, HTTPServer
+import logging
+import urllib
+import mysql.connector as mariadb
+from flask import Flask
+
+class S(BaseHTTPRequestHandler):
+ def _set_response(self):
+ self.send_response(200)
+ self.send_header('Content-type', 'text/html')
+ self.end_headers()
+
+ def do_POST(self):
+ content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
+ post_data = self.rfile.read(content_length) # <--- Gets the data itself
+ #logging.info("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n",str(self.path), str(self.headers), post_data.decode('utf-8'))
+ message=urllib.parse.unquote(self.path)
+ message_list=message[1:].split("|")
+ db_actions(message_list[0],message_list[2],message_list[3],message_list[4],'insert')
+
+ self._set_response()
+ #self.wfile.write("POST request for {}".format(self.path).encode('utf-8'))
+
+# MariaDB related stuff. get the query and what is the module to run (create,insert, update, etc)
+def db_actions(date,host_ip,module,module_msg,action):
+ # open the mariadb connection
+ mariadb_connection=mariadb.connect(user='webusr',password='webusr',database='hpoc_deploy',host='127.0.0.1',port='3306')
+ cursor=mariadb_connection.cursor()
+
+ # check if the host_ip exists in the database
+ query='select count(*) from deploy_status where hostname=\'' + host_ip + '\''
+ cursor.execute(query)
+
+ row=cursor.fetchone()
+ if row[0]==0:
+ # if the hostname does not exist in the table, add it to the table and move forward
+ query="insert into deploy_status(hostname) values(\'" + host_ip +"\')"
+ cursor.execute(query)
+ mariadb_connection.commit()
+ else:
+ #update deploy_status set replycode='Cluster status: ', module='cluster_check' where hostname='10.42.100.37'
+ query="update deploy_status set replycode=\'" + module_msg +"\', module=\'" + module + "\' where hostname=\'" + host_ip +"\'"
+ cursor.execute(query)
+ mariadb_connection.commit()
+
+ #close the mariadb connection
+ mariadb_connection.close()
+ return
+
+# Function for running the HTTP server
+def run(server_class=HTTPServer, handler_class=S, port=8080):
+ logging.basicConfig(level=logging.INFO)
+ server_address = ('', port)
+ httpd = server_class(server_address, handler_class)
+ logging.info('Starting httpd...\n')
+ try:
+ httpd.serve_forever()
+ except KeyboardInterrupt:
+ pass
+ httpd.server_close()
+ logging.info('Stopping httpd...\n')
+
+if __name__ == '__main__':
+ from sys import argv
+
+ if len(argv) == 2:
+ run(port=int(argv[1]))
+ else:
+ run()
diff --git a/test/logserver/nagios_setup.sh b/test/logserver/nagios_setup.sh
new file mode 100644
index 0000000..24885e2
--- /dev/null
+++ b/test/logserver/nagios_setup.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+# Script for setting up the configuration files for the PE Cluster during the TechSummit 2019
+
+
+# Get the list of the clusetrs to be created
+for ip_name in `cat /root/scripts/pelist.txt`
+do
+ # Grab the IP address from the cluster
+ pe_ip=`echo $ip_name | cut -d";" -f 2`
+
+ # Grab the name of the cluster
+ pe_name=`echo $ip_name | cut -d";" -f 1`
+
+ # Copy the default file to the new, good hostname
+ cp /usr/local/nagios/etc/servers/yourhost.cfg /usr/local/nagios/etc/servers/$pe_name.cfg
+
+ # Changing the original files to the right information
+ # Set the right Servername
+ sed -i "s/CLUSTER_NAME/$pe_name/g" /usr/local/nagios/etc/servers/$pe_name.cfg
+
+ # Set the right IP address
+ sed -i "s/CLUSTER_IP/$pe_ip/g" /usr/local/nagios/etc/servers/$pe_name.cfg
+done
+
+# Rename the temp file
+mv /usr/local/nagios/etc/servers/yourhost.cfg /usr/local/nagios/etc/servers/yourhost.cfg.tmp
+
+# Restart Nagios so it can start monitoring:
+systemctl reload nagios.service
diff --git a/test/logserver/pelist.txt b/test/logserver/pelist.txt
new file mode 100644
index 0000000..cecd599
--- /dev/null
+++ b/test/logserver/pelist.txt
@@ -0,0 +1,84 @@
+PHX-POC001;10.42.1.37
+PHX-POC002;10.42.2.37
+PHX-POC003;10.42.3.37
+PHX-POC004;10.42.4.37
+PHX-POC005;10.42.5.37
+PHX-POC006;10.42.6.37
+PHX-POC007;10.42.7.37
+PHX-POC010;10.42.10.37
+PHX-POC011;10.42.11.37
+PHX-POC012;10.42.12.37
+PHX-POC013;10.42.13.37
+PHX-POC014;10.42.14.37
+PHX-POC015;10.42.15.37
+PHX-POC016;10.42.16.37
+PHX-POC019;10.42.19.37
+PHX-POC020;10.42.20.37
+PHX-POC021;10.42.21.37
+PHX-POC022;10.42.22.37
+PHX-POC024;10.42.24.37
+PHX-POC025;10.42.25.37
+PHX-POC026;10.42.26.37
+PHX-POC027;10.42.27.37
+PHX-POC028;10.42.28.37
+PHX-POC029;10.42.29.37
+PHX-POC030;10.42.30.37
+PHX-POC031;10.42.31.37
+PHX-POC032;10.42.32.37
+PHX-POC033;10.42.33.37
+PHX-POC034;10.42.34.37
+PHX-POC037;10.42.37.37
+PHX-POC038;10.42.38.37
+PHX-POC042;10.42.42.37
+PHX-POC044;10.42.44.37
+PHX-POC045;10.42.45.37
+PHX-POC046;10.42.46.37
+PHX-POC047;10.42.47.37
+PHX-POC048;10.42.48.37
+PHX-POC049;10.42.49.37
+PHX-POC050;10.42.50.37
+PHX-POC051;10.42.51.37
+PHX-POC052;10.42.52.37
+PHX-POC053;10.42.53.37
+PHX-POC054;10.42.54.37
+PHX-POC055;10.42.55.37
+PHX-POC056;10.42.56.37
+PHX-POC057;10.42.57.37
+PHX-POC061;10.42.61.37
+PHX-POC062;10.42.62.37
+PHX-POC065;10.42.65.37
+PHX-POC066;10.42.66.37
+PHX-POC067;10.42.67.37
+PHX-POC068;10.42.68.37
+PHX-POC069;10.42.69.37
+PHX-POC070;10.42.70.37
+PHX-POC072;10.42.72.37
+PHX-POC073;10.42.73.37
+PHX-POC074;10.42.74.37
+PHX-POC079;10.42.79.37
+PHX-POC080;10.42.80.37
+PHX-POC081;10.42.81.37
+PHX-POC082;10.42.82.37
+PHX-POC083;10.42.83.37
+PHX-POC086;10.42.86.37
+PHX-POC087;10.42.87.37
+PHX-POC088;10.42.88.37
+PHX-POC090;10.42.90.37
+PHX-POC091;10.42.91.37
+PHX-POC092;10.42.92.37
+PHX-POC093;10.42.93.37
+PHX-POC094;10.42.94.37
+PHX-POC095;10.42.95.37
+PHX-POC096;10.42.96.37
+PHX-POC097;10.42.97.37
+PHX-POC098;10.42.98.37
+PHX-POC099;10.42.99.37
+PHX-POC101;10.42.101.37
+PHX-POC102;10.42.102.37
+PHX-POC104;10.42.104.37
+PHX-POC106;10.42.106.37
+PHX-POC107;10.42.107.37
+PHX-POC108;10.42.108.37
+PHX-POC109;10.42.109.37
+PHX-POC110;10.42.110.37
+PHX-POC113;10.42.113.37
\ No newline at end of file
diff --git a/test/logserver/templates/index.html b/test/logserver/templates/index.html
new file mode 100644
index 0000000..eae0996
--- /dev/null
+++ b/test/logserver/templates/index.html
@@ -0,0 +1,50 @@
+
+ Stageworkshop Summary
+
+
+
+
+
+Employees
+
+
+
+ | Hostname |
+ Module |
+ Log message |
+
+
+{% if result %}
+
+{% for row in result %}
+
+
+ | {{ row[0] }} |
+ {{ row[1] }} |
+ {{ row[2] }} |
+
+
+{% endfor %}
+
+{% endif %}
+
+
+
+
+
+
+
diff --git a/test/logserver/webserver.py b/test/logserver/webserver.py
new file mode 100644
index 0000000..5426fcd
--- /dev/null
+++ b/test/logserver/webserver.py
@@ -0,0 +1,32 @@
+from flask import Flask, render_template
+import mysql.connector as mysqldb
+
+app = Flask(__name__)
+
+class Database:
+ def __init__(self):
+ host = "127.0.0.1"
+ user = "webusr"
+ password = "webusr"
+ db = "hpoc_deploy"
+ port = "3306"
+ self.con = mysqldb.connect(host=host, user=user, password=password, database=db, port=port)
+ self.cur = self.con.cursor()
+
+ def list_messages(self):
+ self.cur.execute("SELECT hostname,module,replycode FROM deploy_status")
+ result = self.cur.fetchall()
+ return result
+
+@app.route('/')
+def employees():
+ def db_query():
+ db = Database()
+ emps = db.list_messages()
+ return emps
+
+ res = db_query()
+ return render_template('index.html', result=res, content_type='application/json')
+
+if __name__ == "__main__":
+ app.run(host='0.0.0.0')
diff --git a/test/minigts_ce.sh b/test/minigts_ce.sh
new file mode 100644
index 0000000..f5b604f
--- /dev/null
+++ b/test/minigts_ce.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+#
+BIN=/usr/bin
+DOMAIN=ntnxlab.local
+PASSWD="nutanix/4u"
+IFS=","
+
+while read -u50 Webname IP1 DBName IP2 DRIP1 DRIP2
+do
+ echo Adding: "$Webname" with IP "$IP1" with DRIP "$DRIP1" and "$DBName" with IP "$IP2" and DRIP "$DRIP2"
+ OCTETDC1=(${IP1//./,})
+ OCTETDC2=(${DRIP1//./,})
+ DC1="${OCTETDC1[0]}.${OCTETDC1[1]}.${OCTETDC1[2]}.41"
+ DC2="${OCTETDC2[0]}.${OCTETDC2[1]}.${OCTETDC2[2]}.41"
+ echo "Using Domain controlers: DC1: $DC1 and DC2: $DC2"
+ SSHPASS=$PASSWD sshpass -e ssh root@$DC1 samba-tool dns add $DC1 $DOMAIN $Webname A $IP1 -U administrator --password $PASSWD
+ SSHPASS=$PASSWD sshpass -e ssh root@$DC1 samba-tool dns add $DC1 $DOMAIN $DBName A $IP2 -U administrator --password $PASSWD
+ echo "Updating the DR side......"
+ SSHPASS=$PASSWD sshpass -e ssh root@$DC2 samba-tool dns add $DC2 $DOMAIN $Webname A $DRIP1 -U administrator --password $PASSWD
+ SSHPASS=$PASSWD sshpass -e ssh root@$DC2 samba-tool dns add $DC2 $DOMAIN $DBName A $DRIP2 -U administrator --password $PASSWD
+ echo "--------------------------------------------------------------------------------------------------------------------------------"
+ echo ""
+done 50< <(cat minigts_ce_list.txt )
+
+for i in 104 111 184 110 99 4 96 95 69 86 81 61
+do
+ DC="10.42.$i.41"
+ SSHPASS=$PASSWD sshpass -e ssh root@$DC "samba-tool dns query $DC $DOMAIN @ ALL -U administrator --password $PASSWD"
+ echo "--------------------------------------------------------"
+done
diff --git a/test/minigts_ce_list.txt b/test/minigts_ce_list.txt
new file mode 100644
index 0000000..a1eb21c
--- /dev/null
+++ b/test/minigts_ce_list.txt
@@ -0,0 +1,50 @@
+drweb1,10.42.4.230,drdb1,10.42.4.239,10.42.86.240,10.42.86.249,
+drweb1,10.42.69.230,drdb1,10.42.69.233,10.42.111.240,10.42.111.243,
+drweb1,10.42.86.230,drdb1,10.42.86.231,10.42.4.240,10.42.4.241,
+drweb1,10.42.96.230,drdb1,10.42.96.237,10.42.81.240,10.42.81.247,
+drweb1,10.42.110.230,drdb1,10.42.110.231,10.42.61.240,10.42.61.241,
+drweb1,10.38.184.230,drdb1,10.38.184.235,10.42.99.240,10.42.99.245,
+drweb10,10.42.61.238,drdb10,10.42.61.239,10.38.184.248,10.38.184.249,
+drweb2,10.42.4.232,drdb2,10.42.4.237,10.42.86.242,10.42.86.247,
+drweb2,10.42.69.232,drdb2,10.42.69.231,10.42.111.242,10.42.111.241,
+drweb2,10.42.86.232,drdb2,10.42.86.237,10.42.4.242,10.42.4.247,
+drweb2,10.42.96.232,drdb2,10.42.96.235,10.42.81.242,10.42.81.245,
+drweb2,10.42.110.232,drdb2,10.42.110.233,10.42.61.242,10.42.61.243,
+drweb2,10.38.184.232,drdb2,10.38.184.237,10.42.99.242,10.42.99.247,
+drweb3,10.42.4.234,drdb3,10.42.4.235,10.42.86.244,10.42.86.245,
+drweb3,10.42.69.234,drdb3,10.42.69.235,10.42.111.244,10.42.111.245,
+drweb3,10.42.86.234,drdb3,10.42.86.235,10.42.4.244,10.42.4.245,
+drweb3,10.42.96.234,drdb3,10.42.96.231,10.42.81.244,10.42.81.241,
+drweb3,10.42.110.234,drdb3,10.42.110.237,10.42.61.244,10.42.61.247,
+drweb3,10.38.184.234,drdb3,10.38.184.231,10.42.99.244,10.42.99.241,
+drweb4,10.42.4.236,drdb4,10.42.4.231,10.42.86.246,10.42.86.241,
+drweb4,10.42.69.236,drdb4,10.42.69.237,10.42.111.246,10.42.111.247,
+drweb4,10.42.86.236,drdb4,10.42.86.233,10.42.4.246,10.42.4.243,
+drweb4,10.42.96.236,drdb4,10.42.96.233,10.42.81.246,10.42.81.243,
+drweb4,10.42.110.236,drdb4,10.42.110.235,10.42.61.246,10.42.61.245,
+drweb4,10.38.184.236,drdb4,10.38.184.233,10.42.99.246,10.42.99.243,
+drweb5,10.42.95.230,drdb5,10.42.95.231,10.42.104.240,10.42.104.241,
+drweb5,10.42.81.230,drdb5,10.42.81.235,10.42.95.240,10.42.95.245,
+drweb5,10.42.99.230,drdb5,10.42.99.237,10.42.96.240,10.42.96.247,
+drweb5,10.42.104.230,drdb5,10.42.104.235,10.42.69.240,10.42.69.245,
+drweb5,10.42.111.230,drdb5,10.42.111.233,10.42.110.240,10.42.110.243,
+drweb5,10.42.4.238,drdb5,10.42.4.233,10.42.86.248,10.42.86.243,
+drweb6,10.42.81.232,drdb6,10.42.81.237,10.42.95.242,10.42.95.247,
+drweb6,10.42.95.232,drdb6,10.42.95.233,10.42.104.242,10.42.104.243,
+drweb6,10.42.99.232,drdb6,10.42.99.231,10.42.96.242,10.42.96.241,
+drweb6,10.42.104.232,drdb6,10.42.104.233,10.42.69.242,10.42.69.243,
+drweb6,10.42.111.232,drdb6,10.42.111.231,10.42.110.242,10.42.110.241,
+drweb6,10.42.61.230,drdb6,10.42.61.231,10.38.184.240,10.38.184.241,
+drweb7,10.42.81.234,drdb7,10.42.81.233,10.42.95.244,10.42.95.243,
+drweb7,10.42.99.234,drdb7,10.42.99.233,10.42.96.244,10.42.96.243,
+drweb7,10.42.104.234,drdb7,10.42.104.231,10.42.69.244,10.42.69.241,
+drweb7,10.42.111.234,drdb7,10.42.111.235,10.42.110.244,10.42.110.245,
+drweb7,10.42.61.232,drdb7,10.42.61.233,10.38.184.242,10.38.184.243,
+drweb7,10.42.95.234,drdb7,10.42.95.237,10.42.104.244,10.42.104.247,
+drweb8,10.42.61.234,drdb8,10.42.61.235,10.38.184.244,10.38.184.245,
+drweb8,10.42.81.236,drdb8,10.42.81.231,10.42.95.246,10.42.95.241,
+drweb8,10.42.95.236,drdb8,10.42.95.235,10.42.104.246,10.42.104.245,
+drweb8,10.42.99.236,drdb8,10.42.99.235,10.42.96.246,10.42.96.245,
+drweb8,10.42.104.236,drdb8,10.42.104.237,10.42.69.246,10.42.69.247,
+drweb8,10.42.111.236,drdb8,10.42.111.237,10.42.110.246,10.42.110.247,
+drweb9,10.42.61.236,drdb9,10.42.61.237,10.38.184.246,10.38.184.247,
diff --git a/test/nht_prep.sh b/test/nht_prep.sh
new file mode 100644
index 0000000..2e14c2b
--- /dev/null
+++ b/test/nht_prep.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+# SCript to stage the NHT clusters for the fourth node
+# Create single node cluster
+yes | cluster --cluster_name=NHTLab --dns_servers=10.42.196.10 --ntp_servers=10.42.196.10 --svm_ips=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}') create
+
+# Give the cluster some time to settle
+sleep 60
+
+#Reset the admin password
+ncli user reset-password user-name='admin' password='nht2EMEA!'
+
+#Rename the default SP to SP1
+default_sp=$(ncli storagepool ls | grep 'Name' | cut -d ':' -f 2 | sed s/' '//g)
+ncli sp edit name="${default_sp}" new-name="SP1"
+
+# Create an Images container if it doesn't exist
+(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep "^Images" 2>&1 > /dev/null) \
+ && echo "Container Images already exists" \
+ || ncli container create name="Images" sp-name="SP1"
+
+# Accept the EULA
+curl -u admin:'nht2EMEA!' -k -H 'Content-Type: application/json' -X POST \
+ https://127.0.0.1:9440/PrismGateway/services/rest/v1/eulas/accept \
+ -d '{
+ "username": "SE",
+ "companyName": "NTNX",
+ "jobTitle": "SE"
+}'
+
+# Disable Pulse in PE
+curl -u admin:'nht2EMEA!' -k -H 'Content-Type: application/json' -X PUT \
+ https://127.0.0.1:9440/PrismGateway/services/rest/v1/pulse \
+ -d '{
+ "defaultNutanixEmail": null,
+ "emailContactList": null,
+ "enable": false,
+ "enableDefaultNutanixEmail": false,
+ "isPulsePromptNeeded": false,
+ "nosVersion": null,
+ "remindLater": null,
+ "verbosityType": null
+}'
+
+
+# Upload the images
+curl -X POST \
+ https://127.0.0.1:9440/api/nutanix/v3/batch \
+ -H 'Content-Type: application/json' \
+ --insecure --user admin:'nht2EMEA!' \
+ -d '{
+ "action_on_failure":"CONTINUE",
+ "execution_order":"SEQUENTIAL",
+ "api_request_list":[
+ {
+ "operation":"POST",
+ "path_and_params":"/api/nutanix/v3/images",
+ "body":{
+ "spec":{
+ "name":"X-Ray.qcow2",
+ "resources":{
+ "image_type":"DISK_IMAGE",
+ "source_uri":"http://download.nutanix.com/xray/3.5.0/xray.qcow2"
+ }
+ },
+ "metadata":{
+ "kind":"image"
+ },
+ "api_version":"3.1.0"
+ }
+ },
+ {
+ "operation":"POST",
+ "path_and_params":"/api/nutanix/v3/images",
+ "body":{
+ "spec":{
+ "name":"Foundation.qcow2",
+ "resources":{
+ "image_type":"DISK_IMAGE",
+ "source_uri":"http://download.nutanix.com/foundation/foundation-4.4.3/Foundation_VM-4.4.3-disk-0.qcow2"
+ }
+ },
+ "metadata":{
+ "kind":"image"
+ },
+ "api_version":"3.1.0"
+ }
+ }
+ ],
+ "api_version":"3.0"
+}'
\ No newline at end of file
diff --git a/test/objects/cluster.txt b/test/objects/cluster.txt
new file mode 100644
index 0000000..181f98a
--- /dev/null
+++ b/test/objects/cluster.txt
@@ -0,0 +1,77 @@
+# Using the # symbol you can have the line skipped by the logic of the checking script.
+10.42.3.39|techX2020!|nathan@nutanix.com
+10.42.5.39|techX2020!|nathan@nutanix.com
+10.42.6.39|techX2020!|nathan@nutanix.com
+10.42.10.39|techX2020!|nathan@nutanix.com
+10.42.11.39|techX2020!|nathan@nutanix.com
+10.42.12.39|techX2020!|nathan@nutanix.com
+10.42.14.39|techX2020!|nathan@nutanix.com
+10.42.16.39|techX2020!|nathan@nutanix.com
+10.42.18.39|techX2020!|nathan@nutanix.com
+10.42.19.39|techX2020!|nathan@nutanix.com
+10.42.20.39|techX2020!|nathan@nutanix.com
+10.42.23.39|techX2020!|nathan@nutanix.com
+10.42.25.39|techX2020!|nathan@nutanix.com
+10.42.27.39|techX2020!|nathan@nutanix.com
+10.42.28.39|techX2020!|nathan@nutanix.com
+10.42.29.39|techX2020!|nathan@nutanix.com
+10.42.30.39|techX2020!|nathan@nutanix.com
+10.42.31.39|techX2020!|nathan@nutanix.com
+10.42.32.39|techX2020!|nathan@nutanix.com
+10.42.34.39|techX2020!|nathan@nutanix.com
+10.42.35.39|techX2020!|nathan@nutanix.com
+10.42.42.39|techX2020!|nathan@nutanix.com
+10.42.56.39|techX2020!|nathan@nutanix.com
+10.42.61.39|techX2020!|nathan@nutanix.com
+10.42.62.39|techX2020!|nathan@nutanix.com
+10.42.66.39|techX2020!|nathan@nutanix.com
+10.42.67.39|techX2020!|nathan@nutanix.com
+10.42.69.39|techX2020!|nathan@nutanix.com
+10.42.71.39|techX2020!|nathan@nutanix.com
+10.42.74.39|techX2020!|nathan@nutanix.com
+10.42.79.39|techX2020!|nathan@nutanix.com
+10.42.81.39|techX2020!|nathan@nutanix.com
+10.42.84.39|techX2020!|nathan@nutanix.com
+10.42.86.39|techX2020!|nathan@nutanix.com
+10.42.94.39|techX2020!|nathan@nutanix.com
+#10.42.95.39|techX2020!|nathan@nutanix.com
+10.42.99.39|techX2020!|nathan@nutanix.com
+10.42.104.39|techX2020!|nathan@nutanix.com
+10.42.108.39|techX2020!|nathan@nutanix.com
+10.42.110.39|techX2020!|nathan@nutanix.com
+10.38.203.39|techX2020!|nathan@nutanix.com
+10.38.204.39|techX2020!|nathan@nutanix.com
+10.38.205.39|techX2020!|nathan@nutanix.com
+10.38.206.39|techX2020!|nathan@nutanix.com
+10.38.207.39|techX2020!|nathan@nutanix.com
+10.38.208.39|techX2020!|nathan@nutanix.com
+10.38.209.39|techX2020!|nathan@nutanix.com
+10.38.210.39|techX2020!|nathan@nutanix.com
+10.38.211.39|techX2020!|nathan@nutanix.com
+10.38.212.39|techX2020!|nathan@nutanix.com
+10.38.213.39|techX2020!|nathan@nutanix.com
+10.38.214.39|techX2020!|nathan@nutanix.com
+10.38.215.39|techX2020!|nathan@nutanix.com
+10.38.216.39|techX2020!|nathan@nutanix.com
+10.38.217.39|techX2020!|nathan@nutanix.com
+10.38.218.39|techX2020!|nathan@nutanix.com
+10.38.221.39|techX2020!|nathan@nutanix.com
+#10.42.111.39|techX2020!|nathan@nutanix.com
+#10.42.112.39|techX2020!|nathan@nutanix.com
+10.55.1.39|techX2020!|nathan@nutanix.com
+10.55.2.39|techX2020!|nathan@nutanix.com
+10.55.4.39|techX2020!|nathan@nutanix.com
+10.55.9.39|techX2020!|nathan@nutanix.com
+10.55.10.39|techX2020!|nathan@nutanix.com
+10.55.30.39|techX2020!|nathan@nutanix.com
+10.55.31.39|techX2020!|nathan@nutanix.com
+10.55.33.39|techX2020!|nathan@nutanix.com
+10.55.35.39|techX2020!|nathan@nutanix.com
+10.55.39.39|techX2020!|nathan@nutanix.com
+10.55.40.39|techX2020!|nathan@nutanix.com
+10.55.41.39|techX2020!|nathan@nutanix.com
+10.55.48.39|techX2020!|nathan@nutanix.com
+10.55.49.39|techX2020!|nathan@nutanix.com
+10.55.55.39|techX2020!|nathan@nutanix.com
+10.55.57.39|techX2020!|nathan@nutanix.com
+10.55.60.39|techX2020!|nathan@nutanix.com
\ No newline at end of file
diff --git a/test/objects/object_airgap.sh b/test/objects/object_airgap.sh
new file mode 100644
index 0000000..a8b9e5e
--- /dev/null
+++ b/test/objects/object_airgap.sh
@@ -0,0 +1,113 @@
+#!/bin/sh
+
+# Script to set the airgap for Objects to our TS filesservers.
+
+for _cluster in $(cat cluster.txt | grep -v ^#)
+ do
+ set -f
+ _fields=(${_cluster//|/ })
+ PE_HOST=${_fields[0]}
+ PE_PASSWORD=${_fields[1]}
+ OCTET=(${PE_HOST//./ })
+ PC_HOST=${PE_HOST:-2}
+ CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure '
+ _url_network="https://${PC_HOST}:9440/api/nutanix/v3/subnets/list"
+ _url_oss="'https://${PC_HOST}:9440/oss/api/nutanix/v3/objectstores"
+ _url_oss_check="https://${PC_HOST}:9440/oss/api/nutanix/v3/objectstores/list"
+
+
+ # Getting the IP for the sources file of objects
+ if [[ ${OCTET[1]} == "42" || ${OCTET[1]} == "38" ]]; then
+ source_ip="http://10.42.38.10/images"
+ else
+ source_ip="http://10.55.76.10"
+ fi
+
+ # Getting the command ready based on IP network of the cluster
+ cmd='/usr/local/nutanix/cluster/bin/mspctl airgap --enable --lcm-server='
+ cmd+=$source_ip
+ cmd+=';sleep 3;/usr/local/nutanix/cluster/bin/mspctl airgap --status | grep "\"enable\":true" | wc -l'
+
+ # Fire the command on the PC of the cluster so we have the right Dark Site image pull for Objects
+ sshpass -e ssh nutanix@${PE_HOST} -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null $cmd
+
+ # See if we have an error on the cluster for the objects
+ url="https://${PC_HOST}:9440/oss/api/nutanix/v3/groups"
+ payload='{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"},{"attribute":"domain"},{"attribute":"num_msp_workers"},{"attribute":"usage_bytes"},{"attribute":"num_buckets"},{"attribute":"num_objects"},{"attribute":"num_alerts_internal"},{"attribute":"client_access_network_ip_used_list"},{"attribute":"total_capacity_gib"},{"attribute":"last_completed_step"},{"attribute":"state"},{"attribute":"percentage_complete"},{"attribute":"ipv4_address"},{"attribute":"num_alerts_critical"},{"attribute":"num_alerts_info"},{"attribute":"num_alerts_warning"},{"attribute":"error_message_list"},{"attribute":"cluster_name"},{"attribute":"client_access_network_name"},{"attribute":"client_access_network_ip_list"},{"attribute":"buckets_infra_network_name"},{"attribute":"buckets_infra_network_vip"},{"attribute":"buckets_infra_network_dns"},{"attribute":"total_memory_size_mib"},{"attribute":"total_vcpu_count"},{"attribute":"num_vcpu_per_msp_worker"}]}'
+ _respone_json=$(curl ${CURL_HTTP_OPTS} -d ${payload} ${url} --user admin:${PE_PASSWORD} | jq '.group_results[0].entity_results[0].data[] | select (.name=="state") .values[0].values[0]' | tr -d \")
+
+ if [[ ${_respone_json} != "COMPLETE" ]]; then
+ if [[ ${_respone_json} == "PENDING" ]]; then
+ echo "Status for ${PC_HOST} is pending.... Skipping"
+ else
+ echo "Found and error at PC ${PC_HOST}.. Starting counter measurements...."
+ # Delete the current objectstore
+ _respone_json=$(curl ${CURL_HTTP_OPTS} -d ${payload} ${url} --user admin:${PE_PASSWORD} | jq '.group_results[0].entity_results[0].entity_id' | tr -d \")
+ uuid_objects_store=${_respone_json}
+ url_delete="https://${PC_HOST}:9440/oss/api/nutanix/v3/objectstores/${uuid_objects_store}"
+ del_oss_response=$(curl ${CURL_HTTP_OPTS} -X DELETE ${url_delete} -w "%{http_code}\n" --user admin:${PE_PASSWORD})
+
+ # Has the deletion been accepted?
+ if [[ ${del_oss_response} == "202" ]]; then
+ echo "Objectstore is to be deleted... Checking before moving on..."
+ url="https://${PC_HOST}:9440/oss/api/nutanix/v3/groups"
+ payload='{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"}]}'
+ _response_json=$(curl ${CURL_HTTP_OPTS} -d ${payload} ${url} --user admin:${PE_PASSWORD} | jq '.filtered_entity_count' | tr -d \")
+ # Wait while the objectstore is still there before we move on in creating one.
+ while [[ ${_response_json} != 0 ]]
+ do
+ echo "Objectstore still found... Waiting 10 seconds.."
+ sleep 10
+ _response_json=$(curl ${CURL_HTTP_OPTS} -d ${payload} ${url} --user admin:${PE_PASSWORD} | jq '.filtered_entity_count' | tr -d \")
+ done
+
+
+
+ # Done waiting, now let's create the payload for the objectstore.
+ # Get the variables from the cluster
+ # Payload for the _json_data so we get the data needed...
+ _json_data='{"kind":"subnet"}'
+ CLUSTER_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user admin:${PE_PASSWORD} $_url_network | jq '.entities[].spec | select (.name=="Primary") | .cluster_reference.uuid' | tr -d \")
+ PRIM_NETWORK_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user admin:${PE_PASSWORD} $_url_network | jq '.entities[] | select (.spec.name=="Primary") | .metadata.uuid' | tr -d \")
+
+ BUCKETS_VIP="${OCTET[0]}.${OCTET[1]}.${OCTET[2]}.17"
+ BUCKETS_DNS_IP="${OCTET[0]}.${OCTET[1]}.${OCTET[2]}.16"
+ OBJECTS_NW_START="${OCTET[0]}.${OCTET[1]}.${OCTET[2]}.18"
+ OBJECTS_NW_END="${OCTET[0]}.${OCTET[1]}.${OCTET[2]}.21"
+
+ # Create the payload URL
+ _json_data_oss='{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"ntnx-objects","description":"NTNXLAB","resources":{"domain":"ntnxlab.local","cluster_reference":{"kind":"cluster","uuid":"'
+ _json_data_oss+=${CLUSTER_UUID}
+ _json_data_oss+='"},"buckets_infra_network_dns":"'
+ _json_data_oss+=${BUCKETS_DNS_IP}
+ _json_data_oss+='","buckets_infra_network_vip":"'
+ _json_data_oss+=${BUCKETS_VIP}
+ _json_data_oss+='","buckets_infra_network_reference":{"kind":"subnet","uuid":"'
+ _json_data_oss+=${PRIM_NETWORK_UUID}
+ _json_data_oss+='"},"client_access_network_reference":{"kind":"subnet","uuid":"'
+ _json_data_oss+=${PRIM_NETWORK_UUID}
+ _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":32768,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"'
+ _json_data_oss+=${OBJECTS_NW_START}
+ _json_data_oss+='","ipv4_end":"'
+ _json_data_oss+=${OBJECTS_NW_END}
+ _json_data_oss+='"}}}}'
+
+ # Now we have the correct data in the payload, let's fire to to the cluster
+ oss_create="https://${PC_HOST}:9440/oss/api/nutanix/v3/objectstores"
+ _response_oss_create=$(curl ${CURL_HTTP_OPTS} -X POST --user admin:${PE_PASSWORD} -d ${_json_data_oss} ${oss_create} | jq '.metadata.uuid' | tr -d \")
+ if [[ -z ${_response_oss_create} ]]; then
+ echo "Failed to fire the script. Please check the cluster.."
+ else
+ echo "Create Objectstore has been fired...."
+ fi
+ fi
+ fi
+ else
+ echo "All good at PC ${PC_HOST}..."
+ fi
+
+
+ done
+
+
+
diff --git a/test/objects/test.json b/test/objects/test.json
new file mode 100644
index 0000000..c3925e2
--- /dev/null
+++ b/test/objects/test.json
@@ -0,0 +1 @@
+{"api_version":"3.0","metadata":{"creation_time":"2020-03-16T05:25:52.000-07:00","kind":"objectstore","last_update_time":"2020-03-16T05:25:52.000-07:00","uuid":"c8fbbddc-b580-4a2d-736c-2de01a3a67b5"},"spec":{"deployment_version":"2.0","description":"NTNXLAB","name":"ntnx-objects","resources":{"aggregate_resources":{"total_capacity_gib":51200,"total_memory_size_mib":32768,"total_vcpu_count":10},"buckets_infra_network_dns":"10.55.10.16","buckets_infra_network_reference":{"kind":"subnet","uuid":"fa5bf140-6bb9-450f-a7de-b2eb7c8ed482"},"buckets_infra_network_vip":"10.55.10.17","client_access_network_ip_list":null,"client_access_network_ipv4_range":{"ipv4_end":"10.55.10.21","ipv4_start":"10.55.10.18"},"client_access_network_reference":{"kind":"subnet","uuid":"fa5bf140-6bb9-450f-a7de-b2eb7c8ed482"},"cluster_reference":{"kind":"cluster","uuid":"0005a0d3-1163-df13-0000-00000001957f"},"domain":"ntnxlab.local"}},"status":{"description":"NTNXLAB","name":"ntnx-objects","resources":{"aggregate_resources":{"total_capacity_gib":51200,"total_memory_size_mib":32768,"total_vcpu_count":10},"buckets_infra_network_dns":"10.55.10.16","buckets_infra_network_reference":{"kind":"subnet","uuid":"fa5bf140-6bb9-450f-a7de-b2eb7c8ed482"},"buckets_infra_network_vip":"10.55.10.17","client_access_network_ip_list":null,"client_access_network_ip_used_list":null,"client_access_network_ipv4_range":{"ipv4_end":"10.55.10.21","ipv4_start":"10.55.10.18"},"client_access_network_reference":{"kind":"subnet","uuid":"fa5bf140-6bb9-450f-a7de-b2eb7c8ed482"},"cluster_reference":{"kind":"cluster","uuid":"0005a0d3-1163-df13-0000-00000001957f"},"domain":"ntnxlab.local"}}}
diff --git a/test/repo_source.sh b/test/repo_source.sh
old mode 100755
new mode 100644
index db96ec1..379b998
--- a/test/repo_source.sh
+++ b/test/repo_source.sh
@@ -10,22 +10,22 @@
# echo AUTH_HOST=${AUTH_HOST}
# exit
-log "__AutoDC__"
-unset SOURCE_URL
-repo_source AUTODC_REPOS[@]
-log "SOURCE_URL=${SOURCE_URL}"
+#log "__AutoDC__"
+#unset SOURCE_URL
+#repo_source AUTODC_REPOS[@]
+#log "SOURCE_URL=${SOURCE_URL}"
-log "__SSHPass__"
-unset SOURCE_URL
-_sshpass_pkg=${SSHPASS_REPOS[0]##*/}
-repo_source SSHPASS_REPOS[@] ${_sshpass_pkg}
-log "SOURCE_URL=${SOURCE_URL}"
-
-log "__jq__"
-unset SOURCE_URL
-_jq_pkg=${JQ_REPOS[0]##*/}
-repo_source JQ_REPOS[@] ${_jq_pkg}
-log "SOURCE_URL=${SOURCE_URL}"
+#log "__SSHPass__"
+#unset SOURCE_URL
+#_sshpass_pkg=${SSHPASS_REPOS[0]##*/}
+#repo_source SSHPASS_REPOS[@] ${_sshpass_pkg}
+#log "SOURCE_URL=${SOURCE_URL}"
+#
+#log "__jq__"
+#unset SOURCE_URL
+#_jq_pkg=${JQ_REPOS[0]##*/}
+#repo_source JQ_REPOS[@] ${_jq_pkg}
+#log "SOURCE_URL=${SOURCE_URL}"
log "__qcow2 Images__"
for _image in "${QCOW2_IMAGES[@]}" ; do
diff --git a/test/restapi.txt b/test/restapi.txt
new file mode 100644
index 0000000..79db559
--- /dev/null
+++ b/test/restapi.txt
@@ -0,0 +1,129 @@
+#######################################################################################################
+# EULA acceptance
+#######################################################################################################
+ curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{
+ "username": "SE with $(basename ${0})",
+ "companyName": "Nutanix",
+ "jobTitle": "SE"
+ }' https://localhost:9440/PrismGateway/services/rest/v1/eulas/accept
+
+
+#######################################################################################################
+# Disable Pulse
+#######################################################################################################
+curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT --data '{
+ "defaultNutanixEmail": null,
+ "emailContactList": null,
+ "enable": false,
+ "enableDefaultNutanixEmail": false,
+ "isPulsePromptNeeded": false,
+ "nosVersion": null,
+ "remindLater": null,
+ "verbosityType": null
+ }' https://localhost:9440/PrismGateway/services/rest/v1/pulse
+
+
+
+
+#######################################################################################################
+# Set Dataservices - PE
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Set NTP servers - PE
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Rename the default storage pool to SP01
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Create a Storage Container Images if not exist
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Create two networks (Primary and Secondary)
+#######################################################################################################
+curl -X POST \
+ https://10.42.9.37:9440/api/nutanix/v0.8/networks \
+ -H 'Content-Type: application/json' \
+ -H 'Postman-Token: 836ee60b-0398-456b-967b-7221c3355545' \
+ -H 'cache-control: no-cache' \
+ -d '{"name":"VLAN30","vlanId":"30","ipConfig":{"dhcpOptions":{"domainNameServers":"8.8.8.8","domainSearch":"ntx-demo.local","domainName":"ntnx-demo.local"},"networkAddress":"10.10.30.0","prefixLength":"24","defaultGateway":"10.10.30.254","pool":[{"range":"10.10.30.100 10.10.30.200"}]}}
+'
+
+
+#######################################################################################################
+# Upload the right AutoDC version
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Install, Configure and start the AutoDC
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Configure the AutoDC's DNS server
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Authentication PE to the installed DC
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Create role mapping
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Upload PC files to be used - PE
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Upload Nutanix Files files
+#######################################################################################################
+
+
+
+#######################################################################################################
+# Deploy an Objectstore
+#######################################################################################################
+curl -X POST https://10.42.87.39:9440/oss/api/nutanix/v3/objectstores -d '{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"","description":"","resources":{"domain":"","cluster_reference":{"kind":"cluster","uuid":""},"buckets_infra_network_dns":"10.42..16","buckets_infra_network_vip":"10.42..17","buckets_infra_network_reference":{"kind":"subnet","uuid":""},"client_access_network_reference":{"kind":"subnet","uuid":".18","ipv4_end":"10.42..21"}}}}' --insecure --silent -H 'Content-Type: application/json'
+
+In need of a different amount of UUIDs before this api will work:
+1. Subnet for the client_access_network; maybe the secondary network? Where is the Windows VM running?
+2. Subnet for the infrastructure; the containers need to be able to talk to the CVMs
+3. Cluster UUID
+
+These UUIDs need to be found dynamically as they differ PER installation/cluster and is the PE UUID NOT the PC UUID!!!
+
+Using: curl -X POST --user : --insecure --silent -d '{"kind":"cluster"}' https://:9440/api/nutanix/v3/subnets/list
+we can grab the json that has the corresponding :
+- UUID of the cluster
+- UUID of the network we need to have to get the command running. As we need to have them run in the primary network, we can use that UUID in the command
+
+#######################################################################################################
+# Progress creation of an object store
+#######################################################################################################
+curl https://10.42.87.39:9440/oss/api/nutanix/v3/groups -d
+
+
+ '{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"},{"attribute":"domain"},{"attribute":"num_msp_workers"},{"attribute":"usage_bytes"},{"attribute":"num_buckets"},{"attribute":"num_objects"},{"attribute":"num_alerts_internal"},{"attribute":"client_access_network_ip_used_list"},{"attribute":"total_capacity_gib"},{"attribute":"last_completed_step"},{"attribute":"state"},{"attribute":"percentage_complete"},{"attribute":"ipv4_address"},{"attribute":"num_alerts_critical"},{"attribute":"num_alerts_info"},{"attribute":"num_alerts_warning"},{"attribute":"error_message_list"},{"attribute":"cluster_name"},{"attribute":"client_access_network_name"},{"attribute":"client_access_network_ip_list"},{"attribute":"buckets_infra_network_name"},{"attribute":"buckets_infra_network_vip"},{"attribute":"buckets_infra_network_dns"}]}' --compressed --insecure
diff --git a/test/test_oss.sh b/test/test_oss.sh
new file mode 100644
index 0000000..a5a27d7
--- /dev/null
+++ b/test/test_oss.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+
+# Run the needed variables
+. global.vars.sh
+
+# Test script to get the objects store creation
+# Get the UUIDs of:
+# - UUID of the cluster
+# - UUID of the network we need to have to get the command running. As we need to have them run in the primary network, we can use that UUID in the command
+
+function object_store() {
+ local _attempts=30
+ local _loops=0
+ local _sleep=60
+ local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure '
+ local _url_network='https://localhost:9440/api/nutanix/v3/subnets/list'
+ local _url_oss='https://localhost:9440/oss/api/nutanix/v3/objectstores'
+
+ # Payload for the _json_data
+ _json_data='{"kind":"subnet"}'
+
+ # Get the json data and split into CLUSTER_UUID and Primary_Network_UUID
+ CLUSTER_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[].spec | select (.name=="Primary") | .cluster_reference.uuid' | tr -d \")
+ echo ${CLUSTER_UUID}
+
+ PRIM_NETWORK_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[] | select (.spec.name=="Primary") | .metadata.uuid' | tr -d \")
+ echo ${PRIM_NETWORK_UUID}
+
+ _json_data_oss='{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"TEST","description":"NTNXLAB","resources":{"domain":"ntnxlab.local","cluster_reference":{"kind":"cluster","uuid":"'
+ _json_data_oss+=${CLUSTER_UUID}
+ _json_data_oss+='"},"buckets_infra_network_dns":"10.42.VLANX.16","buckets_infra_network_vip":"10.42.VLANX.17","buckets_infra_network_reference":{"kind":"subnet","uuid":"'
+ _json_data_oss+=${PRIM_NETWORK_UUID}
+ _json_data_oss+='"},"client_access_network_reference":{"kind":"subnet","uuid":"'
+ _json_data_oss+=${PRIM_NETWORK_UUID}
+ _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":32768,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"10.42.VLANX.18","ipv4_end":"10.42.VLANX.21"}}}}'
+
+ # Set the right VLAN dynamically so we are configuring in the right network
+ _json_data_oss=${_json_data_oss//VLANX/${VLAN}}
+
+ curl -X POST -d $_json_data_oss $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_oss
+
+}
+
+object_store
diff --git a/we_push_centos_cl_disk.sh b/we_push_centos_cl_disk.sh
new file mode 100755
index 0000000..b6c44d7
--- /dev/null
+++ b/we_push_centos_cl_disk.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Set the SSHPASS so we can run the sshpass command
+export SSHPASS=techX2019!
+
+# Script needed to push the CentOS_7_Cloud image
+for i in `cat /root/GTS2019-APAC/gts2019_cluster_list_group2.txt | cut -d"|" -f 1`
+do
+ sshpass -e ssh -x -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -q nutanix@$i '/usr/local/nutanix/bin/acli image.create CentOS_Cloud_7 image_type=kDiskImage wait=true container=Images source_url=http://10.42.8.50/images/CentOS-7-x86_64-GenericCloud.qcow2'
+done
+
+
diff --git a/we_stage_workshop.sh b/we_stage_workshop.sh
new file mode 100755
index 0000000..1d75c68
--- /dev/null
+++ b/we_stage_workshop.sh
@@ -0,0 +1,380 @@
+#!/usr/bin/env bash
+# use bash -x to debug command substitution and evaluation instead of echo.
+DEBUG=
+
+# For WORKSHOPS keyword mappings to scripts and variables, please use:
+# - Calm || Citrix || Summit
+# - PC #.#
+WORKSHOPS=(\
+"Calm Workshop (AOS 5.5+/AHV PC 5.8.x) = Stable (AutoDC1)" \
+"Calm Workshop (AOS 5.8.x/AHV PC 5.10.x) = Stable (AutoDC2)" \
+"Calm Workshop (AOS 5.9+/AHV PC 5.10.x) = Development" \
+"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \
+"Citrix Desktop on AHV Workshop (AOS/AHV 5.6)" \
+) # Adjust function stage_clusters, below, for file/script mappings as needed
+
+function stage_clusters() {
+ # Adjust map below as needed with $WORKSHOPS
+ local _cluster
+ local _container
+ local _dependency
+ local _fields
+ local _libraries='global.vars.sh we-lib.common.sh '
+ local _pe_launch # will be transferred and executed on PE
+ local _pc_launch # will be transferred and executed on PC
+ local _sshkey=${SSH_PUBKEY}
+ local _wc_arg='--lines'
+ local _workshop=${WORKSHOPS[$((${WORKSHOP_NUM}-1))]}
+
+ # Map to latest and greatest of each point release
+ # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download
+ # TODO: make WORKSHOPS and map a JSON configuration file?
+ if (( $(echo ${_workshop} | grep -i "PC 5.10" | wc ${WC_ARG}) > 0 )); then
+ export PC_VERSION="${PC_DEV_VERSION}"
+ elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then
+ export PC_VERSION="${PC_STABLE_VERSION}"
+ elif (( $(echo ${_workshop} | grep -i "PC 5.9" | wc ${WC_ARG}) > 0 )); then
+ export PC_VERSION=5.9.2
+ elif (( $(echo ${_workshop} | grep -i "PC 5.7" | wc ${WC_ARG}) > 0 )); then
+ export PC_VERSION=5.7.1.1
+ elif (( $(echo ${_workshop} | grep -i "PC 5.6" | wc ${WC_ARG}) > 0 )); then
+ export PC_VERSION=5.6.2
+ fi
+
+ # Map workshop to staging script(s) and libraries,
+ # _pe_launch will be executed on PE
+ if (( $(echo ${_workshop} | grep -i Calm | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='calm.sh'
+ _pc_launch=${_pe_launch}
+ fi
+ if (( $(echo ${_workshop} | grep -i Citrix | wc ${WC_ARG}) > 0 )); then
+ _pe_launch='stage_citrixhow.sh'
+ _pc_launch='stage_citrixhow_pc.sh'
+ fi
+ if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then
+ _libraries+='lib.pe.sh lib.pc.sh'
+ _pe_launch='we-ts2019.sh'
+ _pc_launch=${_pe_launch}
+ fi
+
+ dependencies 'install' 'sshpass'
+
+ if [[ -z ${PC_VERSION} ]]; then
+ log "WORKSHOP #${WORKSHOP_NUM} = ${_workshop} with PC-${PC_VERSION}"
+ fi
+
+ # Send configuration scripts to remote clusters and execute Prism Element script
+ if [[ ${CLUSTER_LIST} == '-' ]]; then
+ echo "Login to see tasks in flight via https://${PRISM_ADMIN}:${PE_PASSWORD}@${PE_HOST}:9440"
+ pe_configuration_args "${_pc_launch}"
+
+ pushd scripts || true
+ eval "${PE_CONFIGURATION} ./${_pe_launch} 'PE'" >> ${HOME}/${_pe_launch%%.sh}.log 2>&1 &
+ unset PE_CONFIGURATION
+ popd || true
+ else
+ for _cluster in $(cat ${CLUSTER_LIST} | grep -v ^#)
+ do
+ set -f
+ # shellcheck disable=2206
+ _fields=(${_cluster//|/ })
+ PE_HOST=${_fields[0]}
+ PE_PASSWORD=${_fields[1]}
+ EMAIL=${_fields[2]}
+
+ pe_configuration_args "${_pc_launch}"
+
+ . scripts/global.vars.sh # re-import for relative settings
+
+ cat <> ${_pe_launch%%.sh}.log 2>&1 &"
+ unset PE_CONFIGURATION
+
+ # shellcheck disable=SC2153
+ cat < Gear > Cluster Lockdown,
+ the following will fail silently, use ssh nutanix@{PE|PC} instead.
+
+ $ SSHPASS='${PE_PASSWORD}' sshpass -e ssh \\
+ ${SSH_OPTS} \\
+ nutanix@${PE_HOST} 'date; tail -f ${_pe_launch%%.sh}.log'
+ You can login to PE to see tasks in flight and eventual PC registration:
+ https://${PRISM_ADMIN}:${PE_PASSWORD}@${PE_HOST}:9440/
+
+EOM
+
+ if (( "$(echo ${_libraries} | grep -i lib.pc | wc ${_wc_arg})" > 0 )); then
+ # shellcheck disable=2153
+ cat < 0 )) \
+ && (( $(($OPTARG)) < $((${#WORKSHOPS[@]}-${NONWORKSHOPS}+1)) )); then
+ WORKSHOP_NUM=${OPTARG}
+ else
+ echo "Error: workshop not found = ${OPTARG}"
+ script_usage
+ fi
+ ;;
+ \? )
+ script_usage
+ ;;
+ esac
+done
+shift $((OPTIND -1))
+
+if [[ -z ${CLUSTER_LIST} ]]; then
+ get_file
+fi
+if [[ -z ${WORKSHOP_NUM} ]]; then
+ log "Warning: missing workshop number argument."
+ select_workshop
+fi
+
+if [[ ${WORKSHOPS[${WORKSHOP_NUM}]} == "${_VALIDATE}" ]]; then
+ validate_clusters
+elif (( ${WORKSHOP_NUM} == ${#WORKSHOPS[@]} - 1 )); then
+ echo ${WORKSHOPS[${WORKSHOP_NUM}]}
+ finish
+elif (( ${WORKSHOP_NUM} == ${#WORKSHOPS[@]} - 2 )); then
+ echo ${WORKSHOPS[${WORKSHOP_NUM}]}
+elif (( ${WORKSHOP_NUM} > 0 && ${WORKSHOP_NUM} <= ${#WORKSHOPS[@]} - 3 )); then
+ stage_clusters
+else
+ #log "DEBUG: WORKSHOP_NUM=${WORKSHOP_NUM}"
+ script_usage
+fi