From 8b618624f3f849a3a0c5e43c9071ad9859bbc482 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 15 Nov 2018 18:08:27 -0500 Subject: [PATCH 001/691] Commented out Files/AFS options in common.lib --- scripts/common.lib.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/common.lib.sh b/scripts/common.lib.sh index c6efd2f..90f5c5b 100755 --- a/scripts/common.lib.sh +++ b/scripts/common.lib.sh @@ -128,19 +128,19 @@ function NTNX_Download() { esac # When adding a new AFS version, update BOTH case stanzas below... - elif [[ ${1} == 'AFS']]; then - CheckArgsExist 'AFS_VERSION' + #elif [[ ${1} == 'AFS']]; then + # CheckArgsExist 'AFS_VERSION' #case ${AFS_VERSION} in # 3.1.0.1 ) #esac #_meta_url+="afs/${AFS_VERSION}/nutanix-afs-el7.3-release-afs-${AFS_VERSION}-stable.qcow2" - case ${AFS_VERSION} in - 3.1.0.1 ) - esac + # case ${AFS_VERSION} in + # 3.1.0.1 ) + # esac - _meta_url+="afs/${AFS_VERSION}/afs-${AFS_VERSION}.json" + # _meta_url+="afs/${AFS_VERSION}/afs-${AFS_VERSION}.json" else CheckArgsExist 'AOS_VERSION AOS_UPGRADE' From 8d82b61fb05f56603314cd3c34ac3a675d9c951f Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 15 Nov 2018 22:04:19 -0500 Subject: [PATCH 002/691] updates to AFS downloads --- scripts/global.vars.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index d90aad3..22b50b3 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -18,8 +18,8 @@ MY_IMG_CONTAINER_NAME='Images' HTTP_CACHE_HOST=localhost HTTP_CACHE_PORT=8181 -NTNX_FILES_SOURCE_URL="http://download.nutanix.com/downloads/afs/${AFS_VERSION}/nutanix-afs-el7.3-release-afs-${AFS_VERSION}-stable.qcow2" -NTNX_FILES_META_URL="http://download.nutanix.com/afs/${AFS_VERSION}/afs-${AFS_VERSION}.json" +NTNX_FILES_SOURCE_URL="https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.1.0.1-stable.qcow2" +NTNX_FILES_META_URL="https://s3.amazonaws.com/get-ahv-images/afs-3.1.0.1.json" # Conventions for *_REPOS arrays, the URL must end with: # - trailing slash (which imples _IMAGES argument to repo_source) From 72ef66a211debe13c4ba30a90efb18e5a474735f Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 15 Nov 2018 23:47:02 -0500 Subject: [PATCH 003/691] updated AFS download links --- scripts/global.vars.sh | 4 ++-- scripts/stage_calmhow.sh | 10 ++++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 22b50b3..7924533 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -18,8 +18,8 @@ MY_IMG_CONTAINER_NAME='Images' HTTP_CACHE_HOST=localhost HTTP_CACHE_PORT=8181 -NTNX_FILES_SOURCE_URL="https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.1.0.1-stable.qcow2" -NTNX_FILES_META_URL="https://s3.amazonaws.com/get-ahv-images/afs-3.1.0.1.json" +NTNX_FILES_SOURCE="nutanix-afs-el7.3-release-afs-3.1.0.1-stable.qcow2" +NTNX_FILES_META="afs-3.1.0.1.json" # Conventions for *_REPOS arrays, the URL must end with: # - trailing slash (which imples _IMAGES argument to repo_source) diff --git a/scripts/stage_calmhow.sh b/scripts/stage_calmhow.sh index bc01808..fb7ab03 100644 --- a/scripts/stage_calmhow.sh +++ b/scripts/stage_calmhow.sh @@ -415,17 +415,15 @@ function files_upload() { #NTNX_Download 'AFS' - wget -nv ${NTNX_FILES_SOURCE_URL} - wget -nv ${NTNX_FILES_META_URL} + wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_SOURCE} + wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_META} log "Files upload..." #ncli software upload file-path=/home/nutanix/${MY_AFS_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_AFS_META_URL##*/} software-type=FILE_SERVER - ncli software upload software-type=afs \ - file-path="`pwd`/${NTNX_FILES_SOURCE_URL}" \ - meta-file-path="`pwd`/${NTNX_FILES_META_URL}" + ncli software upload software-type=afs file-path="`pwd`/${NTNX_FILES_SOURCE}" meta-file-path="`pwd`/${NTNX_FILES_META}" log "Delete Files sources to free CVM space..." - rm -f ${NTNX_FILES_SOURCE_URL} ${NTNX_FILES_META_URL} + rm -f ${NTNX_FILES_SOURCE} ${NTNX_FILES_META} } function nos_upgrade() { From f539904d161b87af62d1c75ac09a338c6bce12da Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 16 Nov 2018 11:54:11 -0500 Subject: [PATCH 004/691] updates --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 7924533..67357ae 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -19,7 +19,7 @@ HTTP_CACHE_HOST=localhost HTTP_CACHE_PORT=8181 NTNX_FILES_SOURCE="nutanix-afs-el7.3-release-afs-3.1.0.1-stable.qcow2" -NTNX_FILES_META="afs-3.1.0.1.json" + NTNX_FILES_META="afs-3.1.0.1.json" # Conventions for *_REPOS arrays, the URL must end with: # - trailing slash (which imples _IMAGES argument to repo_source) From 11425126a65c8fd15bb4c4cb90363604f638adb9 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 16 Nov 2018 11:56:05 -0500 Subject: [PATCH 005/691] Updates --- scripts/stage_calmhow.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/stage_calmhow.sh b/scripts/stage_calmhow.sh index fb7ab03..fb03341 100644 --- a/scripts/stage_calmhow.sh +++ b/scripts/stage_calmhow.sh @@ -452,10 +452,10 @@ log "Adding key to PE/CVMs..." && SSH_PubKey || true & # non-blocking, parallel Dependencies 'install' 'sshpass' && Dependencies 'install' 'jq' \ && pe_license \ && pe_init \ +&& files_upload \ && network_configure \ && authentication_source \ && pe_auth \ -&& files_upload \ && pc_init \ && Check_Prism_API_Up 'PC' From 7aff8bfb52136235d9a417b247dc30236c85523e Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sat, 17 Nov 2018 12:28:06 -0800 Subject: [PATCH 006/691] Updates --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index ac94fb3..ef76652 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -7,7 +7,7 @@ # curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/} if [[ -z ${SOURCE} ]]; then - ORGANIZATION=nutanixworkshops + ORGANIZATION=jncox REPOSITORY=stageworkshop BRANCH=master else From 4b469c88bc26b400214981ba4609ed75d709c5b2 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sat, 17 Nov 2018 15:02:54 -0800 Subject: [PATCH 007/691] update --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index ef76652..ac94fb3 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -7,7 +7,7 @@ # curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/} if [[ -z ${SOURCE} ]]; then - ORGANIZATION=jncox + ORGANIZATION=nutanixworkshops REPOSITORY=stageworkshop BRANCH=master else From f392b8b39fc02193219b8032961b8ef0edb28110 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sat, 17 Nov 2018 15:08:38 -0800 Subject: [PATCH 008/691] updates --- scripts/global.vars.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 67357ae..ecc1ff8 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -18,8 +18,8 @@ MY_IMG_CONTAINER_NAME='Images' HTTP_CACHE_HOST=localhost HTTP_CACHE_PORT=8181 -NTNX_FILES_SOURCE="nutanix-afs-el7.3-release-afs-3.1.0.1-stable.qcow2" - NTNX_FILES_META="afs-3.1.0.1.json" +NTNX_FILES_SOURCE='nutanix-afs-el7.3-release-afs-3.1.0.1-stable.qcow2' + NTNX_FILES_META='afs-3.1.0.1.json' # Conventions for *_REPOS arrays, the URL must end with: # - trailing slash (which imples _IMAGES argument to repo_source) From 55c9a58dfb867ed1673af68871fba80a8b18cdc3 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sat, 17 Nov 2018 20:19:16 -0800 Subject: [PATCH 009/691] Update Primary Network Domain and Search Domain --- scripts/stage_calmhow.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/stage_calmhow.sh b/scripts/stage_calmhow.sh index fb03341..aa9310e 100644 --- a/scripts/stage_calmhow.sh +++ b/scripts/stage_calmhow.sh @@ -92,15 +92,15 @@ function network_configure() { log "Remove Rx-Automation-Network if it exists..." acli "-y net.delete Rx-Automation-Network" - log "Create primary network: Name: ${MY_PRIMARY_NET_NAME}, VLAN: ${MY_PRIMARY_NET_VLAN}, Subnet: ${HPOC_PREFIX}.1/25, Domain: ${MY_DOMAIN_NAME}, Pool: ${HPOC_PREFIX}.50 to ${HPOC_PREFIX}.125" + log "Create primary network: Name: ${MY_PRIMARY_NET_NAME}, VLAN: ${MY_PRIMARY_NET_VLAN}, Subnet: ${HPOC_PREFIX}.1/25, Domain: ${MY_DOMAIN_FQDN}, Pool: ${HPOC_PREFIX}.50 to ${HPOC_PREFIX}.125" acli "net.create ${MY_PRIMARY_NET_NAME} vlan=${MY_PRIMARY_NET_VLAN} ip_config=${HPOC_PREFIX}.1/25" - acli "net.update_dhcp_dns ${MY_PRIMARY_NET_NAME} servers=${AUTH_HOST},10.21.253.10 domains=${MY_DOMAIN_NAME}" + acli "net.update_dhcp_dns ${MY_PRIMARY_NET_NAME} servers=${AUTH_HOST},10.21.253.10 domains=${MY_DOMAIN_FQDN}" acli "net.add_dhcp_pool ${MY_PRIMARY_NET_NAME} start=${HPOC_PREFIX}.50 end=${HPOC_PREFIX}.125" if [[ ${MY_SECONDARY_NET_NAME} ]]; then log "Create secondary network: Name: ${MY_SECONDARY_NET_NAME}, VLAN: ${MY_SECONDARY_NET_VLAN}, Subnet: ${HPOC_PREFIX}.129/25, Pool: ${HPOC_PREFIX}.132 to ${HPOC_PREFIX}.253" acli "net.create ${MY_SECONDARY_NET_NAME} vlan=${MY_SECONDARY_NET_VLAN} ip_config=${HPOC_PREFIX}.129/25" - acli "net.update_dhcp_dns ${MY_SECONDARY_NET_NAME} servers=${AUTH_HOST},10.21.253.10 domains=${MY_DOMAIN_NAME}" + acli "net.update_dhcp_dns ${MY_SECONDARY_NET_NAME} servers=${AUTH_HOST},10.21.253.10 domains=${MY_DOMAIN_FQDN}" acli "net.add_dhcp_pool ${MY_SECONDARY_NET_NAME} start=${HPOC_PREFIX}.132 end=${HPOC_PREFIX}.253" fi fi From 9a7fcde76a3b62edaa5833ca7abfa3c2960c8f00 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sun, 18 Nov 2018 15:03:41 -0800 Subject: [PATCH 010/691] update --- scripts/stage_calmhow.sh | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/scripts/stage_calmhow.sh b/scripts/stage_calmhow.sh index aa9310e..63c83a3 100644 --- a/scripts/stage_calmhow.sh +++ b/scripts/stage_calmhow.sh @@ -413,17 +413,11 @@ function files_upload() { log "Files download..." - #NTNX_Download 'AFS' - - wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_SOURCE} wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_META} - - log "Files upload..." - #ncli software upload file-path=/home/nutanix/${MY_AFS_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_AFS_META_URL##*/} software-type=FILE_SERVER + wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_SOURCE} ncli software upload software-type=afs file-path="`pwd`/${NTNX_FILES_SOURCE}" meta-file-path="`pwd`/${NTNX_FILES_META}" - - log "Delete Files sources to free CVM space..." rm -f ${NTNX_FILES_SOURCE} ${NTNX_FILES_META} + } function nos_upgrade() { @@ -452,13 +446,17 @@ log "Adding key to PE/CVMs..." && SSH_PubKey || true & # non-blocking, parallel Dependencies 'install' 'sshpass' && Dependencies 'install' 'jq' \ && pe_license \ && pe_init \ -&& files_upload \ && network_configure \ && authentication_source \ && pe_auth \ && pc_init \ && Check_Prism_API_Up 'PC' +wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_META} +wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_SOURCE} +ncli software upload software-type=afs file-path="`pwd`/${NTNX_FILES_SOURCE}" meta-file-path="`pwd`/${NTNX_FILES_META}" +rm -f ${NTNX_FILES_SOURCE} ${NTNX_FILES_META} + if (( $? == 0 )) ; then pc_configure && Dependencies 'remove' 'sshpass' && Dependencies 'remove' 'jq'; log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" From d2f0d4317a791926e58dc89059969b15c80d3e94 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 15 Feb 2019 10:25:42 -0800 Subject: [PATCH 011/691] Updates --- scripts/calm.sh | 122 +++ scripts/citrix.sh | 173 +++++ scripts/common.lib.sh | 622 ---------------- scripts/global.vars.sh | 288 ++++++-- scripts/lib.common.sh | 817 +++++++++++++++++++++ scripts/{stage_calmhow_pc.sh => lib.pc.sh} | 565 +++++++------- scripts/lib.pe.sh | 478 ++++++++++++ scripts/lib.shell-convenience.sh | 222 ++++++ scripts/pc_destroy.sh | 49 -- scripts/stage_calmhow.sh | 469 ------------ scripts/stage_citrixhow.sh | 43 +- scripts/stage_citrixhow_pc.sh | 4 +- scripts/stageworkshop.lib.sh | 194 ----- scripts/vmdisk2image-pc.sh | 25 +- 14 files changed, 2353 insertions(+), 1718 deletions(-) create mode 100755 scripts/calm.sh create mode 100644 scripts/citrix.sh delete mode 100755 scripts/common.lib.sh create mode 100755 scripts/lib.common.sh rename scripts/{stage_calmhow_pc.sh => lib.pc.sh} (63%) mode change 100644 => 100755 create mode 100644 scripts/lib.pe.sh create mode 100755 scripts/lib.shell-convenience.sh delete mode 100755 scripts/pc_destroy.sh delete mode 100644 scripts/stage_calmhow.sh delete mode 100755 scripts/stageworkshop.lib.sh diff --git a/scripts/calm.sh b/scripts/calm.sh new file mode 100755 index 0000000..d42588e --- /dev/null +++ b/scripts/calm.sh @@ -0,0 +1,122 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + # _pc_version=(${PC_VERSION//./ }) + # + # if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 )); then + # pc_install "${NW1_NAME}" \ + # && prism_check 'PC' + # pc_configure #proceed anyway via SSH, because API+UI password == ? + # else + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + && pc_configure \ + && dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' + + log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + # fi + + files_install & # parallel, optional. Versus: $0 'files' & + + finish + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && images \ + && pc_cluster_img_import \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + pc_project + flow_enable + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/citrix.sh b/scripts/citrix.sh new file mode 100644 index 0000000..f837313 --- /dev/null +++ b/scripts/citrix.sh @@ -0,0 +1,173 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization + +log "Adding key to ${1} VMs..." +ssh_pubkey & # non-blocking, parallel suitable + +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + log "Configure PE role mapping" + ncli authconfig add-role-mapping role=ROLE_CLUSTER_ADMIN entity-type=group name="${DOMAIN_NAME}" entity-values="${DOMAIN_ADMIN_GROUP}" + + log "Creating Reverse Lookup Zone on DC VM" + remote_exec 'ssh' 'AUTH_SERVER' "samba-tool dns zonecreate dc1 ${HPOC_NUMBER}.21.10.in-addr.arpa; service samba-ad-dc restart" + log 'Create custom OUs...' + remote_exec 'ssh' 'AUTH_SERVER' "apt install ldb-tools -y -q" + remote_exec 'ssh' 'AUTH_SERVER' "cat << EOF > ous.ldif +dn: OU=Non-PersistentDesktop,DC=NTNXLAB,DC=local +changetype: add +objectClass: top +objectClass: organizationalunit +description: Non-Persistent Desktop OU + +dn: OU=PersistentDesktop,DC=NTNXLAB,DC=local +changetype: add +objectClass: top +objectClass: organizationalunit +description: Persistent Desktop OU + +dn: OU=XenAppServer,DC=NTNXLAB,DC=local +changetype: add +objectClass: top +objectClass: organizationalunit +description: XenApp Server OU + +EOF" + remote_exec 'ssh' 'AUTH_SERVER' "ldbmodify -H /var/lib/samba/private/sam.ldb ous.ldif; service samba-ad-dc restart" + + log "Create PE user account XD for MCS Plugin" + ncli user create user-name=xd user-password=nutanix/4u first-name=XenDesktop last-name=Service email-id=no-reply@nutanix.com + ncli user grant-cluster-admin-role user-name=xd + + log "Get UUIDs from cluster:" + NET_UUID=$(acli net.get ${NW1_NAME} | grep "uuid" | cut -f 2 -d ':' | xargs) + log "${NW1_NAME} UUID is ${NET_UUID}" + CONTAINER_UUID=$(ncli container ls name=${STORAGE_DEFAULT} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) + log "${STORAGE_DEFAULT} UUID is ${CONTAINER_UUID}" + + log "Download AFS image from ${AFS_SRC_URL}" + wget -nv ${AFS_SRC_URL} + log "Download AFS metadata JSON from ${AFS_META_URL}" + wget -nv ${AFS_META_URL} + log "Stage AFS" + ncli software upload file-path=/home/nutanix/${AFS_SRC_URL##*/} meta-file-path=/home/nutanix/${AFS_META_URL##*/} software-type=FILE_SERVER + log "Delete AFS sources to free some space" + rm ${AFS_SRC_URL##*/} ${AFS_META_URL##*/} + + curl -u admin:${PE_PASSWORD} -k -H 'Content-Type: application/json' -X POST https://127.0.0.1:9440/api/nutanix/v3/prism_central -d "${DEPLOY_BODY}" + log "Waiting for PC deployment to complete (Sleeping 15m)" + sleep 900 + log "Sending PC configuration script" + pc_send_file stage_citrixhow_pc.sh + + # Execute that file asynchroneously remotely (script keeps running on CVM in the background) + log "Launching PC configuration script" + pc_remote_exec "PE_PASSWORD=${PE_PASSWORD} nohup bash /home/nutanix/stage_citrixhow_pc.sh >> pcconfig.log 2>&1 &" + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + && pc_install "${NW1_NAME}" \ + && prism_check 'PC' + + if (( $? == 0 )) ; then + pc_configure \ + && dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' + + log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + finish + else + finish + log "Error 18: in main functional chain, exit!" + exit 18 + fi + ;; + PC | pc ) + . lib.pc.sh + + #PC_UPGRADE_URL='http://10.21.250.221/images/ahv/techsummit/nutanix_installer_package_pc-release-euphrates-5.5.0.6-stable-14bd63735db09b1c9babdaaf48d062723137fc46.tar.gz' + + # Set Prism Central Password to Prism Element Password + # log "Setting PC password to PE password" + # ncli user reset-password user-name="admin" password="${PE_PASSWORD}" + + # Prism Central upgrade + #log "Download PC upgrade image: ${PC_UPGRADE_URL##*/}" + #wget -nv ${PC_UPGRADE_URL} + + #log "Prepare PC upgrade image" + #tar -xzf ${PC_UPGRADE_URL##*/} + #rm ${PC_UPGRADE_URL##*/} + + #log "Upgrade PC" + #cd /home/nutanix/install ; ./bin/cluster -i . -p upgrade + + log "PC Configuration complete on $(date)" + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' || exit 13 + + pc_passwd + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password nx2Tech704\! vm.list + + ntnx_cmd # check cli services available? + + if [[ ! -z "${2}" ]]; then + # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && images \ + && flow_enable \ + && prism_check 'PC' + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; +esac diff --git a/scripts/common.lib.sh b/scripts/common.lib.sh deleted file mode 100755 index 90f5c5b..0000000 --- a/scripts/common.lib.sh +++ /dev/null @@ -1,622 +0,0 @@ -#!/usr/bin/env bash - -function fileserver() { - local _action=${1} # REQUIRED - local _host=${2} # REQUIRED, TODO: default to PE? - local _port=${3} # OPTIONAL - local _directory=${4} # OPTIONAL - - if [[ -z ${1} ]]; then - _error=38 - log "Error ${_error}: start or stop action required!" - exit ${_error} - fi - if [[ -z ${2} ]]; then - _error=39 - log "Error ${_error}: host required!" - exit ${_error} - fi - if [[ -z ${3} ]]; then - _port=8181 - fi - if [[ -z ${4} ]]; then - _directory=cache - fi - - # Determine if on PE or PC with _host PE or PC, then _host=localhost - # ssh -nNT -R 8181:localhost:8181 nutanix@10.21.31.31 - - remote_exec 'ssh' ${_host} \ - "python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}" - -# acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \ -# source_url=http://10.4.150.64:8181/autodc-2.0.qcow2 -#AutoDC2: pending -#AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable - remote_exec 'ssh' ${_host} \ - "kill -9 $(pgrep python -a | grep ${_port} | awk '{ print $1 }')" 'OPTIONAL' -} - -function begin() { - local _release - - if [[ -e ${RELEASE} ]]; then - _release=" release: $(grep FullSemVer ${RELEASE} | awk -F\" '{print $4}')" - fi - - log "$(basename ${0})${_release} start._____________________" -} - -function finish() { - log "${0} ran for ${SECONDS} seconds._____________________" - echo -} - -function NTNX_cmd() { - local _attempts=25 - local _error=10 - local _hold - local _loop=0 - local _sleep=10 - local _status - - while [[ true ]]; do - (( _loop++ )) - _hold=$(nuclei cluster.list 2>&1) - _status=$? - - if (( $(echo "${_hold}" | grep websocket | wc --lines) > 0 )); then - log "Warning: Zookeeper isn't up yet." - elif (( ${_status} > 0 )); then - log "${_status} = ${_hold}, uh oh!" - else - log "Cluster info via nuceli seems good: ${_status}, moving on!" - break - fi - - if (( ${_loop} == ${_attempts} )); then - log "Error ${_error}: couldn't determine cluster information, giving up after ${_loop} tries." - exit ${_error} - else - log "${_loop}/${_attempts}: hold=${_hold} sleep ${_sleep}..." - sleep ${_sleep} - fi - done -} - -function NTNX_Download() { - local _checksum - local _meta_url='http://download.nutanix.com/' - local _source_url= - local _version=0 - - # When adding a new PC version, update BOTH case stanzas below... - if [[ ${1} == 'PC' ]]; then - CheckArgsExist 'PC_VERSION' - case ${PC_VERSION} in - 5.9 | 5.6.2 | 5.8.0.1 ) - _version=2 - ;; - * ) - _version=1 - ;; - esac - - _meta_url+="pc/one-click-pc-deployment/${PC_VERSION}/v${_version}/" - - case ${PC_VERSION} in - 5.9 ) - _meta_url+="euphrates-${PC_VERSION}-stable-prism_central_one_click_deployment_metadata.json" - ;; - 5.6.1 | 5.6.2 | 5.9.0.1 | 5.9.1 ) - _meta_url+="euphrates-${PC_VERSION}-stable-prism_central_metadata.json" - ;; - 5.7.0.1 | 5.7.1 | 5.7.1.1 ) - _meta_url+="pc-${PC_VERSION}-stable-prism_central_metadata.json" - ;; - 5.8.0.1 | 5.8.1 | 5.8.2 | 5.10 | 5.11 ) - _meta_url+="pc_deploy-${PC_VERSION}.json" - ;; - * ) - _error=22 - log "Error ${_error}: unsupported PC_VERSION=${PC_VERSION}!" - log 'Browse to https://portal.nutanix.com/#/page/releases/prismDetails' - log " - Find ${PC_VERSION} in the Additional Releases section on the lower right side" - log ' - Provide the metadata URL for the "PC 1-click deploy from PE" option to this function, both case stanzas.' - exit ${_error} - ;; - esac - - # When adding a new AFS version, update BOTH case stanzas below... - #elif [[ ${1} == 'AFS']]; then - # CheckArgsExist 'AFS_VERSION' - #case ${AFS_VERSION} in - # 3.1.0.1 ) - #esac - - #_meta_url+="afs/${AFS_VERSION}/nutanix-afs-el7.3-release-afs-${AFS_VERSION}-stable.qcow2" - - # case ${AFS_VERSION} in - # 3.1.0.1 ) - # esac - - # _meta_url+="afs/${AFS_VERSION}/afs-${AFS_VERSION}.json" - - else - CheckArgsExist 'AOS_VERSION AOS_UPGRADE' - - # When adding a new AOS version, update BOTH case stanzas below... - case ${AOS_UPGRADE} in - 5.8.0.1 ) - _version=2 - ;; - esac - - _meta_url+="/releases/euphrates-${AOS_UPGRADE}-metadata/" - - if (( ${_version} > 0 )); then - _meta_url+="v${_version}/" - fi - - case ${AOS_UPGRADE} in - 5.8.0.1 | 5.9 ) - _meta_url+="euphrates-${AOS_UPGRADE}-metadata.json" - ;; - * ) - _error=23 - log "Error ${_error}: unsupported AOS_UPGRADE=${AOS_UPGRADE}!" - # TODO: correct AOS_UPGRADE URL - log 'Browse to https://portal.nutanix.com/#/page/releases/nosDetails' - log " - Find ${AOS_UPGRADE} in the Additional Releases section on the lower right side" - log ' - Provide the Upgrade metadata URL to this function for both case stanzas.' - exit ${_error} - ;; - esac - fi - - if [[ ! -e ${_meta_url##*/} ]]; then - log "Retrieving download metadata ${_meta_url##*/} ..." - Download "${_meta_url}" - else - log "Warning: using cached download ${_meta_url##*/}" - fi - - _source_url=$(cat ${_meta_url##*/} | jq -r .download_url_cdn) - - if (( `pgrep curl | wc --lines | tr -d '[:space:]'` > 0 )); then - pkill curl - fi - log "Retrieving Nutanix ${1} bits..." - Download "${_source_url}" - - _checksum=$(md5sum ${_source_url##*/} | awk '{print $1}') - if [[ `cat ${_meta_url##*/} | jq -r .hex_md5` != "${_checksum}" ]]; then - log "Error: md5sum ${_checksum} doesn't match on: ${_source_url##*/} removing and exit!" - rm -f ${_source_url##*/} - exit 2 - else - log "Success: ${1} bits downloaded and passed MD5 checksum!" - fi - - # Set globals for next step handoff - export NTNX_META_URL=${_meta_url} - export NTNX_SOURCE_URL=${_source_url} -} - -function log() { - local _caller - - _caller=$(echo -n "`caller 0 | awk '{print $2}'`") - echo "`date '+%Y-%m-%d %H:%M:%S'`|$$|${_caller}|${1}" -} - -function repo_source() { - # https://stackoverflow.com/questions/1063347/passing-arrays-as-parameters-in-bash#4017175 - local _candidates=("${!1}") # REQUIRED - local _package="${2}" # OPTIONAL - local _error=29 - local _http_code - local _index=0 - local _suffix - local _url - - if (( ${#_candidates[@]} == 0 )); then - log "Error ${_error}: Missing array!" - exit ${_error} - # else - # log "DEBUG: _candidates count is ${#_candidates[@]}" - fi - - if [[ -z ${_package} ]]; then - _suffix=${_candidates[0]##*/} - if (( $(echo "${_suffix}" | grep . | wc --lines) > 0)); then - log "Convenience: omitted package argument, added package=${_package}" - _package="${_suffix}" - fi - fi - # Prepend your local HTTP cache... - _candidates=( "http://${HTTP_CACHE_HOST}:${HTTP_CACHE_PORT}/" "${_candidates[@]}" ) - - while (( ${_index} < ${#_candidates[@]} )) - do - unset SOURCE_URL - - # log "DEBUG: ${_index} ${_candidates[${_index}]}, OPTIONAL: _package=${_package}" - _url=${_candidates[${_index}]} - - if [[ -z ${_package} ]]; then - if (( $(echo "${_url}" | grep '/$' | wc --lines) == 0 )); then - log "error ${_error}: ${_url} doesn't end in trailing slash, please correct." - exit ${_error} - fi - elif (( $(echo "${_url}" | grep '/$' | wc --lines) == 1 )); then - _url+="${_package}" - fi - - if (( $(echo "${_url}" | grep '^nfs' | wc --lines) == 1 )); then - log "warning: TODO: cURL can't test nfs URLs...assuming a pass!" - export SOURCE_URL="${_url}" - break - fi - - _http_code=$(curl ${CURL_OPTS} --max-time 5 --write-out '%{http_code}' --head ${_url} | tail -n1) - - if [[ (( ${_http_code} == 200 )) || (( ${_http_code} == 302 )) ]]; then - export SOURCE_URL="${_url}" - log "Found, HTTP:${_http_code} = ${SOURCE_URL}" - break - fi - log " Lost, HTTP:${_http_code} = ${_url}" - ((_index++)) - done - - if [[ -z "${SOURCE_URL}" ]]; then - _error=30 - log "Error ${_error}: didn't find any sources, last try was ${_url} with HTTP ${_http_code}." - exit ${_error} - fi -} - -function CheckArgsExist() { - local _argument - local _error=88 - - for _argument in ${1}; do - if [[ ${DEBUG} ]]; then - log "DEBUG: Checking ${_argument}..." - fi - _RESULT=$(eval "echo \$${_argument}") - if [[ -z ${_RESULT} ]]; then - log "Error ${_error}: ${_argument} not provided!" - exit ${_error} - elif [[ ${DEBUG} ]]; then - log "Non-error: ${_argument} for ${_RESULT}" - fi - done - - if [[ ${DEBUG} ]]; then - log 'Success: required arguments provided.' - fi -} - -function SSH_PubKey() { - local _name=${MY_EMAIL//\./_DOT_} - local _sshkey=${HOME}/id_rsa.pub - - _name=${_name/@/_AT_} - if [[ -e ${_sshkey} ]]; then - log "Note that a period and other symbols aren't allowed to be a key name." - log "Locally adding ${_sshkey} under ${_name} label..." - ncli cluster add-public-key name=${_name} file-path=${_sshkey} - fi -} - -function Determine_PE() { - local _attempts=5 - local _error=10 - local _hold - local _loop=0 - local _sleep=2 - - log 'Warning: expect errors on lines 1-2, due to non-JSON outputs by nuclei...' - _hold=$(nuclei cluster.list format=json 2>/dev/null \ - | jq '.entities[] | select(.status.state == "COMPLETE")' \ - | jq '. | select(.status.resources.network.external_ip != null)') - - if (( $? > 0 )); then - _error=12 - log "Error ${_error}: couldn't resolve clusters $?" - exit ${_error} - else - CLUSTER_NAME=$(echo ${_hold} | jq .status.name | tr -d \") - MY_PE_HOST=$(echo ${_hold} | jq .status.resources.network.external_ip | tr -d \") - - export CLUSTER_NAME MY_PE_HOST - log "Success: ${CLUSTER_NAME} PE external IP=${MY_PE_HOST}" - fi -} - -function Download() { - local _attempts=5 - local _error=0 - local _http_range_enabled # TODO disabled '--continue-at -' - local _loop=0 - local _output - local _sleep=2 - - if [[ -z ${1} ]]; then - _error=33 - log "Error ${_error}: no URL to download!" - exit ${_error} - fi - - while true ; do - (( _loop++ )) - log "${1}..." - _output='' - curl ${CURL_OPTS} ${_http_range_enabled} --remote-name --location ${1} - _output=$? - #DEBUG=1; if [[ ${DEBUG} ]]; then log "DEBUG: curl exited ${_output}."; fi - - if (( ${_output} == 0 )); then - log "Success: ${1##*/}" - break - fi - - if (( ${_loop} == ${_attempts} )); then - _error=11 - log "Error ${_error}: couldn't download from: ${1}, giving up after ${_loop} tries." - exit ${_error} - elif (( ${_output} == 33 )); then - log "Web server doesn't support HTTP range command, purging and falling back." - _http_range_enabled='' - rm -f ${1##*/} - else - log "${_loop}/${_attempts}: curl=${_output} ${1##*/} sleep ${_sleep}..." - sleep ${_sleep} - fi - done -} - -function remote_exec() { -# Argument ${1} = REQUIRED: ssh or scp -# Argument ${2} = REQUIRED: PE, PC, or AUTH_SERVER -# Argument ${3} = REQUIRED: command configuration -# Argument ${4} = OPTIONAL: populated with anything = allowed to fail - - local _account='nutanix' - local _attempts=3 - local _error=99 - local _host - local _loop=0 - local _password="${MY_PE_PASSWORD}" - local _pw_init='nutanix/4u' # TODO:140 hardcoded p/w - local _sleep=${SLEEP} - local _test=0 - - # shellcheck disable=SC2153 - case ${2} in - 'PE' ) - _host=${MY_PE_HOST} - ;; - 'PC' ) - _host=${MY_PC_HOST} - _password=${_pw_init} - ;; - 'AUTH_SERVER' ) - _account='root' - _host=${AUTH_HOST} - _password=${_pw_init} - _sleep=7 - ;; - esac - - if [[ -z ${3} ]]; then - log 'Error ${_error}: missing third argument.' - exit ${_error} - fi - - if [[ ! -z ${4} ]]; then - _attempts=1 - _sleep=0 - fi - - while true ; do - (( _loop++ )) - case "${1}" in - 'SSH' | 'ssh') - #DEBUG=1; if [[ ${DEBUG} ]]; then log "_test will perform ${_account}@${_host} ${3}..."; fi - SSHPASS="${_password}" sshpass -e ssh -x ${SSH_OPTS} ${_account}@${_host} "${3}" - _test=$? - ;; - 'SCP' | 'scp') - #DEBUG=1; if [[ ${DEBUG} ]]; then log "_test will perform scp ${3} ${_account}@${_host}:"; fi - SSHPASS="${_password}" sshpass -e scp ${SSH_OPTS} ${3} ${_account}@${_host}: - _test=$? - ;; - *) - log "Error ${_error}: improper first argument, should be ssh or scp." - exit ${_error} - ;; - esac - - if (( ${_test} > 0 )) && [[ -z ${4} ]]; then - _error=22 - log "Error ${_error}: pwd=`pwd`, _test=${_test}, _host=${_host}" - exit ${_error} - fi - - if (( ${_test} == 0 )); then - if [[ ${DEBUG} ]]; then log "${3} executed properly."; fi - return 0 - elif (( ${_loop} == ${_attempts} )); then - if [[ -z ${4} ]]; then - _error=11 - log "Error ${_error}: giving up after ${_loop} tries." - exit ${_error} - else - log "Optional: giving up." - break - fi - else - log "${_loop}/${_attempts}: _test=$?|${_test}| ${FILENAME} SLEEP ${_sleep}..." - sleep ${_sleep} - fi - done -} - -function Dependencies { - local _argument - local _error - local _index - local _cpe=/etc/os-release # CPE = https://www.freedesktop.org/software/systemd/man/os-release.html - local _lsb=/etc/lsb-release # Linux Standards Base - local _os_found - local _jq_pkg=${JQ_REPOS[0]##*/} - local _sshpass_pkg=${SSHPASS_REPOS[0]##*/} - - if [[ -z ${1} ]]; then - _error=20 - log "Error ${_error}: missing install or remove verb." - exit ${_error} - elif [[ -z ${2} ]]; then - _error=21 - log "Error ${_error}: missing package name." - exit ${_error} - fi - - if [[ -e ${_lsb} ]]; then - _os_found="$(grep DISTRIB_ID ${_lsb} | awk -F= '{print $2}')" - elif [[ -e ${_cpe} ]]; then - _os_found="$(grep '^ID=' ${_cpe} | awk -F= '{print $2}')" - fi - - case "${1}" in - 'install') - log "Install ${2}..." - export PATH=${PATH}:${HOME} - if [[ -z `which ${2}` ]]; then - case "${2}" in - sshpass | ${_sshpass_pkg}) - if [[ ( ${_os_found} == 'Ubuntu' || ${_os_found} == 'LinuxMint' ) ]]; then - sudo apt-get install --yes sshpass - elif [[ ${_os_found} == '"centos"' ]]; then - # TOFIX: assumption, probably on NTNX CVM or PCVM = CentOS7 - if [[ ! -e ${_sshpass_pkg} ]]; then - repo_source SSHPASS_REPOS[@] ${_sshpass_pkg} - Download ${SOURCE_URL} - fi - sudo rpm -ivh ${_sshpass_pkg} - if (( $? > 0 )); then - _error=31 - log "Error ${_error}: cannot install ${2}." - exit ${_error} - fi - elif [[ `uname -s` == "Darwin" ]]; then - brew install https://raw.githubusercontent.com/kadwanev/bigboybrew/master/Library/Formula/sshpass.rb - fi - ;; - jq | ${_jq_pkg} ) - if [[ ( ${_os_found} == 'Ubuntu' || ${_os_found} == 'LinuxMint' ) ]]; then - if [[ ! -e ${_jq_pkg} ]]; then - sudo apt-get install --yes jq - fi - elif [[ ${_os_found} == '"centos"' ]]; then - if [[ ! -e ${_jq_pkg} ]]; then - repo_source JQ_REPOS[@] ${_jq_pkg} - Download ${SOURCE_URL} - fi - chmod u+x ${_jq_pkg} && ln -s ${_jq_pkg} jq - PATH+=:`pwd` - export PATH - elif [[ `uname -s` == "Darwin" ]]; then - brew install jq - fi - ;; - esac - - if (( $? > 0 )); then - _error=98 - log "Error ${_error}: can't install ${2}." - exit ${_error} - fi - else - log "Success: found ${2}." - fi - ;; - 'remove') - log "Removing ${2}..." - if [[ ${_os_found} == '"centos"' ]]; then - #TODO:30 assuming we're on PC or PE VM. - case "${2}" in - sshpass | ${_sshpass_pkg}) - sudo rpm -e sshpass - ;; - jq | ${_jq_pkg} ) - rm -f jq ${_jq_pkg} - ;; - esac - else - log "Feature: don't remove Dependencies on Mac OS Darwin, Ubuntu, or LinuxMint." - fi - ;; - esac -} - -function Check_Prism_API_Up { -# Argument ${1} = REQUIRED: PE or PC -# Argument ${2} = OPTIONAL: number of attempts -# Argument ${3} = OPTIONAL: number of seconds per cycle - local _attempts=${ATTEMPTS} - local _error=77 - local _host - local _loop=0 - local _password="${MY_PE_PASSWORD}" - local _pw_init='Nutanix/4u' - local _sleep=${SLEEP} - local _test=0 - - CheckArgsExist 'ATTEMPTS MY_PE_PASSWORD SLEEP' - - if [[ ${1} == 'PC' ]]; then - _host=${MY_PC_HOST} - else - _host=${MY_PE_HOST} - fi - if [[ ! -z ${2} ]]; then - _attempts=${2} - fi - - while true ; do - (( _loop++ )) - _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${_password} \ - -X POST --data '{ "kind": "cluster" }' \ - https://${_host}:9440/api/nutanix/v3/clusters/list \ - | tr -d \") # wonderful addition of "" around HTTP status code by cURL - - if [[ ! -z ${3} ]]; then - _sleep=${3} - fi - - if (( ${_test} == 401 )); then - log "Warning: unauthorized ${1} user or password." - fi - - if (( ${_test} == 401 )) && [[ ${1} == 'PC' ]] && [[ ${_password} != "${_pw_init}" ]]; then - _password=${_pw_init} - log "Warning @${1}: Fallback on ${_host}: try initial password next cycle..." - _sleep=0 #break - fi - - if (( ${_test} == 200 )); then - log "@${1}: successful." - return 0 - elif (( ${_loop} > ${_attempts} )); then - log "Warning ${_error} @${1}: Giving up after ${_loop} tries." - return ${_error} - else - log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." - sleep ${_sleep} - fi - done -} diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index ecc1ff8..f65e9e4 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -1,89 +1,235 @@ #!/usr/bin/env bash -# shellcheck disable=SC2034 - RELEASE=release.json - PC_VERSION_DEV=5.9.1 -PC_VERSION_STABLE=5.8.2 - PRISM_ADMIN=admin - AFS_VERSION=3.1.0.1 - - OCTET=(${MY_PE_HOST//./ }) # zero index - HPOC_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} -DATA_SERVICE_IP=${HPOC_PREFIX}.$((${OCTET[3]} + 1)) - MY_PC_HOST=${HPOC_PREFIX}.$((${OCTET[3]} + 2)) - - MY_SP_NAME='SP01' - MY_CONTAINER_NAME='Default' -MY_IMG_CONTAINER_NAME='Images' - -HTTP_CACHE_HOST=localhost -HTTP_CACHE_PORT=8181 -NTNX_FILES_SOURCE='nutanix-afs-el7.3-release-afs-3.1.0.1-stable.qcow2' - NTNX_FILES_META='afs-3.1.0.1.json' +# shellcheck disable=SC2034 + RELEASE='release.json' +# Sync the following to lib.common.sh::ntnx_download-Case=PC +# Browse to: https://portal.nutanix.com/#/page/releases/prismDetails +# - Find ${PC_VERSION} in the Additional Releases section on the lower right side +# - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL + PC_DEV_VERSION='5.10.1.1' + PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.1.1/pcdeploy-5.10.1.1.json' +PC_STABLE_VERSION='5.8.2' +PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/v1/pc_deploy-5.8.2.json' +# Sync the following to lib.common.sh::ntnx_download-Case=FILES +# Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA +# - Find ${FILES_VERSION} in the Additional Releases section on the lower right side +# - Provide "Upgrade Metadata File" URL to FILES_METAURL + FILES_VERSION='3.2.0.1' + FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + # 2019-02-15: override until metadata URL fixed + # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' -# Conventions for *_REPOS arrays, the URL must end with: -# - trailing slash (which imples _IMAGES argument to repo_source) -# - or full package filename. +# CentOS7 - https://s3.amazonaws.com/get-ahv-images/CentOS7.qcow2 +# Windows10 - https://s3.amazonaws.com/get-ahv-images/Windows10-1709.qcow2 +# Windows2012R2 - https://s3.amazonaws.com/get-ahv-images/Windows2012R2.qcow2 +# Windows2016 - https://s3.amazonaws.com/get-ahv-images/Windows2016.qcow2 +# Nutanix VirtIO 1.1.3 - https://s3.amazonaws.com/get-ahv-images/Nutanix-VirtIO-1.1.3.iso +# ToolsVM - https://s3.amazonaws.com/get-ahv-images/ToolsVM.qcow2 +# Karbon OS Images: +# ACS CentOS7 - https://s3.amazonaws.com/get-ahv-images/acs-centos7.qcow2 +# ACS Ubuntu1604 - https://s3.amazonaws.com/get-ahv-images/acs-ubuntu1604.qcow2 +# +# Nutanix Software: +# Xtract - https://s3.amazonaws.com/get-ahv-images/xtract-vm-2.0.3.qcow2 +# Era - https://s3.amazonaws.com/get-ahv-images/ERA-Server-build-1.0.1.qcow2 +# Sherlock/ XiIoT - https://s3.amazonaws.com/get-ahv-images/sherlock-k8s-base-image_320.qcow2 +# AFS 3.2.0.1 - https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2 +# AFS 3.2.0.1 - https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json +# +# EUC Infra: +# MSSql - https://s3.amazonaws.com/get-ahv-images/SQLServer2014SP3.iso +# +# Partner Software: +# HYCU - https://s3.amazonaws.com/get-ahv-images/hycu-3.5.0-6138.qcow2 +# Veeam - https://s3.amazonaws.com/get-ahv-images/VeeamAvailability_1.0.457.vmdk +# Veeam - https://s3.amazonaws.com/get-ahv-images/VeeamBR_9.5.4.2615.Update4.iso -# https://stedolan.github.io/jq/download/#checksums_and_signatures - JQ_REPOS=(\ - 'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ -) - QCOW2_REPOS=(\ - 'http://10.21.250.221/images/ahv/techsummit/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ -) - QCOW2_IMAGES=(\ - CentOS7.qcow2 \ - Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Windows10-1709.qcow2 \ - CentOS7.iso \ - Windows2016.iso \ - Windows2012R2.iso \ - Windows10.iso \ - Nutanix-VirtIO-1.1.3.iso \ -) +NTNX_INIT_PASSWORD='nutanix/4u' + PRISM_ADMIN='admin' + SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" + STORAGE_POOL='SP01' + STORAGE_DEFAULT='Default' + STORAGE_IMAGES='Images' -# https://pkgs.org/download/sshpass -# https://sourceforge.net/projects/sshpass/files/sshpass/ - SSHPASS_REPOS=(\ - 'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ -) + # Conventions for *_REPOS arrays -- the URL must end with either: + # - trailing slash, which imples _IMAGES argument to function repo_source() + # - or full package filename. - AUTH_SERVER='AutoDC' # TODO:160 refactor AUTH_SERVER choice to input file, set default here. - AUTH_HOST=${HPOC_PREFIX}.$((${OCTET[3]} + 3)) - LDAP_PORT=389 - MY_DOMAIN_URL="ldaps://${AUTH_HOST}/" -MY_DOMAIN_FQDN='ntnxlab.local' -MY_DOMAIN_NAME='NTNXLAB' -MY_DOMAIN_USER='administrator@'${MY_DOMAIN_FQDN} -MY_DOMAIN_PASS='nutanix/4u' -MY_DOMAIN_ADMIN_GROUP='SSP Admins' - AUTODC_REPOS=(\ - 'http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ -) + # https://stedolan.github.io/jq/download/#checksums_and_signatures + JQ_REPOS=(\ + 'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + ) + QCOW2_REPOS=(\ + 'http://10.21.250.221/images/tech-enablement/' \ + 'http://10.21.250.221/images/ahv/techsummit/' \ + 'http://10.132.128.50:81/share/saved-images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + CentOS7.iso \ + Windows2016.iso \ + Windows2012R2.iso \ + Windows10.iso \ + Nutanix-VirtIO-1.1.3.iso \ + 'https://s3.amazonaws.com/technology-boot-camp/ERA-Server-build-1.0.0-21edfbc990a8f3424fed146d837483cb1a00d56d.qcow2' \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + ) + # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso + # http://download.nutanix.com/era/1.0.0/ERA-Server-build-1.0.0-bae7ca0d653e1af2bcb9826d1320e88d8c4713cc.qcow2 - MY_PRIMARY_NET_NAME='Primary' - MY_PRIMARY_NET_VLAN='0' -MY_SECONDARY_NET_NAME='Secondary' -MY_SECONDARY_NET_VLAN="${OCTET[2]}1" # TODO:100 check this: what did Global Enablement mean? + # https://pkgs.org/download/sshpass + # https://sourceforge.net/projects/sshpass/files/sshpass/ + SSHPASS_REPOS=(\ + 'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + ) +# shellcheck disable=2206 + OCTET=(${PE_HOST//./ }) # zero index + IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} +DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) + PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) + DNS_SERVERS='8.8.8.8' + NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' + NW1_NAME='Primary' + NW1_VLAN=0 +# Assuming HPOC defaults + NW1_SUBNET="${IPV4_PREFIX}.1/25" + NW1_DHCP_START="${IPV4_PREFIX}.50" + NW1_DHCP_END="${IPV4_PREFIX}.125" # https://sewiki.nutanix.com/index.php/Hosted_POC_FAQ#I.27d_like_to_test_email_alert_functionality.2C_what_SMTP_server_can_I_use_on_Hosted_POC_clusters.3F -SMTP_SERVER_ADDRESS=nutanix-com.mail.protection.outlook.com - SMTP_SERVER_FROM=NutanixHostedPOC@nutanix.com +SMTP_SERVER_ADDRESS='nutanix-com.mail.protection.outlook.com' + SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' SMTP_SERVER_PORT=25 + AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file + AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" + LDAP_PORT=389 + AUTH_FQDN='ntnxlab.local' + AUTH_DOMAIN='NTNXLAB' +AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} +AUTH_ADMIN_PASS='nutanix/4u' +AUTH_ADMIN_GROUP='SSP Admins' + AUTODC_REPOS=(\ + 'http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC-04282018.qcow2' \ + 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ +) + +# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) +# https://sewiki.nutanix.com/index.php/HPOC_IP_Schema +case "${OCTET[0]}.${OCTET[1]}" in + 10.20 ) #Marketing: us-west = SV + DNS_SERVERS='10.21.253.10' + ;; + 10.21 ) #HPOC: us-west = SV + if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then + log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' + exit 0 + fi + + # backup cluster; override relative IP addressing + if (( ${OCTET[2]} == 249 )); then + AUTH_HOST="${IPV4_PREFIX}.118" + PC_HOST="${IPV4_PREFIX}.119" + fi + + DNS_SERVERS='10.21.253.10,10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.55 ) # HPOC us-east = DUR + DNS_SERVERS='10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.42 ) # HPOC us-west = PHX + DNS_SERVERS='10.42.196.10' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.254" + + QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + CentOS7.iso \ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + acs-centos7.qcow2 \ + acs-ubuntu1604.qcow2 \ + xtract-vm-2.0.3.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_320.qcow2 \ + hycu-3.5.0-6138.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + VeeamBR_9.5.4.2615.Update4.iso \ + ) + ;; + 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR + DNS_SERVERS='10.132.71.40' + NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" + NW1_DHCP_START="${IPV4_PREFIX}.100" + NW1_DHCP_END="${IPV4_PREFIX}.250" + # PC deploy file local override, TODO:30 make an PC_URL array and eliminate + PC_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_DEV_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_STABLE_METAURL=${PC_DEV_METAURL} + + QCOW2_IMAGES=(\ + Centos7-Base.qcow2 \ + Centos7-Update.qcow2 \ + Windows2012R2.qcow2 \ + panlm-img-52.qcow2 \ + kx_k8s_01.qcow2 \ + kx_k8s_02.qcow2 \ + kx_k8s_03.qcow2 \ + ) + ;; +esac + +HTTP_CACHE_HOST='localhost' +HTTP_CACHE_PORT=8181 + ATTEMPTS=40 - SLEEP=60 + SLEEP=60 # pause (in seconds) between ATTEMPTS CURL_OPTS='--insecure --silent --show-error' # --verbose' CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null" CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' SSH_OPTS+=' -q' # -v' + +# Find operating system and set dependencies +if [[ -e /etc/lsb-release ]]; then + # Linux Standards Base + OS_NAME="$(grep DISTRIB_ID /etc/lsb-release | awk -F= '{print $2}')" +elif [[ -e /etc/os-release ]]; then + # CPE = https://www.freedesktop.org/software/systemd/man/os-release.html + OS_NAME="$(grep '^ID=' /etc/os-release | awk -F= '{print $2}')" +elif [[ $(uname -s) == 'Darwin' ]]; then + OS_NAME='Darwin' +fi + +WC_ARG='--lines' +if [[ ${OS_NAME} == 'Darwin' ]]; then + WC_ARG='-l' +fi diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh new file mode 100755 index 0000000..8765fae --- /dev/null +++ b/scripts/lib.common.sh @@ -0,0 +1,817 @@ +#!/usr/bin/env bash +# dependencies: dig + +function args_required() { + local _argument + local _error=88 + + for _argument in ${1}; do + if [[ ${DEBUG} ]]; then + log "DEBUG: Checking ${_argument}..." + fi + _RESULT=$(eval "echo \$${_argument}") + if [[ -z ${_RESULT} ]]; then + log "Error ${_error}: ${_argument} not provided!" + exit ${_error} + elif [[ ${DEBUG} ]]; then + log "Non-error: ${_argument} for ${_RESULT}" + fi + done + + if [[ ${DEBUG} ]]; then + log 'Success: required arguments provided.' + fi +} + +function begin() { + local _release + + if [[ -e ${RELEASE} ]]; then + _release=" release: $(grep FullSemVer ${RELEASE} | awk -F\" '{print $4}')" + fi + + log "$(basename ${0})${_release} start._____________________" +} + +function dependencies { + local _argument + local _error + local _index + local _jq_pkg=${JQ_REPOS[0]##*/} + local _sshpass_pkg=${SSHPASS_REPOS[0]##*/} + + if [[ -z ${1} ]]; then + _error=20 + log "Error ${_error}: missing install or remove verb." + exit ${_error} + elif [[ -z ${2} ]]; then + _error=21 + log "Error ${_error}: missing package name." + exit ${_error} + elif [[ "${1}" != 'install' ]] && [[ "${1}" != 'remove' ]]; then + _error=20 + log "Error ${_error}: wrong install or remove verb (case sensitive)." + exit ${_error} + fi + + case "${1}" in + 'install') + + if [[ -z $(which ${2}) ]]; then + log "Install ${2}..." + case "${2}" in + sshpass | ${_sshpass_pkg}) + if [[ ( ${OS_NAME} == 'Ubuntu' || ${OS_NAME} == 'LinuxMint' ) ]]; then + sudo apt-get install --yes sshpass + elif [[ ${OS_NAME} == '"centos"' ]]; then + # TOFIX: assumption, probably on NTNX CVM or PCVM = CentOS7 + if [[ ! -e ${_sshpass_pkg} ]]; then + repo_source SSHPASS_REPOS[@] ${_sshpass_pkg} + download ${SOURCE_URL} + fi + sudo rpm -ivh ${_sshpass_pkg} + if (( $? > 0 )); then + _error=31 + log "Error ${_error}: cannot install ${2}." + exit ${_error} + fi + elif [[ ${OS_NAME} == 'Darwin' ]]; then + brew install https://raw.githubusercontent.com/kadwanev/bigboybrew/master/Library/Formula/sshpass.rb + fi + ;; + jq | ${_jq_pkg} ) + if [[ ( ${OS_NAME} == 'Ubuntu' || ${OS_NAME} == 'LinuxMint' ) ]]; then + if [[ ! -e ${_jq_pkg} ]]; then + sudo apt-get install --yes jq + fi + elif [[ ${OS_NAME} == '"centos"' ]]; then + if [[ ! -e ${_jq_pkg} ]]; then + repo_source JQ_REPOS[@] ${_jq_pkg} + download ${SOURCE_URL} + fi + chmod u+x ${_jq_pkg} && ln -s ${_jq_pkg} jq + + if [[ -d ${HOME}/bin ]]; then + mv jq* ${HOME}/bin/ + else + PATH+=:$(pwd) + export PATH + fi + elif [[ ${OS_NAME} == 'Darwin' ]]; then + brew install jq + fi + ;; + esac + + if (( $? > 0 )); then + _error=98 + log "Error ${_error}: can't install ${2}." + exit ${_error} + fi + else + log "Success: found ${2}." + fi + ;; + 'remove') + if [[ ${OS_NAME} == '"centos"' ]]; then + log "Warning: assuming on PC or PE VM, removing ${2}..." + case "${2}" in + sshpass | ${_sshpass_pkg}) + sudo rpm -e sshpass + ;; + jq | ${_jq_pkg} ) + if [[ -d ${HOME}/bin ]]; then + pushd bin || true + rm -f jq ${_jq_pkg} + popd || true + else + rm -f jq ${_jq_pkg} + fi + ;; + esac + else + log "Feature: don't remove dependencies on Mac OS Darwin, Ubuntu, or LinuxMint." + fi + ;; + esac +} + +function dns_check() { + local _dns + local _error + local _lookup=${1} # REQUIRED + local _test + + if [[ -z ${_lookup} ]]; then + _error=43 + log "Error ${_error}: missing lookup record!" + exit ${_error} + fi + + _dns=$(dig +retry=0 +time=2 +short @${AUTH_HOST} ${_lookup}) + _test=$? + + if [[ ${_dns} != "${AUTH_HOST}" ]]; then + _error=44 + log "Error ${_error}: result was ${_test}: ${_dns}" + return ${_error} + fi +} + +function download() { + local _attempts=5 + local _error=0 + local _http_range_enabled # TODO:40 OPTIMIZATION: disabled '--continue-at -' + local _loop=0 + local _output + local _sleep=2 + + if [[ -z ${1} ]]; then + _error=33 + log "Error ${_error}: no URL to download!" + exit ${_error} + fi + + while true ; do + (( _loop++ )) + log "${1}..." + _output='' + curl ${CURL_OPTS} ${_http_range_enabled} --remote-name --location ${1} + _output=$? + #DEBUG=1; if [[ ${DEBUG} ]]; then log "DEBUG: curl exited ${_output}."; fi + + if (( ${_output} == 0 )); then + log "Success: ${1##*/}" + break + fi + + if (( ${_loop} == ${_attempts} )); then + _error=11 + log "Error ${_error}: couldn't download from: ${1}, giving up after ${_loop} tries." + exit ${_error} + elif (( ${_output} == 33 )); then + log "Web server doesn't support HTTP range command, purging and falling back." + _http_range_enabled='' + rm -f ${1##*/} + else + log "${_loop}/${_attempts}: curl=${_output} ${1##*/} sleep ${_sleep}..." + sleep ${_sleep} + fi + done +} + +function fileserver() { + local _action=${1} # REQUIRED + local _host=${2} # REQUIRED, TODO: default to PE? + local _port=${3} # OPTIONAL + local _directory=${4} # OPTIONAL + + if [[ -z ${1} ]]; then + _error=38 + log "Error ${_error}: start or stop action required!" + exit ${_error} + fi + if [[ -z ${2} ]]; then + _error=39 + log "Error ${_error}: host required!" + exit ${_error} + fi + if [[ -z ${3} ]]; then + _port=8181 + fi + if [[ -z ${4} ]]; then + _directory=cache + fi + + case ${_action} in + 'start' ) + # Determine if on PE or PC with _host PE or PC, then _host=localhost + # ssh -nNT -R 8181:localhost:8181 nutanix@10.21.31.31 + pushd ${_directory} || exit + + remote_exec 'ssh' ${_host} \ + "python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}" + + # acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \ + # source_url=http://10.4.150.64:8181/autodc-2.0.qcow2 + #AutoDC2: pending + #AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable + popd || exit + ;; + 'stop' ) + remote_exec 'ssh' ${_host} \ + "kill -9 $(pgrep python -a | grep ${_port} | awk '{ print $1 }')" 'OPTIONAL' + ;; + esac +} + +function finish() { + log "${0} ran for ${SECONDS} seconds._____________________" + echo +} + +function images() { + # https://portal.nutanix.com/#/page/docs/details?targetId=Command-Ref-AOS-v59:acl-acli-image-auto-r.html + local _cli='acli' + local _command + local _http_body + local _image + local _image_type + local _name + local _source='source_url' + local _test + + which "$_cli" + if (( $? > 0 )); then + _cli='nuclei' + _source='source_uri' + fi + + for _image in "${QCOW2_IMAGES[@]}" ; do + + # log "DEBUG: ${_image} image.create..." + if [[ ${_cli} == 'nuclei' ]]; then + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ${_cli} image.list 2>&1 \ + | grep -i complete \ + | grep "${_image}") + else + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ${_cli} image.list 2>&1 \ + | grep "${_image}") + fi + + if [[ ! -z ${_test} ]]; then + log "Skip: ${_image} already complete on cluster." + else + _command='' + _name="${_image}" + + if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc --lines) )); then + log 'Bypass multiple repo source checks...' + SOURCE_URL="${_image}" + else + repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]! + fi + + if [[ -z "${SOURCE_URL}" ]]; then + _error=30 + log "Warning ${_error}: didn't find any sources for ${_image}, continuing..." + # exit ${_error} + fi + + # TODO:0 TOFIX: acs-centos ugly override for today... + if (( $(echo "${_image}" | grep -i 'acs-centos' | wc --lines ) > 0 )); then + _name=acs-centos + fi + + if [[ ${_cli} == 'acli' ]]; then + _image_type='kDiskImage' + if (( $(echo "${SOURCE_URL}" | grep -i -e 'iso$' | wc --lines ) > 0 )); then + _image_type='kIsoImage' + fi + + _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ + container=${STORAGE_IMAGES} architecture=kX86_64 wait=true" + else + _command+=" name=${_name} description=\"${_image}\"" + fi + + if [[ ${_cli} == 'nuclei' ]]; then + _http_body=$(cat <&1 & + if (( $? != 0 )); then + log "Warning: Image submission: $?. Continuing..." + #exit 10 + fi + + if [[ ${_cli} == 'nuclei' ]]; then + log "NOTE: image.uuid = RUNNING, but takes a while to show up in:" + log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State" + fi + fi + fi + + done +} + +function log() { + local _caller + + _caller=$(echo -n "$(caller 0 | awk '{print $2}')") + echo "$(date '+%Y-%m-%d %H:%M:%S')|$$|${_caller}|${1}" +} + +function ntnx_cmd() { + local _attempts=25 + local _error=10 + local _hold + local _loop=0 + local _sleep=10 + local _status + + while [[ true ]]; do + (( _loop++ )) + _hold=$(source /etc/profile ; nuclei cluster.list 2>&1) + _status=$? + + if (( $(echo "${_hold}" | grep websocket | wc --lines) > 0 )); then + log "Warning: Zookeeper isn't up yet." + elif (( ${_status} > 0 )); then + log "${_status} = ${_hold}, uh oh!" + else + log "Cluster info via nuclei seems good: ${_status}, moving on!" + break + fi + + if (( ${_loop} == ${_attempts} )); then + log "Error ${_error}: couldn't determine cluster information, giving up after ${_loop} tries." + exit ${_error} + else + log "${_loop}/${_attempts}: hold=${_hold} sleep ${_sleep}..." + sleep ${_sleep} + fi + done +} + +function ntnx_download() { + local _checksum + local _error + local _meta_url + local _ncli_softwaretype="${1}" + local _source_url + + case "${_ncli_softwaretype}" in + PC | pc | PRISM_CENTRAL_DEPLOY ) + args_required 'PC_VERSION' + + if [[ "${PC_VERSION}" == "${PC_DEV_VERSION}" ]]; then + _meta_url="${PC_DEV_METAURL}" + else + _meta_url="${PC_STABLE_METAURL}" + fi + + if [[ -z ${_meta_url} ]]; then + _error=22 + log "Error ${_error}: unsupported PC_VERSION=${PC_VERSION}!" + log 'Sync the following to global.var.sh...' + log 'Browse to https://portal.nutanix.com/#/page/releases/prismDetails' + log " - Find ${PC_VERSION} in the Additional Releases section on the lower right side" + log ' - Provide the metadata URL for the "PC 1-click deploy from PE" option to this function, both case stanzas.' + exit ${_error} + fi + + if [[ ! -z ${PC_URL} ]]; then + _source_url="${PC_URL}" + fi + ;; + 'NOS' | 'nos' | 'AOS' | 'aos') + # TODO:70 nos is a prototype + args_required 'AOS_VERSION AOS_UPGRADE' + _meta_url="${AOS_METAURL}" + + if [[ -z ${_meta_url} ]]; then + _error=23 + log "Error ${_error}: unsupported AOS_UPGRADE=${AOS_UPGRADE}!" + log 'Browse to https://portal.nutanix.com/#/page/releases/nosDetails' + log " - Find ${AOS_UPGRADE} in the Additional Releases section on the lower right side" + log ' - Provide the Upgrade metadata URL to this function for both case stanzas.' + exit ${_error} + fi + + if [[ ! -z ${AOS_URL} ]]; then + _source_url="${AOS_URL}" + fi + ;; + FILES | files | AFS | afs ) + args_required 'FILES_VERSION' + _meta_url="${FILES_METAURL}" + + if [[ -z ${_meta_url} ]]; then + _error=22 + log "Error ${_error}: unsupported FILES_VERSION=${FILES_VERSION}!" + log 'Sync the following to global.var.sh...' + log 'Browse to https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA' + log " - Find ${FILES_VERSION} in the Additional Releases section on the lower right side" + log ' - Provide the metadata URL option to this function, both case stanzas.' + exit ${_error} + fi + + if [[ ! -z ${FILES_URL} ]]; then + _source_url="${FILES_URL}" + fi + ;; + * ) + _error=88 + log "Error ${_error}:: couldn't determine software-type ${_ncli_softwaretype}!" + exit ${_error} + ;; + esac + + if [[ ! -e ${_meta_url##*/} ]]; then + log "Retrieving download metadata ${_meta_url##*/} ..." + download "${_meta_url}" + else + log "Warning: using cached download ${_meta_url##*/}" + fi + + if [[ -z ${_source_url} ]]; then + dependencies 'install' 'jq' || exit 13 + _source_url=$(cat ${_meta_url##*/} | jq -r .download_url_cdn) + fi + + if (( $(pgrep curl | wc --lines | tr -d '[:space:]') > 0 )); then + pkill curl + fi + log "Retrieving Nutanix ${_ncli_softwaretype} bits..." + download "${_source_url}" + + _checksum=$(md5sum ${_source_url##*/} | awk '{print $1}') + if [[ $(cat ${_meta_url##*/} | jq -r .hex_md5) != "${_checksum}" ]]; then + + _error=2 + log "Error ${_error}: md5sum ${_checksum} doesn't match on: ${_source_url##*/} removing and exit!" + rm -f ${_source_url##*/} + exit ${_error} + else + log "Success: ${_ncli_softwaretype} bits downloaded and passed MD5 checksum!" + fi + + ncli software upload software-type=${_ncli_softwaretype} \ + file-path="$(pwd)/${_source_url##*/}" \ + meta-file-path="$(pwd)/${_meta_url##*/}" + + if (( $? == 0 )) ; then + log "Success! Delete ${_ncli_softwaretype} sources to free CVM space..." + rm -f ${_source_url##*/} ${_meta_url##*/} + else + _error=3 + log "Error ${_error}: failed ncli upload of ${_ncli_softwaretype}." + exit ${_error} + fi +} + +function pe_determine() { + # ${1} REQUIRED: run on 'PE' or 'PC' + local _error + local _hold + + dependencies 'install' 'jq' + + # ncli @PE and @PC yeild different info! So PC uses nuclei. + case ${1} in + PE | pe ) + _hold=$(source /etc/profile.d/nutanix_env.sh \ + && ncli --json=true cluster info) + ;; + PC | Pc | pc ) + # WORKAROUND: Entities non-JSON outputs by nuclei on lines 1-2... + _hold=$(source /etc/profile.d/nutanix_env.sh \ + && export NUCLEI_SERVER='localhost' \ + && export NUCLEI_USERNAME="${PRISM_ADMIN}" \ + && export NUCLEI_PASSWORD="${PE_PASSWORD}" \ + && nuclei cluster.list format=json 2>/dev/null \ + | grep -v 'Entities :' \ + | jq \ + '.entities[].status | select(.state == "COMPLETE") | select(.resources.network.external_ip != null)' + ) + ;; + *) + log 'Error: invoke with PC or PE argument.' + ;; + esac + + #log "DEBUG: cluster info on ${1}. |${_hold}|" + + if [[ -z "${_hold}" ]]; then + _error=12 + log "Error ${_error}: couldn't resolve cluster info on ${1}. |${_hold}|" + args_required 'PE_HOST' + exit ${_error} + else + case ${1} in + PE | pe ) + CLUSTER_NAME=$(echo ${_hold} | jq -r .data.name) + PE_HOST=$(echo ${_hold} | jq -r .data.clusterExternalIPAddress) + ;; + PC | Pc | pc ) + CLUSTER_NAME=$(echo ${_hold} | jq -r .name) + PE_HOST=$(echo ${_hold} | jq -r .resources.network.external_ip) + ;; + esac + + export CLUSTER_NAME PE_HOST + log "Success: Cluster name=${CLUSTER_NAME}, PE external IP=${PE_HOST}" + fi +} + +function prism_check { + # Argument ${1} = REQUIRED: PE or PC + # Argument ${2} = OPTIONAL: number of attempts + # Argument ${3} = OPTIONAL: number of seconds per cycle + + args_required 'ATTEMPTS PE_PASSWORD SLEEP' + + local _attempts=${ATTEMPTS} + local _error=77 + local _host + local _loop=0 + local _password="${PE_PASSWORD}" + local _pw_init='Nutanix/4u' + local _sleep=${SLEEP} + local _test=0 + + #shellcheck disable=2153 + if [[ ${1} == 'PC' ]]; then + _host=${PC_HOST} + else + _host=${PE_HOST} + fi + if [[ ! -z ${2} ]]; then + _attempts=${2} + fi + + while true ; do + (( _loop++ )) + _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${_password} \ + -X POST --data '{ "kind": "cluster" }' \ + https://${_host}:9440/api/nutanix/v3/clusters/list \ + | tr -d \") # wonderful addition of "" around HTTP status code by cURL + + if [[ ! -z ${3} ]]; then + _sleep=${3} + fi + + if (( ${_test} == 401 )); then + log "Warning: unauthorized ${1} user or password on ${_host}." + + if [[ ${1} == 'PC' && ${_password} != "${_pw_init}" ]]; then + _password=${_pw_init} + log "Warning @${1}: Fallback on ${_host}: try initial password next cycle..." + _sleep=0 #break + elif [[ ${1} == 'PC' && ${_password} == "${_pw_init}" && ${PC_VERSION} == "${PC_DEV_VERSION}" ]]; then + _password=${PE_PASSWORD} + log "Warning @${1}-dev: Fallback on ${_host}: try PE cluster password next cycle..." + _sleep=0 #break + fi + + fi + + if (( ${_test} == 200 )); then + log "@${1}: successful." + return 0 + elif (( ${_loop} > ${_attempts} )); then + log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + return ${_error} + else + log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." + sleep ${_sleep} + fi + done +} + +function remote_exec() { +# Argument ${1} = REQUIRED: ssh or scp +# Argument ${2} = REQUIRED: PE, PC, or AUTH_SERVER +# Argument ${3} = REQUIRED: command configuration +# Argument ${4} = OPTIONAL: populated with anything = allowed to fail + + local _account='nutanix' + local _attempts=3 + local _error=99 + local _host + local _loop=0 + local _password="${PE_PASSWORD}" + local _pw_init="${NTNX_INIT_PASSWORD}" + local _sleep=${SLEEP} + local _test=0 + + args_required 'SSH_OPTS' + + # shellcheck disable=SC2153 + case ${2} in + 'PE' ) + _host=${PE_HOST} + ;; + 'PC' ) + _host=${PC_HOST} + _password=${_pw_init} + ;; + 'AUTH_SERVER' ) + _account='root' + _host=${AUTH_HOST} + _password=${_pw_init} + _sleep=7 + ;; + esac + + if [[ -z ${3} ]]; then + log 'Error ${_error}: missing third argument.' + exit ${_error} + fi + + if [[ ! -z ${4} ]]; then + _attempts=1 + _sleep=0 + fi + + while true ; do + (( _loop++ )) + case "${1}" in + 'SSH' | 'ssh') + #DEBUG=1; if [[ ${DEBUG} ]]; then log "_test will perform ${_account}@${_host} ${3}..."; fi + SSHPASS="${_password}" sshpass -e ssh -x ${SSH_OPTS} ${_account}@${_host} "${3}" + _test=$? + ;; + 'SCP' | 'scp') + #DEBUG=1; if [[ ${DEBUG} ]]; then log "_test will perform scp ${3} ${_account}@${_host}:"; fi + SSHPASS="${_password}" sshpass -e scp ${SSH_OPTS} ${3} ${_account}@${_host}: + _test=$? + ;; + *) + log "Error ${_error}: improper first argument, should be ssh or scp." + exit ${_error} + ;; + esac + + if (( ${_test} > 0 )) && [[ -z ${4} ]]; then + _error=22 + log "Error ${_error}: pwd=$(pwd), _test=${_test}, _host=${_host}" + exit ${_error} + fi + + if (( ${_test} == 0 )); then + if [[ ${DEBUG} ]]; then log "${3} executed properly."; fi + return 0 + elif (( ${_loop} == ${_attempts} )); then + if [[ -z ${4} ]]; then + _error=11 + log "Error ${_error}: giving up after ${_loop} tries." + exit ${_error} + else + log "Optional: giving up." + break + fi + else + log "${_loop}/${_attempts}: _test=$?|${_test}| SLEEP ${_sleep}..." + sleep ${_sleep} + fi + done +} + +function repo_source() { + # https://stackoverflow.com/questions/1063347/passing-arrays-as-parameters-in-bash#4017175 + local _candidates=("${!1}") # REQUIRED + local _package="${2}" # OPTIONAL + local _error=29 + local _http_code + local _index=0 + local _suffix + local _url + + if (( ${#_candidates[@]} == 0 )); then + log "Error ${_error}: Missing array!" + exit ${_error} + # else + # log "DEBUG: _candidates count is ${#_candidates[@]}" + fi + + if [[ -z ${_package} ]]; then + _suffix=${_candidates[0]##*/} + if (( $(echo "${_suffix}" | grep . | wc --lines) > 0)); then + log "Convenience: omitted package argument, added package=${_package}" + _package="${_suffix}" + fi + fi + # Prepend your local HTTP cache... + _candidates=( "http://${HTTP_CACHE_HOST}:${HTTP_CACHE_PORT}/" "${_candidates[@]}" ) + + while (( ${_index} < ${#_candidates[@]} )) + do + unset SOURCE_URL + + # log "DEBUG: ${_index} ${_candidates[${_index}]}, OPTIONAL: _package=${_package}" + _url=${_candidates[${_index}]} + + if [[ -z ${_package} ]]; then + if (( $(echo "${_url}" | grep '/$' | wc --lines) == 0 )); then + log "error ${_error}: ${_url} doesn't end in trailing slash, please correct." + exit ${_error} + fi + elif (( $(echo "${_url}" | grep '/$' | wc --lines) == 1 )); then + _url+="${_package}" + fi + + if (( $(echo "${_url}" | grep '^nfs' | wc --lines) == 1 )); then + log "warning: TODO: cURL can't test nfs URLs...assuming a pass!" + export SOURCE_URL="${_url}" + break + fi + + _http_code=$(curl ${CURL_OPTS} --max-time 5 --write-out '%{http_code}' --head ${_url} | tail -n1) + + if [[ (( ${_http_code} == 200 )) || (( ${_http_code} == 302 )) ]]; then + export SOURCE_URL="${_url}" + log "Found, HTTP:${_http_code} = ${SOURCE_URL}" + break + fi + log " Lost, HTTP:${_http_code} = ${_url}" + ((_index++)) + done + + if [[ -z "${SOURCE_URL}" ]]; then + _error=30 + log "Error ${_error}: didn't find any sources, last try was ${_url} with HTTP ${_http_code}." + exit ${_error} + fi +} + +function ssh_pubkey() { + local _dir + local _directories=(\ + "${HOME}" \ + "${HOME}/ssh_keys" \ + "${HOME}/cache" \ + ) + local _name + local _test + + args_required 'EMAIL SSH_PUBKEY' + + _name=${EMAIL//\./_DOT_} + _name=${_name/@/_AT_} + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ncli cluster list-public-keys name=${_name}) + + if (( $(echo ${_test} | grep -i "Failed" | wc ${WC_ARG}) > 0 )); then + for _dir in "${_directories[@]}"; do + if [[ -e ${_dir}/${SSH_PUBKEY##*/} ]]; then + log "Note that a period and other symbols aren't allowed to be a key name." + + log "Locally adding ${_dir}/${SSH_PUBKEY##*/} under ${_name} label..." + ncli cluster add-public-key name=${_name} file-path=${_dir}/${SSH_PUBKEY##*/} || true + + break + fi + done + else + log "IDEMPOTENCY: found pubkey ${_name}" + fi +} diff --git a/scripts/stage_calmhow_pc.sh b/scripts/lib.pc.sh old mode 100644 new mode 100755 similarity index 63% rename from scripts/stage_calmhow_pc.sh rename to scripts/lib.pc.sh index 699156c..b368308 --- a/scripts/stage_calmhow_pc.sh +++ b/scripts/lib.pc.sh @@ -2,75 +2,161 @@ # -x # Dependencies: curl, ncli, nuclei, jq -function pc_passwd() { - CheckArgsExist 'MY_PE_PASSWORD' +function calm_update() { + local _attempts=12 + local _calm_bin=/usr/local/nutanix/epsilon + local _container + local _error=19 + local _loop=0 + local _sleep=10 + local _url=http://${AUTH_HOST}:8080 - log "Reset PC password to PE password, must be done by ncli@PC, not API or on PE" - ncli user reset-password user-name=${PRISM_ADMIN} password=${MY_PE_PASSWORD} - if (( $? > 0 )); then - log "Warning: password not reset: $?."# exit 10 + if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then + log "Bypassing download of updated containers." + else + dependencies 'install' 'sshpass' || exit 13 + remote_exec 'ssh' 'AUTH_SERVER' \ + 'if [[ ! -e nucalm.tar ]]; then smbclient -I 10.21.249.12 \\\\pocfs\\images --user ${1} --command "prompt ; cd /Calm-EA/pc-'${PC_VERSION}'/ ; mget *tar"; echo; ls -lH *tar ; fi' \ + 'OPTIONAL' + + while true ; do + (( _loop++ )) + _test=$(curl ${CURL_HTTP_OPTS} ${_url} \ + | tr -d \") # wonderful addition of "" around HTTP status code by cURL + + if (( ${_test} == 200 )); then + log "Success reaching ${_url}" + break; + elif (( ${_loop} > ${_attempts} )); then + log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + return ${_error} + else + log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." + sleep ${_sleep} + fi + done + + download ${_url}/epsilon.tar + download ${_url}/nucacallm.tar fi - # TOFIX: nutanix@PC Linux account password change as well? - # local _old_pw='nutanix/4u' - # local _http_body=$(cat </dev/null + nuclei microseg.get_status 2>/dev/null +} + +function lcm() { + local _http_body + local _pc_version + local _test + + # shellcheck disable=2206 + _pc_version=(${PC_VERSION//./ }) + + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then + log "PC_VERSION ${PC_VERSION} >= 5.9, starting LCM inventory..." + + _http_body='value: "{".oid":"LifeCycleManager",".method":"lcm_framework_rpc",".kwargs":{"method_class":"LcmFramework","method":"perform_inventory","args":["http://download.nutanix.com/lcm/2.0"]}}"' + + _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" \ + https://localhost:9440/PrismGateway/services/rest/v1/genesis) + log "inventory _test=|${_test}|" + fi +} + +function pc_admin() { + local _http_body + local _test + local _admin_user='marklavi' + + _http_body=$(cat <= 5.9" - if (( $(echo "${_pc_version} >= 5.9" | bc -l) )); then - _http_body+=' "groupSearchType":"RECURSIVE", ' - _http_body+=" \"directoryUrl\":\"ldap://${AUTH_HOST}:${LDAP_PORT}/\", " + # shellcheck disable=2206 + _pc_version=(${PC_VERSION//./ }) + + log "Checking if PC_VERSION ${PC_VERSION} >= 5.9" + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then + _http_body+=$(cat < 0 )); then + log "Warning: password not reset: $?."# exit 10 + fi + # TOFIX: nutanix@PC Linux account password change as well? + + # local _old_pw='nutanix/4u' + # local _http_body=$(cat <&1 | grep -i complete | grep "${_image}") ]]; then - log "Skip: ${_image} already complete on cluster." - else - repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]! - - # if [[ -z "${SOURCE_URL}" ]]; then - # _error=30 - # log "Error ${_error}: didn't find any sources for ${_image}" - # exit ${_error} - # fi - - nuclei image.create name=${_image} \ - description="${0} via stage_calmhow_pc for ${_image}" \ - source_uri=${SOURCE_URL} 2>&1 - log "NOTE: image.uuid = RUNNING, but takes a while to show up in:" - log "TODO: nuclei image.list, state = COMPLETE; image.list Name UUID State" - if (( $? != 0 )); then - log "Warning: Image submission: $?." - #exit 10 - fi - fi - - done -} -function pc_smtp() { - log "Configure SMTP@PC" - local _sleep=5 + # shellcheck disable=2206 + _pc_version=(${PC_VERSION//./ }) - CheckArgsExist 'SMTP_SERVER_ADDRESS SMTP_SERVER_FROM SMTP_SERVER_PORT' - ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \ - address=${SMTP_SERVER_ADDRESS} from-email-address=${SMTP_SERVER_FROM} - #log "sleep ${_sleep}..."; sleep ${_sleep} - #log $(ncli cluster get-smtp-server | grep Status | grep success) - ncli cluster send-test-email recipient="${MY_EMAIL}" \ - subject="pc_smtp https://${PRISM_ADMIN}:${MY_PE_PASSWORD}@${MY_PC_HOST}:9440 Testing." - # local _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${MY_PE_PASSWORD} -X POST -d '{ - # "address":"${SMTP_SERVER_ADDRESS}","port":"${SMTP_SERVER_PORT}","username":null,"password":null,"secureMode":"NONE","fromEmailAddress":"${SMTP_SERVER_FROM}","emailStatus":null}' \ - # https://localhost:9440/PrismGateway/services/rest/v1/cluster/smtp) - # log "_test=|${_test}|" -} + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 && ${_test} != 500 )); then + log "PC_VERSION ${PC_VERSION} >= 5.10, setting favorites..." -function flow_enable() { - ## (API; Didn't work. Used nuclei instead) - ## https://localhost:9440/api/nutanix/v3/services/microseg - ## {"state":"ENABLE"} - # To disable flow run the following on PC: nuclei microseg.disable + _json=$(cat </dev/null - nuclei microseg.get_status 2>/dev/null + for _http_body in ${_json}; do + _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" \ + https://localhost:9440/api/nutanix/v3/search/favorites) + log "favs _test=|${_test}|${_http_body}" + done + fi } function pc_project() { @@ -373,7 +502,7 @@ function pc_project() { local _count local _uuid - _name=${MY_EMAIL%%@nutanix.com}.test + _name=${EMAIL%%@nutanix.com}.test _count=$(. /etc/profile.d/nutanix_env.sh \ && nuclei project.list 2>/dev/null | grep ${_name} | wc --lines) if (( ${_count} > 0 )); then @@ -386,7 +515,7 @@ function pc_project() { nuclei project.create name=${_name} description='test from NuCLeI!' 2>/dev/null _uuid=$(. /etc/profile.d/nutanix_env.sh \ && nuclei project.get ${_name} format=json 2>/dev/null \ - | ${HOME}/jq .metadata.project_reference.uuid | tr -d '"') + | jq .metadata.project_reference.uuid | tr -d '"') log "${_name}.uuid = ${_uuid}" # - project.get mark.lavi.test @@ -400,123 +529,3 @@ function pc_project() { # {"spec":{"access_control_policy_list":[],"project_detail":{"name":"mark.lavi.test1","resources":{"external_user_group_reference_list":[],"user_reference_list":[],"environment_reference_list":[],"account_reference_list":[],"subnet_reference_list":[{"kind":"subnet","name":"Primary","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"},{"kind":"subnet","name":"Secondary","uuid":"4689bc7f-61dd-4527-bc7a-9d737ae61322"}],"default_subnet_reference":{"kind":"subnet","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"}},"description":"test from NuCLeI!"},"user_list":[],"user_group_list":[]},"api_version":"3.1","metadata":{"creation_time":"2018-06-22T03:54:59Z","spec_version":0,"kind":"project","last_update_time":"2018-06-22T03:55:00Z","uuid":"1be7f66a-5006-4061-b9d2-76caefedd298","categories":{},"owner_reference":{"kind":"user","name":"admin","uuid":"00000000-0000-0000-0000-000000000000"}}} } - -function pc_update() { - log "This function not implemented yet." - log "Download PC upgrade image: ${MY_PC_UPGRADE_URL##*/}" - cd /home/nutanix/install && ./bin/cluster -i . -p upgrade -} - -# shellcheck disable=SC2120 -function calm_update() { - local _attempts=12 - local _calm_bin=/usr/local/nutanix/epsilon - local _container - local _error=19 - local _loop=0 - local _sleep=10 - local _url=http://${AUTH_HOST}:8080 - - if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then - log "Bypassing download of updated containers." - else - remote_exec 'ssh' 'AUTH_SERVER' \ - 'if [[ ! -e nucalm.tar ]]; then smbclient -I 10.21.249.12 \\\\pocfs\\images --user ${1} --command "prompt ; cd /Calm-EA/pc-'${PC_VERSION}'/ ; mget *tar"; echo; ls -lH *tar ; fi' \ - 'OPTIONAL' - - while true ; do - (( _loop++ )) - _test=$(curl ${CURL_HTTP_OPTS} ${_url} \ - | tr -d \") # wonderful addition of "" around HTTP status code by cURL - - if (( ${_test} == 200 )); then - log "Success reaching ${_url}" - break; - elif (( ${_loop} > ${_attempts} )); then - log "Warning ${_error} @${1}: Giving up after ${_loop} tries." - return ${_error} - else - log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." - sleep ${_sleep} - fi - done - - Download ${_url}/epsilon.tar - Download ${_url}/nucalm.tar - fi - - if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then - ls -lh ${HOME}/*tar - mkdir ${HOME}/calm.backup || true - cp ${_calm_bin}/*tar ${HOME}/calm.backup/ \ - && genesis stop nucalm epsilon \ - && docker rm -f "$(docker ps -aq)" || true \ - && docker rmi -f "$(docker images -q)" || true \ - && cp ${HOME}/*tar ${_calm_bin}/ \ - && cluster start # ~75 seconds to start both containers - - for _container in epsilon nucalm ; do - local _test=0 - while (( ${_test} < 1 )); do - _test=$(docker ps -a | grep ${_container} | grep -i healthy | wc --lines) - done - done - fi -} - -#__main()____________ - -# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables -. /etc/profile.d/nutanix_env.sh -. common.lib.sh -. global.vars.sh -begin - -Dependencies 'install' 'sshpass' && Dependencies 'install' 'jq' || exit 13 - -pc_passwd -NTNX_cmd # check cli services available? - -if [[ -z "${MY_PE_HOST}" ]]; then - log "MY_PE_HOST unset, determining..." - Determine_PE - . global.vars.sh -fi - -if [[ ! -z "${1}" ]]; then - # hidden bonus - log "Don't forget: $0 first.last@nutanixdc.local%password" - calm_update && exit 0 -fi - -CheckArgsExist 'MY_EMAIL MY_PC_HOST MY_PE_PASSWORD PC_VERSION' - -export ATTEMPTS=2 -export SLEEP=10 - -log "Adding key to PC VMs..." && SSH_PubKey || true & # non-blocking, parallel suitable - -pc_init \ -&& pc_ui \ -&& pc_auth \ -&& pc_smtp - -ssp_auth \ -&& calm_enable \ -&& images \ -&& flow_enable \ -&& Check_Prism_API_Up 'PC' - -pc_project # TODO:50 pc_project is a new function, non-blocking at end. -# NTNX_Upload 'AOS' # function in common.lib.sh - -if (( $? == 0 )); then - #Dependencies 'remove' 'sshpass' && Dependencies 'remove' 'jq' \ - #&& - log "PC = https://${MY_PC_HOST}:9440" - finish -else - _error=19 - log "Error ${_error}: failed to reach PC!" - exit ${_error} -fi diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh new file mode 100644 index 0000000..c684c55 --- /dev/null +++ b/scripts/lib.pe.sh @@ -0,0 +1,478 @@ +#!/usr/bin/env bash +# -x +# Dependencies: acli, ncli, jq, sshpass, curl, md5sum, pgrep, wc, tr, pkill + +function acli() { + local _cmd + + _cmd=$* + /usr/local/nutanix/bin/acli ${_cmd} + # DEBUG=1 && if [[ ${DEBUG} ]]; then log "$@"; fi +} + +function authentication_source() { + local _attempts + local _error=13 + local _loop + local _pc_version + local _result + local _sleep + local _test=0 + local _vm + + args_required 'AUTH_SERVER AUTH_FQDN SLEEP STORAGE_IMAGES PC_VERSION' + + if [[ -z ${AUTH_SERVER} ]]; then + log "Error ${_error}: please provide a choice for authentication server." + exit ${_error} + fi + # shellcheck disable=2206 + _pc_version=(${PC_VERSION//./ }) + + case "${AUTH_SERVER}" in + 'ActiveDirectory') + log "Manual setup = https://github.com/nutanixworkshops/labs/blob/master/setup/active_directory/active_directory_setup.rst" + ;; + 'AutoDC') + local _autodc_auth + local _autodc_index=1 + local _autodc_release=1 + local _autodc_service='samba-ad-dc' + local _autodc_restart="service ${_autodc_service} restart" + local _autodc_status="systemctl show ${_autodc_service} --property=SubState" + local _autodc_success='SubState=running' + + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then + log "PC_VERSION ${PC_VERSION} >= 5.9, setting AutoDC-2.0..." + _autodc_auth=" --username=${AUTH_ADMIN_USER} --password=${AUTH_ADMIN_PASS}" + _autodc_index='' + _autodc_release=2 + _autodc_service=samba + _autodc_restart="sleep 2 && service ${_autodc_service} stop && sleep 5 && service ${_autodc_service} start" + _autodc_status="service ${_autodc_service} status" + _autodc_success=' * status: started' + + # REVIEW: override global.vars + export AUTODC_REPOS=(\ + 'http://10.132.128.50:81/share/saved-images/autodc-2.0.qcow2' \ + 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ + ) + fi + + dns_check "dc${_autodc_index}.${AUTH_FQDN}" + _result=$? + + if (( ${_result} == 0 )); then + log "${AUTH_SERVER}${_autodc_release}.IDEMPOTENCY: dc${_autodc_index}.${AUTH_FQDN} set, skip. ${_result}" + else + log "${AUTH_SERVER}${_autodc_release}.IDEMPOTENCY failed, no DNS record dc${_autodc_index}.${AUTH_FQDN}" + + _error=12 + _loop=0 + _sleep=${SLEEP} + + repo_source AUTODC_REPOS[@] + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${AUTH_SERVER}${_autodc_release} | wc --lines) == 0 )); then + log "Import ${AUTH_SERVER}${_autodc_release} image from ${SOURCE_URL}..." + acli image.create ${AUTH_SERVER}${_autodc_release} \ + image_type=kDiskImage wait=true \ + container=${STORAGE_IMAGES} source_url=${SOURCE_URL} + else + log "Image found, assuming ready. Skipping ${AUTH_SERVER}${_autodc_release} import." + fi + + log "Create ${AUTH_SERVER}${_autodc_release} VM based on ${AUTH_SERVER}${_autodc_release} image" + acli "vm.create ${AUTH_SERVER}${_autodc_release} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" + # vmstat --wide --unit M --active # suggests 2G sufficient, was 4G + #acli "vm.disk_create ${AUTH_SERVER}${_autodc_release} cdrom=true empty=true" + acli "vm.disk_create ${AUTH_SERVER}${_autodc_release} clone_from_image=${AUTH_SERVER}${_autodc_release}" + acli "vm.nic_create ${AUTH_SERVER}${_autodc_release} network=${NW1_NAME} ip=${AUTH_HOST}" + + log "Power on ${AUTH_SERVER}${_autodc_release} VM..." + acli "vm.on ${AUTH_SERVER}${_autodc_release}" + + _attempts=20 + _loop=0 + _sleep=10 + + while true ; do + (( _loop++ )) + + _test=$(remote_exec 'SSH' 'AUTH_SERVER' "${_autodc_status}") + if [[ "${_test}" == "${_autodc_success}" ]]; then + log "${AUTH_SERVER}${_autodc_release} is ready." + sleep ${_sleep} + break + elif (( ${_loop} > ${_attempts} )); then + log "Error ${_error}: ${AUTH_SERVER}${_autodc_release} VM running: giving up after ${_loop} tries." + _result=$(source /etc/profile.d/nutanix_env.sh \ + && for _vm in $(source /etc/profile.d/nutanix_env.sh && acli vm.list | grep ${AUTH_SERVER}${_autodc_release}) ; do acli -y vm.delete $_vm; done) + # acli image.delete ${AUTH_SERVER}${_autodc_release} + log "Remediate by deleting the ${AUTH_SERVER}${_autodc_release} VM from PE (just attempted by this script: ${_result}) and then running acli $_" + exit ${_error} + else + log "_test ${_loop}/${_attempts}=|${_test}|: sleep ${_sleep} seconds..." + sleep ${_sleep} + fi + done + + log "Create Reverse Lookup Zone on ${AUTH_SERVER}${_autodc_release} VM..." + _attempts=3 + _loop=0 + + while true ; do + (( _loop++ )) + remote_exec 'SSH' 'AUTH_SERVER' \ + "samba-tool dns zonecreate dc${_autodc_index} ${OCTET[2]}.${OCTET[1]}.${OCTET[0]}.in-addr.arpa ${_autodc_auth} && ${_autodc_restart}" \ + 'OPTIONAL' + sleep ${_sleep} + + dns_check "dc${_autodc_index}.${AUTH_FQDN}" + _result=$? + + if (( ${_result} == 0 )); then + log "Success: DNS record dc${_autodc_index}.${AUTH_FQDN} set." + break + elif (( ${_loop} > ${_attempts} )); then + if (( ${_autodc_release} < 2 )); then + log "Error ${_error}: ${AUTH_SERVER}${_autodc_release}: giving up after ${_loop} tries; deleting VM..." + acli "-y vm.delete ${AUTH_SERVER}${_autodc_release}" + exit ${_error} + fi + else + log "dns_check ${_loop}/${_attempts}=|${_result}|: sleep ${_sleep} seconds..." + sleep ${_sleep} + fi + done + + fi + ;; + 'OpenLDAP') + log "To be documented, see https://drt-it-github-prod-1.eng.nutanix.com/mark-lavi/openldap" + ;; + esac +} + +function files_install() { + local _ncli_softwaretype='FILE_SERVER' + local _ncli_software_type='afs' + local _test + + dependencies 'install' 'jq' || exit 13 + + log "IDEMPOTENCY: checking for ${_ncli_software_type} completed..." + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ncli --json=true software list \ + | jq -r \ + '.data[] | select(.softwareType == "'${_ncli_softwaretype}'") | select(.status == "COMPLETED") | .version') + + if [[ ${_test} != "${FILES_VERSION}" ]]; then + log "Files ${FILES_VERSION} not completed. ${_test}" + ntnx_download "${_ncli_software_type}" + else + log "IDEMPOTENCY: Files ${FILES_VERSION} already completed." + fi +} + +function network_configure() { + + if [[ ! -z $(acli "net.list" | grep ${NW1_NAME}) ]]; then + log "IDEMPOTENCY: ${NW1_NAME} network set, skip." + else + args_required 'AUTH_DOMAIN IPV4_PREFIX AUTH_HOST' + + if [[ ! -z $(acli "net.list" | grep 'Rx-Automation-Network') ]]; then + log "Remove Rx-Automation-Network..." + acli "-y net.delete Rx-Automation-Network" + fi + + log "Create primary network: Name: ${NW1_NAME}, VLAN: ${NW1_VLAN}, Subnet: ${NW1_SUBNET}, Domain: ${AUTH_DOMAIN}, Pool: ${NW1_DHCP_START} to ${NW1_DHCP_END}" + acli "net.create ${NW1_NAME} vlan=${NW1_VLAN} ip_config=${NW1_SUBNET}" + acli "net.update_dhcp_dns ${NW1_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_DOMAIN}" + acli " net.add_dhcp_pool ${NW1_NAME} start=${NW1_DHCP_START} end=${NW1_DHCP_END}" + + if [[ ! -z "${NW2_NAME}" ]]; then + log "Create secondary network: Name: ${NW2_NAME}, VLAN: ${NW2_VLAN}, Subnet: ${NW2_SUBNET}, Pool: ${NW2_DHCP_START} to ${NW2_DHCP_END}" + acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" + acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_DOMAIN}" + acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" + fi + fi +} + +function pc_configure() { + args_required 'PC_LAUNCH RELEASE' + local _command + local _container + local _dependencies="global.vars.sh lib.common.sh lib.pc.sh ${PC_LAUNCH}" + + if [[ -e ${RELEASE} ]]; then + _dependencies+=" ${RELEASE}" + else + log 'Warning: did NOT find '${RELEASE} + fi + log "Send configuration scripts to PC and remove: ${_dependencies}" + remote_exec 'scp' 'PC' "${_dependencies}" && rm -f ${_dependencies} lib.pe.sh + + _dependencies="bin/${JQ_REPOS[0]##*/} ${SSHPASS_REPOS[0]##*/} id_rsa.pub" + + log "OPTIONAL: Send binary dependencies to PC: ${_dependencies}" + remote_exec 'scp' 'PC' "${_dependencies}" 'OPTIONAL' + + for _container in epsilon nucalm ; do + if [[ -e ${_container}.tar ]]; then + log "Uploading Calm container updates in background..." + remote_exec 'SCP' 'PC' ${_container}.tar 'OPTIONAL' & + fi + done + + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" + log "Remote asynchroneous launch PC configuration script... ${_command}" + remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" + log "PC Configuration complete: try Validate Staged Clusters now." +} + +function pc_install() { + local _ncli_softwaretype='PRISM_CENTRAL_DEPLOY' + local _nw_name="${1}" + local _nw_uuid + local _storage_default_uuid + local _test + + log "IDEMPOTENCY: Checking PC API responds, curl failures are acceptable..." + prism_check 'PC' 2 0 + + if (( $? == 0 )) ; then + log "IDEMPOTENCY: PC API responds, skip." + else + log "Get cluster network and storage container UUIDs..." + _nw_uuid=$(acli "net.get ${_nw_name}" \ + | grep "uuid" | cut -f 2 -d ':' | xargs) + _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \ + | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) + log "${_nw_name} network UUID: ${_nw_uuid}" + log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" + + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ncli --json=true software list \ + | jq -r \ + '.data[] | select(.softwareType == "'${_ncli_softwaretype}'") | select(.status == "COMPLETED") | .version') + + if [[ ${_test} != "${PC_VERSION}" ]]; then + log "PC-${PC_VERSION} not completed. ${_test}" + ntnx_download "${_ncli_softwaretype}" + else + log "IDEMPOTENCY: PC-${PC_VERSION} upload already completed." + fi + + log "Deploy Prism Central (typically takes 17+ minutes)..." + # TODO:160 make scale-out & dynamic, was: 4vCPU/16GB = 17179869184, 8vCPU/40GB = 42949672960 + # Sizing suggestions, certified configurations: + # https://portal.nutanix.com/#/page/docs/details?targetId=Release-Notes-Prism-Central-v591:sha-pc-scalability-r.html + + # TODO:10 network_configuration.{subnet_mask|default_gateway} + HTTP_BODY=$(cat <= 5 && ${_aos_version[1]} >= 9 )); then + _directory_url="ldap://${AUTH_HOST}:${LDAP_PORT}" + log "Adjusted directory-url=${_directory_url} because AOS-${_aos} >= 5.9" + fi + else + log "Error ${_error}: couldn't determine AOS version=${_aos}" + exit ${_error} + fi + + log "Configure PE external authentication" + ncli authconfig add-directory \ + directory-type=ACTIVE_DIRECTORY \ + connection-type=LDAP directory-url="${_directory_url}" \ + domain="${AUTH_FQDN}" \ + name="${AUTH_DOMAIN}" \ + service-account-username="${AUTH_ADMIN_USER}" \ + service-account-password="${AUTH_ADMIN_PASS}" + + log "Configure PE role map" + ncli authconfig add-role-mapping \ + role=ROLE_CLUSTER_ADMIN \ + entity-type=group name="${AUTH_DOMAIN}" \ + entity-values="${AUTH_ADMIN_GROUP}" + fi +} + +function pe_init() { + args_required 'DATA_SERVICE_IP EMAIL \ + SMTP_SERVER_ADDRESS SMTP_SERVER_FROM SMTP_SERVER_PORT \ + STORAGE_DEFAULT STORAGE_POOL STORAGE_IMAGES \ + SLEEP ATTEMPTS' + + if [[ `ncli cluster get-params | grep 'External Data' | \ + awk -F: '{print $2}' | tr -d '[:space:]'` == "${DATA_SERVICE_IP}" ]]; then + log "IDEMPOTENCY: Data Services IP set, skip." + else + log "Configure SMTP" + ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \ + from-email-address=${SMTP_SERVER_FROM} address=${SMTP_SERVER_ADDRESS} + ${HOME}/serviceability/bin/email-alerts --to_addresses="${EMAIL}" \ + --subject="[pe_init:Config SMTP:alert test] $(ncli cluster get-params)" \ + && ${HOME}/serviceability/bin/send-email + + log "Configure NTP" + ncli cluster add-to-ntp-servers servers=${NTP_SERVERS} + + log "Rename default container to ${STORAGE_DEFAULT}" + default_container=$(ncli container ls | grep -P '^(?!.*VStore Name).*Name' \ + | cut -d ':' -f 2 | sed s/' '//g | grep '^default-container-') + ncli container edit name="${default_container}" new-name="${STORAGE_DEFAULT}" + + log "Rename default storage pool to ${STORAGE_POOL}" + default_sp=$(ncli storagepool ls | grep 'Name' | cut -d ':' -f 2 | sed s/' '//g) + ncli sp edit name="${default_sp}" new-name="${STORAGE_POOL}" + + log "Check if there is a container named ${STORAGE_IMAGES}, if not create one" + (ncli container ls | grep -P '^(?!.*VStore Name).*Name' \ + | cut -d ':' -f 2 | sed s/' '//g | grep "^${STORAGE_IMAGES}" > /dev/null 2>&1) \ + && log "Container ${STORAGE_IMAGES} exists" \ + || ncli container create name="${STORAGE_IMAGES}" sp-name="${STORAGE_POOL}" + + # Set external IP address: + #ncli cluster edit-params external-ip-address=${PE_HOST} + + log "Set Data Services IP address to ${DATA_SERVICE_IP}" + ncli cluster edit-params external-data-services-ip-address=${DATA_SERVICE_IP} + fi +} + +function pe_license() { + local _test + args_required 'CURL_POST_OPTS PE_PASSWORD' + + log "IDEMPOTENCY: Checking PC API responds, curl failures are acceptable..." + prism_check 'PC' 2 0 + + if (( $? == 0 )) ; then + log "IDEMPOTENCY: PC API responds, skip" + else + _test=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{ + "username": "SE with $(basename ${0})", + "companyName": "Nutanix", + "jobTitle": "SE" + }' https://localhost:9440/PrismGateway/services/rest/v1/eulas/accept) + log "Validate EULA on PE: _test=|${_test}|" + + _test=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT --data '{ + "defaultNutanixEmail": null, + "emailContactList": null, + "enable": false, + "enableDefaultNutanixEmail": false, + "isPulsePromptNeeded": false, + "nosVersion": null, + "remindLater": null, + "verbosityType": null + }' https://localhost:9440/PrismGateway/services/rest/v1/pulse) + log "Disable Pulse in PE: _test=|${_test}|" + + #echo; log "Create PE Banner Login" # TODO: for PC, login banner + # https://portal.nutanix.com/#/page/docs/details?targetId=Prism-Central-Guide-Prism-v56:mul-welcome-banner-configure-pc-t.html + # curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data \ + # '{type: "welcome_banner", key: "welcome_banner_status", value: true}' \ + # https://localhost:9440/PrismGateway/services/rest/v1/application/system_data + #curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data + # '{type: "welcome_banner", key: "welcome_banner_content", value: "HPoC '${OCTET[2]}' password = '${PE_PASSWORD}'"}' \ + # https://localhost:9440/PrismGateway/services/rest/v1/application/system_data + fi +} + +function pc_unregister { + local _cluster_uuid + local _pc_uuid + # https://portal.nutanix.com/kb/4944 + + # PE: + cluster status # check + ncli -h true multicluster remove-from-multicluster \ + external-ip-address-or-svm-ips=${PC_HOST} \ + username=${PRISM_ADMIN} password=${PE_PASSWORD} force=true + # Error: This cluster was never added to Prism Central + ncli multicluster get-cluster-state # check for none + _cluster_uuid=$(ncli cluster info | grep -i uuid | awk -F: '{print $2}' | tr -d '[:space:]') + + exit 0 + # PC: remote_exec 'PC' + chmod u+x /home/nutanix/bin/unregistration_cleanup.py \ + && python /home/nutanix/bin/unregistration_cleanup.py ${_cluster_uuid} + # Uuid of current cluster cannot be passed to cleanup + _pc_uuid=$(cluster info) # no such command! + # PE: + chmod u+x /home/nutanix/bin/unregistration_cleanup.py \ + && python /home/nutanix/bin/unregistration_cleanup.py ${_pc_uuid} + + # Troubleshooting + cat ~/data/logs/unregistration_cleanup.log + + pc_destroy +} + +function pc_destroy() { + local _vm + + dependencies 'install' 'jq' || exit 13 + + for _vm in $(acli -o json vm.list | jq -r '.data[] | select(.name | contains("Prism Central")) | .uuid'); do + log "PC vm.uuid=${_vm}" + acli vm.off ${_vm} && acli -y vm.delete ${_vm} + done +} diff --git a/scripts/lib.shell-convenience.sh b/scripts/lib.shell-convenience.sh new file mode 100755 index 0000000..4ddc8aa --- /dev/null +++ b/scripts/lib.shell-convenience.sh @@ -0,0 +1,222 @@ +#!/usr/bin/env bash + +# stageworkshop_pe kill && stageworkshop_w 2 && stageworkshop_pe && stageworkshop_pe logs +# TODO:80 FUTURE: prompt for choice when more than one cluster + +. scripts/global.vars.sh + +if [[ -e ${RELEASE} && "${1}" != 'quiet' ]]; then + echo -e "Sourced lib.shell-convenience.sh, release: $(jq -r '.FullSemVer' ${RELEASE})\n \ + \tPrismCentralStable=${PC_STABLE_VERSION}\n \ + \t PrismCentralDev=${PC_DEV_VERSION}" + + if [[ -z ${PC_VERSION} ]]; then + export PC_VERSION="${PC_DEV_VERSION}" + fi +fi + +alias stageworkshop_pe='stageworkshop_ssh PE' +alias stageworkshop_pe_ssh='stageworkshop_ssh PE' +alias stageworkshop_pe_web='stageworkshop_web PE' +alias stageworkshop_pc='stageworkshop_ssh PC' +alias stageworkshop_pc_ssh='stageworkshop_ssh PC' +alias stageworkshop_pc_web='stageworkshop_web PC' +alias stageworkshop_w='./stage_workshop.sh -f example_pocs.txt -w ' + +function stageworkshop_auth() { + . scripts/global.vars.sh + stageworkshop_ssh 'AUTH' "${1}" +} + +function stageworkshop_cache_start() { + local _hold + local _host + local _hosts + local _file + local _bits=( \ + #https://github.com/mlavi/stageworkshop/archive/master.zip \ + #http://download.nutanix.com/downloads/pc/one-click-pc-deployment/5.10.0.1/euphrates-5.10.0.1-stable-prism_central.tar \ + # http://10.59.103.143:8000/autodc-2.0.qcow2 \ + # http://download.nutanix.com/calm/CentOS-7-x86_64-GenericCloud-1801-01.qcow2 \ + ) + + if [[ ! -d cache ]]; then + mkdir cache + fi + pushd cache || true + + stageworkshop_cache_stop + + echo "Setting up http://localhost:${HTTP_CACHE_PORT}/ on cache directory..." + python -m SimpleHTTPServer ${HTTP_CACHE_PORT} || python -m http.server ${HTTP_CACHE_PORT} & + + echo "Populate cache files..." + for _file in "${_bits[@]}"; do + if [[ -e ${_file##*/} ]]; then + echo "Cached: ${_file##*/}" + else + curl --remote-name --location --continue-at - ${_file} + fi + done + + stageworkshop_cluster '' + + echo "Setting up remote SSH tunnels on local and remote port ${HTTP_CACHE_PORT}..." + #acli -o json host.list | jq -r .data[].hypervisorAddress + _hosts=$(SSHPASS=${PE_PASSWORD} \ + sshpass -e ssh ${SSH_OPTS} -n ${NTNX_USER}@${PE_HOST} \ + 'source /etc/profile.d/nutanix_env.sh ; ncli host list | grep Controller') + _hold=$(echo "${_hosts}" | awk -F': ' '{print $2}') + + # shellcheck disable=2206 + _hosts=(${_hold// / }) # zero index + + for _host in "${_hosts[@]}"; do + echo "SSH tunnel for _host=$_host" + #ServerAliveInterval 120 + SSHPASS=${PE_PASSWORD} sshpass -e ssh ${SSH_OPTS} -nNT \ + -R ${HTTP_CACHE_PORT}:localhost:${HTTP_CACHE_PORT} ${NTNX_USER}@${_host} & + done + + popd || true + echo -e "\nTo turn service and tunnel off: stageworkshop_cache_stop" + + ps -efww | grep ssh + unset NTNX_USER PE_HOST PE_PASSWORD SSHPASS + stageworkshop_web http://localhost:${HTTP_CACHE_PORT} +} + +function stageworkshop_cache_stop() { + echo "Killing service and tunnel:${HTTP_CACHE_PORT}..." + pkill -f ${HTTP_CACHE_PORT} +} + +function stageworkshop_web() { + local _url + + stageworkshop_cluster '' + + case "${1}" in + PC | pc) + # shellcheck disable=2153 + _url=https://${PC_HOST}:9440 + ;; + PE | pe) + _url=https://${PE_HOST}:9440 + ;; + esac + unset NTNX_USER PE_HOST PE_PASSWORD PC_HOST SSHPASS + + case "${OS_NAME}" in + Darwin) + open -a 'Google Chrome' ${_url} + ;; + LinuxMint | Ubuntu) + firefox ${_url} || chromium-browser ${_url} & + ;; + *) + echo "Undetected operating system OS_NAME=${OS_NAME}" + exit 10 + ;; + esac +} + +function stageworkshop_cluster() { + local _cluster + local _fields + local _filespec + export NTNX_USER=nutanix + + if [[ -n ${1} || ${1} == '' ]]; then + _filespec=~/Documents/github.com/mlavi/stageworkshop/example_pocs.txt + else + _filespec="${1}" + echo "INFO: Using cluster file: |${1}| ${_filespec}" + fi + + echo -e "\nAssumptions: + - Last uncommented cluster in: ${_filespec} + - ssh user authentication: ${NTNX_USER} + - Accept self-signed cert: *.nutanix.local\n" + + _cluster=$(grep --invert-match --regexp '^#' "${_filespec}" | tail --lines=1) + # shellcheck disable=2206 + _fields=(${_cluster//|/ }) + + export PE_HOST=${_fields[0]} + export PE_PASSWORD=${_fields[1]} + export EMAIL=${_fields[2]} + echo "INFO|stageworkshop_cluster|PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} NTNX_USER=${NTNX_USER}" + + . scripts/global.vars.sh +} + +function stageworkshop_ssh() { + stageworkshop_cluster '' + + local _command + local _host + local _password=${PE_PASSWORD} + local _user=${NTNX_USER} + + case "${1}" in + PC | pc) + echo "SSHPASS='nutanix/4u' sshpass -e ssh \\ + ${SSH_OPTS} \\ + nutanix@${PC_HOST}" + echo 'pkill -f calm ; tail -f calm*log' + echo "PC_VERSION=${PC_VERSION} EMAIL=${EMAIL} PE_PASSWORD='${_password}' ./calm.sh 'PC'" + _host=${PC_HOST} + _password='nutanix/4u' + ;; + PE | pe) + _host=${PE_HOST} + + cat << EOF +OPTIONAL: cd stageworkshop-master + CHECK: wget http://${HTTP_CACHE_HOST}:${HTTP_CACHE_PORT} -q -O- + +SSHPASS='${PE_PASSWORD}' sshpass -e ssh \\ + ${SSH_OPTS} \\ + nutanix@${PE_HOST} + +pkill -f calm ; tail -f calm*log +EOF + + echo 'rm -rf master.zip calm*.log stageworkshop-master/ && \' + echo ' curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh \' + echo ' && SOURCE=${_} 'EMAIL=${EMAIL} PE_PASSWORD=${_password}' sh ${_##*/} \' + echo ' && tail -f ~/calm*.log' + echo -e "cd stageworkshop-master/scripts/ && \ \n PE_HOST=${PE_HOST} PE_PASSWORD='${_password}' PC_VERSION=${PC_DEV_VERSION} EMAIL=${EMAIL} ./calm.sh 'PE'" + ;; + AUTH | auth | ldap) + _host=${AUTH_HOST} + _password='nutanix/4u' + _user=root + esac + #echo "INFO|stageworkshop_ssh|PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} NTNX_USER=${NTNX_USER}" + + case "${2}" in + log | logs) + _command='date; echo; tail -f calm*log' + ;; + calm | inflight) + _command='ps -efww | grep calm' + ;; + kill | stop) + _command='ps -efww | grep calm ; pkill -f calm; ps -efww | grep calm' + ;; + *) + _command="${2}" + ;; + esac + + echo -e "INFO: ${_host} $ ${_command}\n" + SSHPASS="${_password}" sshpass -e ssh -q \ + -o StrictHostKeyChecking=no \ + -o GlobalKnownHostsFile=/dev/null \ + -o UserKnownHostsFile=/dev/null \ + ${_user}@"${_host}" "${_command}" + + unset NTNX_USER PE_HOST PE_PASSWORD PC_HOST SSHPASS +} diff --git a/scripts/pc_destroy.sh b/scripts/pc_destroy.sh deleted file mode 100755 index e0e5938..0000000 --- a/scripts/pc_destroy.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash - -function pe_unregister { - local _cluster_uuid - local _pc_uuid - local _vm - # https://portal.nutanix.com/kb/4944 - - # PE: - cluster status # check - ncli -h true multicluster remove-from-multicluster \ - external-ip-address-or-svm-ips=${MY_PC_HOST} \ - username=${PRISM_ADMIN} password=${MY_PE_PASSWORD} force=true - # Error: This cluster was never added to Prism Central - ncli multicluster get-cluster-state # check for none - _cluster_uuid=$(ncli cluster info | grep -i uuid | awk -F: '{print $2}' | tr -d '[:space:]') - - exit 0 - # PC: remote_exec 'PC' - chmod u+x /home/nutanix/bin/unregistration_cleanup.py \ - && python /home/nutanix/bin/unregistration_cleanup.py ${_cluster_uuid} - # Uuid of current cluster cannot be passed to cleanup - _pc_uuid=$(cluster info) # no such command! - # PE: - chmod u+x /home/nutanix/bin/unregistration_cleanup.py \ - && python /home/nutanix/bin/unregistration_cleanup.py ${_pc_uuid} - - # Troubleshooting - cat ~/data/logs/unregistration_cleanup.log - - for _vm in `acli -o json vm.list | ~/jq -r '.data[] | select(.name | contains("Prism Central")) | .uuid'`; do - log "PC vm.uuid=${_vm}" - acli vm.off ${_vm} && acli -y vm.delete ${_vm} - done -} - -# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables -. /etc/profile.d/nutanix_env.sh -. common.lib.sh -. global.vars.sh -begin - - MY_PC_HOST=10.21.43.37 -MY_PE_PASSWORD=nx2Tech381! - PRISM_ADMIN=admin - -pe_unregister - -finish diff --git a/scripts/stage_calmhow.sh b/scripts/stage_calmhow.sh deleted file mode 100644 index 63c83a3..0000000 --- a/scripts/stage_calmhow.sh +++ /dev/null @@ -1,469 +0,0 @@ -#!/usr/bin/env bash -# -x -# Dependencies: acli, ncli, dig, jq, sshpass, curl, md5sum, pgrep, wc, tr, pkill -# Please configure according to your needs - -function dns_check() { - local _dns - local _error - local _lookup=${1} # REQUIRED - local _test - - if [[ -z ${_lookup} ]]; then - _error=43 - log "Error ${_error}: missing lookup record!" - exit ${_error} - fi - - _dns=$(dig +retry=0 +time=2 +short @${AUTH_HOST} ${_lookup}) - _test=$? - - if [[ ${_dns} != "${AUTH_HOST}" ]]; then - _error=44 - log "Error ${_error}: result was ${_test}: ${_dns}" - return ${_error} - fi -} - -function acli() { - local _cmd - - _cmd=$* - /usr/local/nutanix/bin/acli ${_cmd} - # DEBUG=1 && if [[ ${DEBUG} ]]; then log "$@"; fi -} - -function pe_init() { - CheckArgsExist 'DATA_SERVICE_IP MY_EMAIL \ - SMTP_SERVER_ADDRESS SMTP_SERVER_FROM SMTP_SERVER_PORT \ - MY_CONTAINER_NAME MY_SP_NAME MY_IMG_CONTAINER_NAME \ - SLEEP ATTEMPTS' - - if [[ `ncli cluster get-params | grep 'External Data' | \ - awk -F: '{print $2}' | tr -d '[:space:]'` == "${DATA_SERVICE_IP}" ]]; then - log "IDEMPOTENCY: Data Services IP set, skip." - else - log "Configure SMTP: https://sewiki.nutanix.com/index.php/Hosted_POC_FAQ#I.27d_like_to_test_email_alert_functionality.2C_what_SMTP_server_can_I_use_on_Hosted_POC_clusters.3F" - ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \ - from-email-address=${SMTP_SERVER_FROM} address=${SMTP_SERVER_ADDRESS} - ${HOME}/serviceability/bin/email-alerts --to_addresses="${MY_EMAIL}" \ - --subject="[pe_init:Config SMTP:alert test] `ncli cluster get-params`" \ - && ${HOME}/serviceability/bin/send-email - - log "Configure NTP" - ncli cluster add-to-ntp-servers \ - servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org - - log "Rename default container to ${MY_CONTAINER_NAME}" - default_container=$(ncli container ls | grep -P '^(?!.*VStore Name).*Name' \ - | cut -d ':' -f 2 | sed s/' '//g | grep '^default-container-') - ncli container edit name="${default_container}" new-name="${MY_CONTAINER_NAME}" - - log "Rename default storage pool to ${MY_SP_NAME}" - default_sp=$(ncli storagepool ls | grep 'Name' | cut -d ':' -f 2 | sed s/' '//g) - ncli sp edit name="${default_sp}" new-name="${MY_SP_NAME}" - - log "Check if there is a container named ${MY_IMG_CONTAINER_NAME}, if not create one" - (ncli container ls | grep -P '^(?!.*VStore Name).*Name' \ - | cut -d ':' -f 2 | sed s/' '//g | grep "^${MY_IMG_CONTAINER_NAME}" > /dev/null 2>&1) \ - && log "Container ${MY_IMG_CONTAINER_NAME} exists" \ - || ncli container create name="${MY_IMG_CONTAINER_NAME}" sp-name="${MY_SP_NAME}" - - # Set external IP address: - #ncli cluster edit-params external-ip-address=${MY_PE_HOST} - - log "Set Data Services IP address to ${DATA_SERVICE_IP}" - ncli cluster edit-params external-data-services-ip-address=${DATA_SERVICE_IP} - fi -} - -function network_configure() { - # From this point, we assume according to SEWiki: - # IP Range: ${HPOC_PREFIX}.0/25 - # Gateway: ${HPOC_PREFIX}.1 - # DNS: 10.21.253.10,10.21.253.11 - # DHCP Pool: ${HPOC_PREFIX}.50 - ${HPOC_PREFIX}.120 - - CheckArgsExist 'MY_PRIMARY_NET_NAME MY_PRIMARY_NET_VLAN MY_SECONDARY_NET_NAME MY_SECONDARY_NET_VLAN MY_DOMAIN_NAME HPOC_PREFIX AUTH_HOST' - - if [[ ! -z `acli "net.list" | grep ${MY_SECONDARY_NET_NAME}` ]]; then - log "IDEMPOTENCY: ${MY_SECONDARY_NET_NAME} network set, skip" - else - log "Remove Rx-Automation-Network if it exists..." - acli "-y net.delete Rx-Automation-Network" - - log "Create primary network: Name: ${MY_PRIMARY_NET_NAME}, VLAN: ${MY_PRIMARY_NET_VLAN}, Subnet: ${HPOC_PREFIX}.1/25, Domain: ${MY_DOMAIN_FQDN}, Pool: ${HPOC_PREFIX}.50 to ${HPOC_PREFIX}.125" - acli "net.create ${MY_PRIMARY_NET_NAME} vlan=${MY_PRIMARY_NET_VLAN} ip_config=${HPOC_PREFIX}.1/25" - acli "net.update_dhcp_dns ${MY_PRIMARY_NET_NAME} servers=${AUTH_HOST},10.21.253.10 domains=${MY_DOMAIN_FQDN}" - acli "net.add_dhcp_pool ${MY_PRIMARY_NET_NAME} start=${HPOC_PREFIX}.50 end=${HPOC_PREFIX}.125" - - if [[ ${MY_SECONDARY_NET_NAME} ]]; then - log "Create secondary network: Name: ${MY_SECONDARY_NET_NAME}, VLAN: ${MY_SECONDARY_NET_VLAN}, Subnet: ${HPOC_PREFIX}.129/25, Pool: ${HPOC_PREFIX}.132 to ${HPOC_PREFIX}.253" - acli "net.create ${MY_SECONDARY_NET_NAME} vlan=${MY_SECONDARY_NET_VLAN} ip_config=${HPOC_PREFIX}.129/25" - acli "net.update_dhcp_dns ${MY_SECONDARY_NET_NAME} servers=${AUTH_HOST},10.21.253.10 domains=${MY_DOMAIN_FQDN}" - acli "net.add_dhcp_pool ${MY_SECONDARY_NET_NAME} start=${HPOC_PREFIX}.132 end=${HPOC_PREFIX}.253" - fi - fi -} - -function authentication_source() { - local _attempts - local _error=13 - local _loop - local _pc_version=$(echo ${PC_VERSION} | awk -F. '{ print $1 "." $2$3$4}') - local _result - local _sleep - local _test=0 - local _vm - - CheckArgsExist 'AUTH_SERVER MY_DOMAIN_FQDN SLEEP MY_IMG_CONTAINER_NAME PC_VERSION' - - if [[ -z ${AUTH_SERVER} ]]; then - log "Error ${_error}: please provide a choice for authentication server." - exit ${_error} - fi - - case "${AUTH_SERVER}" in - 'ActiveDirectory') - log "Manual setup = https://github.com/nutanixworkshops/labs/blob/master/setup/active_directory/active_directory_setup.rst" - ;; - 'AutoDC') - local _autodc_auth - local _autodc_index=1 - local _autodc_release=1 - local _autodc_service='samba-ad-dc' - local _autodc_restart="service ${_autodc_service} restart" - local _autodc_status="systemctl show ${_autodc_service} --property=SubState" - local _autodc_success='SubState=running' - local _pc_version=$(echo ${PC_VERSION} | awk -F. '{ print $1 "." $2$3$4}') - - if (( $(echo "${_pc_version} >= 5.9" | bc -l) )); then - log "PC_VERSION ${PC_VERSION}==${_pc_version} >= 5.9, setting AutoDC-2.0..." - _autodc_auth=" --username=${MY_DOMAIN_USER} --password=${MY_DOMAIN_PASS}" - _autodc_index='' - _autodc_release=2 - _autodc_service=samba - _autodc_restart="sleep 2 && service ${_autodc_service} stop && sleep 5 && service ${_autodc_service} start" - _autodc_status="service ${_autodc_service} status" - _autodc_success=' * status: started' - - export AUTODC_REPOS=(\ # override global.vars. - 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ - ) - fi - - dns_check "dc${_autodc_index}.${MY_DOMAIN_FQDN}" - _result=$? - - if (( ${_result} == 0 )); then - log "${AUTH_SERVER}${_autodc_release}.IDEMPOTENCY: dc${_autodc_index}.${MY_DOMAIN_FQDN} set, skip. ${_result}" - else - log "${AUTH_SERVER}${_autodc_release}.IDEMPOTENCY failed, no DNS record dc${_autodc_index}.${MY_DOMAIN_FQDN}" - - _error=12 - _loop=0 - _sleep=${SLEEP} - - repo_source AUTODC_REPOS[@] - - if (( `source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${AUTH_SERVER}${_autodc_release} | wc --lines` == 0 )); then - log "Import ${AUTH_SERVER}${_autodc_release} image from ${SOURCE_URL}..." - acli image.create ${AUTH_SERVER}${_autodc_release} \ - image_type=kDiskImage wait=true \ - container=${MY_IMG_CONTAINER_NAME} source_url=${SOURCE_URL} - else - log "Image found, skipping ${AUTH_SERVER}${_autodc_release} import." - fi - - # TODO: detect AUTH image ready, else... - log "Create ${AUTH_SERVER}${_autodc_release} VM based on ${AUTH_SERVER}${_autodc_release} image" - acli "vm.create ${AUTH_SERVER}${_autodc_release} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" - # vmstat --wide --unit M --active # suggests 2G sufficient, was 4G - #acli "vm.disk_create ${AUTH_SERVER}${_autodc_release} cdrom=true empty=true" - acli "vm.disk_create ${AUTH_SERVER}${_autodc_release} clone_from_image=${AUTH_SERVER}${_autodc_release}" - acli "vm.nic_create ${AUTH_SERVER}${_autodc_release} network=${MY_PRIMARY_NET_NAME} ip=${AUTH_HOST}" - - log "Power on ${AUTH_SERVER}${_autodc_release} VM..." - acli "vm.on ${AUTH_SERVER}${_autodc_release}" - - _attempts=20 - _loop=0 - _sleep=10 - - while true ; do - (( _loop++ )) - - _test=$(remote_exec 'SSH' 'AUTH_SERVER' "${_autodc_status}") - if [[ "${_test}" == "${_autodc_success}" ]]; then - log "${AUTH_SERVER}${_autodc_release} is ready." - sleep ${_sleep} - break - elif (( ${_loop} > ${_attempts} )); then - log "Error ${_error}: ${AUTH_SERVER}${_autodc_release} VM running: giving up after ${_loop} tries." - _result=$(source /etc/profile.d/nutanix_env.sh \ - && for _vm in $(source /etc/profile.d/nutanix_env.sh && acli vm.list | grep ${AUTH_SERVER}${_autodc_release}) ; do acli -y vm.delete $_vm; done) - # acli image.delete ${AUTH_SERVER}${_autodc_release} - log "Remediate by deleting the ${AUTH_SERVER}${_autodc_release} VM from PE (just attempted by this script: ${_result}) and then running acli $_" - exit ${_error} - else - log "_test ${_loop}/${_attempts}=|${_test}|: sleep ${_sleep} seconds..." - sleep ${_sleep} - fi - done - - log "Create Reverse Lookup Zone on ${AUTH_SERVER}${_autodc_release} VM..." - _attempts=3 - _loop=0 - - while true ; do - (( _loop++ )) - # TODO:130 Samba service reload better? vs. force-reload and restart - remote_exec 'SSH' 'AUTH_SERVER' \ - "samba-tool dns zonecreate dc${_autodc_index} ${OCTET[2]}.${OCTET[1]}.${OCTET[0]}.in-addr.arpa ${_autodc_auth} && ${_autodc_restart}" \ - 'OPTIONAL' - sleep ${_sleep} - - dns_check "dc${_autodc_index}.${MY_DOMAIN_FQDN}" - _result=$? - - if (( ${_result} == 0 )); then - log "Success: DNS record dc${_autodc_index}.${MY_DOMAIN_FQDN} set." - break - elif (( ${_loop} > ${_attempts} )); then - if (( ${_autodc_release} < 2 )); then - log "Error ${_error}: ${AUTH_SERVER}${_autodc_release}: giving up after ${_loop} tries; deleting VM..." - acli "-y vm.delete ${AUTH_SERVER}${_autodc_release}" - exit ${_error} - fi - else - log "dns_check ${_loop}/${_attempts}=|${_result}|: sleep ${_sleep} seconds..." - sleep ${_sleep} - fi - done - - fi - ;; - 'OpenLDAP') - log "To be documented, see https://drt-it-github-prod-1.eng.nutanix.com/mark-lavi/openldap" - ;; - esac -} - -function pe_auth() { - CheckArgsExist 'MY_DOMAIN_NAME MY_DOMAIN_FQDN MY_DOMAIN_URL MY_DOMAIN_USER MY_DOMAIN_PASS MY_DOMAIN_ADMIN_GROUP' - - if [[ -z `ncli authconfig list-directory name=${MY_DOMAIN_NAME} | grep Error` ]]; then - log "IDEMPOTENCY: ${MY_DOMAIN_NAME} directory set, skip." - else - log "Configure PE external authentication" - ncli authconfig add-directory \ - directory-type=ACTIVE_DIRECTORY \ - connection-type=LDAP directory-url="${MY_DOMAIN_URL}" \ - domain="${MY_DOMAIN_FQDN}" \ - name="${MY_DOMAIN_NAME}" \ - service-account-username="${MY_DOMAIN_USER}" \ - service-account-password="${MY_DOMAIN_PASS}" - - log "Configure PE role map" - ncli authconfig add-role-mapping \ - role=ROLE_CLUSTER_ADMIN \ - entity-type=group name="${MY_DOMAIN_NAME}" \ - entity-values="${MY_DOMAIN_ADMIN_GROUP}" - fi -} - -function pe_license() { - CheckArgsExist 'CURL_POST_OPTS MY_PE_PASSWORD' - - log "IDEMPOTENCY: Checking PC API responds, curl failures are acceptable..." - Check_Prism_API_Up 'PC' 2 0 - - if (( $? == 0 )) ; then - log "IDEMPOTENCY: PC API responds, skip" - else - log "Validate EULA on PE" - curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${MY_PE_PASSWORD} -X POST --data '{ - "username": "SE with stage_calmhow.sh", - "companyName": "Nutanix", - "jobTitle": "SE" - }' https://localhost:9440/PrismGateway/services/rest/v1/eulas/accept - - log "Disable Pulse in PE" - curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${MY_PE_PASSWORD} -X PUT --data '{ - "defaultNutanixEmail": null, - "emailContactList": null, - "enable": false, - "enableDefaultNutanixEmail": false, - "isPulsePromptNeeded": false, - "nosVersion": null, - "remindLater": null, - "verbosityType": null - }' https://localhost:9440/PrismGateway/services/rest/v1/pulse - - #echo; log "Create PE Banner Login" # TODO: for PC, login banner - # https://portal.nutanix.com/#/page/docs/details?targetId=Prism-Central-Guide-Prism-v56:mul-welcome-banner-configure-pc-t.html - # curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${MY_PE_PASSWORD} -X POST --data \ - # '{type: "welcome_banner", key: "welcome_banner_status", value: true}' \ - # https://localhost:9440/PrismGateway/services/rest/v1/application/system_data - #curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${MY_PE_PASSWORD} -X POST --data - # '{type: "welcome_banner", key: "welcome_banner_content", value: "HPoC '${OCTET[2]}' password = '${MY_PE_PASSWORD}'"}' \ - # https://localhost:9440/PrismGateway/services/rest/v1/application/system_data - fi -} - -function pc_init() { - local _version_id - - log "IDEMPOTENCY: Checking PC API responds, curl failures are acceptable..." - Check_Prism_API_Up 'PC' 2 0 - - if (( $? == 0 )) ; then - log "IDEMPOTENCY: PC API responds, skip." - else - log "Get NET_UUID,MY_CONTAINER_UUID from cluster: pc_init dependency." - MY_NET_UUID=$(acli "net.get ${MY_PRIMARY_NET_NAME}" | grep "uuid" | cut -f 2 -d ':' | xargs) - log "${MY_PRIMARY_NET_NAME} UUID is ${MY_NET_UUID}" - MY_CONTAINER_UUID=$(ncli container ls name=${MY_CONTAINER_NAME} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) - log "${MY_CONTAINER_NAME} UUID is ${MY_CONTAINER_UUID}" - - NTNX_Download 'PC' - - log "Prism Central upload..." - # TODO: Error: Software prism_central_deploy.5.9.0.1 already exists on the cluster - ncli software upload software-type=PRISM_CENTRAL_DEPLOY \ - file-path="`pwd`/${NTNX_SOURCE_URL##*/}" \ - meta-file-path="`pwd`/${NTNX_META_URL##*/}" - - _version_id=$(cat ${NTNX_META_URL##*/} | jq -r .version_id) - - log "Delete PC sources to free CVM space..." - rm -f ${NTNX_SOURCE_URL##*/} ${NTNX_META_URL##*/} - - log "Deploy Prism Central (typically takes 17+ minutes)..." - # TODO:150 Parameterize DNS Servers & add secondary - # TODO:120 make scale-out & dynamic, was: 4vCPU/16GB = 17179869184, 8vCPU/40GB = 42949672960 - - HTTP_BODY=$(cat <> stage_calmhow_pc.log 2>&1 &" - log "PC Configuration complete: try Validate Staged Clusters now." -} - -function files_upload() { - #local _version_id - - log "Files download..." - - wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_META} - wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_SOURCE} - ncli software upload software-type=afs file-path="`pwd`/${NTNX_FILES_SOURCE}" meta-file-path="`pwd`/${NTNX_FILES_META}" - rm -f ${NTNX_FILES_SOURCE} ${NTNX_FILES_META} - -} - -function nos_upgrade() { - #this is a prototype, untried - NTNX_Download - - ncli software upload software-type=nos \ - meta-file-path="`pwd`/${NTNX_META_URL##*/}" \ - file-path="`pwd`/${NTNX_SOURCE_URL##*/}" -} -#__main()__________ - -# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables -. /etc/profile.d/nutanix_env.sh -. common.lib.sh -. global.vars.sh -begin - -CheckArgsExist 'MY_EMAIL MY_PE_HOST MY_PE_PASSWORD PC_VERSION' - -#Dependencies 'install' 'jq' && NTNX_Download 'PC' & #attempt at parallelization - -log "Adding key to PE/CVMs..." && SSH_PubKey || true & # non-blocking, parallel suitable - -# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! -Dependencies 'install' 'sshpass' && Dependencies 'install' 'jq' \ -&& pe_license \ -&& pe_init \ -&& network_configure \ -&& authentication_source \ -&& pe_auth \ -&& pc_init \ -&& Check_Prism_API_Up 'PC' - -wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_META} -wget -nv https://s3.amazonaws.com/get-ahv-images/${NTNX_FILES_SOURCE} -ncli software upload software-type=afs file-path="`pwd`/${NTNX_FILES_SOURCE}" meta-file-path="`pwd`/${NTNX_FILES_META}" -rm -f ${NTNX_FILES_SOURCE} ${NTNX_FILES_META} - -if (( $? == 0 )) ; then - pc_configure && Dependencies 'remove' 'sshpass' && Dependencies 'remove' 'jq'; - log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" - log "PE = https://${MY_PE_HOST}:9440" - log "PC = https://${MY_PC_HOST}:9440" - finish -else - log "Error 18: in main functional chain, exit!" - exit 18 -fi diff --git a/scripts/stage_citrixhow.sh b/scripts/stage_citrixhow.sh index d0f10fa..6ae85a5 100644 --- a/scripts/stage_citrixhow.sh +++ b/scripts/stage_citrixhow.sh @@ -21,9 +21,9 @@ array=(${MY_CVM_IP//./ }) MY_HPOC_NUMBER=${array[2]} # HPOC Password (if commented, we assume we get that from environment) #MY_PE_PASSWORD='nx2TechXXX!' -MY_SP_NAME='SP01' -MY_CONTAINER_NAME='Default' -MY_IMG_CONTAINER_NAME='Images' +STORAGE_POOL='SP01' +STORAGE_DEFAULT='Default' +STORAGE_IMAGES='Images' MY_DOMAIN_FQDN='ntnxlab.local' MY_DOMAIN_NAME='NTNXLAB' MY_DOMAIN_USER='administrator@ntnxlab.local' @@ -69,19 +69,19 @@ ncli cluster set-smtp-server address=nutanix-com.mail.protection.outlook.com fro # Configure NTP my_log "Configure NTP" ncli cluster add-to-ntp-servers servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org -# Rename default storage container to MY_CONTAINER_NAME -my_log "Rename default container to ${MY_CONTAINER_NAME}" +# Rename default storage container to STORAGE_DEFAULT +my_log "Rename default container to ${STORAGE_DEFAULT}" default_container=$(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep '^default-container-') -ncli container edit name="${default_container}" new-name="${MY_CONTAINER_NAME}" -# Rename default storage pool to MY_SP_NAME -my_log "Rename default storage pool to ${MY_SP_NAME}" +ncli container edit name="${default_container}" new-name="${STORAGE_DEFAULT}" +# Rename default storage pool to STORAGE_POOL +my_log "Rename default storage pool to ${STORAGE_POOL}" default_sp=$(ncli storagepool ls | grep 'Name' | cut -d ':' -f 2 | sed s/' '//g) -ncli sp edit name="${default_sp}" new-name="${MY_SP_NAME}" -# Check if there is a container named MY_IMG_CONTAINER_NAME, if not create one -my_log "Check if there is a container named ${MY_IMG_CONTAINER_NAME}, if not create one" -(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep "^${MY_IMG_CONTAINER_NAME}" 2>&1 > /dev/null) \ - && echo "Container ${MY_IMG_CONTAINER_NAME} already exists" \ - || ncli container create name="${MY_IMG_CONTAINER_NAME}" sp-name="${MY_SP_NAME}" +ncli sp edit name="${default_sp}" new-name="${STORAGE_POOL}" +# Check if there is a container named STORAGE_IMAGES, if not create one +my_log "Check if there is a container named ${STORAGE_IMAGES}, if not create one" +(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep "^${STORAGE_IMAGES}" 2>&1 > /dev/null) \ + && echo "Container ${STORAGE_IMAGES} already exists" \ + || ncli container create name="${STORAGE_IMAGES}" sp-name="${STORAGE_POOL}" # Set external IP address: #ncli cluster edit-params external-ip-address=10.21.${MY_HPOC_NUMBER}.37 # Set Data Services IP address: @@ -92,7 +92,7 @@ ncli cluster edit-params external-data-services-ip-address=10.21.${MY_HPOC_NUMBE MY_IMAGE="AutoDC" retries=1 my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${MY_IMG_CONTAINER_NAME}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2 wait=true) =~ "complete" ]]; do +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2 wait=true) =~ "complete" ]]; do let retries++ if [ $retries -gt 5 ]; then my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." @@ -106,7 +106,7 @@ done MY_IMAGE="CentOS" retries=1 my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${MY_IMG_CONTAINER_NAME}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/CentOS7-04282018.qcow2 wait=true) =~ "complete" ]]; do +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/CentOS7-04282018.qcow2 wait=true) =~ "complete" ]]; do let retries++ if [ $retries -gt 5 ]; then my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." @@ -120,7 +120,7 @@ done MY_IMAGE="Windows2012" retries=1 my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${MY_IMG_CONTAINER_NAME}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows2012R2-04282018.qcow2 wait=true) =~ "complete" ]]; do +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows2012R2-04282018.qcow2 wait=true) =~ "complete" ]]; do let retries++ if [ $retries -gt 5 ]; then my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." @@ -134,7 +134,7 @@ done MY_IMAGE="Windows10" retries=1 my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${MY_IMG_CONTAINER_NAME}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows10-1709-04282018.qcow2 wait=true) =~ "complete" ]]; do +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows10-1709-04282018.qcow2 wait=true) =~ "complete" ]]; do let retries++ if [ $retries -gt 5 ]; then my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." @@ -148,7 +148,7 @@ done MY_IMAGE="XenDesktop-7.15.iso" retries=1 my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${MY_IMG_CONTAINER_NAME}" image_type=kIsoImage source_url=http://10.21.250.221/images/ahv/techsummit/XD715.iso wait=true) =~ "complete" ]]; do +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kIsoImage source_url=http://10.21.250.221/images/ahv/techsummit/XD715.iso wait=true) =~ "complete" ]]; do let retries++ if [ $retries -gt 5 ]; then my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." @@ -258,8 +258,8 @@ ncli user grant-cluster-admin-role user-name=xd my_log "Get UUIDs from cluster:" MY_NET_UUID=$(acli net.get ${MY_PRIMARY_NET_NAME} | grep "uuid" | cut -f 2 -d ':' | xargs) my_log "${MY_PRIMARY_NET_NAME} UUID is ${MY_NET_UUID}" -MY_CONTAINER_UUID=$(ncli container ls name=${MY_CONTAINER_NAME} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) -my_log "${MY_CONTAINER_NAME} UUID is ${MY_CONTAINER_UUID}" +MY_CONTAINER_UUID=$(ncli container ls name=${STORAGE_DEFAULT} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) +my_log "${STORAGE_DEFAULT} UUID is ${MY_CONTAINER_UUID}" # Validate EULA on PE my_log "Validate EULA on PE" @@ -316,7 +316,6 @@ rm ${MY_PC_SRC_URL##*/} ${MY_PC_META_URL##*/} # Deploy Prism Central my_log "Deploy Prism Central" -# TODO:110 Parameterize DNS Servers & add secondary MY_DEPLOY_BODY=$(cat < 0 )) || [[ -z "${VM_UUID}" ]]; then @@ -42,21 +42,21 @@ else log "VM_UUID: ${VM_UUID}" fi -VMDISK_NFS_PATH=$(acli -H ${MY_PE_HOST} -o json vm.get ${VM_NAME} include_vmdisk_paths=true \ +VMDISK_NFS_PATH=$(acli -H ${PE_HOST} -o json vm.get ${VM_NAME} include_vmdisk_paths=true \ | jq .data.\"${VM_UUID}\".config.disk_list[].vmdisk_nfs_path \ | grep -v null | tr -d \") # leading /, acli output example = logs/vm.list.pretty.json if (( $? > 0 )) || [[ -z "${VMDISK_NFS_PATH}" ]]; then log "Error: couldn't resolve VMDISK_NFS_PATH: $?" exit 12 else - echo "VMDISK_NFS_PATH: nfs://${MY_PE_HOST}${VMDISK_NFS_PATH}" + echo "VMDISK_NFS_PATH: nfs://${PE_HOST}${VMDISK_NFS_PATH}" fi IMG=${VM_NAME}_$(date +%Y%m%d-%H:%M) log "Image upload: ${IMG}..." nuclei image.create name=${IMG} \ description="${IMG} updated with centos password and cloud-init" \ - source_uri=nfs://${MY_PE_HOST}${VMDISK_NFS_PATH} + source_uri=nfs://${PE_HOST}${VMDISK_NFS_PATH} if (( $? != 0 )); then log "Warning: Image submission: $?." @@ -67,6 +67,7 @@ log "TODO: nuclei image.list, state = COMPLETE; image.list Name UUID State" exit 0 +cat < Date: Fri, 15 Feb 2019 10:28:15 -0800 Subject: [PATCH 012/691] Updated to Match MLavi --- README.md | 74 +-- bootstrap.sh | 41 +- bugs.md | 359 ------------- {autodc => documentation/autodc}/README.md | 44 +- .../autodc}/add_group_and_users.sh | 0 .../autodc}/autodc-v1-patch.sh | 8 + documentation/autodc/autodc.crt | 29 ++ documentation/autodc/autodc.csr | 26 + documentation/autodc/autodc.key | 51 ++ documentation/autodc/autodc.pem | 51 ++ .../autodc}/tls_cert.conf | 0 documentation/bugs.md | 486 ++++++++++++++++++ documentation/darksite.md | 27 + documentation/guidebook.md | 389 ++++++++++++++ documentation/sh-colo.md | 149 ++++++ guidebook.md | 159 ------ hooks/pre-commit/01-GitVersion | 1 - hooks/pre-commit/01-release | 1 + hooks/scripts/GitVersion.sh | 17 - hooks/scripts/semver_release.sh | 27 + release.json | 46 +- stage_workshop.sh | 235 +++++---- test/bats1.bats | 6 +- .../beachhead-centos7-calm5.7.0.1.json | 0 test/repo_source.sh | 9 +- test/url_hardcoded.sh | 9 + 26 files changed, 1514 insertions(+), 730 deletions(-) delete mode 100644 bugs.md rename {autodc => documentation/autodc}/README.md (77%) rename {autodc => documentation/autodc}/add_group_and_users.sh (100%) rename {autodc => documentation/autodc}/autodc-v1-patch.sh (73%) create mode 100644 documentation/autodc/autodc.crt create mode 100644 documentation/autodc/autodc.csr create mode 100644 documentation/autodc/autodc.key create mode 100644 documentation/autodc/autodc.pem rename {autodc => documentation/autodc}/tls_cert.conf (100%) create mode 100644 documentation/bugs.md create mode 100644 documentation/darksite.md create mode 100644 documentation/guidebook.md create mode 100644 documentation/sh-colo.md delete mode 100644 guidebook.md delete mode 120000 hooks/pre-commit/01-GitVersion create mode 120000 hooks/pre-commit/01-release delete mode 100755 hooks/scripts/GitVersion.sh create mode 100755 hooks/scripts/semver_release.sh mode change 100644 => 100755 stage_workshop.sh rename beachhead-centos7-calm5.7.0.1.json => test/beachhead-centos7-calm5.7.0.1.json (100%) create mode 100644 test/url_hardcoded.sh diff --git a/README.md b/README.md index a42d878..4605b54 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,37 @@ -This script supports staging HPoC clusters for [Nutanix Workshops](http://www.nutanixworkshops.com). +This script supports staging HPoC clusters for [Nutanix Workshops](https://nutanix.handsonworkshops.com/). It automates the majority of the [Workshop Setup Guide](http://www.nutanixworkshops.com/en/latest/setup/). After HPoC Foundation, you can have push-button Calm in about half an hour! -# Table of Contents # - -1. [Available Workshops](#available-workshops) -2. [HPoC Cluster Reservation](#hpoc-cluster-reservation) -3. [Staging Your HPoC](#staging-your-hpoc) - 1. [Interactive Usage](#interactive-usage) - 2. [Non-interactive Usage](#non-interactive-usage) -4. [Validate Staged Clusters](#validate-staged-clusters) -5. [Authentication](#authentication) +--- +# Table of Contents # + + +- [Available Workshops](#available-workshops) +- [HPoC Cluster Reservation](#hpoc-cluster-reservation) +- [Staging Your HPoC](#staging-your-hpoc) + - [Interactive Usage](#interactive-usage) + - [Non-interactive Usage](#non-interactive-usage) + - [Validate Staged Clusters](#validate-staged-clusters) +- [Authentication](#authentication) + + +--- ## Available Workshops ## 1. Calm Introduction Workshop (AOS/AHV 5.5+) 2. Citrix Desktop on AHV Workshop (AOS/AHV 5.6) +See the WORKSHOPS list at the top of [stage_workshop.sh](blob/master/stage_workshop.sh#L8). + ## HPoC Cluster Reservation ## Make your new reservation on https://rx.corp.nutanix.com/ with: -- __Region:__ NX-US-West or US-East regions only -- __AOS + Hypevisor:__ proper versions for your workshop, specified above +- __AOS + Hypervisor:__ proper versions for your workshop, specified above - Recommended: AOS and AHV 5.8 - Older or newer versions may not function as expected -- __OS Images:__ *you do not* need to specify images (CentOS, Windows2012, etc.) for your reservation +- __VM Images:__ *you do not* need to specify images (CentOS, Windows2012, etc.) for your reservation ## Staging Your HPoC ## @@ -60,15 +66,13 @@ Finally, execute the script to stage the HPOC clusters defined in your text file ### Interactive Usage ### -````./stage_workshop.sh```` +`./stage_workshop.sh` -Running the script interactively - will prompt you to input the name of your text file containing your cluster IP and password details. - You will then be prompted to choose a Workshop to stage. +Running the script interactively will prompt you to input the name of your text file containing your cluster IP and password details. You will then be prompted to choose a Workshop to stage. ### Non-interactive Usage ### -````./stage_workshop.sh -f [example_pocs.txt] -w [workshop number]```` +`./stage_workshop.sh -f [example_pocs.txt] -w [workshop number]` Each staging option will deploy: @@ -77,31 +81,27 @@ Each staging option will deploy: - Prism Central - configuring AHV networks for your Primary and Secondary VLANs. -If you encounter issues reach out to @matt on Slack. +Ask questions not covered here to the Global Sales Technical Enablement team via Slack, review the pinned items in each channel first: +- __#technology-bootcamps:__ for customer and prospect bootcamps +- __#hands-on-workshops:__ for Nutanix Partner and SE workshops -## Validate Staged Clusters ## +### Validate Staged Clusters ### After staging (~30m), you can re-run the stage_workshop script and select "Validate Staged Clusters" to perform a quick check to ensure all images were uploaded and that Prism Central was provisioned as expected. Example: -```` -./stage_workshop.sh -Cluster Input File: example_pocs.txt -1) Calm Introduction Workshop (AOS/AHV 5.6) -2) Citrix Desktop on AHV Workshop (AOS/AHV 5.6) -3) Change Cluster Input File -4) Validate Staged Clusters -5) Quit -Select an option: 4 -10.21.44.37 - Prism Central staging FAILED -10.21.44.37 - Review logs at 10.21.44.37:/home/nutanix/config.log and 10.21.44.39:/home/nutanix/pcconfig.log -```` + ./stage_workshop.sh + Cluster Input File: example_pocs.txt + 1) Calm Introduction Workshop (AOS/AHV 5.6) + 2) Citrix Desktop on AHV Workshop (AOS/AHV 5.6) + 3) Change Cluster Input File + 4) Validate Staged Clusters + 5) Quit + Select an option: 4 + 10.21.44.37 - Prism Central staging FAILED + 10.21.44.37 - Review logs at 10.21.44.37:/home/nutanix/config.log and 10.21.44.39:/home/nutanix/pcconfig.log ## Authentication ## -OpenLDAP works fine for authentication, but Prism Central has a problem with anything more than simple RBAC with it. -- https://jira.nutanix.com/browse/ENG-126217 openldap authentication difference in PC vs PE - - fixed with PC 5.7.1 - -In the meantime, one can use Windows Server: Active Directory, but for simpler and faster results, the automation leverages [AutoDC](autodc/README.md). +One can use Windows Server: Active Directory, but for simpler and faster results, the automation leverages [AutoDC](documentation/autodc/README.md). diff --git a/bootstrap.sh b/bootstrap.sh index ac94fb3..1d74e94 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -11,6 +11,7 @@ if [[ -z ${SOURCE} ]]; then REPOSITORY=stageworkshop BRANCH=master else + # shellcheck disable=2206 URL_SOURCE=(${SOURCE//\// }) # zero index ORGANIZATION=${URL_SOURCE[2]} REPOSITORY=${URL_SOURCE[3]} @@ -38,7 +39,7 @@ if [[ -f ${BRANCH}.zip ]]; then sh ${HOME}/${0} clean fi -echo -e "\nFor details, please see: ${BASE_URL}" +echo -e "\nFor details, please see: ${BASE_URL}/documentation/guidebook.md" _ERROR=0 @@ -55,8 +56,8 @@ CLUSTER_NAME+=$(ncli cluster get-params | grep 'Cluster Name' \ | awk -F: '{print $2}' | tr -d '[:space:]') EMAIL_DOMAIN=nutanix.com -if [[ -z ${MY_PE_PASSWORD} ]]; then - _PRISM_ADMIN=admin +if [[ -z ${PE_PASSWORD} ]]; then + _PRISM_ADMIN='admin' echo -e "\n Note: Hit [Return] to use the default answer inside brackets.\n" read -p "Optional: What is this cluster's admin username? [${_PRISM_ADMIN}] " PRISM_ADMIN if [[ -z ${PRISM_ADMIN} ]]; then @@ -72,22 +73,22 @@ if [[ -z ${MY_PE_PASSWORD} ]]; then echo "Error ${_ERROR}: passwords do not match." exit ${_ERROR} else - MY_PE_PASSWORD=${_PW1} + PE_PASSWORD=${_PW1} unset _PW1 _PW2 fi fi -if [[ -z ${MY_EMAIL} ]]; then +if [[ -z ${EMAIL} ]]; then echo -e "\n Note: @${EMAIL_DOMAIN} will be added if domain omitted." - read -p "REQUIRED: Email address for cluster admin? " MY_EMAIL + read -p "REQUIRED: Email address for cluster admin? " EMAIL fi _WC_ARG='--lines' -if [[ `uname -s` == "Darwin" ]]; then +if [[ $(uname -s) == 'Darwin' ]]; then _WC_ARG='-l' fi -if (( $(echo ${MY_EMAIL} | grep @ | wc ${_WC_ARG}) == 0 )); then - MY_EMAIL+=@${EMAIL_DOMAIN} +if (( $(echo ${EMAIL} | grep @ | wc ${_WC_ARG}) == 0 )); then + EMAIL+=@${EMAIL_DOMAIN} fi if [[ -d ../${REPOSITORY}-${BRANCH} ]]; then @@ -107,32 +108,30 @@ if [[ -e release.json ]]; then echo -e "\n${ARCHIVE}::$(basename $0) release: $(grep FullSemVer release.json | awk -F\" '{print $4}')" fi -MY_PE_HOST=$(ncli cluster get-params \ +PE_HOST=$(ncli cluster get-params \ | grep 'External IP' \ | awk -F: '{print $2}' \ | tr -d '[:space:]') -echo -e "\nStarting stage_workshop.sh for ${MY_EMAIL} with ${PRISM_ADMIN}:passwordNotShown@${MY_PE_HOST} ...\n" +echo -e "\nStarting stage_workshop.sh for ${EMAIL} with ${PRISM_ADMIN}:passwordNotShown@${PE_HOST} ...\n" if [[ ! -z ${WORKSHOP} ]]; then echo -e "\tAdding workshop: ${WORKSHOP}" MY_WORKSHOP=" -w ${WORKSHOP}" fi - MY_EMAIL=${MY_EMAIL} \ - MY_PE_HOST=${MY_PE_HOST} \ - PRISM_ADMIN=${PRISM_ADMIN} \ -MY_PE_PASSWORD=${MY_PE_PASSWORD} \ -./stage_workshop.sh -f - ${MY_WORKSHOP} \ - && popd + EMAIL=${EMAIL} \ + PE_HOST=${PE_HOST} \ +PRISM_ADMIN=${PRISM_ADMIN} \ +PE_PASSWORD=${PE_PASSWORD} \ +./stage_workshop.sh -f - ${MY_WORKSHOP} # \ +# && popd || exit echo -e "\n DONE: ${0} ran for ${SECONDS} seconds." cat < http.txt```` - - download 403 detection: authentication unauthorized - -# Backlog # - -- CI/CD pipeline demo -- LAMP v2 application improvements (reboot nice to have) -- Calm videos/spreadsheet -- Multi product demo -- Projects: update default or create new project -- PC_Init|Reset PC password to PE password, must be done by nci@PC, not API or on PE - Error: Password requirements: Should be at least 8 characters long. Should have at least 1 lowercase character(s). Should have at least 1 uppercase character(s). Should have at least 1 digit(s). Should have at least 1 special character(s). Should differ by at least 4 characters from previous password. Should not be from last 5 passwords. Should not have more than 2 same consecutive character(s). Should not be a dictionary word or too simplistic/systematic. Should should have at least one character belonging to 4 out of the 4 supported classes (lowercase, uppercase, digits, special characters). - 2018-10-02 10:56:27|92834|PC_Init|Warning: password not reset: 0.# -- Fix role mappings, logins on PE, PC - - PE, PC: use RBAC user for APIs, etc.: cluster Admin - - improve/run autodc/add_group_and_users.sh - - adminuser01@ntnxlab.local (password = nutanix/4u) can’t login to PE. - “You are not authorized to access Prism. Please contact the Nutanix administrator.†- add user01@ntnxlab.local to role mapping, same error as above. -- OpenLDAP is now supported for Self Service on Prism Central: ENG-126217 - -- TODO: Add link: https://drt-it-github-prod-1.eng.nutanix.com/akim-sissaoui/calm_aws_setup_blueprint/blob/master/Action%20Create%20Project/3-Create%20AWS%20Calm%20Entry -- TODO: check remote file for cache, containers, images before uploading and skip when OPTIONAL -- nuclei (run local from container?) - - version.get # gives API 3.1 and AOS 5.7.0.1 (bug!) - - vs: cat /etc/nutanix/release_version - - project.create name=mark.lavi.test \ - description='test_from NuClei!' - - project.get mark.lavi.test - - project.update mark.lavi.test - spec.resources.account_reference_list.kind= or .uuid - spec.resources.default_subnet_reference.kind= - spec.resources.environment_reference_list.kind= - spec.resources.external_user_group_reference_list.kind= - spec.resources.subnet_reference_list.kind= - spec.resources.user_reference_list.kind= - - resources: - account_reference_list: [] - environment_reference_list: [] - external_user_group_reference_list: [] - is_default: false - resource_domain: - resources: [] - subnet_reference_list: [] - user_reference_list: [] - - nuclei authconfig (run local from container?) See notes#nuceli section, below. -- TODO: (localize?) and upload blueprint via nuclei (see unit tests)? -- TODO: Default project environment set, enable marketplace item, launch! -- TODO: Enable multiple cloud account settings, then environments, then marketplace launch -- TODO: PE, PC: clear our warnings: resolve/ack issues for cleanliness? - -- TODO: Calm 5.8 bootcamp labs and 5.5-6 bugs - - https://github.com/nutanixworkshops/introcalm - vs. https://github.com/mlavi/calm_workshop - - file Calm bugs from guide -- Boxcutter for AHV: - - extend scripts/vmdisk2image-pc.sh to - - https://qemu.weilnetz.de/doc/qemu-doc.html#disk_005fimages_005fssh - qemu-system-x86_64 -drive file=ssh://[user@]server[:port]/path[?host_key_check=host_key_check] - - download (NFS?)/export image - - upload/import image - - drive into Jenkinsfile pipeline job - - periodic runs: weekly? - - Base images/boxes: https://github.com/chef/bento -- Refactor 10.21 out further: mostly done! move to bats? -- refactor out all passwords, hardcoded values to variables - - SSP Admins -- Create adminuser2, assign privs, use it instead of base admin user (drop privs/delete at end?) -- ncli rsyslog -- Add widget Deployed Applications to (default) dashboard - -- FEATURE: improved software engineering - - https://githooks.com/ - - https://github.com/nkantar/Autohook - - https://pre-commit.com/ - - brew install pre-commit - - https://github.com/rycus86/githooks - - Add (git)version/release to each script (assembly?) for github archive cache - - https://semver.org/ - - https://guides.github.com/introduction/flow/index.html - - https://github.com/GitTools/GitVersion - - https://gitversion.readthedocs.io/en/stable/usage/command-line/ - - brew install gitversion - - GitVersion /showConfig - - sudo apt-get install mono-complete - - do not: sudo apt-get install libcurl3 # removes curl libcurl4 - - Download dotnet4 zip archive - - put on mono-path? - - Investigate https://hub.docker.com/r/gittools/gitversion-fullfx/ - - docker pull gittools/gitversion-fullfx:linux - - docker run --rm -v "$(pwd):/repo" gittools/gitversion-fullfx:linux{-version} /repo - - gitversion | tee gitversion.json | jq -r .FullSemVer - - ````ls -l *json && echo _GV=${_GV}```` - - ````_GV=gitversion.json ; rm -f ${_GV} \ - && gitversion | tee ${_GV} | grep FullSemVer | awk -F\" '{print $4}' && unset _GV```` - - https://blog.ngeor.com/2017/12/19/semantic-versioning-with-gitversion.html - - versus https://github.com/markchalloner/git-semver - - ~/Documents/github.com/ideadevice/calm/src/calm/tests/qa/docs - = https://github.com/ideadevice/calm/tree/master/src/calm/tests/qa/docs - - start a feature branch - - syslog format: INFO|DEBUG|etc. - - https://en.wikipedia.org/wiki/Syslog#Severity_level - - Per Google shell style guide: - - refactor function names to lowercase: https://google.github.io/styleguide/shell.xml?showone=Function_Names#Function_Names - - http://jake.ginnivan.net/blog/2014/05/25/simple-versioning-and-release-notes/ - - https://github.com/GitTools/GitReleaseNotes -# Bash test framework for unit tests and on blueprints? - - https://kitchen.ci/ which can do spec, BATS, etc. = https://github.com/test-kitchen/test-kitchen - - https://kitchen.ci/docs/getting-started/writing-test - - https://serverspec.org/ DSL Spec TDD - - http://rspec.info/ Ruby TDD - - inspec - - more compliance from supermarket - - https://dev-sec.io/features.html#os-hardening - - https://www.cisecurity.org/cis-benchmarks/ - - https://en.wikipedia.org/wiki/ERuby - - https://www.engineyard.com/blog/bats-test-command-line-tools - - https://medium.com/@pimterry/testing-your-shell-scripts-with-bats-abfca9bdc5b9 - - http://ohmyz.sh/ - - https://github.com/jakubroztocil/httpie#scripting - - https://github.com/pimterry/git-confirm - - BATS https://github.com/bats-core/bats-core - - https://invent.life/project/bash-infinity-framework - - Runit/rundeck? http://bashdb.sourceforge.net/ - - Tests: - - external URLs working (PC x, sshpass, jq, autodc, etc.) - - userX login to PE, PC - - userX new project, upload, run blueprint - - GOOD: user01@ntnxlab.local auth test fine@PE, bats? - -# AutoDC: - - GOOD: - - NTNXLAB, ntnxlab.local, root:nutanix/4u - - samba --version Version 4.2.14-Debian - - https://wiki.archlinux.org/index.php/samba - - https://gitlab.com/mlavi/alpine-dc (fork) - - yum install samba-ldap - - https://help.ubuntu.com/lts/serverguide/samba-ldap.html.en - - Move AutoDC to DHCP? and adjust DNS for SRE HPOC subnets? - -# DOCUMENTATION: - - review, refactor & migrate to bugs.txt: TODO, TOFIX comments - - Insure exit codes unique/consistent, error messages consistent - -# OPTIMIZATION: - - Upload AutoDC image in parallel with PC.tar - - restore http_resume check/attempt - - create cache, use cache, propagate cache to PC, fall back to global - -# Notes # - -## Citations for other Calm automation ## - -- https://drt-it-github-prod-1.eng.nutanix.com/sylvain-huguet/auto-hpoc -- One more: @anthony.c? -- https://gitlab.com/Chandru.tkc/Serviceability_shared/ - - pc-automate/installpc.py - - 24: "heartbeat": "/PrismGateway/services/rest/v1/heartbeat", - - 326: def validate_cluster(entity): - - 500: def add_network_to_project(name,directory_uuid): - -## Push Button Calm # - -- https://github.com/mlavi/stageworkshop/blob/master/guidebook.md -- MP4 Video = 292MB: https://drive.google.com/open?id=1AfIWDff-mlvwth_lKv9DG4x-vi0ZsWij - ~11 minute screencast overview of the 70 minute journey from Foundation - to Calm running a blueprint: most of it is waiting for foundation and PC download/upload/deploy. -- Social coding: https://github.com/nutanixworkshops/stageworkshop/pull/1 -- Biggest pain: - - finding a HPOC - - second biggest pain: keeping it for more than a few hours except on the weekend. - - third biggest pain: coding in Bash :slightly_smiling_face: it makes you miss even script kiddie programming languages! - -## NuCLeI ## - -https://jira.nutanix.com/browse/ENG-78322 -````app_blueprint -availability_zone -available_extension -available_extension_images -catalog_item -category -certificate -changed_regions -client_auth -cloud_credentials -cluster -container -core CLI control. -diag Diagnostic tools. -directory_service -disk -docker_image -docker_registry -exit Exits the CLI. -extension -get Gets the current value of the given configuration options. -help Provides help text for the named object. -host -image -network_function_chain -network_security_rule -oauth_client -oauth_token -permission -project -protection_rule -quit Exits the CLI. -recovery_plan -recovery_plan_job -remote_connection -report_config -report_instance -role -set Sets the value of the given configuration options. -ssh_user -subnet -user -version NuCLEI Version Information. -virtual_network -vm -vm_backup -vm_snapshot -volume_group -volume_group_backup -volume_group_snapshot -webhook -```` - -### nuclei authconfig (run local from container?) #### - -````list | ls -edit | update -remove | rm -list-directory | ls-directory -create-directory | add-directory -edit-directory | update-directory -remove-directory | rm-directory -list-role-mappings | ls-role-mappings -delete-role-mapping -add-role-mapping -add-to-role-mapping-values -remove-from-role-mapping-values -get-directory-values-by-type -test-ldap-connection -```` - -## Image Uploading ## -TOFIX: -- https://jira.nutanix.com/browse/FEAT-7112 -- https://jira.nutanix.com/browse/ENG-115366 -once PC image service takes control, rejects PE image uploads. Move to PC, not critical path. - -KB 4892 = https://portal.nutanix.com/#/page/kbs/details?targetId=kA00e000000XePyCAK -v3 API = http://developer.nutanix.com/reference/prism_central/v3/#images two steps: - -1. POST /images to create image metadata and get UUID, see logs/spec-image.json -2. PUT images/uuid/file: upload uuid, body, checksum and checksum type: sha1, sha256 -or nuclei, only on PCVM or in container - -## File servers for container updates ## - -- https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#What_you_get_with_each_reservation -- https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#Lab_Resources -- https://sewiki.nutanix.com/index.php/HPOC_Access_Instructions#FTP - - \\lab-ftp\ftp - - smb://hpoc-ftp/ = \\hpoc-ftp\ftp - - ftp://nutanix:nutanix/4u@hostedpoc.nutanix.com/ - - smb://pocfs/ = \\pocfs\iso\ and \images\ - - smb://pocfs.nutanixdc.local use: auth - - WIN> nslookup pocfs.nutanixdc.local - - smbclient -I 10.21.249.12 \\\\pocfs\\images \ - --user mark.lavi@nutanixdc.local --command "prompt ; cd /Calm-EA/pc-5.7.1/ ; mget *tar" - - smb://hpoc-afs/ = \\hpoc-afs\se\ - - smbclient \\\\hpoc-afs\\se\\ --user mark.lavi@nutanixdc.local --debuglevel=10 - - WIN> nslookup hpoc-afs.nutanixdc.local - 10.21.249.41-3 - - smbclient -I 10.21.249.41 \\\\hpoc-afs\\se\\ --user mark.lavi@nutanixdc.local - - smb://NTNX-HPOC-AFS-3.NUTANIXDC.LOCAL - default password = welcome123 - - https://ubuntuswitch.wordpress.com/2010/02/05/nautilus-slow-network-or-network-does-not-work/ -- smb-client vs cifs? - - https://www.tldp.org/HOWTO/SMB-HOWTO-8.html - - https://www.samba.org/samba/docs/current/man-html/smbclient.1.html - - https://linux-cifs.samba.org/ - - https://pserver.samba.org/samba/ftp/cifs-cvs/linux-cifs-client-guide.pdf - - https://serverfault.com/questions/609365/cifs-mount-in-fstab-succeeds-on-ip-fails-on-hostname-written-in-etc-hosts - - sudo apt-get install cifs-utils - - yum install cifs-utils - man mount.cifs - USER=mark.lavi@nutanix.com PASSWD=secret mount -t cifs //hpoc-afs/se /mnt/se/ - - mac: sudo mount -v -r -t nfs -o resvport,nobrowse,nosuid,locallocks,nfc,actimeo=1 10.21.34.37:/SelfServiceContainer/ nfstest -- mount AFS and then put a web/S/FTP server on top -- python -m SimpleHTTPServer 8080 || python -m http.server 8080 - -# Git Notes # - -https://git-scm.com/book/en/v2/Distributed-Git-Contributing-to-a-Project - -``` -$ git remote show -origin - -# https://gitversion.readthedocs.io/en/stable/reference/git-setup/ -$ git remote add upstream https://github.com/nutanixworkshops/stageworkshop.git - -$ git remote show -upstream -origin - -$ git fetch upstream -$ git merge upstream/master - -$ git tags -$ git tag -a 2.0.1 [hash] -$ git push origin --tags -```` diff --git a/autodc/README.md b/documentation/autodc/README.md similarity index 77% rename from autodc/README.md rename to documentation/autodc/README.md index d4bbba8..e7c6268 100644 --- a/autodc/README.md +++ b/documentation/autodc/README.md @@ -1,11 +1,17 @@ # AutoDC # +--- + +- [Tips](#tips) +- [AutoDC2](#autodc2) + + +--- AutoDC (Auto Domain Controller) was created by @John.Walker, using Alpine (and Turnkey Linux?), to stand up a pre-configured Samba DC. https://gitlab.com/devnull-42/alpine-dc -The console runs an ncurses application which allows simple reconfiguration of domain, -administator password, and reboots. Console credentials are root:nutanix/4u +The console runs an ncurses application which allows simple reconfiguration of domain, administrator password, and reboots. Console credentials are root:nutanix/4u > The auto_dc qcow2 that Johnny “Blue Label†Walker has created can be found here: http://10.21.250.221/images/auto_dc.qcow2 > It creates with four groups and corresponding users. basic-users, power-users, developers, and ssp-admins. (so, good to go from that perspective for PC/SSP/Calm). It sets up DNS forwarding correctly for the HPOC environment so anything it can’t resolve will forward to 10.21.250.10 / 10.21.250.11 -- meaning that you’re safe to create an IPAM network that uses that as its DNS server to resolve both hostnames you’ve created as well as nutanixdc.local and everything else outside. And it creates the domain ... poclab? IIRC? @@ -26,42 +32,46 @@ Users and groups are imported as part of the initialization: |poweruser01-05|nutanix/4u|SSP Power Users| |basicuser01-05|nutanix/4u|SSP Basic Users| -### Tips ### +Via: ./add_group_and_users.sh + +|Username(s)|Password|Group| +|----|-----|-----| +|user01-25|nutanix/4u|? confirm first| +|user26-69|nutanix/4u|CalmAdmin| -When rebuilding a HPOC from rx, foundation automation takes: -- 4 nodes@NX-3060-G5: 30 minutes -- 4 nodes@NX-1050: 40 minutes. +## Tips ## -I believe you can [easily get away with 2GB RAM for AutoDC](https://github.com/mlavi/stageworkshop/blob/master/scripts/stage_calmhow.sh#L131), - so I use that. +Investigating memory use with `vmstat --wide --unit M --active`, +I believe you can [easily get away with 2GB RAM for AutoDC](https://github.com/mlavi/stageworkshop/blob/master/scripts/lib.pe.sh#L88). You may wish to use ````poc_samba_user.sh```` to populate AutoDC past the initial set of users, above: 1. Modify the hard-coded variables at the top if needed. 2. ````export DC_IP='10.21.example.40' && scp poc*sh root@${DC_IP}: && ssh root@${DC_IP} "chmod u+x poc*sh; ./poc_samba_users.sh"```` -## Next Gen AutoDC ## +## AutoDC2 ## @JohnWalker: The new one I have built has an image size of 85MB and is based on Alpine linux. I just need to finish the TUI interface for changing the config. Mark: "Is there a web GUI type tool that we could build in? I’m thinking CPanel might be able to tackle that?" -@JohnWalker: I'm building a console interface in Python that will handle that. +@JohnWalker: +>I'm building a console interface in Python that will handle that. I’m going to have a couple of ways. 1 would be to edit the users.csv and groups.csv. The other would be to add users and groups individually without re-initializing the dc. -## Version2 ## - PC 5.9.x Authentication was changed to add optional search recursion and strengthen security. This regressed the behavior of authentication configuration that worked in PC 5.8.x and works in PE 5.9.x. - https://jira.nutanix.com/browse/ENG-180716 "Invalid service account details" error message is incorrect Workaround: autodc-2.0.qcow2 release. -John Walker [14:13] -I figured it out. The default certs are fine +John Walker: +>I figured it out. The default certs are fine Need to add the following to the [global] section in /etc/samba/smb.conf - ldap server require strong auth = no -The connection from Prism wasn't strong enough so Samba was rejecting it. + + `ldap server require strong auth = no` + +>The connection from Prism wasn't strong enough so Samba was rejecting it. I added that, restarted samba and was able to connect. -Using ldap://ip:389 and the DOMAIN\username format for the user +Using `ldap://ip:389` and the `DOMAIN\username` format for the user diff --git a/autodc/add_group_and_users.sh b/documentation/autodc/add_group_and_users.sh similarity index 100% rename from autodc/add_group_and_users.sh rename to documentation/autodc/add_group_and_users.sh diff --git a/autodc/autodc-v1-patch.sh b/documentation/autodc/autodc-v1-patch.sh similarity index 73% rename from autodc/autodc-v1-patch.sh rename to documentation/autodc/autodc-v1-patch.sh index e2e0b54..c01da73 100644 --- a/autodc/autodc-v1-patch.sh +++ b/documentation/autodc/autodc-v1-patch.sh @@ -1,5 +1,13 @@ #!/usr/bin/env bash +# if [[ ${AUTH_SERVER} == 'AutoDCv1' ]]; then +# # local _autodc_conf='/etc/samba/smb.conf' +# # local _autodc_patch='ldap server require strong auth = no' +# remote_exec 'ssh' 'AUTH_SERVER' \ +# 'curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/documentation/autodc/autodc-v1-patch.sh && bash ${_##*/}' \ +# 'OPTIONAL' +# fi + if [[ ${USER} != 'root' ]]; then echo "Error in assumption: execute as user root." exit 1 diff --git a/documentation/autodc/autodc.crt b/documentation/autodc/autodc.crt new file mode 100644 index 0000000..b9ccea2 --- /dev/null +++ b/documentation/autodc/autodc.crt @@ -0,0 +1,29 @@ +-----BEGIN CERTIFICATE----- +MIIE/zCCAuegAwIBAgIJAMvPij9P2HyqMA0GCSqGSIb3DQEBCwUAMA4xDDAKBgNV +BAMMA2RjMTAeFw0xODEwMzAwNjAxNDdaFw0yODEwMjcwNjAxNDdaMA4xDDAKBgNV +BAMMA2RjMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMfD0hV96GfR +Xd5/0dXIMJH+RpcYd5GmQc2af2IplZ+YFZFXgcNmZt9xLcJ/rOBGgW/GpSP6RsEp +fcOhLbN8UXbZRd65K8V34elmuDn60bITt/yZ4Jv9DsgQS3JdiZav5C+fh+EHipaT +O4HlbOQEOYL+/EjG9XS/xtEeQcDPfYipEnwqDT9enAX/iyGcfIEciXpr5wxnWw1z +646mt1KLMXnWlISe37/9TMTihchxDx79FcOs6meueYJsTxDo7hMo7mNV51rnr9tD +4hChvYJbPIZll+16eV5c8B9s5iy6Mtn10+Jl4rZ60n7MzpKe/HAXOFe7AOrTr3w+ +/ALhvcivlcK73CLa7vtQ5jfreKhr69deB40kLWLBD2/YwaXHvVKCBGRHSi3ZD2qs +I1bJgyrDWZwiYAAWD/zvzpM0e6IcGmnW49DzHN8zpexFCyIiN1VPFSycObl7S4bu +cfgnPgJNO3pYeNySFBKPzXpG8P7RyL4hi7PAjH2NJ+w3NJY+ZuZhr+50Y/pEFqDi +Np50HHRfUnekntgyZEAIeI2XJoRTqVI+vi8FvlEKgx3MSvVx23+J41P1GKBDcr34 +VOHlfWaKSPDILV7Xp9v5jtHbW4ZaY23/Aysvo8nhrKRlEFcW8kRKbAh22u4o/xCy +b5BUWO11xqA9iH9B5CiMjwe3hU2Z9jbhAgMBAAGjYDBeMAkGA1UdEwQCMAAwCwYD +VR0PBAQDAgXgMEQGA1UdEQQ9MDuCEmxkYXAubnRueGxhYi5sb2NhbIIUYXV0b2Rj +Lm50bnhsYWIubG9jYWyCDyoubnRueGxhYi5sb2NhbDANBgkqhkiG9w0BAQsFAAOC +AgEAb9jYv5uoMPmtE5GfjaYJ1ejT2FX+LhGNUaTdvGb/j7RMZNCMnc2QsYUa967D +sKqCmjudF2Q6+VSwqD4ZxQyLPii6FxrDYARF/l9/GadVgfmZFBEjCgRpxrm+9RqP +MZ0s0OHeSfFTxk59IzVHcjtsYyNYQ6JwUPDGaFqS3ubffZE5mWwcczwGa3oFNThe +P6sRy3A9IMAVnCISibx4nCsYzo3nYTX+UxPLJRwktAk5zT3DMvpLkpmFierwgR4n ++k5zTUUh7CPT3IF+qSW92YpLNiFMspIUeCL3+ZUWIhbjJHzWUntJCkwpfLPGXzn9 +FRXYQ2a1Zg0SDvBvQOekJJiADX603UlInXOIEfxnUW2X8DWLjl1eZEfXdYM8JjwZ +e0S+F8OOR6c/IX44FcQi30OiU8RGhcbADtj1428awtpdnr55dNOWY4RVG08Xc9xZ +gjFfwRPuP72Tem9uqJqUorQ8y2XdvrtjSYSVbiFXD/KpXJ+0b6XUTauDnST2Mfuc +/OcSv4acy7a7D3LQ9O7Om+ddloGdd0nORf80S+ZB1mQaXc9QkcSJen/1+Y5mz3/U +C7fpP2WsC9NJKKY31NRxEBp3dZe7kPz+/ySx3tQ7Iz3UaB4PCyq9jLTQ2VHA6cCW +9ZPLvB7LNM4oxygWtBgTVt8ytlDly29kfbutGKpnyiUHqWo= +-----END CERTIFICATE----- diff --git a/documentation/autodc/autodc.csr b/documentation/autodc/autodc.csr new file mode 100644 index 0000000..d48182f --- /dev/null +++ b/documentation/autodc/autodc.csr @@ -0,0 +1,26 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIIEUzCCAjsCAQAwDjEMMAoGA1UEAwwDZGMxMIICIjANBgkqhkiG9w0BAQEFAAOC +Ag8AMIICCgKCAgEAx8PSFX3oZ9Fd3n/R1cgwkf5Glxh3kaZBzZp/YimVn5gVkVeB +w2Zm33Etwn+s4EaBb8alI/pGwSl9w6Ets3xRdtlF3rkrxXfh6Wa4OfrRshO3/Jng +m/0OyBBLcl2Jlq/kL5+H4QeKlpM7geVs5AQ5gv78SMb1dL/G0R5BwM99iKkSfCoN +P16cBf+LIZx8gRyJemvnDGdbDXPrjqa3UosxedaUhJ7fv/1MxOKFyHEPHv0Vw6zq +Z655gmxPEOjuEyjuY1XnWuev20PiEKG9gls8hmWX7Xp5XlzwH2zmLLoy2fXT4mXi +tnrSfszOkp78cBc4V7sA6tOvfD78AuG9yK+VwrvcItru+1DmN+t4qGvr114HjSQt +YsEPb9jBpce9UoIEZEdKLdkPaqwjVsmDKsNZnCJgABYP/O/OkzR7ohwaadbj0PMc +3zOl7EULIiI3VU8VLJw5uXtLhu5x+Cc+Ak07elh43JIUEo/Nekbw/tHIviGLs8CM +fY0n7Dc0lj5m5mGv7nRj+kQWoOI2nnQcdF9Sd6Se2DJkQAh4jZcmhFOpUj6+LwW+ +UQqDHcxK9XHbf4njU/UYoENyvfhU4eV9ZopI8MgtXten2/mO0dtbhlpjbf8DKy+j +yeGspGUQVxbyREpsCHba7ij/ELJvkFRY7XXGoD2If0HkKIyPB7eFTZn2NuECAwEA +AaAAMA0GCSqGSIb3DQEBCwUAA4ICAQAM0NfjZBiz+L8FRFp2fnvlmsS9wWkrL9fj +5hIG0yhoyOJ8pOJnw8zjZEy8hzRYr27yu65FKgrxJR58sLkDoEDpvwP5tqpiNz8q +zR3o0EYcIyk+jnD4owgnocRwpMlJYJdSLbrlpMjGW3iYCr0kLeNh6QoXZf1XVKCH +thJ05Add9cHo1CMs7tTEd3CFyHdRUtGcYRowFhzE3RO/tgU52kb2WDcOpNpo9ijN +glhtWGgwjqw+V3gTCzKzHAKiABKdNh1rceXt1r1eHgxHLDaIcWK9d2OLwKnlH+9x +8gfCGRUOFdjbJikGCPqX6DeQ1vRg+fg0ObJhwgKekaiigchTbkf4CDkkFtN88vb7 +1IJi/tcFqCUFR675CyAapHsBUrjxy8Y0W/ihyBYHfku9EvJbvn7AmBKt78NSlOgq +Rip3OIQq9HgfANH5MOxq02nGAWCiEZmSaZ1F8re3j4iz8cxbIDXZaUiKKagkQSl4 +XfseTHEkINjtGMtPadUSlICCnIePrPUP5YjNJLQC2JNnWbvxzYRe9qN1NS1EBJ4Z +e6PEVJDZNvizTZkUaDd3puF+p1Xs6WXIuxGbDM0lEM46BCjLSxk4fQCa0EtM5Wqs +kCXnzPYV+ZpF1DsW7pJtiZWtSJZc93KVDuZbWSrV3seKrFm2vL0FN4SXd0fowg6s +kTOLnpbfLA== +-----END CERTIFICATE REQUEST----- diff --git a/documentation/autodc/autodc.key b/documentation/autodc/autodc.key new file mode 100644 index 0000000..b4a05d3 --- /dev/null +++ b/documentation/autodc/autodc.key @@ -0,0 +1,51 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIJKQIBAAKCAgEAx8PSFX3oZ9Fd3n/R1cgwkf5Glxh3kaZBzZp/YimVn5gVkVeB +w2Zm33Etwn+s4EaBb8alI/pGwSl9w6Ets3xRdtlF3rkrxXfh6Wa4OfrRshO3/Jng +m/0OyBBLcl2Jlq/kL5+H4QeKlpM7geVs5AQ5gv78SMb1dL/G0R5BwM99iKkSfCoN +P16cBf+LIZx8gRyJemvnDGdbDXPrjqa3UosxedaUhJ7fv/1MxOKFyHEPHv0Vw6zq +Z655gmxPEOjuEyjuY1XnWuev20PiEKG9gls8hmWX7Xp5XlzwH2zmLLoy2fXT4mXi +tnrSfszOkp78cBc4V7sA6tOvfD78AuG9yK+VwrvcItru+1DmN+t4qGvr114HjSQt +YsEPb9jBpce9UoIEZEdKLdkPaqwjVsmDKsNZnCJgABYP/O/OkzR7ohwaadbj0PMc +3zOl7EULIiI3VU8VLJw5uXtLhu5x+Cc+Ak07elh43JIUEo/Nekbw/tHIviGLs8CM +fY0n7Dc0lj5m5mGv7nRj+kQWoOI2nnQcdF9Sd6Se2DJkQAh4jZcmhFOpUj6+LwW+ +UQqDHcxK9XHbf4njU/UYoENyvfhU4eV9ZopI8MgtXten2/mO0dtbhlpjbf8DKy+j +yeGspGUQVxbyREpsCHba7ij/ELJvkFRY7XXGoD2If0HkKIyPB7eFTZn2NuECAwEA +AQKCAgBA+UwneWBSBCMH74MMiaRil9XV5ztcwDq+6PVttCDaW9kIAPJP38qt+l3r +2SMdf1EyTeYAobLJuPkS3CXFLzY/9cNoAcpe2/M6uxJIqW/y/77kBhisI87e+43O +kOXjgSoy4yyK4K0O9KxyP3D/aPBbzxu3gB7t7kJ4WeMBF3u5K2S7xeGxLtCIkulp +8T/9ro54pwfrXOKcqYuARnFyA5wACO8dg587DuQdVaA15bAJYD/n+SH0vyb+aMP+ +uARXbwS6goJRavIj2xx4VGMTNyGkhP65WxCGxKvFTOJAwf8wLWjRRlpLB//XBj5a +KiPpQWQuBMDzoA5/1uyY81uLHrESpeBJQHa9CNAnrLcRJkExcx9GmjPZA1lYqTDG +Ox8Sqlh4UneMeEM7OYBjOJft+9oG/uOzT2gFcolCuZaOmYCQFOi7UrL3vdh5mSAS +FlRz2RdXOuBbJIwz5yDGMb9/NeWvz8hhDjjfmz9RegxUQLFUhBBv59hPdcYBMlOq +5+9m9cbUWciSB6kJngX8EJcUJ9LL9fPdV4Jz9hIU22/UGNlYUvFOt0OpvKrq66LU +cmwvGE0ryJQE8AOUyoU1F5bsO8TeV/ftdeeK2S3pGkviY75ygGYJF7Zz4sNZ7KPh +XAcT2xzlxclDDz274IB8wm8ervrf+o72CRxfBpkjegdnLekFUQKCAQEA9xTQUQsP +FuhhHL2pxMqXD3rzR1/okC7Zh5rNHQOPcsC/T5Wymmfk+AmHxIHGVmxtzP+boD3i +iDMQgc9rkdE72grhZSp6RxNT4M9VsVqberrVYPY/A99c1b1y+PMMLLsC92xvNODW +iAiSRZLJg8bWqHyMh9/U661JPN+QcC0dhAUY/E9qy7nLSfmQDY7TStmATDhgEnMc +d5in6Uun+sE7u3vU617daxPESxt6Mu1dJyK7YkLaX5ySKt50FlHMxJqSBJUBU73h +E2Wap0ik5eqAzifLg2ZvAby33QgGNpnkfShvn1B+AVIGT9oI4cCHwcOSdzK3cl/1 +JqzHJAXZaherewKCAQEAzvnGHI25BbX3gvzuEEJaiEiOmocDcrXhXx0mQE2zYcH0 +fCZxWdkaztCvQQQyFU2b8pNudoyg4h9czxiu1Eapn1qK0s6sAxmwDjlDEYWzeEWV +gvFa/XWTHixo51GgmcojPr8TBPowsRJWJHZ9A8Sj0TMJ6ggucUc/SvqxykJAtYiC +KXo2mF4qArKhdxbFjgYbgnwMF66D4L0thvutTxbdJpKCWlUQLZMYeiaX3K7M924Q +XKY+g/4U9XSDFLsMHuymw7ZGxVTDBWCTxasPYn7niT4HKfaZKrHyAgeq2sR2pF+w +ceba5ESEqmXoQl2VhlsrfmKl2J1rbILerr/cFGN6UwKCAQEAjfSqXvWgnMO0D8Q0 +RrkyoxU7srU6s6QHHK7my5WhFAgDomDadXXdilhHZbI91qeDSjVNH3qOrTvRbxNR +tTxmS7bWmr4V9MH4/f+BN11cWN4A/C1eNQZemvq2lKtnQkfLw4XMOrA28X8jMs/L +8WcLK1ArmWETPriXjF7R6HEzNQeMvGGQdS3wyzaT+gYmBSoDWUdGgzMZlujhB9uM +zje47tWNlhEx23RG4fQOeDYBwBtnV2QbfLVDEVgvkCc9I3O/EtzdH1THfNJyUUwe +WW4qxMb+WSc//dqENHyIvVQbf4D86KZhTrbmo3gchD+Of9eyTcOYvCC/g5He9CZc +LGUfAwKCAQB1MVBPkp+U8De5X2WcpzWDvTGhpZj6NqnppSOnfVd4ns3EDjBYY2me +eOMCuVFyZoNKktgVIANohcGTMbKsV2Zr6GR6gLM8/iqFoz6qiT3Zd+z0O5z9rrki +eJrdf4akDaRvPtmSE+fGG0/TTZPzsj/fkDqo3QEO+vyHg/NvRpxFLYol2iFXnNnE +tUhfmfE6PZMOCxb9wb7CXduNdqvnAFcwI/zTER8b7xII+Bk8x20fHJ7/FtKHAjLz +2IjwEAkI6Y5zep4P1bKnNMv3PKh1aM3JE+oShKKtXhxdseG4BU34PIY3uDNw1KsZ +oKImEb88WBsEcO7e/CN7SY60+8WtdbYpAoIBAQDXsbMEd8gX8KZvoXCcZgo71RPu +NYwhkzuaCcU4el47gumRKMXIPmKbXij4zJev7w607enU6EkiqCg0SWwAqLZ29zdw +aIEFk+8NlO5+ryO+ld4nrK8Nwe+L02UNIQZmW1O3hZkSwV//s7d2Z/yXR+5GLuhe +omueQyRnN/x7rYMKefvPxrZ0xTq4unkwpoFyaIJdDYzEB8R8A1zJxDo8yRCJOASE +l+5Hx6SUW5fcrb6VWgI21sUULZSqWaa/w/h+fYDHT21r0Z6YekSjn5UawjoY6ggP +C0EGNG1IrESsFqj/qjQFU15JY/vGY8wBz4zghl1Kqb69lQGy0VF0iMnjensi +-----END RSA PRIVATE KEY----- diff --git a/documentation/autodc/autodc.pem b/documentation/autodc/autodc.pem new file mode 100644 index 0000000..b4a05d3 --- /dev/null +++ b/documentation/autodc/autodc.pem @@ -0,0 +1,51 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIJKQIBAAKCAgEAx8PSFX3oZ9Fd3n/R1cgwkf5Glxh3kaZBzZp/YimVn5gVkVeB +w2Zm33Etwn+s4EaBb8alI/pGwSl9w6Ets3xRdtlF3rkrxXfh6Wa4OfrRshO3/Jng +m/0OyBBLcl2Jlq/kL5+H4QeKlpM7geVs5AQ5gv78SMb1dL/G0R5BwM99iKkSfCoN +P16cBf+LIZx8gRyJemvnDGdbDXPrjqa3UosxedaUhJ7fv/1MxOKFyHEPHv0Vw6zq +Z655gmxPEOjuEyjuY1XnWuev20PiEKG9gls8hmWX7Xp5XlzwH2zmLLoy2fXT4mXi +tnrSfszOkp78cBc4V7sA6tOvfD78AuG9yK+VwrvcItru+1DmN+t4qGvr114HjSQt +YsEPb9jBpce9UoIEZEdKLdkPaqwjVsmDKsNZnCJgABYP/O/OkzR7ohwaadbj0PMc +3zOl7EULIiI3VU8VLJw5uXtLhu5x+Cc+Ak07elh43JIUEo/Nekbw/tHIviGLs8CM +fY0n7Dc0lj5m5mGv7nRj+kQWoOI2nnQcdF9Sd6Se2DJkQAh4jZcmhFOpUj6+LwW+ +UQqDHcxK9XHbf4njU/UYoENyvfhU4eV9ZopI8MgtXten2/mO0dtbhlpjbf8DKy+j +yeGspGUQVxbyREpsCHba7ij/ELJvkFRY7XXGoD2If0HkKIyPB7eFTZn2NuECAwEA +AQKCAgBA+UwneWBSBCMH74MMiaRil9XV5ztcwDq+6PVttCDaW9kIAPJP38qt+l3r +2SMdf1EyTeYAobLJuPkS3CXFLzY/9cNoAcpe2/M6uxJIqW/y/77kBhisI87e+43O +kOXjgSoy4yyK4K0O9KxyP3D/aPBbzxu3gB7t7kJ4WeMBF3u5K2S7xeGxLtCIkulp +8T/9ro54pwfrXOKcqYuARnFyA5wACO8dg587DuQdVaA15bAJYD/n+SH0vyb+aMP+ +uARXbwS6goJRavIj2xx4VGMTNyGkhP65WxCGxKvFTOJAwf8wLWjRRlpLB//XBj5a +KiPpQWQuBMDzoA5/1uyY81uLHrESpeBJQHa9CNAnrLcRJkExcx9GmjPZA1lYqTDG +Ox8Sqlh4UneMeEM7OYBjOJft+9oG/uOzT2gFcolCuZaOmYCQFOi7UrL3vdh5mSAS +FlRz2RdXOuBbJIwz5yDGMb9/NeWvz8hhDjjfmz9RegxUQLFUhBBv59hPdcYBMlOq +5+9m9cbUWciSB6kJngX8EJcUJ9LL9fPdV4Jz9hIU22/UGNlYUvFOt0OpvKrq66LU +cmwvGE0ryJQE8AOUyoU1F5bsO8TeV/ftdeeK2S3pGkviY75ygGYJF7Zz4sNZ7KPh +XAcT2xzlxclDDz274IB8wm8ervrf+o72CRxfBpkjegdnLekFUQKCAQEA9xTQUQsP +FuhhHL2pxMqXD3rzR1/okC7Zh5rNHQOPcsC/T5Wymmfk+AmHxIHGVmxtzP+boD3i +iDMQgc9rkdE72grhZSp6RxNT4M9VsVqberrVYPY/A99c1b1y+PMMLLsC92xvNODW +iAiSRZLJg8bWqHyMh9/U661JPN+QcC0dhAUY/E9qy7nLSfmQDY7TStmATDhgEnMc +d5in6Uun+sE7u3vU617daxPESxt6Mu1dJyK7YkLaX5ySKt50FlHMxJqSBJUBU73h +E2Wap0ik5eqAzifLg2ZvAby33QgGNpnkfShvn1B+AVIGT9oI4cCHwcOSdzK3cl/1 +JqzHJAXZaherewKCAQEAzvnGHI25BbX3gvzuEEJaiEiOmocDcrXhXx0mQE2zYcH0 +fCZxWdkaztCvQQQyFU2b8pNudoyg4h9czxiu1Eapn1qK0s6sAxmwDjlDEYWzeEWV +gvFa/XWTHixo51GgmcojPr8TBPowsRJWJHZ9A8Sj0TMJ6ggucUc/SvqxykJAtYiC +KXo2mF4qArKhdxbFjgYbgnwMF66D4L0thvutTxbdJpKCWlUQLZMYeiaX3K7M924Q +XKY+g/4U9XSDFLsMHuymw7ZGxVTDBWCTxasPYn7niT4HKfaZKrHyAgeq2sR2pF+w +ceba5ESEqmXoQl2VhlsrfmKl2J1rbILerr/cFGN6UwKCAQEAjfSqXvWgnMO0D8Q0 +RrkyoxU7srU6s6QHHK7my5WhFAgDomDadXXdilhHZbI91qeDSjVNH3qOrTvRbxNR +tTxmS7bWmr4V9MH4/f+BN11cWN4A/C1eNQZemvq2lKtnQkfLw4XMOrA28X8jMs/L +8WcLK1ArmWETPriXjF7R6HEzNQeMvGGQdS3wyzaT+gYmBSoDWUdGgzMZlujhB9uM +zje47tWNlhEx23RG4fQOeDYBwBtnV2QbfLVDEVgvkCc9I3O/EtzdH1THfNJyUUwe +WW4qxMb+WSc//dqENHyIvVQbf4D86KZhTrbmo3gchD+Of9eyTcOYvCC/g5He9CZc +LGUfAwKCAQB1MVBPkp+U8De5X2WcpzWDvTGhpZj6NqnppSOnfVd4ns3EDjBYY2me +eOMCuVFyZoNKktgVIANohcGTMbKsV2Zr6GR6gLM8/iqFoz6qiT3Zd+z0O5z9rrki +eJrdf4akDaRvPtmSE+fGG0/TTZPzsj/fkDqo3QEO+vyHg/NvRpxFLYol2iFXnNnE +tUhfmfE6PZMOCxb9wb7CXduNdqvnAFcwI/zTER8b7xII+Bk8x20fHJ7/FtKHAjLz +2IjwEAkI6Y5zep4P1bKnNMv3PKh1aM3JE+oShKKtXhxdseG4BU34PIY3uDNw1KsZ +oKImEb88WBsEcO7e/CN7SY60+8WtdbYpAoIBAQDXsbMEd8gX8KZvoXCcZgo71RPu +NYwhkzuaCcU4el47gumRKMXIPmKbXij4zJev7w607enU6EkiqCg0SWwAqLZ29zdw +aIEFk+8NlO5+ryO+ld4nrK8Nwe+L02UNIQZmW1O3hZkSwV//s7d2Z/yXR+5GLuhe +omueQyRnN/x7rYMKefvPxrZ0xTq4unkwpoFyaIJdDYzEB8R8A1zJxDo8yRCJOASE +l+5Hx6SUW5fcrb6VWgI21sUULZSqWaa/w/h+fYDHT21r0Z6YekSjn5UawjoY6ggP +C0EGNG1IrESsFqj/qjQFU15JY/vGY8wBz4zghl1Kqb69lQGy0VF0iMnjensi +-----END RSA PRIVATE KEY----- diff --git a/autodc/tls_cert.conf b/documentation/autodc/tls_cert.conf similarity index 100% rename from autodc/tls_cert.conf rename to documentation/autodc/tls_cert.conf diff --git a/documentation/bugs.md b/documentation/bugs.md new file mode 100644 index 0000000..bf712fe --- /dev/null +++ b/documentation/bugs.md @@ -0,0 +1,486 @@ +# Bugs, Priorities, and Notes # + +--- + + +- [Bugs](#bugs) +- [Semi-prioritized Backlog with Technical Debt](#semi-prioritized-backlog-with-technical-debt) + - [Improved Software Engineering](#improved-software-engineering) +- [Notes](#notes) + - [Push Button Calm](#push-button-calm) + - [Citations for other Calm automation](#citations-for-other-calm-automation) + - [AutoDC](#autodc) + - [NuCLeI](#nuclei) + - [nuclei authconfig (run local from container?)](#nuclei-authconfig-run-local-from-container) + - [Image Uploading](#image-uploading) + - [File servers for container updates](#file-servers-for-container-updates) + - [Git](#git) + + +--- + +# Bugs # + +- BUG = AOS 5.9, 5.10: all calm.sh PC service timeout detect/retry + - Notify Nathan and bart.grootzevert when fixed + - 2018-10-24 21:54:23|14165|Determine_PE|Warning: expect errors on lines 1-2, due to non-JSON outputs by nuclei... + E1024 21:54:24.142107 14369 jwt.go:35] ZK session is nil + 2018/10/24 21:54:24 Failed to connect to the server: websocket.Dial ws://127.0.0.1:9444/icli: bad status: 403 + - @Michael workaround: py-nuclei? + - ssh nutanix@10.21.78.39 'source /etc/profile; py-nuclei -u admin -p "password" image.list | grep acs' + - dev :: PC-5.10 bugs: activate Calm, auth, import images + - ```2018-12-26 16:05:25|96508|calm_enable|Enable Nutanix Calm... + 2018-12-26 16:05:26|96508|calm_enable|_test=|| + 2018-12-26 16:05:26|96508|lcm|PC_VERSION 5.10.0.1 >= 5.9, starting LCM inventory... + 2018-12-26 16:05:26|96508|lcm|inventory _test=|500|``` + - ? PE> ncli multicluster add-to-multicluster external-ip-address-or-svm-ips=$PC_HOST username=admin password=yaknow +- FIXED = PC 5.9 authentication regression + - https://jira.nutanix.com/browse/ENG-180716 = "Invalid service account details" error message is incorrect + - Fix scheduled for PC 5.10.1 + - Workaround = [AutoDC: Version2](autodc/README.md#Version2) + - deprecate AutoDC1 for 5.6-8? + +# Semi-prioritized Backlog with Technical Debt # + +- Linux migration: + - https://hub.docker.com/u/gittools + - https://hub.docker.com/r/gittools/gitversion 2 years old: v4.0.0-beta.12 493 MB + - https://hub.docker.com/r/gittools/gitversion-fullfx + - latest=linux-4.0.1beta, linux-4.0.0 works on LinuxMint + - docker pull gittools/gitversion-fullfx:linux{-version} + - docker run --rm -v "$(pwd):/repo" gittools/gitversion-fullfx:linux{-version} /repo + + docker image inspect $(docker image ls | grep gitversion | awk '{print $3}') > documentation/container.gitversion.$(uname -s).txt + + - Last known good @MacOS from: image inspect (above): + "Created": "2018-10-24T11:46:33.952190652Z", + "Container": "404031c17f634908b685d0b1b5f7d015b9f23b6c018a5dc288983306338d8464", + - https://hub.docker.com/r/gittools/gitversion-dotnetcore + - https://hub.docker.com/r/asimonf/docker-gitversion-compose/dockerfile + - https://hub.docker.com/r/pblachut/gitversionindocker/tags + - How to: + - check for latest remote container tags + - How to find all container tags from a remote image registry: + - https://stackoverflow.com/questions/24481564/how-can-i-find-a-docker-image-with-a-specific-tag-in-docker-registry-on-the-dock + + curl -s -S "https://registry.hub.docker.com/v2/repositories/library/$@/tags/" | jq '."results"[]["name"]' |sort + - https://stackoverflow.com/questions/28320134/how-to-list-all-tags-for-a-docker-image-on-a-remote-registry + - purge unused container tags +- Small improvements/bugs: + - Banner: PC-X bug:,@HPOC # + - PE banner: PUT /PrismGateway/services/rest/v1/application/system_data + {"type":"WELCOME_BANNER","key":"welcome_banner_status","value":true,"username":"system_data","updatedTimeInUsecs":1550212264611000} + {"type":"WELCOME_BANNER","key":"welcome_banner_content","value":"Welcome!","username":"system_data","updatedTimeInUsecs":1550212264751000} + - Add AutoDC to PE DNS, like PC_DNS + - Duplicate images are already active/uploaded on PC: check on import/inactive? + - dependencies 'install' 'sshpass' && dependencies 'install' 'jq' || exit 13 everywhere for robustness/parallelization + - capture NFS URL timeout error message? + - stage-workshop: load into an array, Round-robin clusters + - shell-convenience: load cluster array, menu of array index selection + - tail -f $Branch/workshop.log? + - Email when PC is ready, point to next steps in guidebook + - Refactor PC_URL to be an array? + - LCM inventory (check AOS, PC, and LCM version) + - Calm 2.6 containers +- Auth + role mappings + - OpenLDAP is now supported for Self Service on Prism Central: ENG-126217 + - OpenLDAP works fine for authentication, but Prism Central has a problem with anything more than simple RBAC with it. + - https://jira.nutanix.com/browse/ENG-126217 openldap authentication difference in PC vs PE + - fixed with PC 5.7.1 + - SSP Admins + - PE, PC: clear our warnings: resolve/ack issues for cleanliness? + - Create adminuser2, assign privs, use it instead of base admin user (drop privs/delete at end?) + - Fix role mappings, logins on PE, PC + - PE, PC: use RBAC user for APIs, etc.: cluster Admin + - improve/run autodc/add_group_and_users.sh + - adminuser01@ntnxlab.local (password = nutanix/4u) can’t login to PE. + “You are not authorized to access Prism. Please contact the Nutanix administrator.†+ add user01@ntnxlab.local to role mapping, same error as above. + - PC_Init|Reset PC password to PE password, must be done by nci@PC, not API or on PE + Error: Password requirements: Should be at least 8 characters long. Should have at least 1 lowercase character(s). Should have at least 1 uppercase character(s). Should have at least 1 digit(s). Should have at least 1 special character(s). Should differ by at least 4 characters from previous password. Should not be from last 5 passwords. Should not have more than 2 same consecutive character(s). Should not be a dictionary word or too simplistic/systematic. Should should have at least one character belonging to 4 out of the 4 supported classes (lowercase, uppercase, digits, special characters). + 2018-10-02 10:56:27|92834|PC_Init|Warning: password not reset: 0.# +- RFE: AOS 5.10.0.1 may need latest or have incompatible AHV release + - PE: ncli software ls software-type=Hypervisor + - cluster --version --md5sum= --bundle= -p host_upgrade +- RFE: refactor sshpass dependency + - Sylvain Huguet https://nutanix.slack.com/archives/C0JSE04TA/p1549918415017800?thread_ts=1549915109.010300&cid=C0JSE04TA + @mark.lavi jot down a note somewhere that I need to revisit that one with you, maybe providing an alternative version as a Docker container would help. Many people have Docker for Mac/Docker for Windows these days + Or we can replace that `sshpass` dependancy with a Python script instead, might be another idea. + Or start with an API call to push an SSH key to the cluster... then ssh should work passwordless. + Chris Rasmussen + @shu API call or Python would be preferable, IMO. More likely that a Python binary already exists on the user's system than Docker. + Sylvain Huguet: Docker has other added benefits in terms of packaging sources/binaries with the script and using the docker hub as a CDN/delivery mechanism, especially with Big Bang happening. But we can at least provide alternative method to `sshpass` based on some logic to identify what’s available on the machine. + Chris Rasmussen: Yeah, not saying Docker is a _bad_ idea. Just that in terms of the number of people that could use this script without any changes, Python/API is likely the best choice (on OS X). +- Test Calm 5.8 bootcamp labs and 5.5-6 bugs + - https://github.com/nutanixworkshops/introcalm + vs. https://github.com/mlavi/calm_workshop + - file Calm bugs from guide +- Calm configuration: + - Projects: + - update default or create new project + - nuclei (run local from container?) + - version.get # gives API 3.1 and AOS 5.7.0.1 (bug!) + - vs: cat /etc/nutanix/release_version + - project.create name=mark.lavi.test \ + description='test_from NuClei!' + - project.get mark.lavi.test + - project.update mark.lavi.test + spec.resources.account_reference_list.kind= or .uuid + spec.resources.default_subnet_reference.kind= + spec.resources.environment_reference_list.kind= + spec.resources.external_user_group_reference_list.kind= + spec.resources.subnet_reference_list.kind= + spec.resources.user_reference_list.kind= + + resources: + account_reference_list: [] + environment_reference_list: [] + external_user_group_reference_list: [] + is_default: false + resource_domain: + resources: [] + subnet_reference_list: [] + user_reference_list: [] + - nuclei authconfig (run local from container?) See notes#nuceli section, below. + - (localize?) and upload blueprint via nuclei (see unit tests)? + - Default project environment set, enable marketplace item, launch! + - Enable multiple cloud account settings, then environments, then marketplace launch + - Add widget Deployed Applications to (default) dashboard +- SRE cluster automation + - Louie: https://confluence.eng.nutanix.com:8443/display/LABS/Internal+Networks +- Refactor URLs into global.vars.sh via: test/url_hardcoded.sh + - refactor out all passwords, hardcoded values to variables + - Remove backticks: scripts/lib.pe.sh:354 remains outside of *md + - ncli rsyslog + - Improve log output to be syslog compatible? + - syslog format: INFO|DEBUG|etc. + - https://en.wikipedia.org/wiki/Syslog#Severity_level + - Documentation: + - review, refactor & migrate to bugs.md: TODO, TOFIX comments + - Insure exit codes unique/consistent, error messages consistent + - release notes/Changelog? + - JSON or YAML options? from bash/jq? + - Create a data structure to specify an image name (or rename after uploading) + - Change global.vars.sh to .json for new data structures + - https://github.com/kristopolous/TickTick + - https://github.com/dominictarr/JSON.sh + - https://medv.io/json-in-bash/ + - https://github.com/sharkdp/hyperfine +- FEATURE = [Darksite](darksite.md): cache+ordering+detection + - Ping derik.davenport@ for testing + - Tasks: + - focus on dependencies first (check $HOME/bin), then images? + - local devstation (never purge), for each: + - check if in ./cache + - else, download to ./cache + - upload to PE:~/cache + - IMPROVEMENT: array of directories to check: + - $HOME + - $HOME/cache + - $HOME/stageworkshop*/cache + - PE:software_downloads + - PE+PC:ssh_keys + - PE CVM, for each: + - check if in ~/cache + - BENEFIT: reusable cache for any use case + - else, download to ~/cache + - install dependencies from ~/cache to ~ + - eventually, upload to PC:~/cache + - DEFER: purge + - PC VM: repeat above for images + - detect HPOC networks to favor local URLs? + - Check remote file for cache, containers, images before uploading and skip when OPTIONAL + - download 403 detection: authentication unauthorized + - restore http_resume check/attempt + - create,use cache, fall back to global, next: propagate cache to PC + - Refactor all functions to use ${HOME}/cache : ntnx_download, etc. + - PC import PE images + - Move images from PE to PC? Make Karbon and Era optional? + - migrate/import image catalog on PC: + {"action_on_failure":"CONTINUE","execution_order":"SEQUENTIAL","api_request_list":[{"operation":"POST","path_and_params":"/api/nutanix/v3/images/migrate","body":{"image_reference_list":[],"cluster_reference":{"uuid":"00057b0a-2472-da09-0000-0000000086b7","kind":"cluster","name":"string"}}}],"api_version":"3.0"} + - Optimization: Upload AutoDC image in parallel with PC.tar +- Demos: + - Azure LAMP demo + - CI/CD pipeline demo + - LAMP v2 application improvements (reboot nice to have) + - Calm videos/spreadsheet + - Multi product demo + +## Improved Software Engineering ## +- I've been wrestling with how to best make my bash scripts test driven. There are TDD Bash frameworks, however most of the systems leveraged/orchestrated are external and would require mocking, something I’m not sure how to approach. + +What I have done, in most functions, is try to make them [idempotent](https://en.wiktionary.org/wiki/idempotent) by "testing" for the desired outcome and skipping if accomplished. Of course, most of these tests are cheats: they only check for the final stage of a function being accomplished. Usually, this is good enough, because the final configuration is predicated on all preceding stages in the function. It would be ideal to test every operation, but as you can imagine, that’s quite a bit of work. + +This gives the ability to rerun the script from the beginning, skip all previously successful work, and rapidly begin work on the next, unaccomplished stage. + +I've looked into some server testing frameworks. + +- https://githooks.com/ + - https://github.com/nkantar/Autohook (success) + - Also investigated: + - https://pre-commit.com/ + - brew install pre-commit + - https://github.com/rycus86/githooks + - Research https://medium.freecodecamp.org/improve-development-workflow-of-your-team-with-githooks-9cda15377c3b + - TODO via hook?: check if unpushed commits, then allow git commit --amend + - https://stackoverflow.com/questions/253055/how-do-i-push-amended-commit-to-the-remote-git-repository +- Add (git)version/release to each script (assembly?) for github archive cache + - https://semver.org/ + - https://github.com/GitTools/GitVersion + - https://gitversion.readthedocs.io/en/stable/usage/command-line/ + - brew install gitversion + - GitVersion /showConfig + - sudo apt-get install mono-complete + - do not: sudo apt-get install libcurl3 # removes curl libcurl4 + - Download dotnet4 zip archive and put on mono-path? + - gitversion | tee gitversion.json | jq -r .FullSemVer + - ````ls -l *json && echo _GV=${_GV}```` + - ````_GV=gitversion.json ; rm -f ${_GV} \ + && gitversion | tee ${_GV} | grep FullSemVer | awk -F\" '{print $4}' && unset _GV```` + - https://blog.ngeor.com/2017/12/19/semantic-versioning-with-gitversion.html + - versus https://github.com/markchalloner/git-semver +- ~/Documents/github.com/ideadevice/calm/src/calm/tests/qa/docs + = https://github.com/ideadevice/calm/tree/master/src/calm/tests/qa/docs +- https://stackoverflow.com/questions/14494747/add-images-to-readme-md-on-github +- https://guides.github.com/introduction/flow/index.html +- https://bors.tech/ "Bors is a GitHub bot that prevents merge skew / semantic merge conflicts, so when a developer checks out the main branch, they can expect all of the tests to pass out-of-the-box." +- Per Google shell style guide: + - refactor function names to lowercase: https://google.github.io/styleguide/shell.xml?showone=Function_Names#Function_Names +- http://jake.ginnivan.net/blog/2014/05/25/simple-versioning-and-release-notes/ + - https://github.com/GitTools/GitReleaseNotes +- Bash test framework for unit tests and on blueprints? + - https://kitchen.ci/ which can do spec, BATS, etc. = https://github.com/test-kitchen/test-kitchen + - https://kitchen.ci/docs/getting-started/writing-test + - https://serverspec.org/ DSL Spec TDD + - http://rspec.info/ Ruby TDD + - inspec + - more compliance from supermarket + - https://dev-sec.io/features.html#os-hardening + - https://www.cisecurity.org/cis-benchmarks/ + - https://en.wikipedia.org/wiki/ERuby + - https://www.engineyard.com/blog/bats-test-command-line-tools + - https://medium.com/@pimterry/testing-your-shell-scripts-with-bats-abfca9bdc5b9 + - http://ohmyz.sh/ + - https://github.com/jakubroztocil/httpie#scripting + - https://github.com/pimterry/git-confirm + - BATS https://github.com/bats-core/bats-core + - https://invent.life/project/bash-infinity-framework + - Runit/rundeck? http://bashdb.sourceforge.net/ + - Tests: + - external URLs working (PC x, sshpass, jq, autodc, etc.) + - userX login to PE, PC + - userX new project, upload, run blueprint + - GOOD: user01@ntnxlab.local auth test fine@PE, bats? + - Knowledge base/articles/documentation: + - https://github.com/orientation/orientation + - https://shields.io/ + - https://github.com/badges/shields + - Changelog: + - https://github.com/olivierlacan/keep-a-changelog + - https://keepachangelog.com/en/1.0.0/ + - Good discussions in the issues, tags such as: breaking, internal, etc. + - http://krlmlr.github.io/using-gitattributes-to-avoid-merge-conflicts/ + - Boxcutter for AHV: + - extend scripts/vmdisk2image-pc.sh to + - https://qemu.weilnetz.de/doc/qemu-doc.html#disk_005fimages_005fssh + qemu-system-x86_64 -drive file=ssh://[user@]server[:port]/path[?host_key_check=host_key_check] + - download (NFS?)/export image + - upload/import image + - drive into Jenkinsfile pipeline job + - periodic runs: weekly? + - Base images/boxes: https://github.com/chef/bento + +# Notes # + +## Push Button Calm # + +- https://github.com/mlavi/stageworkshop/blob/master/guidebook.md +- MP4 Video = 292MB: https://drive.google.com/open?id=1AfIWDff-mlvwth_lKv9DG4x-vi0ZsWij + ~11 minute screencast overview of the 70 minute journey from Foundation + to Calm running a blueprint: most of it is waiting for foundation and PC download/upload/deploy. +- Social coding: https://github.com/nutanixworkshops/stageworkshop/pull/1 +- Biggest pain: + - finding a HPOC + - second biggest pain: keeping it for more than a few hours except on the weekend. + - third biggest pain: coding in Bash :slightly_smiling_face: it makes you miss even script kiddie programming languages! + +## Citations for other Calm automation ## + +- Acknowledge https://drt-it-github-prod-1.eng.nutanix.com/sylvain-huguet/auto-hpoc + - "Drafted a first version. Then @Christophe Jauffret took over and polished it + Then we handed over the whole thing to Matt and Nathan during the prep for TS18" +- https://github.com/MMouse-23/FoundationDemoAddon in Powershell! +- One more: @anthony.c? +- Add links: https://drt-it-github-prod-1.eng.nutanix.com/akim-sissaoui/calm_aws_setup_blueprint/blob/master/Action%20Create%20Project/3-Create%20AWS%20Calm%20Entry +- https://gitlab.com/Chandru.tkc/Serviceability_shared/ + - pc-automate/installpc.py + - 24: "heartbeat": "/PrismGateway/services/rest/v1/heartbeat", + - 326: def validate_cluster(entity): + - 500: def add_network_to_project(name,directory_uuid): +- https://github.com/digitalformula/nutanix-cluster-setup + +## AutoDC ## + - See also: [AutoDC](autodc/README.md) + - GOOD: + - NTNXLAB, ntnxlab.local, root:nutanix/4u + - samba --version Version 4.2.14-Debian + - https://wiki.archlinux.org/index.php/samba + - https://gitlab.com/mlavi/alpine-dc (fork) + - yum install samba-ldap + - https://help.ubuntu.com/lts/serverguide/samba-ldap.html.en + - Move AutoDC to DHCP? + +## NuCLeI ## + +https://jira.nutanix.com/browse/ENG-78322 +````app_blueprint +availability_zone +available_extension +available_extension_images +catalog_item +category +certificate +changed_regions +client_auth +cloud_credentials +cluster +container +core CLI control. +diag Diagnostic tools. +directory_service +disk +docker_image +docker_registry +exit Exits the CLI. +extension +get Gets the current value of the given configuration options. +help Provides help text for the named object. +host +image +network_function_chain +network_security_rule +oauth_client +oauth_token +permission +project +protection_rule +quit Exits the CLI. +recovery_plan +recovery_plan_job +remote_connection +report_config +report_instance +role +set Sets the value of the given configuration options. +ssh_user +subnet +user +version NuCLEI Version Information. +virtual_network +vm +vm_backup +vm_snapshot +volume_group +volume_group_backup +volume_group_snapshot +webhook +```` + +### nuclei authconfig (run local from container?) #### + +````list | ls +edit | update +remove | rm +list-directory | ls-directory +create-directory | add-directory +edit-directory | update-directory +remove-directory | rm-directory +list-role-mappings | ls-role-mappings +delete-role-mapping +add-role-mapping +add-to-role-mapping-values +remove-from-role-mapping-values +get-directory-values-by-type +test-ldap-connection +```` + +## Image Uploading ## +TOFIX: +- https://jira.nutanix.com/browse/FEAT-7112 +- https://jira.nutanix.com/browse/ENG-115366 +once PC image service takes control, rejects PE image uploads. Move to PC, not critical path. + +KB 4892 = https://portal.nutanix.com/#/page/kbs/details?targetId=kA00e000000XePyCAK +v3 API = http://developer.nutanix.com/reference/prism_central/v3/#images two steps: + +1. POST /images to create image metadata and get UUID, see logs/spec-image.json +2. PUT images/uuid/file: upload uuid, body, checksum and checksum type: sha1, sha256 +or nuclei, only on PCVM or in container + +## File servers for container updates ## + +- https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#What_you_get_with_each_reservation +- https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#Lab_Resources +- https://sewiki.nutanix.com/index.php/HPOC_Access_Instructions#FTP + - \\lab-ftp\ftp + - smb://hpoc-ftp/ = \\hpoc-ftp\ftp + - ftp://nutanix:nutanix/4u@hostedpoc.nutanix.com/ + - \\pocfs.nutanixdc.local and \\hpoc-afs.nutanixdc.local + - smb://pocfs/ = \\pocfs\iso\ and \images\ + - nutanixdc\username + - smb://pocfs.nutanixdc.local use: auth + - WIN> nslookup pocfs.nutanixdc.local + - smbclient -I 10.21.249.12 \\\\pocfs\\images \ + --user mark.lavi@nutanixdc.local --command "prompt ; cd /Calm-EA/pc-5.7.1/ ; mget *tar" + - smb://hpoc-afs/ = \\hpoc-afs\se\ + - smbclient \\\\hpoc-afs\\se\\ --user mark.lavi@nutanixdc.local --debuglevel=10 + - WIN> nslookup hpoc-afs.nutanixdc.local + 10.21.249.41-3 + - smbclient -I 10.21.249.41 \\\\hpoc-afs\\se\\ --user mark.lavi@nutanixdc.local + - smb://NTNX-HPOC-AFS-3.NUTANIXDC.LOCAL + default password = welcome123 + - https://ubuntuswitch.wordpress.com/2010/02/05/nautilus-slow-network-or-network-does-not-work/ +- smb-client vs cifs? + - https://www.tldp.org/HOWTO/SMB-HOWTO-8.html + - https://www.samba.org/samba/docs/current/man-html/smbclient.1.html + - https://linux-cifs.samba.org/ + - https://pserver.samba.org/samba/ftp/cifs-cvs/linux-cifs-client-guide.pdf + - https://serverfault.com/questions/609365/cifs-mount-in-fstab-succeeds-on-ip-fails-on-hostname-written-in-etc-hosts + - sudo apt-get install cifs-utils + - yum install cifs-utils + man mount.cifs + USER=mark.lavi@nutanix.com PASSWD=secret mount -t cifs //hpoc-afs/se /mnt/se/ + - mac: sudo mount -v -r -t nfs -o resvport,nobrowse,nosuid,locallocks,nfc,actimeo=1 10.21.34.37:/SelfServiceContainer/ nfstest +- mount AFS and then put a web/S/FTP server on top +- python -m SimpleHTTPServer 8080 || python -m http.server 8080 + +## Git ## + +https://git-scm.com/book/en/v2/Distributed-Git-Contributing-to-a-Project + +``` +$ git remote show +origin + +# https://gitversion.readthedocs.io/en/stable/reference/git-setup/ +$ git remote add upstream https://github.com/nutanixworkshops/stageworkshop.git +$ git remote show +upstream +origin + +$ git fetch upstream +$ git merge upstream/master + +$ git tag +$ git tag -a 2.0.1 [optional_hash] +$ git push origin --tags + +git remote show origin +git checkout master && git merge [topic_branch] +git branch --delete [topic_branch] +git push origin --delete [topic_branch|tag] +git remote set-url origin git@github.com:mlavi/stageworkshop.git #change transport + +$ git stash list +git stage && git pull --rebase && git stash pop +```` diff --git a/documentation/darksite.md b/documentation/darksite.md new file mode 100644 index 0000000..84f3590 --- /dev/null +++ b/documentation/darksite.md @@ -0,0 +1,27 @@ +# Darksite # + +## Rationale ## + +Many Nutanix customers run their on-prem clusters with a requirement for network isolation, usually in a DMZ, private LAN, or otherwise air gapped environment. For security purposes, these environments are further restricted without Internet access. This causes the need to transport software binaries, updates, etc. via physical access to the cluster, *a.k.a.* sneaker net. + +This document outlines the procedure to bootstrap a new or established Nutanix AHV cluster with software from your laptop. + +## Overview ## + +Ideal to do this on a CVM, but you can prepare by downloading all of the bits in advance. The goal is to get everything onto the CVM if there’s room. If not, get it onto a fileserver that the CVM can access, even via SCP/SSH or HTTP. + +- Download the push button Calm archive, unarchive, create a ````cache```` directory inside: + +````wget https://github.com/mlavi/stageworkshop/archive/master.zip && \ +unzip master.zip && pushd stageworkshop-master && mkdir cache && cd ${_} +```` +- Put everything else below in this cache directory and contact me. + + - AutoDC: http://10.59.103.143:8000/autodc-2.0.qcow2 + - CentOS 7.4 image: http://download.nutanix.com/calm/CentOS-7-x86_64-GenericCloud-1801-01.qcow2 + - OPTIONAL rolling: http://cloud.centos.org/centos/7/images/CentOS-7-x86_64-GenericCloud.qcow2 + - PC-5.9.1 metadata and bits: + - http://download.nutanix.com/pc/one-click-pc-deployment/5.9.1/v1/euphrates-5.9.1-stable-prism_central_metadata.json + - http://download.nutanix.com/pc/one-click-pc-deployment/5.9.1/euphrates-5.9.1-stable-prism_central.tar + - jq-1.5: https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64 + - sshpass: http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm diff --git a/documentation/guidebook.md b/documentation/guidebook.md new file mode 100644 index 0000000..1c9e34c --- /dev/null +++ b/documentation/guidebook.md @@ -0,0 +1,389 @@ +# HPoC Automation: Push Button Calm + + +- [Caveat](#caveat) +- [Acknowledgements](#acknowledgements) +- [For the Impatient](#for-the-impatient) +- [How to Do a Calm Bootcamp](#how-to-do-a-calm-bootcamp) + - [Extended Enablement](#extended-enablement) +- [Bugs, Priorities, Notes](#bugs-priorities-notes) +- [Development](#development) + - [Feature Branches](#feature-branches) + - [Local Development Strategies and Tactics](#local-development-strategies-and-tactics) + - [How to Update or Add a Workshop](#how-to-update-or-add-a-workshop) + - [How to Update Nutanix Software Version Used in a Workshop](#how-to-update-nutanix-software-version-used-in-a-workshop) +- [Timing](#timing) +- [Procedure](#procedure) + + +--- +## Caveat ## + +This is a work in progress and your milage may vary! + +## Acknowledgements ## + +The entire Global Technical Sales Enablement team has delivered an amazing amount of content and automation for Nutanix Tech Summits and Workshops alongside the Corporate SE team automation gurus. It has been a pleasure to work with all of them and this work stands on the shoulders of those giants. Thank you! + +## For the Impatient ## + +You can get push button Calm in two ways. It is best to decide by answering this question:
*do you need to set up a single or multiple Nutanix AHV clusters?* + +- __Single:__ the easiest manner is to SSH to the cluster IP address and run the bootstrap script on a CVM: + + curl --remote-name --location https://raw.githubusercontent.com/nutanixworkshops/stageworkshop/master/bootstrap.sh && sh ${_##*/} + + answer a few questions* and watch the log progress. + - The `nutanixworkshops` repository places a premium on stability, not on the bleeding edge of the latest releases; use one of the forked repos for latest development. + +- __Multiple:__ or to use development branches, satisfy these requirements: + + 1. Tested on Ubuntu and Mac, but could work easily on other Linux distributions. + - Mac requires https://brew.sh installed, which first requires ``xcode-select --install`` + 2. Terminal with command line git. + + echo "Start Foundation on your HPoC now, script retries for 60 minutes..." + + export PE=10.21.X.37 && export PE_PASSWORD='nx2Tech###!' && EMAIL=first.last@nutanix.com \ + && cd $(git clone https://github.com/mlavi/stageworkshop.git 2>&1 | grep Cloning | awk -F\' '{print $2}') \ + && echo "${PE}|${PE_PASSWORD}|${EMAIL}" > clusters.txt \ + && ./stage_workshop.sh -f clusters.txt -w 1 #latest stable Calm workshop + + Cluster Foundation typically takes *~30 minutes,* in the meantime, the above will: + + 1. Set the Prism Element (PE) IP address, password, and your email address + 2. Change directory into the cloned git repository + 3. Put settings into a configuration file + 4. Retry for 60 minutes to stage the cluster with the configuration file and Workshop #1. + + Approximately 30 minutes later, you can login to PE to get to PC (or directly to PC) and follow step #7 below to finish push button Calm automation. + +## How to Do a Calm Bootcamp ## + +- __Planning:__ + - Work with your stakeholders at the partner or customer to plan the bootcamp/workshop agenda, facilities, and proposed schedule dates. Facility considerations for all attendees: + - Leader podium/projector + - WiFi access + - Seating with tables and power access + - Breakfast/Lunch/refreshments + - Local SEs to help moderate, assist, and troubleshoot each attendee's needs while the leader continues to drive the bootcamp. + - *Do not do a 15+ person workshop alone* unless you are comfortable dealing with adverse situations, have back up plans, and can easily adapt to make the most of attendee's valuable time (i.e. videos, presentation decks, and/or print outs). + - If appropriate, work with your local field marketing team to make a Bootcamp event landing and registration page, they can also help reserve a marketing cluster. + - You should drive attendees to register. + - More information and supporting materials are on GDrive in the [Technology Bootcamp](https://drive.google.com/drive/folders/0B_IfSpggJeFVfjdrVUxkZ25tQVRINHFVMkl4TFpWMG1GUUNhOVhaWnJOZ1gtSkJfa2QzSGc) folder. + - Estimate the count of your audience, reserve a HPOC cluster for every 20 attendees (due to VDI login constraints). + - You can workaround the 20 VDI user constraint per HPOC by using Juniper VPN. + - Budget for extra people to attend at the last moment and expect last minute attrition as well. + - Confirm your dates. + - Think about bringing Nutanix schwag: stickers, t-shirts, etc. for giveaways. + - Ask questions not covered here in Slack to the Global Sales Technical Enablement team, first see the pinned items in each channel: + - __#technology-bootcamps:__ for customer and prospect bootcamps + - __#hands-on-workshops:__ for Nutanix Partner and SE workshops + - Make a HPOC reservation(s) on https://rx.corp.nutanix.com/ with: + + - __AOS + Hypervisor:__ proper versions for your workshop, specified in the Workshop title. + - *Recommended:* AOS 5.8 + - Older or newer versions of AOS may not function as expected. + - Use for the Calm development workshop to test with the latest Prism Central and Calm versions. + - Prism Central and Calm UIs are continually evolving, you may encounter variations that require different feature navigation from the step by step directions. + - __OS Images:__ *you do not* need to specify images (CentOS, Windows2012, etc.) for your reservation. + - __Start and End Times:__ It is nice to have the cluster extend to the end of the day in case anyone would like to save their blueprints or work after the bootcamp ends. + - The default HPOC reservation comes with 25 VDI user sessions, therefore it is recommended that you reserve a cluster for every 25 attendees. + - For attendees that can install and use Juniper Pulse VPN to access the HPOC, you can consolidate those users to access a single cluster in addition to the VDI users. + +3. __Once your HPOC reservation starts:__ + - Leverage Push button Calm automation to stage your cluster with the Calm workshop of your choice. + - *Optional:* + - For every attendee over 20 or a previously known/arranged audience, it is ideal to populate them by userXX or by email in the directory and then configure sign on into Prism Central. + - https://sewiki.nutanix.com/index.php/HPOC_Access_Instructions + - There is a printout available in GDrive? + - Populate the authentication directory with additional users and email addresses, add into groups if desired. + - Configure SSP role mappings if desired. + - Populate PC projects with groups and/or users with roles. + +5. __Before the Bootcamp:__ + - Survey the site and conference/classroom, correct any logistical problems, confirm driving, parking, sign-in procedures, room location/directions, and WiFi access. + - The day(s) before the Bootcamp or as soon as push button Calm completes with any optional configuration, send a reminder notice in email with logistical details, and *ask people to test their VDI and/or VPN access.* + - This reduces logistical problems encountered on the bootcamp day starting to access the cluster(s). + - Send WiFi details for on-site network access. + - Run through the first lab to check everything works! + +6. __The Day of the Bootcamp__ + + - Arrive early, re-survey the site, send any updates to the e-mail attendee list, and include this link: + - [Nutanix Partner Workshop](https://nutanix.handsonworkshops.com/workshops/6070f10d-3aa0-4c7e-b727-dc554cbc2ddf/start/) + - Alternatively, have them access http://nutanix.handsonworkshops.com and register/log in. + - Project WiFi/logisical and Agenda info + - Once quorum is established, I like to begin with a introductions and review of the agenda: + - why are we here? + - what do want to accomplish today? + - who is the Nutanix team in the room to ask for help? + - I take a role call of who is in the room, writing down their first name and last initial to protect their privacy, and I ask: "what is your role and company, what do you want to accomplish or for us to address today?" + - Amend the agenda if needed based on feedback + - Review the agenda and begin the day, here is the typical agenda: + - Introductions and Logistics + - Calm overview and enablement: see the next section for detail + - Lab 1, etc. + +### Extended Enablement ### + +It is easy to do a full day of enablement on Calm. There are many topics which you can pick from for the best audience engagement; shed topics for the appropriate depth and time. + +1. Nutanix+Calm=DevOps + - DevOps enablement: What is DevOps? [Enablement deck](https://drive.google.com/open?id=1f0o9YSHy6BW_5cHS_7n716zNtKuPXTXu) [[article](https://mlavi.github.io/post/calm.io-recap/calm.io-i-dream-of-devops-but-what-is-devops/)] + 1. The Journey: move the mountain stone by stone + 2. Business outcome = __agility__: DevOps Definition + 3. Business outcome = __scalability__: Pets versus Cattle + 4. Plan, implement, measure, repeat. + - [DevOps Maturity Diagram](https://mlavi.github.io/post/devops-maturity-diagram/): journey up and to the right, evaluate vendors who imprison you in the lower left quadrant. Then bring down the silos. + - The goal of DevOps is to become invisible, we cattle DevOps across the organization and we all become DevOps. + - Nutanix is on this journey, we are cattle architecture, infrastructure, and operations: this is how we disrupt and lead the industry by doing so for our customers. Calm accelerates and completes the customer journey: + + - Invisible infrastructure (HW), invisible hypervisors, invisible clouds, invisible DevOps = invisible silos (hyperconverged teams) and invisible/continuous ops. + + - The traditional Nutanix customer base is Ops, now we have a new audience: the Dev. What is a Developer and how does a developer look at the world? + + 1. [Build-Test-Deploy pattern](https://mlavi.github.io/post/devops-btd-pattern/) for a mature agile SDLC engineering organization leads to Continuous Integration, Delivery, and Deployment (CI/CD) + 4. [The DevOps Automation Diagram](http://mlavi.github.io/post/devops_automation.pu.svg) [[article](https://mlavi.github.io/post/devops-automation/)] illustrates CI/CD [[article](https://mlavi.github.io/post/calm.io-recap/calm.io-demystifying-continuous-integration-delivery-and-deployment/)] + 4. [Configuration Management](https://mlavi.github.io/post/calm.io-recap/calm.io-configuration-management-in-the-devops-world/) means Ops cares about infrastructure as code and becomes infrastructure developers. + - Proof that software eats the world and we have The New Kingmakers. + + 2. Calm Selling: + + - Typical customer use cases, success, and summary of business + - How to do the first customer Calm presentation + - Calm Customer Preso (Seismic) + - SKO2018 Selling the Enterprise Cloud with Calm: [Video](https://drive.google.com/open?id=0B05FlI1TwLEzTHFyUzdxUnp6a3M) [Slides](https://drive.google.com/a/nutanix.com/file/d/0B57gwWrKd9AVQjVaVFdRS1VnSEk/view?usp=sharing) + - Qualification questions + - Competitive analysis/Objection Handling + - How to demo Calm in 5, 10, 20 minutes + - Where: HPOC versus demo.nutanix.com versus expo.nutanix.com + - Be aware that I never do a Calm demo in less than 30 minutes because the platform can address so many use cases. It is better to do discovery, qualification of use cases, and plan a follow up engagement than it is to do short demo. + - 5 minute Calm demo outline: + - Just rip through the presentation slides! If there is interest, create a SFDC opp and schedule the follow up with Calm seller team. + - 10 minute Calm demo outline: + - Skip the slides and message: Calm delivers enterprise applications with a few clicks + - Show the marketplace, launch a LAMP blueprint, show application profiles with TCO, launch and audit the deployment, explain operations in flight + - 20 minute Calm demo outline: + + - [Calm History, OrgChart, Resources](https://sites.google.com/a/nutanix.com/corp/calm) + - Deeper dives and advanced features (later in the day, second day, etc.): + - Calm Releases and Roadmap + - Calm LCM 2.x feature releases + - [Service Now](https://github.com/nutanix/ServiceNow) bridge open sourced, productization in beta + - [CI/CD blueprint](https://next.nutanix.com/blog-40/automation-ci-cd-and-nutanix-calm-31147) + + - __Anatomy of a Calm Sale:__ with escalation points + - *This section is about to be updated with the new Calm go to market plan and selling team.* + - Platform, first customer pitch + - Discovery on qualification questions: + - Do you have automation, software devs, etc. + - Map out how they deploy a change to production, + get the metrics/silos, time to deploy value. + - Find use case(s) for Calm/automation/platform, + create SFDC opportunity with Calm SKUs, + schedule platform follow ups and/or Calm deep dive. + - Offer a bootcamp for engagement. + - Define a PoC, escalate to solutions arch if needed. + - Implement a PoC, exit PoC successfully for a technical close. + - Sales close. + - __Continuous Selling:__ nurture Calm and upsell platform value, features, and product engagement. + - Typical use case is SSP-IaaS deployment, but the use case can be progressed over many stages to DevOps maturity: + 1. Add self-service and showback + 1. Add t-shirt sizes, multiple application profiles + 1. Add apps + 1. Add integrations + 1. Add multiple providers (active-active) + 1. Add app lifecycle operations + 1. Drive to CI/CD pipelines + 1. Drive to continuous operations + - Repeat for the next business initiative: + - Continuously prospect for other business teams, apps, integrations, marketplace blueprints, use cases which influence roadmap priority, and *for competition*: report back to the team in #calm. + - Profit! + + - __Group Exercise:__ + - Pitch a slide or answer a Calm objection question + - Entire team offers positive criticism to improve member response, typical sidebar discussion + - Make the pitch your own voice, not by wrote. + - Add your your own story and experience to illustrate how we do it better, customer proof points, and lay traps. + - Improve your strategy/tactics using XCommand and the Calm battlecard. + - Avoid traps which isolate Calm from platform or lower value. + +## Bugs, Priorities, Notes ## + +See [the planning and working document](bugs.md). + +## Development ## + +Shell scripting is not a complete computer language, but despite its drawbacks, it is capable. In the limited environment of the CVM, it is the easiest manner to orchestrate other command line tools, including Nutanix CLIs. You will also see RESTful API calls to exercise platform functionality. + +I refer to the [Advanced Bash-Scripting Guide](http://tldp.org/LDP/abs/html/index.html) every time I forget something or see strange syntax, it is a great way to learn and understand shell scripting! + +### Feature Branches ### + +In order to keep the master branch stable, a developer should work on a feature branch when possible. The following shows how to change to a feature branch, it assumes you are working with the `mlavi` repository and want to change to a branch named : + + cd $(git clone https://github.com/mlavi/stageworkshop.git 2>&1 \ + | grep Cloning | awk -F\' '{print $2}') \ + && git checkout + + # Create your cluster file, e.g.: echo "${PE}|${PE_PASSWORD}|${EMAIL}" > pocs.txt + ./stage_workshop.sh -f pocs.txt + +### Local Development Strategies and Tactics ### + +- [Semantic Versioning](https://semver.org/) implemented via: + - /hooks/ = local git hooks + - [Autohook](https://github.com/nkantar/Autohook).sh + - /hooks/pre-commit/... = symbolic link to hooks/script/... + - /hooks/scripts/semver_release.sh + - [GitVersion](https://github.com/GitTools/GitVersion) container outputs to /release.json + - Setup: + - Docker + GitVersion (see: semver_release.sh::${CONTAINER_TAG}) + - cd hooks && ./autohook.sh install +- Shell Style Guide + - shfmt via IDE + - [Google Shell Script Guide](https://google.github.io/styleguide/shell.xml) + +### How to Update or Add a Workshop ### + +Everything takes place in `stage_workshop.sh`: +1. line 5: Update the `WORKSHOPS` array with the title of your new workshop. + - Insure you use a keyword and a `MAJOR.MINOR` semantic version version. + These will be used for switching between workshops and versions. + You should not need to use precision past the .MINOR point release. + - *e.g.:* use "PC 5.9" (not "Prism Central 5.9.0.1") + - ````WORKSHOPS=(\ + "Calm Workshop (AOS 5.5+/AHV PC 5.8.x) = Stable (AutoDC1)" \ + "Calm Workshop (AOS 5.5+/AHV PC 5.10.x) = Development" \ + "Citrix Desktop on AHV Workshop (AOS/AHV 5.6)" \ + #"Tech Summit 2018" \ + )```` +2. Adjust/update function stage_clusters() (which immediately follows the `WORKSHOPS` array) for mappings to latest version number and staging scripts, as needed. + +### How to Update Nutanix Software Version Used in a Workshop ### + +1. See above, update `stage_workshop.sh` function stage_clusters() version number. +2. Adjust `global.vars.sh` if appropriate for: + - `PC_DEV_VERSION` and/or `PC_STABLE_VERSION` + - `FILES_VERSION` + and update the corresponding metadata URLs. + +## Timing ## + +We'll round up to the nearest half minute. + +1. 30 min = RX Foundation times to PE up (approximate) + +| Cluster | 5/24 (minutes) | 6/2 (min) | 6/10 (min) | +| :------------- | ------------- | --- | ---------- | +| NX-1060 | 30 | N/A | N/A | +| NX-3060-G5 | 25 | 35 | 33 | + + When rebuilding a HPOC from rx, Foundation automation takes: + - 4 nodes@NX-3060-G5: 30 minutes + - 4 nodes@NX-1050: 40 minutes. + +2. 0.5 min per cluster = ./stage_workshop.sh + +3. 28/26/20 min = calm.sh PE +Typical download and install of Prism Central is 17 minutes of waiting! + +| Function | Run1@5/24 (minutes) | 6/2 (min) | 6/10 (min) | +| :------------- | :------------- | --- | ---------- | +| __start__ | 11:26:53 | 09:07:55 | 03:15:35 | +| __end__ | 11:54:28 | 09:34:09 | 03:35:25 | + +4. 1.5 min = calm.sh PC + +| Function | Run1@5/24 (minutes) | 6/2 (min) | 6/10 (min) | +| :------------- | :------------- | --- | ---------- | +| __start (localtime)__ | 04:54:27 | 02:34:08 | 20:35:24 | +| __end (localtime)__ | 04:55:57 | 02:35:37 | 20:36:45 | + +5. 2 min: Login to PC, manual configuration of Calm default project (see step 7, below). + +## Procedure ## + +0. Crank some tunes and record the start time! +1. __Browse (tab1)__ to this page = https://github.com/mlavi/stageworkshop/blob/master/documentation/guidebook.md +2. __Browse (tab2)__ to review HPoC reservation details in https://rx.corp.nutanix.com:8443/ + + 1. Find the __Cluster External IP__ and the __PE admin password__: + we will use both of these in a moment. + 2. Memorize the HPOC number (third octet of the External IPv4) + and prepare to copy by highlighting the __PE admin password__ + or merely memorize the three digits of __PE admin password__. + 3. *Browse (tab3)* to the PE URL to show unavailable before or during foundation process. + 4. *Launch a new terminal*: + + 1. Change terminal font size for demo. + 2. Cut and paste the first line the follows to create, and change to the repository directory + - or cut and paste the entire code block if you're comfortable editing the command line, + - otherwise copy one line at a time and substitute __Cluster External IP__ + on the ````MY_HPOC```` assignment line or change that ````X```` you cleverly memorized + and paste the __PE admin password__ onto the ````PE_PASSWORD```` line + or change the ````###```` you cleverly memorized. + + git clone https://github.com/mlavi/stageworkshop.git && cd $_ + export MY_HPOC=10.21.X.37 \ + && export PE_PASSWORD='nx2Tech###!' \ + && echo "${MY_HPOC}|${PE_PASSWORD}" >> example_pocs.txt + + - *OPTIONAL:* Make a mistake with the HPoC octet to show a failure mode. + - That's it, you're done! Just sit back and wait, periodically + reload browser tab3, or follow the log output on PE and PC... + +1. Side by side: (screens because split desktop doesn't work well enough) + + 1. __Browser (tab 2):__ Open RX automation cluster foundation status detail page, it will be tab4. + 2. __Terminal:__ After the automation is uploaded to the cluster CVM, copy and paste the command to monitor the ````calm.sh```` progress. + + 3. __Browser (tab3):__ Reload the PE URL, accept security override, login as admin and password to PE EULA. + 4. __Terminal:__ Once PE UI configured, reload browser tab3 to show EULA bypassed or click on the decline EULA button to return to login prompt. + + - *BUG:* Once Authentication Server is up, you should be able to login as a SSP admin = adminuser05@ntnxlab.local + 5. __Browser:__ + + - Show PE Authentication: test above user with the default password. + - View All Tasks, wait until software is uploading + 6. __Terminal:__ Show that we're waiting...approximately 17 minutes (fast forward). Highlight automation scripts sent to PC. + 7. __Browser:__ from PE, show VM table, go to home and show PE registered to PC, launch PC and login as admin. + + * *BUG:* Can't login as a SSP admin = adminuser05@ntnxlab.local + * Show Authentication, show role mapping, show images. + +1. Push button Calm! + + 1. __PC> Apps (or PC-5.10+: Services > Calm):__ click lower left ? to show Calm version + 2. __Projects:__ Default: add the following: + + - Description: "Freedom to Cloud", + - Roles: assign and save, + - Local and Cloud, + - choose PoC AHV cluster, + - Network: enable VLANs, + - and Save. + 3. __Blueprints:__ Upload blueprint: ````test/beachhead-centos7-calm5.7.0.1.json```` in default project. + + - Resize icon + - Pull left tab open, note public key in AHVCluster application profile, zoom to show end of the value. + - __Credentials:__ upload private key, note user centos, save, back. + - __Service = Webtier:__ + + - Show VM name, zoom in to show macros. + - Choose local image uploaded to cluster to save time versus the dynamic imported image. + - Show user centos in cloud-init and @@{your_public_key}@@ macro. + - Show package install task: uncomment install work + - Show service tab: Deployment Config + + - *bug* service > service is redundant! + - Save, Launch! + 4. __Application Launch:__ + + - Name application deployment: marklavi-beachhead-took-X-minutes + - Terminal: find start time, find end time. + + - *BUG:* time zones of server, cloud-init? + + - Show logical deployment, open terminal, audit logs diff --git a/documentation/sh-colo.md b/documentation/sh-colo.md new file mode 100644 index 0000000..3277e13 --- /dev/null +++ b/documentation/sh-colo.md @@ -0,0 +1,149 @@ +# SH-COLO Automation + +## Overview + +Rebuild AHV-2 cluster no more than 1 hour. + + +## Foundation + +- ssh to foundation VM ``10.132.128.10`` + - double check the environemnt you will foundation + - confirm no one will use AHV-2 + - confirm ``~/foundation/config/ahv-2.conf`` are correct. (please refer last chapter) + - run script to foundation your gears + ``` + CLUSTER_NAME=AHV-2 + CLUSTER_CONFIG=ahv-2.config + cd ~/foundation + service foundation_service stop + ./bin/foundation --nos=./nos/5.8.2.tar --cluster_name=${CLUSTER_NAME} config/${CLUSTER_CONFIG} + ``` + + - after foundation + ``` + CLUSTER_NAME=AHV-2 + CLUSTER_IP=10.132.129.37 + CLUSTER_DNS=10.132.129.40 + ncli user reset-password user-name=admin password='nx2Tech432!' + ncli user add last-name=nutanix first-name=nutanix user-name=nutanix user-password='nutanix/4u' email-id=nutanix@nutanix.sh + ncli user grant-cluster-admin-role user-name=nutanix + ncli user grant-user-admin-role user-name=nutanix + ncli cluster edit-info new-name=${CLUSTER_NAME} external-ip-address=${CLUSTER_IP} + ncli cluster set-timezone timezone=Asia/Shanghai + ncli cluster add-to-name-servers servers=${CLUSTER_DNS} + echo -e 'nx2Tech432!\nnx2Tech432!' | sudo passwd nutanix + ``` + +## Configuration + +- download automation script + ``` + wget -O stageworkshop-master.zip https://github.com/panlm/stageworkshop/archive/master.zip + unzip stageworkshop-master.zip + ``` + +- put default info to ``~/stageworkshop-master/clusters.txt`` + ``` + #cluster_ip|pe_password|mail_address + 10.132.129.37|nx2Tech432!|leiming.pan@nutanix.com + ``` + +- start to configure, using the 5th workshop + ``` + cd stageworkshop-master + ./stage_workshop.sh -f ./clusters.txt -w 5 + ``` + +- see log on ``pe`` and ``pc`` + ``` + tail -f ~/calm.log + ``` + +- finial configure + ``` + ncli user reset-password user-name=admin password='nx2Tech432!' + ncli user add last-name=nutanix first-name=nutanix user-name=nutanix user-password='nutanix/4u' email-id=nutanix@nutanix.sh + ncli user grant-cluster-admin-role user-name=nutanix + ncli user grant-user-admin-role user-name=nutanix + echo -e 'nx2Tech432!\nnx2Tech432!' | sudo passwd nutanix + ``` + +## Login + +- Login PE ``https://10.132.129.37:9440`` +- Login PC ``https://10.132.129.39:9440`` + + +# Reference + +## ahv-2.config + +- default ahv-2 configure on foundation vm ``/home/nutanix/foundation/config/ahv-2.config`` + ``` + ipmi_user=ADMIN + ipmi_password=ADMIN + + hypervisor_netmask=255.255.128.0 + hypervisor_gateway=10.132.128.4 + hypervisor_nameserver=10.132.71.40 + hypervisor_password=nutanix/4u + svm_subnet_mask=255.255.128.0 + svm_default_gw=10.132.128.4 + + hyp_type=kvm + hyp_version=20170830.166 + + 10.132.129.33 + hypervisor_ip=10.132.129.25 + svm_ip=10.132.129.29 + node_position=A + + 10.132.129.34 + hypervisor_ip=10.132.129.26 + svm_ip=10.132.129.30 + node_position=A + + 10.132.129.35 + hypervisor_ip=10.132.129.27 + svm_ip=10.132.129.31 + node_position=B + ``` + +## ahv-3.config + +- default ahv-3 configure on foundation vm ``/home/nutanix/foundation/config/ahv-3.config`` + ``` + ipmi_user=ADMIN + ipmi_password=ADMIN + + hypervisor_netmask=255.255.128.0 + hypervisor_gateway=10.132.128.4 + hypervisor_nameserver=10.132.71.40 + hypervisor_password=nutanix/4u + svm_subnet_mask=255.255.128.0 + svm_default_gw=10.132.128.4 + + hyp_type=kvm + hyp_version=20170830.166 + + 10.132.130.33 + hypervisor_ip=10.132.130.25 + svm_ip=10.132.130.29 + node_position=A + + 10.132.130.34 + hypervisor_ip=10.132.130.26 + svm_ip=10.132.130.30 + node_position=B + + 10.132.130.35 + hypervisor_ip=10.132.130.27 + svm_ip=10.132.130.31 + node_position=C + + 10.132.130.36 + hypervisor_ip=10.132.130.28 + svm_ip=10.132.130.32 + node_position=D + ``` diff --git a/guidebook.md b/guidebook.md deleted file mode 100644 index b5e057f..0000000 --- a/guidebook.md +++ /dev/null @@ -1,159 +0,0 @@ -# HPoC Automation: Push Button Calm - -## Caveat ## - -This is a work in progress and your milage may vary! - -## Prerequisites ## - -1. Tested on Ubuntu and Mac (Mac requires https://brew.sh installed). -2. A terminal with command line git. - -### Acknowledgements ### - -The entire Global Technical Sales Enablement team has delivered an amazing - amount of content and automation for Nutanix TechSummits and Workshops. Along with the Corporate SE team automation gurus, it has been a pleasure to work with all of them and this work stands on the shoulder of those giants. - Thank you! - -### For the Impatient ### - - echo "Start Foundation on your HPoC now, we'll wait 40 minutes..." - - export PE=10.21.X.37 && export PE_PASSWORD='nx2Tech###!' && EMAIL=first.last@nutanix.com \ - && cd $(git clone https://github.com/mlavi/stageworkshop.git 2>&1 | grep Cloning | awk -F\' '{print $2}') \ - && echo "${PE}|${PE_PASSWORD}|${EMAIL}" > clusters.txt \ - && ./stage_workshop.sh -f clusters.txt -w 1 #latest calm - - sleep 60*30 && lynx https://admin:${PE_PASSWORD}@${PE}:9440/ - -While Foundation typically takes ~30 minutes, we'll: - -1. Set the Prism Element (PE) IP address, password, and your email address, -2. Change directory into the cloned git repository, -3. Put settings into a configuration file, -4. Stage the cluster with the configuration file and the latest Calm workshop. - -Approximately 30 minutes later, you can login to PE to get to PC and follow step #7 below to finish push button Calm automation. - -## Bugs, Priorities, Notes ## - -See [the planning and working document](bugs.md). - -### Timing ### - -We'll round up to the nearest half minute. - -1. 30 min = RX Foundation times to PE up (approximate) - -| Cluster | 5/24 (minutes) | 6/2 (min) | 6/10 (min) | -| :------------- | ------------- | --- | ---------- | -| NX-1060 | 30 | N/A | N/A | -| NX-3060-G5 | 25 | 35 | 33 | - -2. 0.5 min per cluster = ./stage_workshop.sh - -3. 28/26/20 min = PE:stage_calmhow.sh -Typical download and install of Prism Central is 17 minutes of waiting! - -| Function | Run1@5/24 (minutes) | 6/2 (min) | 6/10 (min) | -| :------------- | :------------- | --- | ---------- | -| __start__ | 11:26:53 | 09:07:55 | 03:15:35 | -| __end__ | 11:54:28 | 09:34:09 | 03:35:25 | - -4. 1.5 min = PC:stage_calmhow_pc.sh - -| Function | Run1@5/24 (minutes) | 6/2 (min) | 6/10 (min) | -| :------------- | :------------- | --- | ---------- | -| __start (localtime)__ | 04:54:27 | 02:34:08 | 20:35:24 | -| __end (localtime)__ | 04:55:57 | 02:35:37 | 20:36:45 | - -5. 2 min: Login to PC, manual configuration of Calm default project (see step 7, below). - -## Procedure ## - -0. Crank some tunes and record the start time! -1. __Browse (tab1)__ to this page = https://github.com/mlavi/stageworkshop/blob/master/guidebook.md - - - I have submitted [a pull request](https://github.com/nutanixworkshops/stageworkshop/pull/1) to merge my work. -2. __Browse (tab2)__ to review HPoC reservation details in https://rx.corp.nutanix.com:8443/ - - 1. Find the __Cluster External IP__ and the __PE admin password__: - we will use both of these in a moment. - 2. Memorize the HPOC number (third octet of the External IPv4) - and prepare to copy by highlighting the __PE admin password__ - or merely memorize the three digits of __PE admin password__. - 3. *Browse (tab3)* to the PE URL to show unavailable before or during foundation process. - 4. *Launch a new terminal*: - - 1. Change terminal font size for demo. - 2. Cut and paste the first line the follows to create, and change to the repository directory - - or cut and paste the entire code block if you're comfortable editing the command line, - - otherwise copy one line at a time and substitute __Cluster External IP__ - on the ````MY_HPOC```` assignment line or change that ````X```` you cleverly memorized - and paste the __PE admin password__ onto the ````MY_PE_PASSWORD```` line - or change the ````###```` you cleverly memorized. - - git clone https://github.com/mlavi/stageworkshop.git && cd $_ - export MY_HPOC=10.21.X.37 \ - && export MY_PE_PASSWORD='nx2Tech###!' \ - && echo "${MY_HPOC}|${MY_PE_PASSWORD}" >> example_pocs.txt - - - *OPTIONAL:* Make a mistake with the HPoC octet to show a failure mode. - - That's it, you're done! Just sit back and wait, periodically - reload browser tab3, or follow the log output on PE and PC... - -1. Side by side: (screens because split desktop doesn't work well enough) - - 1. __Browser (tab 2):__ Open RX automation cluster foundation status detail page, it will be tab4. - 2. __Terminal:__ After the automation is uploaded to the cluster CVM, copy and paste the command to monitor the ````stage_calmhow.sh```` progress. - - 3. __Browser (tab3):__ Reload the PE URL, accept security override, login as admin and password to PE EULA. - 4. __Terminal:__ Once PE UI configured, reload browser tab3 to show EULA bypassed or click on the decline EULA button to return to login prompt. - - - *BUG:* Once Authentication Server is up, you should be able to login as a SSP admin = adminuser05@ntnxlab.local - 5. __Browser:__ - - - Show PE Authentication: test above user with the default password. - - View All Tasks, wait until software is uploading - 6. __Terminal:__ Show that we're waiting...approximately 17 minutes (fast forward). Highlight automation scripts sent to PC. - 7. __Browser:__ from PE, show VM table, go to home and show PE registered to PC, launch PC and login as admin. - - * *BUG:* Can't login as a SSP admin = adminuser05@ntnxlab.local - * Show Authentication, show role mapping, show images. - -1. Push button Calm! - - 1. __PC> Apps:__ click lower left ? to show Calm 5.7 - - * *BUG* why a ? in the UI? - 2. __Projects:__ Default: add the following: - - - Description: "Freedom to Cloud", - - Roles: assign and save, - - Local and Cloud, - - choose PoC AHV cluster, - - Network: enable VLANs, - - and Save. - 3. __Blueprints:__ Upload blueprint: ````beachhead-centos7-calm5.7.0.1.json```` in default project. - - - Resize icon - - Pull left tab open, note public key in AHVCluster application profile, zoom to show end of the value. - - __Credentials:__ upload private key, note user centos, save, back. - - __Service = Webtier:__ - - - Show VM name, zoom in to show macros. - - Choose local image uploaded to cluster to save time versus the dynamic imported image. - - Show user centos in cloud-init and @@{your_public_key}@@ macro. - - Show package install task: uncomment install work - - Show service tab: Deployment Config - - - *bug* service > service is redundant! - - Save, Launch! - 4. __Application Launch:__ - - - Name application deployment: marklavi-beachhead-took-X-minutes - - Terminal: find start time, find end time. - - - *BUG:* time zones of server, cloud-init? - - - Show logical deployment, open terminal, audit logs diff --git a/hooks/pre-commit/01-GitVersion b/hooks/pre-commit/01-GitVersion deleted file mode 120000 index 1d1e101..0000000 --- a/hooks/pre-commit/01-GitVersion +++ /dev/null @@ -1 +0,0 @@ -../scripts/GitVersion.sh \ No newline at end of file diff --git a/hooks/pre-commit/01-release b/hooks/pre-commit/01-release new file mode 120000 index 0000000..d5316c0 --- /dev/null +++ b/hooks/pre-commit/01-release @@ -0,0 +1 @@ +../scripts/semver_release.sh \ No newline at end of file diff --git a/hooks/scripts/GitVersion.sh b/hooks/scripts/GitVersion.sh deleted file mode 100755 index 7090571..0000000 --- a/hooks/scripts/GitVersion.sh +++ /dev/null @@ -1,17 +0,0 @@ -pushd ~/Documents/github.com/mlavi/stageworkshop/ \ -&& source scripts/stageworkshop.lib.sh 'quiet' - -if (( $(docker ps 2>&1 | grep Cannot | wc --lines) == 0 )); then - docker run --rm -v "$(pwd):/repo" gittools/gitversion-fullfx:linux /repo \ - > ${RELEASE} -elif [[ ! -z $(which gitversion) ]]; then - gitversion > ${RELEASE} -else - echo "Error: Docker engine down and no native binary available on PATH." -fi - -mv ${RELEASE} original.${RELEASE} && cat ${_} \ -| jq ". + {\"PrismCentralStable\":\"${PC_VERSION_STABLE}\"} + {\"PrismCentralDev\":\"${PC_VERSION_DEV}\"}" \ -> ${RELEASE} && rm -f original.${RELEASE} - -git add ${RELEASE} diff --git a/hooks/scripts/semver_release.sh b/hooks/scripts/semver_release.sh new file mode 100755 index 0000000..22dd57d --- /dev/null +++ b/hooks/scripts/semver_release.sh @@ -0,0 +1,27 @@ +# shellcheck disable=SC2148 +# Note: hooks/pre-commit/01-release symlinks here +# Debug: bash -x hooks/scripts/semver_release.sh + +CONTAINER_TAG='gittools/gitversion-fullfx:linux-4.0.0' +# works on LinuxMint + +pushd ~/Documents/github.com/mlavi/stageworkshop/ \ +&& source scripts/lib.shell-convenience.sh 'quiet' || exit 1 + +if (( $(docker ps 2>&1 | grep Cannot | wc --lines) == 0 )); then + docker run --rm -v "$(pwd):/repo" ${CONTAINER_TAG} /repo \ + > ${RELEASE} +elif [[ ! -z $(which gitversion) ]]; then + gitversion > ${RELEASE} +else + ERROR=10 + echo "Error ${ERROR}: Docker engine down and no native binary available on PATH." + exit ${ERROR} +fi + +rm -f original.${RELEASE} || true +mv ${RELEASE} original.${RELEASE} && cat ${_} \ +| jq ". + {\"PrismCentralStable\":\"${PC_STABLE_VERSION}\"} + {\"PrismCentralDev\":\"${PC_DEV_VERSION}\"}" \ +> ${RELEASE} && rm -f original.${RELEASE} + +git add ${RELEASE} diff --git a/release.json b/release.json index c9ddcbb..2c00186 100644 --- a/release.json +++ b/release.json @@ -1,32 +1,32 @@ { "Major": 2, "Minor": 0, - "Patch": 3, - "PreReleaseTag": "ci.53", - "PreReleaseTagWithDash": "-ci.53", + "Patch": 6, + "PreReleaseTag": "ci.7", + "PreReleaseTagWithDash": "-ci.7", "PreReleaseLabel": "ci", - "PreReleaseNumber": 53, + "PreReleaseNumber": 7, "BuildMetaData": "", "BuildMetaDataPadded": "", - "FullBuildMetaData": "Branch.master.Sha.0eb3eb05d43a1de2ef7783c2eeb052b95eec0155", - "MajorMinorPatch": "2.0.3", - "SemVer": "2.0.3-ci.53", - "LegacySemVer": "2.0.3-ci53", - "LegacySemVerPadded": "2.0.3-ci0053", - "AssemblySemVer": "2.0.3.0", - "AssemblySemFileVer": "2.0.3.0", - "FullSemVer": "2.0.3-ci.53", - "InformationalVersion": "2.0.3-ci.53+Branch.master.Sha.0eb3eb05d43a1de2ef7783c2eeb052b95eec0155", + "FullBuildMetaData": "Branch.master.Sha.dfb2d5e5e097bb27fbaf1623e3113851ea18f5cb", + "MajorMinorPatch": "2.0.6", + "SemVer": "2.0.6-ci.7", + "LegacySemVer": "2.0.6-ci7", + "LegacySemVerPadded": "2.0.6-ci0007", + "AssemblySemVer": "2.0.6.0", + "AssemblySemFileVer": "2.0.6.0", + "FullSemVer": "2.0.6-ci.7", + "InformationalVersion": "2.0.6-ci.7+Branch.master.Sha.dfb2d5e5e097bb27fbaf1623e3113851ea18f5cb", "BranchName": "master", - "Sha": "0eb3eb05d43a1de2ef7783c2eeb052b95eec0155", - "ShortSha": "0eb3eb0", - "NuGetVersionV2": "2.0.3-ci0053", - "NuGetVersion": "2.0.3-ci0053", - "NuGetPreReleaseTagV2": "ci0053", - "NuGetPreReleaseTag": "ci0053", - "CommitsSinceVersionSource": 53, - "CommitsSinceVersionSourcePadded": "0053", - "CommitDate": "2018-11-05", + "Sha": "dfb2d5e5e097bb27fbaf1623e3113851ea18f5cb", + "ShortSha": "dfb2d5e", + "NuGetVersionV2": "2.0.6-ci0007", + "NuGetVersion": "2.0.6-ci0007", + "NuGetPreReleaseTagV2": "ci0007", + "NuGetPreReleaseTag": "ci0007", + "CommitsSinceVersionSource": 7, + "CommitsSinceVersionSourcePadded": "0007", + "CommitDate": "2019-02-15", "PrismCentralStable": "5.8.2", - "PrismCentralDev": "5.9.1" + "PrismCentralDev": "5.10.1.1" } diff --git a/stage_workshop.sh b/stage_workshop.sh old mode 100644 new mode 100755 index 3c5db86..82e2b78 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -1,148 +1,180 @@ #!/usr/bin/env bash # use bash -x to debug command substitution and evaluation instead of echo. +DEBUG= # For WORKSHOPS keyword mappings to scripts and variables, please use: # - Calm || Citrix || Summit # - PC #.# WORKSHOPS=(\ "Calm Workshop (AOS 5.5+/AHV PC 5.8.x) = Stable (AutoDC1)" \ -"Calm Workshop (AOS 5.5+/AHV PC 5.9.x) = Development (AutoDC2)" \ -# "Calm Workshop (AOS 5.5+/AHV PC 5.7.x)" \ -# "Calm Workshop (AOS 5.5+/AHV PC 5.6.x)" \ +"Calm Workshop (AOS 5.8.x/AHV PC 5.10.x) = Stable (AutoDC2)" \ +"Calm Workshop (AOS 5.9+/AHV PC 5.10.x) = Development" \ "Citrix Desktop on AHV Workshop (AOS/AHV 5.6)" \ #"Tech Summit 2018" \ -) # Adjust function stage_clusters for mappings as needed +) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { # Adjust map below as needed with $WORKSHOPS local _cluster local _container - local _dependencies + local _dependency local _fields - local _pe_config - local _pc_config - local _release - local _sshkey + local _libraries='global.vars.sh lib.common.sh ' + local _pe_launch # will be transferred and executed on PE + local _pc_launch # will be transferred and executed on PC + local _sshkey=${SSH_PUBKEY} + local _wc_arg='--lines' local _workshop=${WORKSHOPS[$((${WORKSHOP_NUM}-1))]} # Map to latest and greatest of each point release - # Metadata URLs MUST be specified in common.lib.sh function: NTNX_Download - if (( $(echo ${_workshop} | grep -i "PC 5.9" | wc -l) > 0 )); then - export PC_VERSION=5.9.1 - elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc -l) > 0 )); then - export PC_VERSION=5.8.2 - elif (( $(echo ${_workshop} | grep -i "PC 5.7" | wc -l) > 0 )); then + # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download + # TODO: make WORKSHOPS and map a JSON configuration file? + if (( $(echo ${_workshop} | grep -i "PC 5.10" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION="${PC_DEV_VERSION}" + elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION="${PC_STABLE_VERSION}" + elif (( $(echo ${_workshop} | grep -i "PC 5.9" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION=5.9.2 + elif (( $(echo ${_workshop} | grep -i "PC 5.7" | wc ${WC_ARG}) > 0 )); then export PC_VERSION=5.7.1.1 - elif (( $(echo ${_workshop} | grep -i "PC 5.6" | wc -l) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.6" | wc ${WC_ARG}) > 0 )); then export PC_VERSION=5.6.2 fi - # Map to staging scripts - if (( $(echo ${_workshop} | grep -i Calm | wc -l) > 0 )); then - _pe_config=stage_calmhow.sh - _pc_config=stage_calmhow_pc.sh + # Map workshop to staging script(s) and libraries, + # _pe_launch will be executed on PE + if (( $(echo ${_workshop} | grep -i Calm | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='calm.sh' + _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i Citrix | wc -l) > 0 )); then - _pe_config=stage_citrixhow.sh - _pc_config=stage_citrixhow_pc.sh + if (( $(echo ${_workshop} | grep -i Citrix | wc ${WC_ARG}) > 0 )); then + _pe_launch='stage_citrixhow.sh' + _pc_launch='stage_citrixhow_pc.sh' fi - if (( $(echo ${_workshop} | grep -i Summit | wc -l) > 0 )); then - _pe_config=stage_ts18.sh - _pc_config=stage_ts18_pc.sh + if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then + _pe_launch='stage_ts18.sh' + _pc_launch='stage_ts18_pc.sh' fi - Dependencies 'install' 'sshpass' + dependencies 'install' 'sshpass' - log "WORKSHOP #${WORKSHOP_NUM} = ${_workshop} with PC-${PC_VERSION}" - # Send configuration scripts to remote clusters and execute Prism Element script + if [[ -z ${PC_VERSION} ]]; then + log "WORKSHOP #${WORKSHOP_NUM} = ${_workshop} with PC-${PC_VERSION}" + fi + # Send configuration scripts to remote clusters and execute Prism Element script if [[ ${CLUSTER_LIST} == '-' ]]; then - echo "Login to see tasks in flight via https://${PRISM_ADMIN}:${MY_PE_PASSWORD}@${MY_PE_HOST}:9440" - get_configuration - cd scripts && eval "${CONFIGURATION} ./${_pe_config}" >> ${HOME}/${_pe_config%%.sh}.log 2>&1 & + echo "Login to see tasks in flight via https://${PRISM_ADMIN}:${PE_PASSWORD}@${PE_HOST}:9440" + pe_configuration_args "${_pc_launch}" + + pushd scripts || true + eval "${PE_CONFIGURATION} ./${_pe_launch} 'PE'" >> ${HOME}/${_pe_launch%%.sh}.log 2>&1 & + unset PE_CONFIGURATION + popd || true else - for _cluster in `cat ${CLUSTER_LIST} | grep -v ^#` + for _cluster in $(cat ${CLUSTER_LIST} | grep -v ^#) do set -f - _fields=(${_cluster//|/ }) - MY_PE_HOST=${_fields[0]} - MY_PE_PASSWORD=${_fields[1]} - MY_EMAIL=${_fields[2]} + # shellcheck disable=2206 + _fields=(${_cluster//|/ }) + PE_HOST=${_fields[0]} + PE_PASSWORD=${_fields[1]} + EMAIL=${_fields[2]} - get_configuration + pe_configuration_args "${_pc_launch}" . scripts/global.vars.sh # re-import for relative settings - Check_Prism_API_Up 'PE' 60 + cat <> ${_pe_config%%.sh}.log 2>&1 &" + log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + unset PE_CONFIGURATION # shellcheck disable=SC2153 cat < Gear > Cluster Lockdown, the following will fail silently, use ssh nutanix@{PE|PC} instead. - $ SSHPASS='${MY_PE_PASSWORD}' sshpass -e ssh ${SSH_OPTS} \\ - nutanix@${MY_PE_HOST} 'date; tail -f ${_pe_config%%.sh}.log' + $ SSHPASS='${PE_PASSWORD}' sshpass -e ssh \\ + ${SSH_OPTS} \\ + nutanix@${PE_HOST} 'date; tail -f ${_pe_launch%%.sh}.log' You can login to PE to see tasks in flight and eventual PC registration: - https://${PRISM_ADMIN}:${MY_PE_PASSWORD}@${MY_PE_HOST}:9440/ + https://${PRISM_ADMIN}:${PE_PASSWORD}@${PE_HOST}:9440/ + +EOM - $ SSHPASS='nutanix/4u' sshpass -e ssh ${SSH_OPTS} \\ - nutanix@${MY_PC_HOST} 'date; tail -f ${_pc_config%%.sh}.log' - https://${PRISM_ADMIN}@${MY_PC_HOST}:9440/ + if (( "$(echo ${_libraries} | grep -i lib.pc | wc ${_wc_arg})" > 0 )); then + # shellcheck disable=2153 + cat < 0 && ${WORKSHOP_NUM} <= ${#WORKSHOPS[@]} - 3 )); then + stage_clusters else - log "Warning: missing ${_CLUSTER_FILE} argument." - get_file # If no command line arguments, start interactive session + #log "DEBUG: WORKSHOP_NUM=${WORKSHOP_NUM}" + script_usage fi diff --git a/test/bats1.bats b/test/bats1.bats index c8d320d..dd3ad1f 100755 --- a/test/bats1.bats +++ b/test/bats1.bats @@ -5,9 +5,9 @@ [ "$result" -eq 4 ] } -# source scripts/common.lib.sh ; ATTEMPTS=2 SLEEP=2 TRIES=3 MY_PE_HOST=10.21.82.37 Check_Prism_API_Up 'PE' +# source scripts/lib.common.sh ; ATTEMPTS=2 SLEEP=2 TRIES=3 PE_HOST=10.21.82.37 prism_check 'PE' @test "Is PE up?" { - result="$(source ./scripts/common.lib.sh ; \ - ATTEMPTS=2 SLEEP=2 TRIES=3 MY_PE_HOST=10.21.82.37 MY_PE_PASSWORD='tbd' Check_Prism_API_Up 'PE' )" + result="$(source ./scripts/lib.common.sh ; \ + ATTEMPTS=2 SLEEP=2 TRIES=3 PE_HOST=10.21.82.37 PE_PASSWORD='tbd' prism_check 'PE' )" [ "$result" -ne 0 ] } diff --git a/beachhead-centos7-calm5.7.0.1.json b/test/beachhead-centos7-calm5.7.0.1.json similarity index 100% rename from beachhead-centos7-calm5.7.0.1.json rename to test/beachhead-centos7-calm5.7.0.1.json diff --git a/test/repo_source.sh b/test/repo_source.sh index d90b8b6..db96ec1 100755 --- a/test/repo_source.sh +++ b/test/repo_source.sh @@ -1,8 +1,15 @@ #!/usr/bin/env bash +# ./repo_source.sh 2>&1 | grep -v 8181 | grep SOURCE_URL -. ../scripts/common.lib.sh +# PE_HOST='1.1.1.1' + +. ../scripts/lib.common.sh . ../scripts/global.vars.sh +# echo IPV4_PREFIX=${IPV4_PREFIX} +# echo AUTH_HOST=${AUTH_HOST} +# exit + log "__AutoDC__" unset SOURCE_URL repo_source AUTODC_REPOS[@] diff --git a/test/url_hardcoded.sh b/test/url_hardcoded.sh new file mode 100644 index 0000000..57bee46 --- /dev/null +++ b/test/url_hardcoded.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +# ./url_hardcoded.sh 2>&1 + +egrep http *sh */*sh \ + --exclude autodc*sh --exclude hooks*sh --exclude stage_citrixhow* \ + --exclude vmdisk2image-pc.sh --exclude global.vars.sh \ +| grep -v -i \ + -e localhost -e 127.0.0.1 -e _HOST -e _http_ \ + -e download.nutanix.com -e portal.nutanix.com -e python -e github -e '#' From eb43a09f148e03386d59e33207248c93a5f90867 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 16 Feb 2019 18:46:51 -0800 Subject: [PATCH 013/691] Update global.vars.sh Updated for GTS specific Image Repo and Images --- scripts/global.vars.sh | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f90d39f..e930380 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -140,7 +140,35 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) NW2_SUBNET="${IPV4_PREFIX}.129/25" NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" + #NW2_DHCP_END="${IPV4_PREFIX}.253" + NW2_DHCP_END="${IPV4_PREFIX}.229" + + QCOW2_REPOS=(\ + 'http://10.42.8.50/images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) + # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + CentOS7.iso \ + Windows2016.iso \ + Windows2012R2.iso \ + Windows10.iso \ + Nutanix-VirtIO-1.1.3.iso \ + acs-centos7.qcow2 \ + acs-ubuntu1604.qcow2 \ + xtract-vm-2.0.3.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_320.qcow2 \ + SQLServer2014SP3.iso \ + hycu-3.5.0-6138.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + VeeamBR_9.5.4.2615.Update4.iso \ + ) ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR DNS_SERVERS='10.132.71.40' From 937d88fabef14720b791d93f422742af0b940ba1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 17 Feb 2019 14:57:06 -0800 Subject: [PATCH 014/691] Updates --- release.json | 36 +++++----- scripts/global.vars.sh | 3 +- scripts/ts2019.sh | 146 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 166 insertions(+), 19 deletions(-) create mode 100644 scripts/ts2019.sh diff --git a/release.json b/release.json index ade92c8..cc6b5d1 100644 --- a/release.json +++ b/release.json @@ -2,31 +2,31 @@ "Major": 2, "Minor": 0, "Patch": 6, - "PreReleaseTag": "ci.11", - "PreReleaseTagWithDash": "-ci.11", + "PreReleaseTag": "ci.13", + "PreReleaseTagWithDash": "-ci.13", "PreReleaseLabel": "ci", - "PreReleaseNumber": 11, + "PreReleaseNumber": 13, "BuildMetaData": "", "BuildMetaDataPadded": "", - "FullBuildMetaData": "Branch.master.Sha.b47788a3bdc14b8fd8852a1fefccf73a125d038c", + "FullBuildMetaData": "Branch.master.Sha.3d62e775126b97ddac481a1fcc81920d42d998fc", "MajorMinorPatch": "2.0.6", - "SemVer": "2.0.6-ci.11", - "LegacySemVer": "2.0.6-ci11", - "LegacySemVerPadded": "2.0.6-ci0011", + "SemVer": "2.0.6-ci.13", + "LegacySemVer": "2.0.6-ci13", + "LegacySemVerPadded": "2.0.6-ci0013", "AssemblySemVer": "2.0.6.0", "AssemblySemFileVer": "2.0.6.0", - "FullSemVer": "2.0.6-ci.11", - "InformationalVersion": "2.0.6-ci.11+Branch.master.Sha.b47788a3bdc14b8fd8852a1fefccf73a125d038c", + "FullSemVer": "2.0.6-ci.13", + "InformationalVersion": "2.0.6-ci.13+Branch.master.Sha.3d62e775126b97ddac481a1fcc81920d42d998fc", "BranchName": "master", - "Sha": "b47788a3bdc14b8fd8852a1fefccf73a125d038c", - "ShortSha": "b47788a", - "NuGetVersionV2": "2.0.6-ci0011", - "NuGetVersion": "2.0.6-ci0011", - "NuGetPreReleaseTagV2": "ci0011", - "NuGetPreReleaseTag": "ci0011", - "CommitsSinceVersionSource": 11, - "CommitsSinceVersionSourcePadded": "0011", - "CommitDate": "2019-02-15", + "Sha": "3d62e775126b97ddac481a1fcc81920d42d998fc", + "ShortSha": "3d62e77", + "NuGetVersionV2": "2.0.6-ci0013", + "NuGetVersion": "2.0.6-ci0013", + "NuGetPreReleaseTagV2": "ci0013", + "NuGetPreReleaseTag": "ci0013", + "CommitsSinceVersionSource": 13, + "CommitsSinceVersionSourcePadded": "0013", + "CommitDate": "2019-02-17", "PrismCentralStable": "5.8.2", "PrismCentralDev": "5.10.1.1" } diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index e930380..9f8ef0a 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -39,6 +39,7 @@ NTNX_INIT_PASSWORD='nutanix/4u' 'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) QCOW2_REPOS=(\ + 'http://10.42.8.50/images/' \ 'http://10.21.250.221/images/tech-enablement/' \ 'http://10.21.250.221/images/ahv/techsummit/' \ 'http://10.132.128.50:81/share/saved-images/' \ @@ -95,7 +96,7 @@ AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' AUTODC_REPOS=(\ 'http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC-04282018.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh new file mode 100644 index 0000000..0ed1ae3 --- /dev/null +++ b/scripts/ts2019.sh @@ -0,0 +1,146 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export PC_DEV_VERSION='5.10.1.1' + export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.1.1.json' + export PC_URL='http://10.42.8.50/images/x.tar.gz' + export FILES_VERSION='3.2.0' + export FILES_METAURL='http://10.42.8.50/images/afs-3.2.0.json' + export FILES_URL='http://10.42.8.50/images/x.tar.qcow2' + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + && pc_configure \ + && dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' + + log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install & # parallel, optional. Versus: $0 'files' & + + finish + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export QCOW2_REPOS=(\ + 'http://10.42.8.50/images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + CentOS7.iso \ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + acs-centos7.qcow2 \ + acs-ubuntu1604.qcow2 \ + xtract-vm-2.0.3.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_320.qcow2 \ + hycu-3.5.0-6138.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + VeeamBR_9.5.4.2615.Update4.iso \ + ) + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && images \ + && pc_cluster_img_import \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + pc_project + flow_enable + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac From 074756ac93131532e5d6e7605f0f6823ccd4333f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 17 Feb 2019 16:07:07 -0800 Subject: [PATCH 015/691] Update global.vars.sh --- scripts/global.vars.sh | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 9f8ef0a..034283d 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -18,10 +18,11 @@ PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/ FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' # 2019-02-15: override until metadata URL fixed # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' # Revert by overriding again... - FILES_VERSION='3.2.0' - FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' + #FILES_VERSION='3.2.0' + #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' + #FILES_URL= NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' From 3c02a74535af9de473342c04306b48299b9f5edf Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Feb 2019 10:18:29 -0800 Subject: [PATCH 016/691] Updates for ts2019.sh --- scripts/global.vars.sh | 56 +++--------------------------------------- scripts/ts2019.sh | 17 ++++++++----- 2 files changed, 15 insertions(+), 58 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 034283d..52271c5 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -18,7 +18,7 @@ PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/ FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' # 2019-02-15: override until metadata URL fixed # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' # Revert by overriding again... #FILES_VERSION='3.2.0' #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' @@ -51,13 +51,13 @@ NTNX_INIT_PASSWORD='nutanix/4u' Windows2016.qcow2 \ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ CentOS7.iso \ Windows2016.iso \ Windows2012R2.iso \ Windows10.iso \ Nutanix-VirtIO-1.1.3.iso \ - 'https://s3.amazonaws.com/technology-boot-camp/ERA-Server-build-1.0.0-21edfbc990a8f3424fed146d837483cb1a00d56d.qcow2' \ + SQLServer2014SP3.iso \ + XenApp_and_XenDesktop_7_18.iso \ 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso @@ -142,35 +142,7 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) NW2_SUBNET="${IPV4_PREFIX}.129/25" NW2_DHCP_START="${IPV4_PREFIX}.132" - #NW2_DHCP_END="${IPV4_PREFIX}.253" - NW2_DHCP_END="${IPV4_PREFIX}.229" - - QCOW2_REPOS=(\ - 'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ - ) - # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share - QCOW2_IMAGES=(\ - CentOS7.qcow2 \ - Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ - CentOS7.iso \ - Windows2016.iso \ - Windows2012R2.iso \ - Windows10.iso \ - Nutanix-VirtIO-1.1.3.iso \ - acs-centos7.qcow2 \ - acs-ubuntu1604.qcow2 \ - xtract-vm-2.0.3.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ - sherlock-k8s-base-image_320.qcow2 \ - SQLServer2014SP3.iso \ - hycu-3.5.0-6138.qcow2 \ - VeeamAvailability_1.0.457.vmdk \ - VeeamBR_9.5.4.2615.Update4.iso \ - ) + NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR DNS_SERVERS='10.132.71.40' @@ -253,26 +225,6 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_SUBNET="${IPV4_PREFIX}.129/25" NW2_DHCP_START="${IPV4_PREFIX}.132" NW2_DHCP_END="${IPV4_PREFIX}.254" - - QCOW2_IMAGES=(\ - CentOS7.qcow2 \ - Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ - CentOS7.iso \ - Windows2012R2.iso \ - SQLServer2014SP3.iso \ - Nutanix-VirtIO-1.1.3.iso \ - acs-centos7.qcow2 \ - acs-ubuntu1604.qcow2 \ - xtract-vm-2.0.3.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ - sherlock-k8s-base-image_320.qcow2 \ - hycu-3.5.0-6138.qcow2 \ - VeeamAvailability_1.0.457.vmdk \ - VeeamBR_9.5.4.2615.Update4.iso \ - ) ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR DNS_SERVERS='10.132.71.40' diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 0ed1ae3..96b95d1 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -21,9 +21,15 @@ case ${1} in export PC_DEV_VERSION='5.10.1.1' export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.1.1.json' export PC_URL='http://10.42.8.50/images/x.tar.gz' - export FILES_VERSION='3.2.0' - export FILES_METAURL='http://10.42.8.50/images/afs-3.2.0.json' - export FILES_URL='http://10.42.8.50/images/x.tar.qcow2' + #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' + #export PC_URL='https://s3.amazonaws.com/get-ahv-images/x.tar.gz' + export FILES_VERSION='3.2.0.1' + export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.229" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -72,14 +78,13 @@ case ${1} in Windows2012R2.iso \ SQLServer2014SP3.iso \ Nutanix-VirtIO-1.1.3.iso \ - acs-centos7.qcow2 \ - acs-ubuntu1604.qcow2 \ xtract-vm-2.0.3.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ sherlock-k8s-base-image_320.qcow2 \ - hycu-3.5.0-6138.qcow2 \ + hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ VeeamBR_9.5.4.2615.Update4.iso \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) dependencies 'install' 'jq' || exit 13 From c9249b21d3176966ed4c780f69fbfe4025ab6645 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Feb 2019 10:26:26 -0800 Subject: [PATCH 017/691] Update ts2019.sh --- scripts/ts2019.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 96b95d1..21c5fbf 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -20,9 +20,9 @@ case ${1} in export PC_DEV_VERSION='5.10.1.1' export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.1.1.json' - export PC_URL='http://10.42.8.50/images/x.tar.gz' + export PC_URL='http://10.42.8.50/images/euphrates-5.10.1.1-stable-prism_central.tar' #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' - #export PC_URL='https://s3.amazonaws.com/get-ahv-images/x.tar.gz' + #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' export FILES_VERSION='3.2.0.1' export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' @@ -74,7 +74,6 @@ case ${1} in Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ - CentOS7.iso \ Windows2012R2.iso \ SQLServer2014SP3.iso \ Nutanix-VirtIO-1.1.3.iso \ From 3db548abea903cd5d878291f929d898ef33ac915 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 22 Feb 2019 11:02:10 -0800 Subject: [PATCH 018/691] Brought in Marks latest commits --- documentation/bugs.md | 4 +- scripts/calm.sh | 12 ++++- scripts/lib.common.sh | 10 ++++ scripts/lib.pe.sh | 63 +++++++++++++++++----- scripts/lib.shell-convenience.sh | 7 +-- scripts/ts2019.sh | 89 +++++++++++++++++++++----------- 6 files changed, 138 insertions(+), 47 deletions(-) mode change 100755 => 100644 scripts/lib.common.sh mode change 100755 => 100644 scripts/lib.shell-convenience.sh diff --git a/documentation/bugs.md b/documentation/bugs.md index 97985bb..c5c4fe5 100644 --- a/documentation/bugs.md +++ b/documentation/bugs.md @@ -22,7 +22,6 @@ # Bugs # - BUG = AOS 5.9, 5.10: all calm.sh PC service timeout detect/retry - - Notify Nathan and bart.grootzevert when fixed - 2018-10-24 21:54:23|14165|Determine_PE|Warning: expect errors on lines 1-2, due to non-JSON outputs by nuclei... E1024 21:54:24.142107 14369 jwt.go:35] ZK session is nil 2018/10/24 21:54:24 Failed to connect to the server: websocket.Dial ws://127.0.0.1:9444/icli: bad status: 403 @@ -34,6 +33,9 @@ 2018-12-26 16:05:26|96508|lcm|PC_VERSION 5.10.0.1 >= 5.9, starting LCM inventory... 2018-12-26 16:05:26|96508|lcm|inventory _test=|500|``` - PE> ncli multicluster add-to-multicluster external-ip-address-or-svm-ips=$PC_HOST username=admin password=yaknow + - Notify bart.grootzevert when fixed + - 2019-02-20 21:28:12|4424|pc_configure|PC>=5.10, manual join PE to PC = |Cluster registration is currently in progress. This operation may take a while. +Error: The username or password entered is incorrect.| - ADC2 wonky - 2019-02-15 16:12:08|20294|pe_auth|Adjusted directory-url=ldap://10.42.23.40:389 because AOS-5.10.0.1 >= 5.9 diff --git a/scripts/calm.sh b/scripts/calm.sh index 6222290..e8925e9 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -32,13 +32,14 @@ case ${1} in pc_install "${NW1_NAME}" \ && prism_check 'PC' \ && pc_configure \ + && pc_configure \ && dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install & # parallel, optional. Versus: $0 'files' & + #files_install & # parallel, optional. Versus: $0 'files' & finish else @@ -50,6 +51,9 @@ case ${1} in ;; PC | pc ) . lib.pc.sh + + run_once + dependencies 'install' 'jq' || exit 13 ssh_pubkey & # non-blocking, parallel suitable @@ -66,6 +70,12 @@ case ${1} in log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" pe_determine ${1} . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi fi if [[ ! -z "${2}" ]]; then # hidden bonus diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh old mode 100755 new mode 100644 index c0393c8..35e41e4 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -783,6 +783,16 @@ function repo_source() { fi } +function run_once() { + # TODO: PC dependent + if [[ ! -z ${PC_LAUNCH} ]] && (( $(cat ${HOME}/${PC_LAUNCH%%.sh}.log | wc ${WC_ARG}) > 20 )); then + finish + _error=2 + log "Warning ${_error}: ${PC_LAUNCH} already ran, exit!" + exit ${_error} + fi +} + function ssh_pubkey() { local _dir local _directories=(\ diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 28fc3a5..18814fc 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -210,23 +210,62 @@ function network_configure() { fi } -function pc_configure() { - args_required 'PC_LAUNCH RELEASE' - local _command - local _container - local _dependencies="global.vars.sh lib.common.sh lib.pc.sh ${PC_LAUNCH}" +function cluster_check() { + local _attempts=20 + local _loop=0 local _pc_version - local _test + local _sleep=60 + local _test=1 + local _test_exit # shellcheck disable=2206 _pc_version=(${PC_VERSION//./ }) + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 )); then - _test=$(ncli multicluster add-to-multicluster \ - external-ip-address-or-svm-ips=${PC_HOST} \ - username=${PRISM_ADMIN} password=${PE_PASSWORD}) - log "PC>=5.10, manual join PE to PC = |${_test}|" + log "PC>=5.10, checking multicluster state..." + + while true ; do + (( _loop++ )) + + _test=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.multicluster) + _test_exit=$? + log "Cluster status: |${_test}|, exit: ${_test_exit}." + + if [[ ${_test} != 'true' ]]; then + _test=$(ncli multicluster add-to-multicluster \ + external-ip-address-or-svm-ips=${PC_HOST} \ + username=${PRISM_ADMIN} password=${PE_PASSWORD}) + _test_exit=$? + log "Manual join PE to PC = |${_test}|, exit: ${_test_exit}." + fi + + _test=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.multicluster) + _test_exit=$? + log "Cluster status: |${_test}|, exit: ${_test_exit}." + + if [[ ${_test} == 'true' ]]; then + log "PE to PC = cluster registration: successful." + return 0 + elif (( ${_loop} > ${_attempts} )); then + log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + return ${_error} + else + log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." + sleep ${_sleep} + fi + done fi +} + +function pc_configure() { + args_required 'PC_LAUNCH RELEASE' + local _command + local _container + local _dependencies="global.vars.sh lib.common.sh lib.pc.sh ${PC_LAUNCH}" + if [[ -e ${RELEASE} ]]; then _dependencies+=" ${RELEASE}" else @@ -249,7 +288,7 @@ function pc_configure() { _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." @@ -292,7 +331,7 @@ function pc_install() { # shellcheck disable=2206 _pc_version=(${PC_VERSION//./ }) - if (( ${_pc_version[0]} = 5 && ${_pc_version[1]} <= 6 )); then + if (( ${_pc_version[0]} == 5 && ${_pc_version[1]} <= 6 )); then _should_auto_register='"should_auto_register":true,' fi diff --git a/scripts/lib.shell-convenience.sh b/scripts/lib.shell-convenience.sh old mode 100755 new mode 100644 index 4ddc8aa..41f1806 --- a/scripts/lib.shell-convenience.sh +++ b/scripts/lib.shell-convenience.sh @@ -180,14 +180,15 @@ SSHPASS='${PE_PASSWORD}' sshpass -e ssh \\ ${SSH_OPTS} \\ nutanix@${PE_HOST} -pkill -f calm ; tail -f calm*log +pkill -f calm ; tail -f *log EOF echo 'rm -rf master.zip calm*.log stageworkshop-master/ && \' echo ' curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh \' echo ' && SOURCE=${_} 'EMAIL=${EMAIL} PE_PASSWORD=${_password}' sh ${_##*/} \' - echo ' && tail -f ~/calm*.log' + echo ' && tail -f ~/*log' echo -e "cd stageworkshop-master/scripts/ && \ \n PE_HOST=${PE_HOST} PE_PASSWORD='${_password}' PC_VERSION=${PC_DEV_VERSION} EMAIL=${EMAIL} ./calm.sh 'PE'" + echo "ncli multicluster add-to-multicluster external-ip-address-or-svm-ips=10.42.x.39 username=admin password='x'" ;; AUTH | auth | ldap) _host=${AUTH_HOST} @@ -198,7 +199,7 @@ EOF case "${2}" in log | logs) - _command='date; echo; tail -f calm*log' + _command='date; echo; tail -f *log' ;; calm | inflight) _command='ps -efww | grep calm' diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 21c5fbf..d8d6ca7 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -1,6 +1,30 @@ #!/usr/bin/env bash # -x - +function ts_images() { + export QCOW2_REPOS=(\ + 'http://10.42.8.50/images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + xtract-vm-2.0.3.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_320.qcow2 \ + hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + VeeamBR_9.5.4.2615.Update4.iso \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + ) + + images && pc_cluster_img_import +} #__main()__________ # Source Nutanix environment (PATH + aliases), then common routines + global variables @@ -44,16 +68,25 @@ case ${1} in if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ && prism_check 'PC' \ - && pc_configure \ - && dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' - log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" - log "PE = https://${PE_HOST}:9440" - log "PC = https://${PC_HOST}:9440" + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" - files_install & # parallel, optional. Versus: $0 'files' & + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & - finish + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi else finish _error=18 @@ -64,27 +97,7 @@ case ${1} in PC | pc ) . lib.pc.sh - export QCOW2_REPOS=(\ - 'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ - ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share - export QCOW2_IMAGES=(\ - CentOS7.qcow2 \ - Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ - Windows2012R2.iso \ - SQLServer2014SP3.iso \ - Nutanix-VirtIO-1.1.3.iso \ - xtract-vm-2.0.3.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ - sherlock-k8s-base-image_320.qcow2 \ - hycu-3.5.0-6253.qcow2 \ - VeeamAvailability_1.0.457.vmdk \ - VeeamBR_9.5.4.2615.Update4.iso \ - 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ - ) + run_once dependencies 'install' 'jq' || exit 13 @@ -102,6 +115,12 @@ case ${1} in log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" pe_determine ${1} . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi fi if [[ ! -z "${2}" ]]; then # hidden bonus @@ -122,10 +141,16 @@ case ${1} in && calm_enable \ && lcm \ && images \ - && pc_cluster_img_import \ && prism_check 'PC' log "Non-blocking functions (in development) follow." + # shellcheck disable=2206 + _pc_version=(${PC_VERSION//./ }) + + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} <= 8 )); then + log "PC<=5.8, Image imports..." + ts_images + fi pc_project flow_enable pc_admin @@ -147,4 +172,8 @@ case ${1} in FILES | files | afs ) files_install ;; + IMAGES | images ) + . lib.pc.sh + ts_images + ;; esac From 77bf1c4eb6b3066618040c6836e1b555fa59a084 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 22 Feb 2019 11:15:08 -0800 Subject: [PATCH 019/691] Update stage_workshop.sh --- stage_workshop.sh | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) mode change 100755 => 100644 stage_workshop.sh diff --git a/stage_workshop.sh b/stage_workshop.sh old mode 100755 new mode 100644 index 82e2b78..aa41a3a --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -9,8 +9,8 @@ WORKSHOPS=(\ "Calm Workshop (AOS 5.5+/AHV PC 5.8.x) = Stable (AutoDC1)" \ "Calm Workshop (AOS 5.8.x/AHV PC 5.10.x) = Stable (AutoDC2)" \ "Calm Workshop (AOS 5.9+/AHV PC 5.10.x) = Development" \ +"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ "Citrix Desktop on AHV Workshop (AOS/AHV 5.6)" \ -#"Tech Summit 2018" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -53,8 +53,9 @@ function stage_clusters() { _pc_launch='stage_citrixhow_pc.sh' fi if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then - _pe_launch='stage_ts18.sh' - _pc_launch='stage_ts18_pc.sh' + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='ts2019.sh' + _pc_launch=${_pe_launch} fi dependencies 'install' 'sshpass' From c4b3740f07a0570928c0e45d2fab9619e665292e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 22 Feb 2019 11:39:23 -0800 Subject: [PATCH 020/691] updates --- scripts/global.vars.sh | 3 ++- scripts/lib.pc.sh | 4 ++-- scripts/lib.pe.sh | 5 ++++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 52271c5..750f13b 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -83,7 +83,8 @@ DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) NW1_DHCP_START="${IPV4_PREFIX}.50" NW1_DHCP_END="${IPV4_PREFIX}.125" # https://sewiki.nutanix.com/index.php/Hosted_POC_FAQ#I.27d_like_to_test_email_alert_functionality.2C_what_SMTP_server_can_I_use_on_Hosted_POC_clusters.3F -SMTP_SERVER_ADDRESS='nutanix-com.mail.protection.outlook.com' +#SMTP_SERVER_ADDRESS='nutanix-com.mail.protection.outlook.com' +SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' SMTP_SERVER_PORT=25 diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b368308..b6b61c4 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -446,14 +446,14 @@ function pc_ui() { local _json local _pc_version local _test - +#{"type":"WELCOME_BANNER","username":"system_data","key":"welcome_banner_content","value":"${PRISM_ADMIN}:${PE_PASSWORD}@${CLUSTER_NAME}"} \ _json=$(cat < Date: Fri, 22 Feb 2019 12:23:39 -0800 Subject: [PATCH 021/691] Update ts2019.sh --- scripts/ts2019.sh | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index d8d6ca7..8cbfa59 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -55,6 +55,28 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.229" + export QCOW2_REPOS=(\ + 'http://10.42.8.50/images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + xtract-vm-2.0.3.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_320.qcow2 \ + hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + VeeamBR_9.5.4.2615.Update4.iso \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + ) + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable From 213f9a821dff6492ba706dc240fc3d2023f627f8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 22 Feb 2019 16:23:22 -0800 Subject: [PATCH 022/691] Update ts2019.sh --- scripts/ts2019.sh | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 8cbfa59..d8d6ca7 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -55,28 +55,6 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.229" - export QCOW2_REPOS=(\ - 'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ - ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share - export QCOW2_IMAGES=(\ - CentOS7.qcow2 \ - Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ - Windows2012R2.iso \ - SQLServer2014SP3.iso \ - Nutanix-VirtIO-1.1.3.iso \ - xtract-vm-2.0.3.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ - sherlock-k8s-base-image_320.qcow2 \ - hycu-3.5.0-6253.qcow2 \ - VeeamAvailability_1.0.457.vmdk \ - VeeamBR_9.5.4.2615.Update4.iso \ - 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ - ) - args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable From 6b6fe5ee2b96b1ce5cbccd369603f1789ec91c79 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sat, 23 Feb 2019 09:13:22 -0800 Subject: [PATCH 023/691] Updates for images --- scripts/ts2019.sh | 44 +++++----- scripts/ts2019_mlavi.sh | 179 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 202 insertions(+), 21 deletions(-) create mode 100644 scripts/ts2019_mlavi.sh diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index d8d6ca7..b62d1d1 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -1,27 +1,7 @@ #!/usr/bin/env bash # -x function ts_images() { - export QCOW2_REPOS=(\ - 'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ - ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share - export QCOW2_IMAGES=(\ - CentOS7.qcow2 \ - Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ - Windows2012R2.iso \ - SQLServer2014SP3.iso \ - Nutanix-VirtIO-1.1.3.iso \ - xtract-vm-2.0.3.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ - sherlock-k8s-base-image_320.qcow2 \ - hycu-3.5.0-6253.qcow2 \ - VeeamAvailability_1.0.457.vmdk \ - VeeamBR_9.5.4.2615.Update4.iso \ - 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ - ) + images && pc_cluster_img_import } @@ -55,6 +35,28 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.229" + export QCOW2_REPOS=(\ + 'http://10.42.8.50/images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + xtract-vm-2.0.3.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_320.qcow2 \ + hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + VeeamBR_9.5.4.2615.Update4.iso \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + ) + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/ts2019_mlavi.sh b/scripts/ts2019_mlavi.sh new file mode 100644 index 0000000..d8d6ca7 --- /dev/null +++ b/scripts/ts2019_mlavi.sh @@ -0,0 +1,179 @@ +#!/usr/bin/env bash +# -x +function ts_images() { + export QCOW2_REPOS=(\ + 'http://10.42.8.50/images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + xtract-vm-2.0.3.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_320.qcow2 \ + hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + VeeamBR_9.5.4.2615.Update4.iso \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + ) + + images && pc_cluster_img_import +} +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export PC_DEV_VERSION='5.10.1.1' + export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.1.1.json' + export PC_URL='http://10.42.8.50/images/euphrates-5.10.1.1-stable-prism_central.tar' + #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' + #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' + export FILES_VERSION='3.2.0.1' + export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.229" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + # shellcheck disable=2206 + _pc_version=(${PC_VERSION//./ }) + + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} <= 8 )); then + log "PC<=5.8, Image imports..." + ts_images + fi + pc_project + flow_enable + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; + IMAGES | images ) + . lib.pc.sh + ts_images + ;; +esac From 230a6098e26950064ffd3be1f70a8708ff949a49 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sat, 23 Feb 2019 09:57:57 -0800 Subject: [PATCH 024/691] Update bootstrap.sh --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index 1d74e94..9969368 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -7,7 +7,7 @@ # curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/} if [[ -z ${SOURCE} ]]; then - ORGANIZATION=nutanixworkshops + ORGANIZATION=jncox REPOSITORY=stageworkshop BRANCH=master else From fc41fa0074128382509205a56efdeab38bdf13e8 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sat, 23 Feb 2019 10:19:57 -0800 Subject: [PATCH 025/691] Update ts2019.sh --- scripts/ts2019.sh | 4 ---- 1 file changed, 4 deletions(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index b62d1d1..8c53ff3 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -1,10 +1,6 @@ #!/usr/bin/env bash # -x -function ts_images() { - - images && pc_cluster_img_import -} #__main()__________ # Source Nutanix environment (PATH + aliases), then common routines + global variables From 1991d4439a5f6c753378633d93cceeeb3026df78 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sat, 23 Feb 2019 11:03:15 -0800 Subject: [PATCH 026/691] Update ts2019.sh --- scripts/ts2019.sh | 53 ++++++++++++++++++++++++----------------------- 1 file changed, 27 insertions(+), 26 deletions(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 8c53ff3..682fbdb 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -31,28 +31,6 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.229" - export QCOW2_REPOS=(\ - 'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ - ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share - export QCOW2_IMAGES=(\ - CentOS7.qcow2 \ - Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ - Windows2012R2.iso \ - SQLServer2014SP3.iso \ - Nutanix-VirtIO-1.1.3.iso \ - xtract-vm-2.0.3.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ - sherlock-k8s-base-image_320.qcow2 \ - hycu-3.5.0-6253.qcow2 \ - VeeamAvailability_1.0.457.vmdk \ - VeeamBR_9.5.4.2615.Update4.iso \ - 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ - ) - args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -95,6 +73,28 @@ case ${1} in PC | pc ) . lib.pc.sh + export QCOW2_REPOS=(\ + 'http://10.42.8.50/images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + xtract-vm-2.0.3.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_320.qcow2 \ + hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + VeeamBR_9.5.4.2615.Update4.iso \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + ) + run_once dependencies 'install' 'jq' || exit 13 @@ -145,10 +145,11 @@ case ${1} in # shellcheck disable=2206 _pc_version=(${PC_VERSION//./ }) - if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} <= 8 )); then - log "PC<=5.8, Image imports..." - ts_images - fi + #commenting out to take images back to prevuous update + #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} <= 8 )); then + # log "PC<=5.8, Image imports..." + # ts_images + #fi pc_project flow_enable pc_admin From 4b016fc5a76be5befaba1d838c9f8694c4d8f572 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sun, 24 Feb 2019 09:21:38 -0500 Subject: [PATCH 027/691] Update ts2019.sh --- scripts/ts2019.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 682fbdb..33a6759 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -173,6 +173,6 @@ case ${1} in ;; IMAGES | images ) . lib.pc.sh - ts_images + #ts_images ;; esac From e98aa9c00112706fb53dbc9e4a87580bfab2a1d9 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sun, 24 Feb 2019 09:54:46 -0500 Subject: [PATCH 028/691] Update ts2019.sh --- scripts/ts2019.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 33a6759..5019a73 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -171,8 +171,8 @@ case ${1} in FILES | files | afs ) files_install ;; - IMAGES | images ) - . lib.pc.sh + #IMAGES | images ) + # . lib.pc.sh #ts_images - ;; + #;; esac From b0c2e8c3cb6dcd6f95687fcf2815a3f6a30468d4 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sun, 24 Feb 2019 11:04:00 -0500 Subject: [PATCH 029/691] Update ts2019.sh --- scripts/ts2019.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 5019a73..ab37f65 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -95,8 +95,6 @@ case ${1} in 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) - run_once - dependencies 'install' 'jq' || exit 13 ssh_pubkey & # non-blocking, parallel suitable From ec0d0adbeb75cb3cc5292ee3b76d1fd54eccb016 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 25 Feb 2019 00:19:49 -0500 Subject: [PATCH 030/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 750f13b..b3a4f8d 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -130,7 +130,7 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.55 ) # HPOC us-east = DUR - DNS_SERVERS='10.21.253.11' + DNS_SERVERS='10.55.251.10' NW2_NAME='Secondary' NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) NW2_SUBNET="${IPV4_PREFIX}.129/25" From 862a8aa3e2e04b524d64c153371503cca9ea63d0 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sat, 9 Mar 2019 07:08:58 +0800 Subject: [PATCH 031/691] Update ts2019.sh --- scripts/ts2019.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index ab37f65..51caa09 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -18,9 +18,9 @@ case ${1} in PE | pe ) . lib.pe.sh - export PC_DEV_VERSION='5.10.1.1' - export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.1.1.json' - export PC_URL='http://10.42.8.50/images/euphrates-5.10.1.1-stable-prism_central.tar' + export PC_DEV_VERSION='5.10.2' + export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' export FILES_VERSION='3.2.0.1' @@ -88,7 +88,7 @@ case ${1} in Nutanix-VirtIO-1.1.3.iso \ xtract-vm-2.0.3.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ - sherlock-k8s-base-image_320.qcow2 \ + sherlock-k8s-base-image_403.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ VeeamBR_9.5.4.2615.Update4.iso \ From 217cee5419c9e4473b99194ec59bc058e714c00d Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sun, 10 Mar 2019 08:43:29 +0700 Subject: [PATCH 032/691] NDogg Updates --- scripts/global.vars.sh | 28 ++++++++++++++++------------ scripts/lib.pc.sh | 2 +- scripts/lib.pe.sh | 6 +++--- 3 files changed, 20 insertions(+), 16 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index b3a4f8d..f3e57ac 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -6,8 +6,8 @@ # Browse to: https://portal.nutanix.com/#/page/releases/prismDetails # - Find ${PC_VERSION} in the Additional Releases section on the lower right side # - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL - PC_DEV_VERSION='5.10.1.1' - PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.1.1/pcdeploy-5.10.1.1.json' + PC_DEV_VERSION='5.10.2' + PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.1.1/pcdeploy-5.10.2.json' PC_STABLE_VERSION='5.8.2' PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/v1/pc_deploy-5.8.2.json' # Sync the following to lib.common.sh::ntnx_download-Case=FILES @@ -41,10 +41,10 @@ NTNX_INIT_PASSWORD='nutanix/4u' ) QCOW2_REPOS=(\ 'http://10.42.8.50/images/' \ - 'http://10.21.250.221/images/tech-enablement/' \ - 'http://10.21.250.221/images/ahv/techsummit/' \ - 'http://10.132.128.50:81/share/saved-images/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ + #'http://10.21.250.221/images/tech-enablement/' \ + #'http://10.21.250.221/images/ahv/techsummit/' \ + #'http://10.132.128.50:81/share/saved-images/' \ ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share QCOW2_IMAGES=(\ CentOS7.qcow2 \ @@ -97,11 +97,13 @@ AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' AUTODC_REPOS=(\ - 'http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2' \ + 'http://10.42.8.50/images/AutoDC.qcow2' \ + 'http://10.42.8.50/images/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ + #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ ) # For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) @@ -179,11 +181,13 @@ AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' AUTODC_REPOS=(\ - 'http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC-04282018.qcow2' \ - 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + 'http://10.42.8.50/images/AutoDC.qcow2' \ + 'http://10.42.8.50/images/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ + #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ ) # For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b6b61c4..fa7c36a 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +65#!/usr/bin/env bash # -x # Dependencies: curl, ncli, nuclei, jq diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 907794f..4fd3a7c 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -58,10 +58,10 @@ function authentication_source() { 'http://10.42.8.50/images/AutoDC.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - 'http://10.132.128.50:81/share/saved-images/autodc-2.0.qcow2' \ - 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + #'http://10.132.128.50:81/share/saved-images/autodc-2.0.qcow2' \ + #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ + #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ ) fi From 47652181724822b76cb518e152d81242969466f7 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 18 Mar 2019 14:19:20 -0700 Subject: [PATCH 033/691] Update lib.pc.sh LCM Update --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index fa7c36a..fadfe4a 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -81,7 +81,7 @@ function lcm() { if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then log "PC_VERSION ${PC_VERSION} >= 5.9, starting LCM inventory..." - _http_body='value: "{".oid":"LifeCycleManager",".method":"lcm_framework_rpc",".kwargs":{"method_class":"LcmFramework","method":"perform_inventory","args":["http://download.nutanix.com/lcm/2.0"]}}"' + _http_body='{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"}' _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" \ https://localhost:9440/PrismGateway/services/rest/v1/genesis) From 401ff36391bbd89a9951db61250a16a97d4a1f0d Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 22 Mar 2019 17:50:57 +0100 Subject: [PATCH 034/691] Updates for EMEA GTS Brought in Willem's additions for LCM Updates. Added a second loop for ISOs --- scripts/lib.common.sh | 90 ++++++- scripts/lib.pc.org.sh | 531 ++++++++++++++++++++++++++++++++++++++++++ scripts/lib.pc.sh | 195 ++++++++++++++-- scripts/lib.pe.sh | 13 +- scripts/ts2019.sh | 12 +- 5 files changed, 816 insertions(+), 25 deletions(-) create mode 100644 scripts/lib.pc.org.sh diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 35e41e4..a09b5c9 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -267,6 +267,10 @@ function images() { _source='source_uri' fi +####################################### +# For doing Disk IMAGES +####################################### + for _image in "${QCOW2_IMAGES[@]}" ; do # log "DEBUG: ${_image} image.create..." @@ -307,10 +311,90 @@ function images() { if [[ ${_cli} == 'acli' ]]; then _image_type='kDiskImage' - if (( $(echo "${SOURCE_URL}" | grep -i -e 'iso$' | wc --lines ) > 0 )); then - _image_type='kIsoImage' + _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ + container=${STORAGE_IMAGES} architecture=kX86_64 wait=true" + else + _command+=" name=${_name} description=\"${_image}\"" + fi + + if [[ ${_cli} == 'nuclei' ]]; then + _http_body=$(cat <&1 & + if (( $? != 0 )); then + log "Warning: Image submission: $?. Continuing..." + #exit 10 fi + if [[ ${_cli} == 'nuclei' ]]; then + log "NOTE: image.uuid = RUNNING, but takes a while to show up in:" + log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State" + fi + fi + fi + + done + + ####################################### + # For doing ISO IMAGES + ####################################### + + for _image in "${QCOW2_IMAGES[@]}" ; do + + # log "DEBUG: ${_image} image.create..." + if [[ ${_cli} == 'nuclei' ]]; then + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ${_cli} image.list 2>&1 \ + | grep -i complete \ + | grep "${_image}") + else + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ${_cli} image.list 2>&1 \ + | grep "${_image}") + fi + + if [[ ! -z ${_test} ]]; then + log "Skip: ${_image} already complete on cluster." + else + _command='' + _name="${_image}" + + if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc --lines) )); then + log 'Bypass multiple repo source checks...' + SOURCE_URL="${_image}" + else + repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]! + fi + + if [[ -z "${SOURCE_URL}" ]]; then + _error=30 + log "Warning ${_error}: didn't find any sources for ${_image}, continuing..." + # exit ${_error} + fi + + # TODO:0 TOFIX: acs-centos ugly override for today... + if (( $(echo "${_image}" | grep -i 'acs-centos' | wc --lines ) > 0 )); then + _name=acs-centos + fi + + if [[ ${_cli} == 'acli' ]]; then + _image_type='kIsoImage' _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ container=${STORAGE_IMAGES} architecture=kX86_64 wait=true" else @@ -326,7 +410,7 @@ function images() { "path_and_params":"/api/nutanix/v3/images", "body":{"spec": {"name":"${_name}","description":"${_image}","resources":{ - "image_type":"DISK_IMAGE", + "image_type":"ISO_IMAGE", "source_uri":"${SOURCE_URL}"}}, "metadata":{"kind":"image"},"api_version":"3.1.0"}}],"api_version":"3.0"} EOF diff --git a/scripts/lib.pc.org.sh b/scripts/lib.pc.org.sh new file mode 100644 index 0000000..fadfe4a --- /dev/null +++ b/scripts/lib.pc.org.sh @@ -0,0 +1,531 @@ +65#!/usr/bin/env bash +# -x +# Dependencies: curl, ncli, nuclei, jq + +function calm_update() { + local _attempts=12 + local _calm_bin=/usr/local/nutanix/epsilon + local _container + local _error=19 + local _loop=0 + local _sleep=10 + local _url=http://${AUTH_HOST}:8080 + + if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then + log "Bypassing download of updated containers." + else + dependencies 'install' 'sshpass' || exit 13 + remote_exec 'ssh' 'AUTH_SERVER' \ + 'if [[ ! -e nucalm.tar ]]; then smbclient -I 10.21.249.12 \\\\pocfs\\images --user ${1} --command "prompt ; cd /Calm-EA/pc-'${PC_VERSION}'/ ; mget *tar"; echo; ls -lH *tar ; fi' \ + 'OPTIONAL' + + while true ; do + (( _loop++ )) + _test=$(curl ${CURL_HTTP_OPTS} ${_url} \ + | tr -d \") # wonderful addition of "" around HTTP status code by cURL + + if (( ${_test} == 200 )); then + log "Success reaching ${_url}" + break; + elif (( ${_loop} > ${_attempts} )); then + log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + return ${_error} + else + log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." + sleep ${_sleep} + fi + done + + download ${_url}/epsilon.tar + download ${_url}/nucacallm.tar + fi + + if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then + ls -lh ${HOME}/*tar + mkdir ${HOME}/calm.backup || true + cp ${_calm_bin}/*tar ${HOME}/calm.backup/ \ + && genesis stop nucalm epsilon \ + && docker rm -f "$(docker ps -aq)" || true \ + && docker rmi -f "$(docker images -q)" || true \ + && cp ${HOME}/*tar ${_calm_bin}/ \ + && cluster start # ~75 seconds to start both containers + + for _container in epsilon nucalm ; do + local _test=0 + while (( ${_test} < 1 )); do + _test=$(docker ps -a | grep ${_container} | grep -i healthy | wc --lines) + done + done + fi +} + +function flow_enable() { + ## (API; Didn't work. Used nuclei instead) + ## https://localhost:9440/api/nutanix/v3/services/microseg + ## {"state":"ENABLE"} + # To disable flow run the following on PC: nuclei microseg.disable + + log "Enable Nutanix Flow..." + nuclei microseg.enable 2>/dev/null + nuclei microseg.get_status 2>/dev/null +} + +function lcm() { + local _http_body + local _pc_version + local _test + + # shellcheck disable=2206 + _pc_version=(${PC_VERSION//./ }) + + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then + log "PC_VERSION ${PC_VERSION} >= 5.9, starting LCM inventory..." + + _http_body='{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"}' + + _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" \ + https://localhost:9440/PrismGateway/services/rest/v1/genesis) + log "inventory _test=|${_test}|" + fi +} + +function pc_admin() { + local _http_body + local _test + local _admin_user='marklavi' + + _http_body=$(cat <= 5.9" + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then + _http_body+=$(cat < 0 )); then + log "Warning: password not reset: $?."# exit 10 + fi + # TOFIX: nutanix@PC Linux account password change as well? + + # local _old_pw='nutanix/4u' + # local _http_body=$(cat <= 5 && ${_pc_version[1]} >= 10 && ${_test} != 500 )); then + log "PC_VERSION ${PC_VERSION} >= 5.10, setting favorites..." + + _json=$(cat </dev/null | grep ${_name} | wc --lines) + if (( ${_count} > 0 )); then + nuclei project.delete ${_name} confirm=false 2>/dev/null + else + log "Warning: _count=${_count}" + fi + + log "Creating ${_name}..." + nuclei project.create name=${_name} description='test from NuCLeI!' 2>/dev/null + _uuid=$(. /etc/profile.d/nutanix_env.sh \ + && nuclei project.get ${_name} format=json 2>/dev/null \ + | jq .metadata.project_reference.uuid | tr -d '"') + log "${_name}.uuid = ${_uuid}" + + # - project.get mark.lavi.test + # - project.update mark.lavi.test + # spec.resources.account_reference_list.kind= or .uuid + # spec.resources.default_subnet_reference.kind= + # spec.resources.environment_reference_list.kind= + # spec.resources.external_user_group_reference_list.kind= + # spec.resources.subnet_reference_list.kind= + # spec.resources.user_reference_list.kind= + + # {"spec":{"access_control_policy_list":[],"project_detail":{"name":"mark.lavi.test1","resources":{"external_user_group_reference_list":[],"user_reference_list":[],"environment_reference_list":[],"account_reference_list":[],"subnet_reference_list":[{"kind":"subnet","name":"Primary","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"},{"kind":"subnet","name":"Secondary","uuid":"4689bc7f-61dd-4527-bc7a-9d737ae61322"}],"default_subnet_reference":{"kind":"subnet","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"}},"description":"test from NuCLeI!"},"user_list":[],"user_group_list":[]},"api_version":"3.1","metadata":{"creation_time":"2018-06-22T03:54:59Z","spec_version":0,"kind":"project","last_update_time":"2018-06-22T03:55:00Z","uuid":"1be7f66a-5006-4061-b9d2-76caefedd298","categories":{},"owner_reference":{"kind":"user","name":"admin","uuid":"00000000-0000-0000-0000-000000000000"}}} +} diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index fadfe4a..08d8b1b 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1,7 +1,11 @@ -65#!/usr/bin/env bash +#!/usr/bin/env bash # -x # Dependencies: curl, ncli, nuclei, jq +############################################################################################################################################################################### +# Routine to update Calm, but can be done via the LCM!!!! +############################################################################################################################################################################### + function calm_update() { local _attempts=12 local _calm_bin=/usr/local/nutanix/epsilon @@ -59,6 +63,10 @@ function calm_update() { fi } +############################################################################################################################################################################### +# Routine to enable Flow +############################################################################################################################################################################### + function flow_enable() { ## (API; Didn't work. Used nuclei instead) ## https://localhost:9440/api/nutanix/v3/services/microseg @@ -70,24 +78,126 @@ function flow_enable() { nuclei microseg.get_status 2>/dev/null } -function lcm() { - local _http_body - local _pc_version - local _test - - # shellcheck disable=2206 - _pc_version=(${PC_VERSION//./ }) +############################################################################################################################################################################### +# Routine to be run/loop till yes we are ok. +############################################################################################################################################################################### +function loop(){ + + local _attempts=30 + local _loops=0 + local _sleep=60 + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + + # What is the progress of the taskid?? + while true; do + (( _loops++ )) + # Get the progress of the task + _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}?filterCriteria=parent_task_uuid%3D%3D${_task_id} | jq '.entities[0].percentageCompleted' 2>nul | tr -d \") + if (( ${_progress} == 100 )); then + log "The step has been succesfuly run" + break; + elif (( ${_loops} > ${_attempts} )); then + log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + return ${_error} + else + log "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds" + sleep ${_sleep} + fi + done +} - if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then - log "PC_VERSION ${PC_VERSION} >= 5.9, starting LCM inventory..." +############################################################################################################################################################################### +# Routine to start the LCM Inventory and the update. +############################################################################################################################################################################### - _http_body='{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"}' +function lcm() { - _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" \ - https://localhost:9440/PrismGateway/services/rest/v1/genesis) - log "inventory _test=|${_test}|" + local _url_lcm='https://localhost:9440/PrismGateway/services/rest/v1/genesis' + local _url_progress='https://localhost:9440/PrismGateway/services/rest/v1/progress_monitors' + local _url_groups='https://localhost:9440/api/nutanix/v3/groups' + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + + # Inventory download/run + _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"}' ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \") + + # If there has been a reply (task_id) then the URL has accepted by PC + # Changed (()) to [] so it works.... + if [ -z "$_task_id" ]; then + log "LCM Inventory start has encountered an eror..." + else + log "LCM Inventory started.." + set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + + # Run the progess checker + loop + + # We need to get the UUIDs and the versions to be used.. so we can start the update. They are in the /home/nutanix/data/logs/lcm_ops.out AFTER an inventory run! + # ******!!!!!!WE ARE USING A DEBUG LINE IN THE FILE MENTIONED!!! NEED TO KEEP TRACK OF THAT IF IT CHANGES!!!!!***** + _full_uuids=$(cat /home/nutanix/data/logs/lcm_ops.out | grep -A 1 entity_uuid | grep -B 1 "2.6.0.3") + + # As we need to have the latest version from the LCM we need to reverse the string so we get the last versions/UUIDS + _first_uuid=$(echo $_full_uuids |rev|cut -d":" -f 4 |rev | cut -d "\"" -f2) + _first_version="2.6.0.3" # Setting the version number hard coded!!! This is what has been tested for the workshops. + _sec_uuid=$(echo $_full_uuids rev|rev | cut -d":" -f 2 |rev | cut -d "\"" -f2) + _sec_version=$_first_version + + # Set the parameter to create the ugrade plan + # Create the curl json string '-d blablablablabla' so we can call the string and not the full json data line + _json_data="-d " + _json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"generate_plan\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"" + _json_data+=$_first_uuid + _json_data+="\\\",\\\"" + _json_data+=$_first_version + _json_data+="\\\"],[\\\"" + _json_data+=$_sec_uuid + _json_data+="\\\",\\\"" + _json_data+=$_sec_version + _json_data+="\\\"]]]}}\"}" + + + # Run the generate plan task + _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm}) + + # Notify the log server that the LCM has been creating a plan + log "LCM Inventory has created a plan" + set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + + # As the new json only needs to have the generate_plan changed into "perform_update" we also migh tuse sed... + _json_data=$(echo $_json_data | sed -e 's/generate_plan/perform_update/g') + + # Create new json data string + #_json_data="-d " + #_json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"perform_update\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"" + #_json_data+=$_first_uuid + #_json_data+="\\\",\\\"" + #_json_data+=$_first_version + #_json_data+="\\\"],[\\\"" + #_json_data+=$_sec_uuid + #_json_data+="\\\",\\\"" + #_json_data+=$_sec_version + #_json_data+="\\\"]]]}}\"}" + + + # Run the upgrade to have the latest versions + _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \") + + # If there has been a reply task_id then the URL has accepted by PC + if [ -z "$_task_id" ]; then + # There has been an error!!! + log "LCM Upgrade has encountered an error!!!!" + else + # Notify the logserver that we are starting the LCM Upgrade + log "LCM Upgrade starting..." + + # Run the progess checker + loop + fi fi + } +############################################################################################################################################################################### +# Routine for PC_Admin +############################################################################################################################################################################### function pc_admin() { local _http_body @@ -116,6 +226,9 @@ EOF log "add.roles ${_http_body}=|${_test}|" } +############################################################################################################################################################################### +# Routine set PC authentication to use the AD as well +############################################################################################################################################################################### function pc_auth() { # TODO:190 configure case for each authentication server type? local _group @@ -174,6 +287,11 @@ EOF done } +############################################################################################################################################################################### +# Routine to import the images into PC +# TODO: As we test for acli, which is not avail on the PC, we use NUCLEI. Can we make a change???? +############################################################################################################################################################################### + function pc_cluster_img_import() { local _http_body local _test @@ -202,6 +320,10 @@ EOF log "batch _test=|${_test}|" } +############################################################################################################################################################################### +# Routine to add dns servers +############################################################################################################################################################################### + function pc_dns_add() { local _dns_server local _test @@ -213,6 +335,10 @@ function pc_dns_add() { done } +############################################################################################################################################################################### +# Routine to setup the initial steps for PC; NTP, EULA and Pulse +############################################################################################################################################################################### + function pc_init() { # TODO:130 pc_init: NCLI, type 'cluster get-smtp-server' config for idempotency? local _test @@ -243,6 +369,10 @@ function pc_init() { log "PULSE _test=|${_test}|" } +############################################################################################################################################################################### +# Routine to setup the SMTP server in PC +############################################################################################################################################################################### + function pc_smtp() { log "Configure SMTP@PC" local _sleep=5 @@ -262,6 +392,10 @@ function pc_smtp() { # log "_test=|${_test}|" } +############################################################################################################################################################################### +# Routine to change the PC admin password +############################################################################################################################################################################### + function pc_passwd() { args_required 'PRISM_ADMIN PE_PASSWORD' @@ -283,6 +417,10 @@ function pc_passwd() { # log "cURL reset password _test=${_test}" } +############################################################################################################################################################################### +# Routine to setp up the SSP authentication to use the AutoDC1 or 2 server +############################################################################################################################################################################### + function ssp_auth() { args_required 'AUTH_SERVER AUTH_HOST AUTH_ADMIN_USER AUTH_ADMIN_PASS' @@ -421,9 +559,15 @@ EOF } +############################################################################################################################################################################### +# Routine to enable Calm and proceed only if Calm is enabled +############################################################################################################################################################################### + function calm_enable() { local _http_body local _test + local _sleep=30 + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' log "Enable Nutanix Calm..." _http_body=$(cat <nul | tr -d \") + if [[ ${_progress} == "ENABLED" ]]; then + log "Calm has been Enabled..." + break; + else + log "Still enabling Calm.....Sleeping ${_sleep} seconds" + sleep ${_sleep} + fi + done } +############################################################################################################################################################################### +# Routine to make changes to the PC UI; Colors, naming and the Welcome Banner +############################################################################################################################################################################### + function pc_ui() { # http://vcdx56.com/2017/08/change-nutanix-prism-ui-login-screen/ local _http_body @@ -497,6 +658,10 @@ EOF fi } +############################################################################################################################################################################### +# Routine to Create a Project in the Calm part +############################################################################################################################################################################### + function pc_project() { local _name local _count diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 4fd3a7c..7c1dc01 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -267,7 +267,14 @@ function pc_configure() { args_required 'PC_LAUNCH RELEASE' local _command local _container - local _dependencies="global.vars.sh lib.common.sh lib.pc.sh ${PC_LAUNCH}" + local _dependencies="global.vars.sh lib.pc.sh ${PC_LAUNCH}" + + # If we are being called via the we-ts2019.sh, we need to change the lib.common.sh to we-lib.common.sh + if [[ ${PC_LAUNCH} != *"we-"* ]]; then + _dependencies+=" lib.common.sh" + else + _dependencies+=" we-lib.common.sh" + fi if [[ -e ${RELEASE} ]]; then _dependencies+=" ${RELEASE}" @@ -288,7 +295,9 @@ function pc_configure() { remote_exec 'SCP' 'PC' ${_container}.tar 'OPTIONAL' & fi done - + ##################################################################################### + ### Handing of to the PC for rest of the installation + ##################################################################################### _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 51caa09..9e080ac 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -83,17 +83,19 @@ case ${1} in Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ - Windows2012R2.iso \ - SQLServer2014SP3.iso \ - Nutanix-VirtIO-1.1.3.iso \ - xtract-vm-2.0.3.qcow2 \ + move-3.0.1.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ sherlock-k8s-base-image_403.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ - VeeamBR_9.5.4.2615.Update4.iso \ 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) + export ISO_IMAGES=(\ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + VeeamBR_9.5.4.2615.Update4.iso \ + ) dependencies 'install' 'jq' || exit 13 From 50c635c4255761b5a26d4163cbeea458cdd7327f Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 22 Mar 2019 18:20:26 +0100 Subject: [PATCH 035/691] Update lib.common.sh --- scripts/lib.common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index a09b5c9..c84268a 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -355,7 +355,7 @@ EOF # For doing ISO IMAGES ####################################### - for _image in "${QCOW2_IMAGES[@]}" ; do + for _image in "${ISO_IMAGES[@]}" ; do # log "DEBUG: ${_image} image.create..." if [[ ${_cli} == 'nuclei' ]]; then From 2542c3ebe33b5e49248b92f25bbba96e35b7a5cd Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 25 Mar 2019 12:20:10 +0100 Subject: [PATCH 036/691] Update global.vars.sh fixes for Staging JQ and fixing ISO in Globals. --- scripts/global.vars.sh | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f3e57ac..8ee4501 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -37,6 +37,8 @@ NTNX_INIT_PASSWORD='nutanix/4u' # https://stedolan.github.io/jq/download/#checksums_and_signatures JQ_REPOS=(\ + 'http://10.42.8.50/images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ 'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) QCOW2_REPOS=(\ @@ -51,15 +53,17 @@ NTNX_INIT_PASSWORD='nutanix/4u' Windows2016.qcow2 \ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ - CentOS7.iso \ - Windows2016.iso \ - Windows2012R2.iso \ - Windows10.iso \ - Nutanix-VirtIO-1.1.3.iso \ - SQLServer2014SP3.iso \ - XenApp_and_XenDesktop_7_18.iso \ 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) + ISO_IMAGES=(\ + CentOS7.iso \ + Windows2016.iso \ + Windows2012R2.iso \ + Windows10.iso \ + Nutanix-VirtIO-1.1.3.iso \ + SQLServer2014SP3.iso \ + XenApp_and_XenDesktop_7_18.iso \ +) # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso # http://download.nutanix.com/era/1.0.0/ERA-Server-build-1.0.0-bae7ca0d653e1af2bcb9826d1320e88d8c4713cc.qcow2 From 636fb2cce3e9aecbd793a9e9e3c823d806d50309 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 25 Mar 2019 13:05:29 +0100 Subject: [PATCH 037/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 8ee4501..54b0d22 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -43,7 +43,7 @@ NTNX_INIT_PASSWORD='nutanix/4u' ) QCOW2_REPOS=(\ 'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'http://10.21.250.221/images/tech-enablement/' \ #'http://10.21.250.221/images/ahv/techsummit/' \ #'http://10.132.128.50:81/share/saved-images/' \ From 563e6dc467b86f98c64029dec8a34b80cecdac32 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 4 Apr 2019 14:43:27 -0700 Subject: [PATCH 038/691] Bringing in Willems changes to LCM --- CHANGELOG.MD | 68 + cluster.txt | 1 + .../__pycache__/webserver.cpython-34.pyc | Bin 0 -> 1390 bytes .../__pycache__/webserver.cpython-36.pyc | Bin 0 -> 1283 bytes logserver/curl_sim.sh | 233 ++ logserver/curl_sim1.sh | 233 ++ logserver/curl_sim2.sh | 233 ++ logserver/curl_sim3.sh | 233 ++ logserver/hpoc_deploy.sql | 61 + logserver/logserver.py | 71 + logserver/nagios_setup.sh | 30 + logserver/pelist.txt | 84 + logserver/templates/index.html | 50 + logserver/webserver.py | 32 + nodemanager/package.json | 11 + quick.txt | 1 + release.json | 30 +- scripts/lib.pc.sh | 113 +- scripts/we-lib.common.sh | 849 ++++ scripts/we-ts2019.sh | 176 + sync_upstream.sh | 3 + test/LCM_Test/# GRABBING THE UUIDS | 72 + test/LCM_Test/Run the LCM inventory | 47 + test/LCM_Test/lcm_ops.out | 3728 +++++++++++++++++ test/LCM_Test/lcm_run.sh | 142 + test/LCM_Test/lcm_update.sh | 27 + test/LCM_Test/reply-inventory.json | 495 +++ test/LCM_Test/reply.json | 3 + test/Run PC local installation | 1 + we_push_centos_cl_disk.sh | 12 + we_stage_workshop.sh | 380 ++ 31 files changed, 7367 insertions(+), 52 deletions(-) create mode 100644 CHANGELOG.MD create mode 100644 cluster.txt create mode 100644 logserver/__pycache__/webserver.cpython-34.pyc create mode 100644 logserver/__pycache__/webserver.cpython-36.pyc create mode 100644 logserver/curl_sim.sh create mode 100644 logserver/curl_sim1.sh create mode 100644 logserver/curl_sim2.sh create mode 100644 logserver/curl_sim3.sh create mode 100644 logserver/hpoc_deploy.sql create mode 100644 logserver/logserver.py create mode 100644 logserver/nagios_setup.sh create mode 100644 logserver/pelist.txt create mode 100644 logserver/templates/index.html create mode 100644 logserver/webserver.py create mode 100644 nodemanager/package.json create mode 100644 quick.txt create mode 100644 scripts/we-lib.common.sh create mode 100644 scripts/we-ts2019.sh create mode 100755 sync_upstream.sh create mode 100644 test/LCM_Test/# GRABBING THE UUIDS create mode 100644 test/LCM_Test/Run the LCM inventory create mode 100644 test/LCM_Test/lcm_ops.out create mode 100644 test/LCM_Test/lcm_run.sh create mode 100644 test/LCM_Test/lcm_update.sh create mode 100644 test/LCM_Test/reply-inventory.json create mode 100644 test/LCM_Test/reply.json create mode 100644 test/Run PC local installation create mode 100644 we_push_centos_cl_disk.sh create mode 100644 we_stage_workshop.sh diff --git a/CHANGELOG.MD b/CHANGELOG.MD new file mode 100644 index 0000000..515e356 --- /dev/null +++ b/CHANGELOG.MD @@ -0,0 +1,68 @@ +

Changelog stageworkshop

+ +* Unreleased + +* Version 2.0.6-ci.14 + +

Changes made

+ + +* 2019-04-04 willem@nutanix.com + +

Stageworkshop part

+ * Changed the following files: + + * scripts/lib.pc.sh; Added LCM upgrades using API calls and not files. Also added Karbon enable + +

Call back server part

+ + * No changes + + +

Changes made

+ + +* 2019-03-21 willem@nutanix.com + +

Stageworkshop part

+ * Changed the following files: + + * scripts/lib.pc.sh; Added a loop function for the waiting for Calm to be enabled before starting the LCM. + +

Call back server part

+ + * No changes + + +* 2019-03-20 willem@nutanix.com + +

Stageworkshop part

+ * Changed the following files: + + * release.json; Changed all the 13 to 14 as a number + * scripts/lib.pc.sh; Added a loop function for the LCM part to follow the progress of teh API call. Also added an extra wait of 5 minutes so that the "Enable Application" is done before we start the LCM! Otherwise it will crash. + * script/lib/pe/sh; Added a line that if it's being called by we- something, it changes lib.common.sh to we-lib.common.sh + + * Copied for keeping the original the following files: + + * scripts/lib.pc.org.sh + + * Created extra files for a server reporting system (centralised logging using curl) + + * we_stage_workshop.sh; the new version of the staging workshop script. This one holds the lines for the centralised feedback + * we_push_centos_cl_disk.sh; small script to push the CentOS Image that is needed in the TS2019 workshops + * script/we-ts2019.sh; has a more debug info fired for the scripts run at PE and PC timeframe (nohup bash -x) + * script/we-lib.common.sh; contains extra functions for the call back to the server + + +

Call back server part

+ * Reason for this server is to have the possibility to send the logging/steps of the stageworkshop script to a centralised server. The "tool" is based on python. + + * Created files and their goal: + + * logserver/logserver.py; Listener for the messagees from the scripts. + * usage: python3 logserver.py 3000 to start the logserver on port 3000 (used by the scripts for now) + * logserver/webserver.py; Webserver to show the messages of the different running scripts + * usage: python3 webserver to start the webserver which will default to port 5000 by default. + + diff --git a/cluster.txt b/cluster.txt new file mode 100644 index 0000000..1c28991 --- /dev/null +++ b/cluster.txt @@ -0,0 +1 @@ +10.42.86.37|techX2019!|willem@nutanix.com diff --git a/logserver/__pycache__/webserver.cpython-34.pyc b/logserver/__pycache__/webserver.cpython-34.pyc new file mode 100644 index 0000000000000000000000000000000000000000..59a0656f45cd875f1932073de40bb00136429059 GIT binary patch literal 1390 zcmZ8hO^+Kz5Url?@p|!wK=|0pN?aHv*zAJDC5j@2gbNr6a~dIy#xuRF&Db;P?jf;d zpOyoEg}>BSPF(l_Iq_;{*ICkbm#e#9SG|5+?SF^IgT?8eznr^7f76|YF+D}s1x$&r zpo(ZEC>6AFD0N8itV5McOOH~Yq#zm4H4q_vSGrVs^al_gT>~DG3~3ot=`*4Xs0?|P z(F2Y$OQL^}h!V&8FREPsW=Be{OQ|v=S51{0Ib|Y;=D>T3u0My7#H>IEtiTiD5IO~e zTN0_qiDyY*qi=JQ&vPvcfsNb(^dt0V=sJf%gnbuek7y3}LQFXbh&?`26CV@t63Vr@<>ZZ?#H~u9~{Y zO4(HPCiN~ZF21(jRjrNnS}m20nq2EwwJOu7yg7sQnp&B}x8Z7|UsmPZhDE(z%feU} zP8t-g(zU{%RXDq+Wp$~198vM1RS099a;_pce~}vD=gp@RRo7;s3$nLmTW+VolqE(38hc33Ou4|!*a zGu?hQd;b0NXFs2@66<^=AFb-Lt>hyGoi;^X%Ci?g{rJOKFI%QfZd#qVYKRPJC|}E> zH8PDZr75oRs!AMnj1?OwsoTnQNAArDPajtcZL*crI{#H_p3plUU^tg%XDEhoH}ghJ3XHqP>M^2I z@FAQ-sqZ^Ad)-iBM#fE7K@?Sp7Q&xH&W{HUy0BR!+I=IPWbjSsGh8TA2-b-|_6!uqO4QFY<&B=0V@tD@*}Uja{8 PWzCQ4ffzYM=lgyh literal 0 HcmV?d00001 diff --git a/logserver/__pycache__/webserver.cpython-36.pyc b/logserver/__pycache__/webserver.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6190576affa1452c52ab22921d1cc5c7308b7e55 GIT binary patch literal 1283 zcmZ9MOK%e~5P)s(Bb(6DmPbJ?hh8I92~ClZ5JE^mxj>L8Cktt{S-UjBiMO^FNR*t? z0~dY>zlOW7ocIepF|$jf(p}AD){mL-=b0R?ueW}^`~I;P5b~Q`dElEo2eC(>1QAq` zG39j3IHUN@R4@v8B&m#-ls8@z5s2`dh;YexA{(Qoh?Z2uqH#`Oz9rZ(X(xZc7ilve z9jVNI_8Ue{gfyv><3eRl_H&%Sd>9zdLF@xil5i>rXMzfLPIyp9pQPzrIRAVSw@i_W%Jhx%_>AGiY$OfF4*D^2SR!Cp(#vocfljM)N5<~AzidqrU!6N186 z7L9}^&1&R=c2~uV(D2SGa9F%`(JlZQ0&~zRb1I;8e*{%={kIY8_ZY<90_AW6vZgE2 zv=3-7bd@34c&R*u4zkxW^@$k3v80*aLw_}&m?m_#i~XjjQrgZ_rSnYLXPwKnv(^xL z@>}QBjn!;3D+)EtGdI+e?#Sv1#KXM&8WgLO6PLPaA=}ghFbH$^aV~%>yPisO74hyW z7nVdZHM_X;hL2A0AHg8H${F9N8l>~DJG9}=m1&x|>JBI$K!Ozw9~lh=RJpqnyt=@u WVv?uzZ9hZ67W-$DwphY$Zu|uSyDKdK literal 0 HcmV?d00001 diff --git a/logserver/curl_sim.sh b/logserver/curl_sim.sh new file mode 100644 index 0000000..09cb05e --- /dev/null +++ b/logserver/curl_sim.sh @@ -0,0 +1,233 @@ +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.100.37%7cbegin%7cwe-ts2019.sh%20release%3a%202.0.6-ci.13%20start._____________________ -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.100.37%7cdependencies%7cInstall%20sshpass... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.100.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20sshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7cdependencies%7cInstall%20jq... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.100.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.100.37%7crepo_source%7cFound%2c%20HTTP%3a302%20%3d%20https%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.100.37%7cdownload%7chttps%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a48%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20jq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a49%7c11219%7c10.42.100.37%7cpe_license%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a52%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.100.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.100.37%7cpe_license%7cValidate%20EULA%20on%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a59%7c11219%7c10.42.100.37%7cpe_license%7cDisable%20Pulse%20in%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a04%7c11219%7c10.42.100.37%7cpe_init%7cConfigure%20SMTP -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a17%7c11219%7c10.42.100.37%7cpe_init%7cConfigure%20NTP -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a24%7c11219%7c10.42.100.37%7cpe_init%7cRename%20default%20container%20to%20Default -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a34%7c11219%7c10.42.100.37%7cpe_init%7cRename%20default%20storage%20pool%20to%20SP01 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a44%7c11219%7c10.42.100.37%7cpe_init%7cCheck%20if%20there%20is%20a%20container%20named%20Images%2c%20if%20not%20create%20one -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.100.37%7cpe_init%7cContainer%20Images%20exists -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.100.37%7cpe_init%7cSet%20Data%20Services%20IP%20address%20to%2010.42.100.38 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a56%7c11219%7c10.42.100.37%7cnetwork_configure%7cRemove%20Rx-Automation-Network... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a00%7c11219%7c10.42.100.37%7cnetwork_configure%7cCreate%20primary%20network%3a%20Name%3a%20Primary%2c%20VLAN%3a%200%2c%20Subnet%3a%2010.42.100.1%2f25%2c%20Domain%3a%20NTNXLAB%2c%20Pool%3a%2010.42.100.50%20to%2010.42.100.125 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a04%7c11219%7c10.42.100.37%7cnetwork_configure%7cCreate%20secondary%20network%3a%20Name%3a%20Secondary%2c%20VLAN%3a%201001%2c%20Subnet%3a%2010.42.100.129%2f25%2c%20Pool%3a%2010.42.100.132%20to%2010.42.100.229 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a08%7c11219%7c10.42.100.37%7cauthentication_source%7cPC_VERSION%205.10.2%20%3e%3d%205.9%2c%20setting%20AutoDC-2.0... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7cdns_check%7cError%2044%3a%20result%20was%209%3a%20%3b%3b%20connection%20timed%20out%3b%20no%20servers%20could%20be%20reached -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7cauthentication_source%7cAutoDC2.IDEMPOTENCY%20failed%2c%20no%20DNS%20record%20dc.ntnxlab.local -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7crepo_source%7cConvenience%3a%20omitted%20package%20argument%2c%20added%20package%3d -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.100.37%7cauthentication_source%7cImport%20AutoDC2%20image%20from%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a20%7c11219%7c10.42.100.37%7cauthentication_source%7cCreate%20AutoDC2%20VM%20based%20on%20AutoDC2%20image -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a22%7c11219%7c10.42.100.37%7cauthentication_source%7cPower%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.100.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.100.37%7cauthentication_source%7c_test%201%2f40%3d%7c2019-03-11%2017%3a52%3a30%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.100.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.100.37%7cauthentication_source%7c_test%202%2f40%3d%7c2019-03-11%2017%3a52%3a43%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.100.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.100.37%7cauthentication_source%7c_test%203%2f40%3d%7c2019-03-11%2017%3a52%3a53%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.100.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.100.37%7cauthentication_source%7c_test%204%2f40%3d%7c%20%2a%20status%3a%20stopped%0a2019-03-11%2017%3a53%3a03%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a14%7c11219%7c10.42.100.37%7cauthentication_source%7cAutoDC2%20is%20ready. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a24%7c11219%7c10.42.100.37%7cauthentication_source%7cCreate%20Reverse%20Lookup%20Zone%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a41%7c11219%7c10.42.100.37%7cauthentication_source%7cSuccess%3a%20DNS%20record%20dc.ntnxlab.local%20set. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.100.37%7cpe_auth%7cAdjusted%20directory-url%3dldap%3a%2f%2f10.42.100.40%3a389%20because%20AOS-5.10.2%20%3e%3d%205.9 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.100.37%7cpe_auth%7cConfigure%20PE%20external%20authentication -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a56%7c11219%7c10.42.100.37%7cpe_auth%7cConfigure%20PE%20role%20map -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a01%7c11219%7c10.42.100.37%7cpc_install%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a04%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a07%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.100.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.100.37%7cpc_install%7cGet%20cluster%20network%20and%20storage%20container%20UUIDs... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.100.37%7cpc_install%7cPrimary%20network%20UUID%3a%2084f854ca-e65a-4ba4-8e84-7c3061c33c42 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.100.37%7cpc_install%7cDefault%20storage%20container%20UUID%3a%20118bd727-366f-418f-8686-e3434b18e6bd -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.100.37%7cpc_install%7cPC-5.10.2%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.100.37%7cntnx_download%7cRetrieving%20download%20metadata%20pcdeploy-5.10.2.json%20... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fpcdeploy-5.10.2.json... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20pcdeploy-5.10.2.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.100.37%7cntnx_download%7cRetrieving%20Nutanix%20PRISM_CENTRAL_DEPLOY%20bits... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2feuphrates-5.10.2-stable-prism_central.tar... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a37%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20euphrates-5.10.2-stable-prism_central.tar -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a45%7c11219%7c10.42.100.37%7cntnx_download%7cSuccess%3a%20PRISM_CENTRAL_DEPLOY%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.100.37%7cntnx_download%7cSuccess%21%20Delete%20PRISM_CENTRAL_DEPLOY%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.100.37%7cpc_install%7cDeploy%20Prism%20Central%20%28typically%20takes%2017%2b%20minutes%29... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a45%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%201%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a59%3a48%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%202%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a00%3a51%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%203%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a01%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%204%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a02%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%205%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a03%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%206%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a04%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%207%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a05%3a54%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%208%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a06%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%209%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a07%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2010%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a08%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2011%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a09%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2012%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a10%3a55%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2013%2f40%3d403%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.100.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.100.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2014%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.100.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.100.37%7cprism_check%7cWarning%20%40PC-dev%3a%20Fallback%20on%2010.42.100.39%3a%20try%20PE%20cluster%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2015%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.100.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.100.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%2016%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.100.37%7cprism_check%7c%40PC%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.100.37%7ccluster_check%7cPC%3e%3d5.10%2c%20checking%20multicluster%20state... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.100.37%7cpc_configure%7cSend%20configuration%20scripts%20to%20PC%20and%20remove%3a%20global.vars.sh%20lib.pc.sh%20we-ts2019.sh%20we-lib.common.sh%20release.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.100.37%7cpc_configure%7cOPTIONAL%3a%20Send%20binary%20dependencies%20to%20PC%3a%20bin%2fjq-linux64%20sshpass-1.06-2.el7.x86_64.rpm%20id_rsa.pub -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.100.37%7cremote_exec%7cOptional%3a%20giving%20up. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.100.37%7cpc_configure%7cRemote%20asynchroneous%20launch%20PC%20configuration%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.100.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20PC -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cpc_configure%7cPC%20Configuration%20complete%3a%20try%20Validate%20Staged%20Clusters%20now. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cmain%7cPC%20Configuration%20complete%3a%20Waiting%20for%20PC%20deployment%20to%20complete%2c%20API%20is%20up%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cmain%7cPE%20%3d%20https%3a%2f%2f10.42.100.37%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cmain%7cPC%20%3d%20https%3a%2f%2f10.42.100.39%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cfinish%7c%2fhome%2fnutanix%2fwe-ts2019.sh%20ran%20for%201462%20seconds._____________________ -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cdependencies%7cSuccess%3a%20found%20jq. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.100.37%7cfiles_install%7cIDEMPOTENCY%3a%20checking%20for%20afs%20completed... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a10%7c11219%7c10.42.100.37%7ccluster_check%7cCluster%20status%3a%20%7c%7c%2c%20exit%3a%205. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.100.37%7cfiles_install%7cFiles%203.2.0.1%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.100.37%7cntnx_download%7cRetrieving%20download%20metadata%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json%20... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.100.37%7cntnx_download%7cRetrieving%20Nutanix%20afs%20bits... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.100.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a23%7c11219%7c10.42.100.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a25%7c11219%7c10.42.100.37%7ccluster_check%7cManual%20join%20PE%20to%20PC%20%3d%20%7cCluster%20registration%20is%20currently%20in%20progress.%20This%20operation%20may%20take%20a%20while.%0a%0a%20%20%20%20Status%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%3a%20true%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a27%7c11219%7c10.42.100.37%7cntnx_download%7cSuccess%3a%20afs%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.100.37%7ccluster_check%7cCluster%20status%3a%20%7ctrue%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.100.37%7ccluster_check%7cPE%20to%20PC%20%3d%20cluster%20registration%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.100.37%7cmain%7cRemote%20asynchroneous%20PC%20Image%20import%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20%20%20%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.100.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20%20%20%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20IMAGES -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a26%7c11219%7c10.42.100.37%7cntnx_download%7cSuccess%21%20Delete%20afs%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a57%7c11219%7c10.42.100.37%7cdependencies%7cWarning%3a%20assuming%20on%20PC%20or%20PE%20VM%2c%20removing%20jq... -H 'cache-control: no-cache' -H 'content-length: 0' \ No newline at end of file diff --git a/logserver/curl_sim1.sh b/logserver/curl_sim1.sh new file mode 100644 index 0000000..138c1d9 --- /dev/null +++ b/logserver/curl_sim1.sh @@ -0,0 +1,233 @@ +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.21.37%7cbegin%7cwe-ts2019.sh%20release%3a%202.0.6-ci.13%20start._____________________ -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.21.37%7cdependencies%7cInstall%20sshpass... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.21.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20sshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7cdependencies%7cInstall%20jq... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.21.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.21.37%7crepo_source%7cFound%2c%20HTTP%3a302%20%3d%20https%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.21.37%7cdownload%7chttps%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a48%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20jq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a49%7c11219%7c10.42.21.37%7cpe_license%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a52%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.21.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.21.37%7cpe_license%7cValidate%20EULA%20on%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a59%7c11219%7c10.42.21.37%7cpe_license%7cDisable%20Pulse%20in%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a04%7c11219%7c10.42.21.37%7cpe_init%7cConfigure%20SMTP -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a17%7c11219%7c10.42.21.37%7cpe_init%7cConfigure%20NTP -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a24%7c11219%7c10.42.21.37%7cpe_init%7cRename%20default%20container%20to%20Default -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a34%7c11219%7c10.42.21.37%7cpe_init%7cRename%20default%20storage%20pool%20to%20SP01 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a44%7c11219%7c10.42.21.37%7cpe_init%7cCheck%20if%20there%20is%20a%20container%20named%20Images%2c%20if%20not%20create%20one -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.21.37%7cpe_init%7cContainer%20Images%20exists -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.21.37%7cpe_init%7cSet%20Data%20Services%20IP%20address%20to%2010.42.100.38 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a56%7c11219%7c10.42.21.37%7cnetwork_configure%7cRemove%20Rx-Automation-Network... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a00%7c11219%7c10.42.21.37%7cnetwork_configure%7cCreate%20primary%20network%3a%20Name%3a%20Primary%2c%20VLAN%3a%200%2c%20Subnet%3a%2010.42.100.1%2f25%2c%20Domain%3a%20NTNXLAB%2c%20Pool%3a%2010.42.100.50%20to%2010.42.100.125 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a04%7c11219%7c10.42.21.37%7cnetwork_configure%7cCreate%20secondary%20network%3a%20Name%3a%20Secondary%2c%20VLAN%3a%201001%2c%20Subnet%3a%2010.42.100.129%2f25%2c%20Pool%3a%2010.42.100.132%20to%2010.42.100.229 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a08%7c11219%7c10.42.21.37%7cauthentication_source%7cPC_VERSION%205.10.2%20%3e%3d%205.9%2c%20setting%20AutoDC-2.0... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7cdns_check%7cError%2044%3a%20result%20was%209%3a%20%3b%3b%20connection%20timed%20out%3b%20no%20servers%20could%20be%20reached -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7cauthentication_source%7cAutoDC2.IDEMPOTENCY%20failed%2c%20no%20DNS%20record%20dc.ntnxlab.local -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7crepo_source%7cConvenience%3a%20omitted%20package%20argument%2c%20added%20package%3d -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.21.37%7cauthentication_source%7cImport%20AutoDC2%20image%20from%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a20%7c11219%7c10.42.21.37%7cauthentication_source%7cCreate%20AutoDC2%20VM%20based%20on%20AutoDC2%20image -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a22%7c11219%7c10.42.21.37%7cauthentication_source%7cPower%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.21.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.21.37%7cauthentication_source%7c_test%201%2f40%3d%7c2019-03-11%2017%3a52%3a30%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.21.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.21.37%7cauthentication_source%7c_test%202%2f40%3d%7c2019-03-11%2017%3a52%3a43%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.21.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.21.37%7cauthentication_source%7c_test%203%2f40%3d%7c2019-03-11%2017%3a52%3a53%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.21.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.21.37%7cauthentication_source%7c_test%204%2f40%3d%7c%20%2a%20status%3a%20stopped%0a2019-03-11%2017%3a53%3a03%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a14%7c11219%7c10.42.21.37%7cauthentication_source%7cAutoDC2%20is%20ready. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a24%7c11219%7c10.42.21.37%7cauthentication_source%7cCreate%20Reverse%20Lookup%20Zone%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a41%7c11219%7c10.42.21.37%7cauthentication_source%7cSuccess%3a%20DNS%20record%20dc.ntnxlab.local%20set. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.21.37%7cpe_auth%7cAdjusted%20directory-url%3dldap%3a%2f%2f10.42.100.40%3a389%20because%20AOS-5.10.2%20%3e%3d%205.9 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.21.37%7cpe_auth%7cConfigure%20PE%20external%20authentication -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a56%7c11219%7c10.42.21.37%7cpe_auth%7cConfigure%20PE%20role%20map -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a01%7c11219%7c10.42.21.37%7cpc_install%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a04%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a07%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.21.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.21.37%7cpc_install%7cGet%20cluster%20network%20and%20storage%20container%20UUIDs... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.21.37%7cpc_install%7cPrimary%20network%20UUID%3a%2084f854ca-e65a-4ba4-8e84-7c3061c33c42 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.21.37%7cpc_install%7cDefault%20storage%20container%20UUID%3a%20118bd727-366f-418f-8686-e3434b18e6bd -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.21.37%7cpc_install%7cPC-5.10.2%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.21.37%7cntnx_download%7cRetrieving%20download%20metadata%20pcdeploy-5.10.2.json%20... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fpcdeploy-5.10.2.json... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20pcdeploy-5.10.2.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.21.37%7cntnx_download%7cRetrieving%20Nutanix%20PRISM_CENTRAL_DEPLOY%20bits... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2feuphrates-5.10.2-stable-prism_central.tar... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a37%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20euphrates-5.10.2-stable-prism_central.tar -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a45%7c11219%7c10.42.21.37%7cntnx_download%7cSuccess%3a%20PRISM_CENTRAL_DEPLOY%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.21.37%7cntnx_download%7cSuccess%21%20Delete%20PRISM_CENTRAL_DEPLOY%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.21.37%7cpc_install%7cDeploy%20Prism%20Central%20%28typically%20takes%2017%2b%20minutes%29... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a45%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%201%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a59%3a48%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%202%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a00%3a51%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%203%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a01%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%204%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a02%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%205%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a03%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%206%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a04%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%207%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a05%3a54%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%208%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a06%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%209%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a07%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2010%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a08%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2011%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a09%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2012%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a10%3a55%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2013%2f40%3d403%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.21.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.21.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2014%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.21.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.21.37%7cprism_check%7cWarning%20%40PC-dev%3a%20Fallback%20on%2010.42.100.39%3a%20try%20PE%20cluster%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2015%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.21.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.21.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%2016%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.21.37%7cprism_check%7c%40PC%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.21.37%7ccluster_check%7cPC%3e%3d5.10%2c%20checking%20multicluster%20state... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.21.37%7cpc_configure%7cSend%20configuration%20scripts%20to%20PC%20and%20remove%3a%20global.vars.sh%20lib.pc.sh%20we-ts2019.sh%20we-lib.common.sh%20release.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.21.37%7cpc_configure%7cOPTIONAL%3a%20Send%20binary%20dependencies%20to%20PC%3a%20bin%2fjq-linux64%20sshpass-1.06-2.el7.x86_64.rpm%20id_rsa.pub -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.21.37%7cremote_exec%7cOptional%3a%20giving%20up. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.21.37%7cpc_configure%7cRemote%20asynchroneous%20launch%20PC%20configuration%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.21.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20PC -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cpc_configure%7cPC%20Configuration%20complete%3a%20try%20Validate%20Staged%20Clusters%20now. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cmain%7cPC%20Configuration%20complete%3a%20Waiting%20for%20PC%20deployment%20to%20complete%2c%20API%20is%20up%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cmain%7cPE%20%3d%20https%3a%2f%2f10.42.21.37%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cmain%7cPC%20%3d%20https%3a%2f%2f10.42.100.39%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cfinish%7c%2fhome%2fnutanix%2fwe-ts2019.sh%20ran%20for%201462%20seconds._____________________ -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cdependencies%7cSuccess%3a%20found%20jq. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.21.37%7cfiles_install%7cIDEMPOTENCY%3a%20checking%20for%20afs%20completed... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a10%7c11219%7c10.42.21.37%7ccluster_check%7cCluster%20status%3a%20%7c%7c%2c%20exit%3a%205. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.21.37%7cfiles_install%7cFiles%203.2.0.1%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.21.37%7cntnx_download%7cRetrieving%20download%20metadata%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json%20... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.21.37%7cntnx_download%7cRetrieving%20Nutanix%20afs%20bits... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.21.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a23%7c11219%7c10.42.21.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a25%7c11219%7c10.42.21.37%7ccluster_check%7cManual%20join%20PE%20to%20PC%20%3d%20%7cCluster%20registration%20is%20currently%20in%20progress.%20This%20operation%20may%20take%20a%20while.%0a%0a%20%20%20%20Status%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%3a%20true%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a27%7c11219%7c10.42.21.37%7cntnx_download%7cSuccess%3a%20afs%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.21.37%7ccluster_check%7cCluster%20status%3a%20%7ctrue%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.21.37%7ccluster_check%7cPE%20to%20PC%20%3d%20cluster%20registration%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.21.37%7cmain%7cRemote%20asynchroneous%20PC%20Image%20import%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20%20%20%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.21.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20%20%20%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20IMAGES -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a26%7c11219%7c10.42.21.37%7cntnx_download%7cSuccess%21%20Delete%20afs%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a57%7c11219%7c10.42.21.37%7cdependencies%7cWarning%3a%20assuming%20on%20PC%20or%20PE%20VM%2c%20removing%20jq... -H 'cache-control: no-cache' -H 'content-length: 0' \ No newline at end of file diff --git a/logserver/curl_sim2.sh b/logserver/curl_sim2.sh new file mode 100644 index 0000000..e835977 --- /dev/null +++ b/logserver/curl_sim2.sh @@ -0,0 +1,233 @@ +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.2.37%7cbegin%7cwe-ts2019.sh%20release%3a%202.0.6-ci.13%20start._____________________ -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.2.37%7cdependencies%7cInstall%20sshpass... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.2.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20sshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7cdependencies%7cInstall%20jq... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.2.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.2.37%7crepo_source%7cFound%2c%20HTTP%3a302%20%3d%20https%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.2.37%7cdownload%7chttps%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a48%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20jq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a49%7c11219%7c10.42.2.37%7cpe_license%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a52%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.2.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.2.37%7cpe_license%7cValidate%20EULA%20on%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a59%7c11219%7c10.42.2.37%7cpe_license%7cDisable%20Pulse%20in%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a04%7c11219%7c10.42.2.37%7cpe_init%7cConfigure%20SMTP -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a17%7c11219%7c10.42.2.37%7cpe_init%7cConfigure%20NTP -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a24%7c11219%7c10.42.2.37%7cpe_init%7cRename%20default%20container%20to%20Default -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a34%7c11219%7c10.42.2.37%7cpe_init%7cRename%20default%20storage%20pool%20to%20SP01 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a44%7c11219%7c10.42.2.37%7cpe_init%7cCheck%20if%20there%20is%20a%20container%20named%20Images%2c%20if%20not%20create%20one -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.2.37%7cpe_init%7cContainer%20Images%20exists -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.2.37%7cpe_init%7cSet%20Data%20Services%20IP%20address%20to%2010.42.100.38 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a56%7c11219%7c10.42.2.37%7cnetwork_configure%7cRemove%20Rx-Automation-Network... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a00%7c11219%7c10.42.2.37%7cnetwork_configure%7cCreate%20primary%20network%3a%20Name%3a%20Primary%2c%20VLAN%3a%200%2c%20Subnet%3a%2010.42.100.1%2f25%2c%20Domain%3a%20NTNXLAB%2c%20Pool%3a%2010.42.100.50%20to%2010.42.100.125 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a04%7c11219%7c10.42.2.37%7cnetwork_configure%7cCreate%20secondary%20network%3a%20Name%3a%20Secondary%2c%20VLAN%3a%201001%2c%20Subnet%3a%2010.42.100.129%2f25%2c%20Pool%3a%2010.42.100.132%20to%2010.42.100.229 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a08%7c11219%7c10.42.2.37%7cauthentication_source%7cPC_VERSION%205.10.2%20%3e%3d%205.9%2c%20setting%20AutoDC-2.0... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7cdns_check%7cError%2044%3a%20result%20was%209%3a%20%3b%3b%20connection%20timed%20out%3b%20no%20servers%20could%20be%20reached -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7cauthentication_source%7cAutoDC2.IDEMPOTENCY%20failed%2c%20no%20DNS%20record%20dc.ntnxlab.local -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7crepo_source%7cConvenience%3a%20omitted%20package%20argument%2c%20added%20package%3d -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.2.37%7cauthentication_source%7cImport%20AutoDC2%20image%20from%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a20%7c11219%7c10.42.2.37%7cauthentication_source%7cCreate%20AutoDC2%20VM%20based%20on%20AutoDC2%20image -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a22%7c11219%7c10.42.2.37%7cauthentication_source%7cPower%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.2.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.2.37%7cauthentication_source%7c_test%201%2f40%3d%7c2019-03-11%2017%3a52%3a30%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.2.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.2.37%7cauthentication_source%7c_test%202%2f40%3d%7c2019-03-11%2017%3a52%3a43%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.2.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.2.37%7cauthentication_source%7c_test%203%2f40%3d%7c2019-03-11%2017%3a52%3a53%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.2.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.2.37%7cauthentication_source%7c_test%204%2f40%3d%7c%20%2a%20status%3a%20stopped%0a2019-03-11%2017%3a53%3a03%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a14%7c11219%7c10.42.2.37%7cauthentication_source%7cAutoDC2%20is%20ready. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a24%7c11219%7c10.42.2.37%7cauthentication_source%7cCreate%20Reverse%20Lookup%20Zone%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a41%7c11219%7c10.42.2.37%7cauthentication_source%7cSuccess%3a%20DNS%20record%20dc.ntnxlab.local%20set. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.2.37%7cpe_auth%7cAdjusted%20directory-url%3dldap%3a%2f%2f10.42.100.40%3a389%20because%20AOS-5.10.2%20%3e%3d%205.9 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.2.37%7cpe_auth%7cConfigure%20PE%20external%20authentication -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a56%7c11219%7c10.42.2.37%7cpe_auth%7cConfigure%20PE%20role%20map -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a01%7c11219%7c10.42.2.37%7cpc_install%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a04%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a07%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.2.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.2.37%7cpc_install%7cGet%20cluster%20network%20and%20storage%20container%20UUIDs... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.2.37%7cpc_install%7cPrimary%20network%20UUID%3a%2084f854ca-e65a-4ba4-8e84-7c3061c33c42 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.2.37%7cpc_install%7cDefault%20storage%20container%20UUID%3a%20118bd727-366f-418f-8686-e3434b18e6bd -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.2.37%7cpc_install%7cPC-5.10.2%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.2.37%7cntnx_download%7cRetrieving%20download%20metadata%20pcdeploy-5.10.2.json%20... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fpcdeploy-5.10.2.json... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20pcdeploy-5.10.2.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.2.37%7cntnx_download%7cRetrieving%20Nutanix%20PRISM_CENTRAL_DEPLOY%20bits... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2feuphrates-5.10.2-stable-prism_central.tar... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a37%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20euphrates-5.10.2-stable-prism_central.tar -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a45%7c11219%7c10.42.2.37%7cntnx_download%7cSuccess%3a%20PRISM_CENTRAL_DEPLOY%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.2.37%7cntnx_download%7cSuccess%21%20Delete%20PRISM_CENTRAL_DEPLOY%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.2.37%7cpc_install%7cDeploy%20Prism%20Central%20%28typically%20takes%2017%2b%20minutes%29... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a45%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%201%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a59%3a48%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%202%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a00%3a51%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%203%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a01%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%204%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a02%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%205%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a03%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%206%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a04%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%207%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a05%3a54%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%208%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a06%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%209%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a07%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2010%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a08%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2011%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a09%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2012%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a10%3a55%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2013%2f40%3d403%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.2.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.2.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2014%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.2.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.2.37%7cprism_check%7cWarning%20%40PC-dev%3a%20Fallback%20on%2010.42.100.39%3a%20try%20PE%20cluster%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2015%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.2.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.2.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%2016%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.2.37%7cprism_check%7c%40PC%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.2.37%7ccluster_check%7cPC%3e%3d5.10%2c%20checking%20multicluster%20state... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.2.37%7cpc_configure%7cSend%20configuration%20scripts%20to%20PC%20and%20remove%3a%20global.vars.sh%20lib.pc.sh%20we-ts2019.sh%20we-lib.common.sh%20release.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.2.37%7cpc_configure%7cOPTIONAL%3a%20Send%20binary%20dependencies%20to%20PC%3a%20bin%2fjq-linux64%20sshpass-1.06-2.el7.x86_64.rpm%20id_rsa.pub -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.2.37%7cremote_exec%7cOptional%3a%20giving%20up. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.2.37%7cpc_configure%7cRemote%20asynchroneous%20launch%20PC%20configuration%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.2.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20PC -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cpc_configure%7cPC%20Configuration%20complete%3a%20try%20Validate%20Staged%20Clusters%20now. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cmain%7cPC%20Configuration%20complete%3a%20Waiting%20for%20PC%20deployment%20to%20complete%2c%20API%20is%20up%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cmain%7cPE%20%3d%20https%3a%2f%2f10.42.2.37%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cmain%7cPC%20%3d%20https%3a%2f%2f10.42.100.39%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cfinish%7c%2fhome%2fnutanix%2fwe-ts2019.sh%20ran%20for%201462%20seconds._____________________ -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cdependencies%7cSuccess%3a%20found%20jq. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.2.37%7cfiles_install%7cIDEMPOTENCY%3a%20checking%20for%20afs%20completed... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a10%7c11219%7c10.42.2.37%7ccluster_check%7cCluster%20status%3a%20%7c%7c%2c%20exit%3a%205. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.2.37%7cfiles_install%7cFiles%203.2.0.1%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.2.37%7cntnx_download%7cRetrieving%20download%20metadata%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json%20... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.2.37%7cntnx_download%7cRetrieving%20Nutanix%20afs%20bits... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.2.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a23%7c11219%7c10.42.2.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a25%7c11219%7c10.42.2.37%7ccluster_check%7cManual%20join%20PE%20to%20PC%20%3d%20%7cCluster%20registration%20is%20currently%20in%20progress.%20This%20operation%20may%20take%20a%20while.%0a%0a%20%20%20%20Status%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%3a%20true%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a27%7c11219%7c10.42.2.37%7cntnx_download%7cSuccess%3a%20afs%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.2.37%7ccluster_check%7cCluster%20status%3a%20%7ctrue%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.2.37%7ccluster_check%7cPE%20to%20PC%20%3d%20cluster%20registration%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.2.37%7cmain%7cRemote%20asynchroneous%20PC%20Image%20import%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20%20%20%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.2.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20%20%20%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20IMAGES -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a26%7c11219%7c10.42.2.37%7cntnx_download%7cSuccess%21%20Delete%20afs%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a57%7c11219%7c10.42.2.37%7cdependencies%7cWarning%3a%20assuming%20on%20PC%20or%20PE%20VM%2c%20removing%20jq... -H 'cache-control: no-cache' -H 'content-length: 0' \ No newline at end of file diff --git a/logserver/curl_sim3.sh b/logserver/curl_sim3.sh new file mode 100644 index 0000000..2b91ec8 --- /dev/null +++ b/logserver/curl_sim3.sh @@ -0,0 +1,233 @@ +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.31.37%7cbegin%7cwe-ts2019.sh%20release%3a%202.0.6-ci.13%20start._____________________ -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.31.37%7cdependencies%7cInstall%20sshpass... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a45%7c11219%7c10.42.31.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2fmirror.centos.org%2fcentos%2f7%2fextras%2fx86_64%2fPackages%2fsshpass-1.06-2.el7.x86_64.rpm... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20sshpass-1.06-2.el7.x86_64.rpm -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7cdependencies%7cInstall%20jq... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a46%7c11219%7c10.42.31.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.31.37%7crepo_source%7cFound%2c%20HTTP%3a302%20%3d%20https%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a47%7c11219%7c10.42.31.37%7cdownload%7chttps%3a%2f%2fgithub.com%2fstedolan%2fjq%2freleases%2fdownload%2fjq-1.5%2fjq-linux64... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a48%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20jq-linux64 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a49%7c11219%7c10.42.31.37%7cpe_license%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a52%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.31.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a58%7c11219%7c10.42.31.37%7cpe_license%7cValidate%20EULA%20on%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a50%3a59%7c11219%7c10.42.31.37%7cpe_license%7cDisable%20Pulse%20in%20PE%3a%20_test%3d%7c%7c -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a04%7c11219%7c10.42.31.37%7cpe_init%7cConfigure%20SMTP -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a17%7c11219%7c10.42.31.37%7cpe_init%7cConfigure%20NTP -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a24%7c11219%7c10.42.31.37%7cpe_init%7cRename%20default%20container%20to%20Default -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a34%7c11219%7c10.42.31.37%7cpe_init%7cRename%20default%20storage%20pool%20to%20SP01 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a44%7c11219%7c10.42.31.37%7cpe_init%7cCheck%20if%20there%20is%20a%20container%20named%20Images%2c%20if%20not%20create%20one -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.31.37%7cpe_init%7cContainer%20Images%20exists -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a49%7c11219%7c10.42.31.37%7cpe_init%7cSet%20Data%20Services%20IP%20address%20to%2010.42.100.38 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a51%3a56%7c11219%7c10.42.31.37%7cnetwork_configure%7cRemove%20Rx-Automation-Network... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a00%7c11219%7c10.42.31.37%7cnetwork_configure%7cCreate%20primary%20network%3a%20Name%3a%20Primary%2c%20VLAN%3a%200%2c%20Subnet%3a%2010.42.100.1%2f25%2c%20Domain%3a%20NTNXLAB%2c%20Pool%3a%2010.42.100.50%20to%2010.42.100.125 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a04%7c11219%7c10.42.31.37%7cnetwork_configure%7cCreate%20secondary%20network%3a%20Name%3a%20Secondary%2c%20VLAN%3a%201001%2c%20Subnet%3a%2010.42.100.129%2f25%2c%20Pool%3a%2010.42.100.132%20to%2010.42.100.229 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a08%7c11219%7c10.42.31.37%7cauthentication_source%7cPC_VERSION%205.10.2%20%3e%3d%205.9%2c%20setting%20AutoDC-2.0... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7cdns_check%7cError%2044%3a%20result%20was%209%3a%20%3b%3b%20connection%20timed%20out%3b%20no%20servers%20could%20be%20reached -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7cauthentication_source%7cAutoDC2.IDEMPOTENCY%20failed%2c%20no%20DNS%20record%20dc.ntnxlab.local -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7crepo_source%7cConvenience%3a%20omitted%20package%20argument%2c%20added%20package%3d -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7crepo_source%7c%20Lost%2c%20HTTP%3a000%20%3d%20http%3a%2f%2flocalhost%3a8181%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7crepo_source%7cFound%2c%20HTTP%3a200%20%3d%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a10%7c11219%7c10.42.31.37%7cauthentication_source%7cImport%20AutoDC2%20image%20from%20http%3a%2f%2f10.42.8.50%2fimages%2fAutoDC2.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a20%7c11219%7c10.42.31.37%7cauthentication_source%7cCreate%20AutoDC2%20VM%20based%20on%20AutoDC2%20image -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a22%7c11219%7c10.42.31.37%7cauthentication_source%7cPower%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.31.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a30%7c11219%7c10.42.31.37%7cauthentication_source%7c_test%201%2f40%3d%7c2019-03-11%2017%3a52%3a30%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.31.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a43%7c11219%7c10.42.31.37%7cauthentication_source%7c_test%202%2f40%3d%7c2019-03-11%2017%3a52%3a43%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.31.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a52%3a53%7c11219%7c10.42.31.37%7cauthentication_source%7c_test%203%2f40%3d%7c2019-03-11%2017%3a52%3a53%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d255%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.31.37%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a03%7c11219%7c10.42.31.37%7cauthentication_source%7c_test%204%2f40%3d%7c%20%2a%20status%3a%20stopped%0a2019-03-11%2017%3a53%3a03%7c11219%7cremote_exec%7cError%2022%3a%20pwd%3d%2fhome%2fnutanix%2c%20_test%3d3%2c%20_host%3d10.42.100.40%7c%3a%20sleep%2010%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a14%7c11219%7c10.42.31.37%7cauthentication_source%7cAutoDC2%20is%20ready. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a24%7c11219%7c10.42.31.37%7cauthentication_source%7cCreate%20Reverse%20Lookup%20Zone%20on%20AutoDC2%20VM... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a41%7c11219%7c10.42.31.37%7cauthentication_source%7cSuccess%3a%20DNS%20record%20dc.ntnxlab.local%20set. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.31.37%7cpe_auth%7cAdjusted%20directory-url%3dldap%3a%2f%2f10.42.100.40%3a389%20because%20AOS-5.10.2%20%3e%3d%205.9 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a51%7c11219%7c10.42.31.37%7cpe_auth%7cConfigure%20PE%20external%20authentication -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a53%3a56%7c11219%7c10.42.31.37%7cpe_auth%7cConfigure%20PE%20role%20map -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a01%7c11219%7c10.42.31.37%7cpc_install%7cIDEMPOTENCY%3a%20Checking%20PC%20API%20responds%2c%20curl%20failures%20are%20acceptable... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a04%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%201%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a07%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%202%2f2%3d000%3a%20sleep%200%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.31.37%7cprism_check%7cWarning%2077%20%40PC%3a%20Giving%20up%20after%203%20tries. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a10%7c11219%7c10.42.31.37%7cpc_install%7cGet%20cluster%20network%20and%20storage%20container%20UUIDs... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.31.37%7cpc_install%7cPrimary%20network%20UUID%3a%2084f854ca-e65a-4ba4-8e84-7c3061c33c42 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a15%7c11219%7c10.42.31.37%7cpc_install%7cDefault%20storage%20container%20UUID%3a%20118bd727-366f-418f-8686-e3434b18e6bd -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.31.37%7cpc_install%7cPC-5.10.2%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.31.37%7cntnx_download%7cRetrieving%20download%20metadata%20pcdeploy-5.10.2.json%20... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fpcdeploy-5.10.2.json... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a25%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20pcdeploy-5.10.2.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.31.37%7cntnx_download%7cRetrieving%20Nutanix%20PRISM_CENTRAL_DEPLOY%20bits... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a26%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2feuphrates-5.10.2-stable-prism_central.tar... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a37%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20euphrates-5.10.2-stable-prism_central.tar -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a54%3a45%7c11219%7c10.42.31.37%7cntnx_download%7cSuccess%3a%20PRISM_CENTRAL_DEPLOY%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.31.37%7cntnx_download%7cSuccess%21%20Delete%20PRISM_CENTRAL_DEPLOY%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a41%7c11219%7c10.42.31.37%7cpc_install%7cDeploy%20Prism%20Central%20%28typically%20takes%2017%2b%20minutes%29... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a58%3a45%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%201%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2017%3a59%3a48%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%202%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a00%3a51%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%203%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a01%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%204%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a02%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%205%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a03%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%206%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a04%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%207%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a05%3a54%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%208%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a06%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%209%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a07%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2010%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a08%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2011%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a09%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2012%2f40%3d000%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a10%3a55%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2013%2f40%3d403%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.31.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.31.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a11%3a59%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2014%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.31.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.31.37%7cprism_check%7cWarning%20%40PC-dev%3a%20Fallback%20on%2010.42.100.39%3a%20try%20PE%20cluster%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a13%3a02%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2015%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.31.37%7cprism_check%7cWarning%3a%20unauthorized%20PC%20user%20or%20password%20on%2010.42.100.39. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.31.37%7cprism_check%7cWarning%20%40PC%3a%20Fallback%20on%2010.42.100.39%3a%20try%20initial%20password%20next%20cycle... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a14%3a04%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%2016%2f40%3d401%3a%20sleep%2060%20seconds... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.31.37%7cprism_check%7c%40PC%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.31.37%7ccluster_check%7cPC%3e%3d5.10%2c%20checking%20multicluster%20state... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.31.37%7cpc_configure%7cSend%20configuration%20scripts%20to%20PC%20and%20remove%3a%20global.vars.sh%20lib.pc.sh%20we-ts2019.sh%20we-lib.common.sh%20release.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a05%7c11219%7c10.42.31.37%7cpc_configure%7cOPTIONAL%3a%20Send%20binary%20dependencies%20to%20PC%3a%20bin%2fjq-linux64%20sshpass-1.06-2.el7.x86_64.rpm%20id_rsa.pub -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.31.37%7cremote_exec%7cOptional%3a%20giving%20up. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a06%7c11219%7c10.42.31.37%7cpc_configure%7cRemote%20asynchroneous%20launch%20PC%20configuration%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.31.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20PC -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cpc_configure%7cPC%20Configuration%20complete%3a%20try%20Validate%20Staged%20Clusters%20now. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cmain%7cPC%20Configuration%20complete%3a%20Waiting%20for%20PC%20deployment%20to%20complete%2c%20API%20is%20up%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cmain%7cPE%20%3d%20https%3a%2f%2f10.42.31.37%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cmain%7cPC%20%3d%20https%3a%2f%2f10.42.100.39%3a9440 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cfinish%7c%2fhome%2fnutanix%2fwe-ts2019.sh%20ran%20for%201462%20seconds._____________________ -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cdependencies%7cSuccess%3a%20found%20jq. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a07%7c11219%7c10.42.31.37%7cfiles_install%7cIDEMPOTENCY%3a%20checking%20for%20afs%20completed... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a10%7c11219%7c10.42.31.37%7ccluster_check%7cCluster%20status%3a%20%7c%7c%2c%20exit%3a%205. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.31.37%7cfiles_install%7cFiles%203.2.0.1%20not%20completed.%20 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.31.37%7cntnx_download%7cRetrieving%20download%20metadata%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json%20... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a17%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.31.37%7cntnx_download%7cRetrieving%20Nutanix%20afs%20bits... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a18%7c11219%7c10.42.31.37%7cdownload%7chttp%3a%2f%2f10.42.8.50%2fimages%2fnutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a23%7c11219%7c10.42.31.37%7cdownload%7cSuccess%3a%20nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a25%7c11219%7c10.42.31.37%7ccluster_check%7cManual%20join%20PE%20to%20PC%20%3d%20%7cCluster%20registration%20is%20currently%20in%20progress.%20This%20operation%20may%20take%20a%20while.%0a%0a%20%20%20%20Status%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%3a%20true%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a27%7c11219%7c10.42.31.37%7cntnx_download%7cSuccess%3a%20afs%20bits%20downloaded%20and%20passed%20MD5%20checksum%21 -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.31.37%7ccluster_check%7cCluster%20status%3a%20%7ctrue%7c%2c%20exit%3a%200. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.31.37%7ccluster_check%7cPE%20to%20PC%20%3d%20cluster%20registration%3a%20successful. -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a15%3a30%7c11219%7c10.42.31.37%7cmain%7cRemote%20asynchroneous%20PC%20Image%20import%20script...%20EMAIL%3dnathan.cox%40nutanix.com%20%20%20%20%20%20%20%20%20%20%20%20PC_HOST%3d10.42.100.39%20PE_HOST%3d10.42.31.37%20PE_PASSWORD%3dtechX2019%21%20%20%20%20%20%20%20%20%20%20%20%20PC_LAUNCH%3dwe-ts2019.sh%20PC_VERSION%3d5.10.2%20nohup%20bash%20%2fhome%2fnutanix%2fwe-ts2019.sh%20IMAGES -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a26%7c11219%7c10.42.31.37%7cntnx_download%7cSuccess%21%20Delete%20afs%20sources%20to%20free%20CVM%20space... -H 'cache-control: no-cache' -H 'content-length: 0' +sleep 10 +curl -s -X POST http://127.0.0.1:8080/2019-03-11%2018%3a17%3a57%7c11219%7c10.42.31.37%7cdependencies%7cWarning%3a%20assuming%20on%20PC%20or%20PE%20VM%2c%20removing%20jq... -H 'cache-control: no-cache' -H 'content-length: 0' \ No newline at end of file diff --git a/logserver/hpoc_deploy.sql b/logserver/hpoc_deploy.sql new file mode 100644 index 0000000..67c31d9 --- /dev/null +++ b/logserver/hpoc_deploy.sql @@ -0,0 +1,61 @@ +-- MySQL dump 10.14 Distrib 5.5.60-MariaDB, for Linux (x86_64) +-- +-- Host: localhost Database: hpoc_deploy +-- ------------------------------------------------------ +-- Server version 5.5.60-MariaDB + +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; +/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; +/*!40101 SET NAMES utf8 */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + +-- +-- Current Database: `hpoc_deploy` +-- + +CREATE DATABASE /*!32312 IF NOT EXISTS*/ `hpoc_deploy` /*!40100 DEFAULT CHARACTER SET latin1 */; + +USE `hpoc_deploy`; + +-- +-- Table structure for table `deploy_status` +-- + +DROP TABLE IF EXISTS `deploy_status`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `deploy_status` ( + `id` int(6) NOT NULL AUTO_INCREMENT, + `hostname` varchar(255) DEFAULT NULL, + `module` varchar(255) DEFAULT NULL, + `replycode` varchar(255) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=129 DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `deploy_status` +-- + +LOCK TABLES `deploy_status` WRITE; +/*!40000 ALTER TABLE `deploy_status` DISABLE KEYS */; +INSERT INTO `deploy_status` VALUES (125,'10.42.100.37','dependencies','Warning: assuming on PC or PE VM, removing jq...'),(126,'10.42.21.37','dependencies','Warning: assuming on PC or PE VM, removing jq...'),(127,'10.42.2.37','dependencies','Warning: assuming on PC or PE VM, removing jq...'),(128,'10.42.31.37','dependencies','Warning: assuming on PC or PE VM, removing jq...'); +/*!40000 ALTER TABLE `deploy_status` ENABLE KEYS */; +UNLOCK TABLES; +/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; + +/*!40101 SET SQL_MODE=@OLD_SQL_MODE */; +/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; +/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; +/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; +/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; + +-- Dump completed on 2019-03-13 3:49:55 diff --git a/logserver/logserver.py b/logserver/logserver.py new file mode 100644 index 0000000..f60a01d --- /dev/null +++ b/logserver/logserver.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 + +from http.server import BaseHTTPRequestHandler, HTTPServer +import logging +import urllib +import mysql.connector as mariadb +from flask import Flask + +class S(BaseHTTPRequestHandler): + def _set_response(self): + self.send_response(200) + self.send_header('Content-type', 'text/html') + self.end_headers() + + def do_POST(self): + content_length = int(self.headers['Content-Length']) # <--- Gets the size of data + post_data = self.rfile.read(content_length) # <--- Gets the data itself + #logging.info("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n",str(self.path), str(self.headers), post_data.decode('utf-8')) + message=urllib.parse.unquote(self.path) + message_list=message[1:].split("|") + db_actions(message_list[0],message_list[2],message_list[3],message_list[4],'insert') + + self._set_response() + #self.wfile.write("POST request for {}".format(self.path).encode('utf-8')) + +# MariaDB related stuff. get the query and what is the module to run (create,insert, update, etc) +def db_actions(date,host_ip,module,module_msg,action): + # open the mariadb connection + mariadb_connection=mariadb.connect(user='webusr',password='webusr',database='hpoc_deploy',host='127.0.0.1',port='3306') + cursor=mariadb_connection.cursor() + + # check if the host_ip exists in the database + query='select count(*) from deploy_status where hostname=\'' + host_ip + '\'' + cursor.execute(query) + + row=cursor.fetchone() + if row[0]==0: + # if the hostname does not exist in the table, add it to the table and move forward + query="insert into deploy_status(hostname) values(\'" + host_ip +"\')" + cursor.execute(query) + mariadb_connection.commit() + else: + #update deploy_status set replycode='Cluster status: ', module='cluster_check' where hostname='10.42.100.37' + query="update deploy_status set replycode=\'" + module_msg +"\', module=\'" + module + "\' where hostname=\'" + host_ip +"\'" + cursor.execute(query) + mariadb_connection.commit() + + #close the mariadb connection + mariadb_connection.close() + return + +# Function for running the HTTP server +def run(server_class=HTTPServer, handler_class=S, port=8080): + logging.basicConfig(level=logging.INFO) + server_address = ('', port) + httpd = server_class(server_address, handler_class) + logging.info('Starting httpd...\n') + try: + httpd.serve_forever() + except KeyboardInterrupt: + pass + httpd.server_close() + logging.info('Stopping httpd...\n') + +if __name__ == '__main__': + from sys import argv + + if len(argv) == 2: + run(port=int(argv[1])) + else: + run() diff --git a/logserver/nagios_setup.sh b/logserver/nagios_setup.sh new file mode 100644 index 0000000..24885e2 --- /dev/null +++ b/logserver/nagios_setup.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Script for setting up the configuration files for the PE Cluster during the TechSummit 2019 + + +# Get the list of the clusetrs to be created +for ip_name in `cat /root/scripts/pelist.txt` +do + # Grab the IP address from the cluster + pe_ip=`echo $ip_name | cut -d";" -f 2` + + # Grab the name of the cluster + pe_name=`echo $ip_name | cut -d";" -f 1` + + # Copy the default file to the new, good hostname + cp /usr/local/nagios/etc/servers/yourhost.cfg /usr/local/nagios/etc/servers/$pe_name.cfg + + # Changing the original files to the right information + # Set the right Servername + sed -i "s/CLUSTER_NAME/$pe_name/g" /usr/local/nagios/etc/servers/$pe_name.cfg + + # Set the right IP address + sed -i "s/CLUSTER_IP/$pe_ip/g" /usr/local/nagios/etc/servers/$pe_name.cfg +done + +# Rename the temp file +mv /usr/local/nagios/etc/servers/yourhost.cfg /usr/local/nagios/etc/servers/yourhost.cfg.tmp + +# Restart Nagios so it can start monitoring: +systemctl reload nagios.service diff --git a/logserver/pelist.txt b/logserver/pelist.txt new file mode 100644 index 0000000..cecd599 --- /dev/null +++ b/logserver/pelist.txt @@ -0,0 +1,84 @@ +PHX-POC001;10.42.1.37 +PHX-POC002;10.42.2.37 +PHX-POC003;10.42.3.37 +PHX-POC004;10.42.4.37 +PHX-POC005;10.42.5.37 +PHX-POC006;10.42.6.37 +PHX-POC007;10.42.7.37 +PHX-POC010;10.42.10.37 +PHX-POC011;10.42.11.37 +PHX-POC012;10.42.12.37 +PHX-POC013;10.42.13.37 +PHX-POC014;10.42.14.37 +PHX-POC015;10.42.15.37 +PHX-POC016;10.42.16.37 +PHX-POC019;10.42.19.37 +PHX-POC020;10.42.20.37 +PHX-POC021;10.42.21.37 +PHX-POC022;10.42.22.37 +PHX-POC024;10.42.24.37 +PHX-POC025;10.42.25.37 +PHX-POC026;10.42.26.37 +PHX-POC027;10.42.27.37 +PHX-POC028;10.42.28.37 +PHX-POC029;10.42.29.37 +PHX-POC030;10.42.30.37 +PHX-POC031;10.42.31.37 +PHX-POC032;10.42.32.37 +PHX-POC033;10.42.33.37 +PHX-POC034;10.42.34.37 +PHX-POC037;10.42.37.37 +PHX-POC038;10.42.38.37 +PHX-POC042;10.42.42.37 +PHX-POC044;10.42.44.37 +PHX-POC045;10.42.45.37 +PHX-POC046;10.42.46.37 +PHX-POC047;10.42.47.37 +PHX-POC048;10.42.48.37 +PHX-POC049;10.42.49.37 +PHX-POC050;10.42.50.37 +PHX-POC051;10.42.51.37 +PHX-POC052;10.42.52.37 +PHX-POC053;10.42.53.37 +PHX-POC054;10.42.54.37 +PHX-POC055;10.42.55.37 +PHX-POC056;10.42.56.37 +PHX-POC057;10.42.57.37 +PHX-POC061;10.42.61.37 +PHX-POC062;10.42.62.37 +PHX-POC065;10.42.65.37 +PHX-POC066;10.42.66.37 +PHX-POC067;10.42.67.37 +PHX-POC068;10.42.68.37 +PHX-POC069;10.42.69.37 +PHX-POC070;10.42.70.37 +PHX-POC072;10.42.72.37 +PHX-POC073;10.42.73.37 +PHX-POC074;10.42.74.37 +PHX-POC079;10.42.79.37 +PHX-POC080;10.42.80.37 +PHX-POC081;10.42.81.37 +PHX-POC082;10.42.82.37 +PHX-POC083;10.42.83.37 +PHX-POC086;10.42.86.37 +PHX-POC087;10.42.87.37 +PHX-POC088;10.42.88.37 +PHX-POC090;10.42.90.37 +PHX-POC091;10.42.91.37 +PHX-POC092;10.42.92.37 +PHX-POC093;10.42.93.37 +PHX-POC094;10.42.94.37 +PHX-POC095;10.42.95.37 +PHX-POC096;10.42.96.37 +PHX-POC097;10.42.97.37 +PHX-POC098;10.42.98.37 +PHX-POC099;10.42.99.37 +PHX-POC101;10.42.101.37 +PHX-POC102;10.42.102.37 +PHX-POC104;10.42.104.37 +PHX-POC106;10.42.106.37 +PHX-POC107;10.42.107.37 +PHX-POC108;10.42.108.37 +PHX-POC109;10.42.109.37 +PHX-POC110;10.42.110.37 +PHX-POC113;10.42.113.37 \ No newline at end of file diff --git a/logserver/templates/index.html b/logserver/templates/index.html new file mode 100644 index 0000000..eae0996 --- /dev/null +++ b/logserver/templates/index.html @@ -0,0 +1,50 @@ + + Stageworkshop Summary + + + + + +Employees + + + + + + + + +{% if result %} + +{% for row in result %} + + + + + + + +{% endfor %} + +{% endif %} + + + + +
HostnameModuleLog message
{{ row[0] }} {{ row[1] }} {{ row[2] }}
+ + diff --git a/logserver/webserver.py b/logserver/webserver.py new file mode 100644 index 0000000..5426fcd --- /dev/null +++ b/logserver/webserver.py @@ -0,0 +1,32 @@ +from flask import Flask, render_template +import mysql.connector as mysqldb + +app = Flask(__name__) + +class Database: + def __init__(self): + host = "127.0.0.1" + user = "webusr" + password = "webusr" + db = "hpoc_deploy" + port = "3306" + self.con = mysqldb.connect(host=host, user=user, password=password, database=db, port=port) + self.cur = self.con.cursor() + + def list_messages(self): + self.cur.execute("SELECT hostname,module,replycode FROM deploy_status") + result = self.cur.fetchall() + return result + +@app.route('/') +def employees(): + def db_query(): + db = Database() + emps = db.list_messages() + return emps + + res = db_query() + return render_template('index.html', result=res, content_type='application/json') + +if __name__ == "__main__": + app.run(host='0.0.0.0') diff --git a/nodemanager/package.json b/nodemanager/package.json new file mode 100644 index 0000000..92c5c4c --- /dev/null +++ b/nodemanager/package.json @@ -0,0 +1,11 @@ +{ + "name": "nodemanager", + "version": "1.0.0", + "description": "Central Log server", + "main": "app.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC" +} diff --git a/quick.txt b/quick.txt new file mode 100644 index 0000000..0b7dad2 --- /dev/null +++ b/quick.txt @@ -0,0 +1 @@ +10.42.41.37|techX2019!|nathan.cox@nutanix.com \ No newline at end of file diff --git a/release.json b/release.json index cc6b5d1..89d9750 100644 --- a/release.json +++ b/release.json @@ -2,31 +2,31 @@ "Major": 2, "Minor": 0, "Patch": 6, - "PreReleaseTag": "ci.13", - "PreReleaseTagWithDash": "-ci.13", + "PreReleaseTag": "ci.14", + "PreReleaseTagWithDash": "-ci.14", "PreReleaseLabel": "ci", - "PreReleaseNumber": 13, + "PreReleaseNumber": 14, "BuildMetaData": "", "BuildMetaDataPadded": "", "FullBuildMetaData": "Branch.master.Sha.3d62e775126b97ddac481a1fcc81920d42d998fc", "MajorMinorPatch": "2.0.6", - "SemVer": "2.0.6-ci.13", - "LegacySemVer": "2.0.6-ci13", - "LegacySemVerPadded": "2.0.6-ci0013", + "SemVer": "2.0.6-ci.14", + "LegacySemVer": "2.0.6-ci14", + "LegacySemVerPadded": "2.0.6-ci0014", "AssemblySemVer": "2.0.6.0", "AssemblySemFileVer": "2.0.6.0", - "FullSemVer": "2.0.6-ci.13", - "InformationalVersion": "2.0.6-ci.13+Branch.master.Sha.3d62e775126b97ddac481a1fcc81920d42d998fc", + "FullSemVer": "2.0.6-ci.14", + "InformationalVersion": "2.0.6-ci.14+Branch.master.Sha.3d62e775126b97ddac481a1fcc81920d42d998fc", "BranchName": "master", "Sha": "3d62e775126b97ddac481a1fcc81920d42d998fc", "ShortSha": "3d62e77", - "NuGetVersionV2": "2.0.6-ci0013", - "NuGetVersion": "2.0.6-ci0013", - "NuGetPreReleaseTagV2": "ci0013", + "NuGetVersionV2": "2.0.6-ci0014", + "NuGetVersion": "2.0.6-ci0014", + "NuGetPreReleaseTagV2": "ci0014", "NuGetPreReleaseTag": "ci0013", - "CommitsSinceVersionSource": 13, - "CommitsSinceVersionSourcePadded": "0013", - "CommitDate": "2019-02-17", + "CommitsSinceVersionSource": 14, + "CommitsSinceVersionSourcePadded": "0014", + "CommitDate": "2019-03-20", "PrismCentralStable": "5.8.2", - "PrismCentralDev": "5.10.1.1" + "PrismCentralDev": "5.10.2" } diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 08d8b1b..2aca9af 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -131,52 +131,59 @@ function lcm() { # Run the progess checker loop - # We need to get the UUIDs and the versions to be used.. so we can start the update. They are in the /home/nutanix/data/logs/lcm_ops.out AFTER an inventory run! - # ******!!!!!!WE ARE USING A DEBUG LINE IN THE FILE MENTIONED!!! NEED TO KEEP TRACK OF THAT IF IT CHANGES!!!!!***** - _full_uuids=$(cat /home/nutanix/data/logs/lcm_ops.out | grep -A 1 entity_uuid | grep -B 1 "2.6.0.3") + ################################################################# + # Grab the json from the possible to be updated UUIDs and versions and save local in reply_json.json + ################################################################# + + # Need loop so we can create the full json more dynamical + + # Run the Curl command and save the oputput in a temp file + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json + + # Fill the uuid array with the correct values + uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " ")) + + # Grabbing the versions of the UUID and put them in a versions array + for uuid in "${uuid_arr[@]}" + do + version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply-inventory.json | tail -4 | head -n 1 | tr -d \")) + done - # As we need to have the latest version from the LCM we need to reverse the string so we get the last versions/UUIDS - _first_uuid=$(echo $_full_uuids |rev|cut -d":" -f 4 |rev | cut -d "\"" -f2) - _first_version="2.6.0.3" # Setting the version number hard coded!!! This is what has been tested for the workshops. - _sec_uuid=$(echo $_full_uuids rev|rev | cut -d":" -f 2 |rev | cut -d "\"" -f2) - _sec_version=$_first_version - # Set the parameter to create the ugrade plan # Create the curl json string '-d blablablablabla' so we can call the string and not the full json data line + # Begin of the JSON data payload _json_data="-d " - _json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"generate_plan\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"" - _json_data+=$_first_uuid - _json_data+="\\\",\\\"" - _json_data+=$_first_version - _json_data+="\\\"],[\\\"" - _json_data+=$_sec_uuid - _json_data+="\\\",\\\"" - _json_data+=$_sec_version - _json_data+="\\\"]]]}}\"}" - - + _json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"generate_plan\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[" + + # Combine the two created UUID and Version arrays to the full needed data using a loop + count=0 + while [ $count -lt ${#uuid_arr[@]} ] + do + _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," + let count=count+1 + done + + # Remove the last "," as we don't need it. + _json_data=${_json_data%?}; + + # Last part of the JSON data payload + _json_data+="]]}}\"}" + # Run the generate plan task _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm}) - - # Notify the log server that the LCM has been creating a plan + + # Remove the temp json file as we don't need it anymore + rm -rf reply_json.json + + # Notify the log server that the LCM has created a plan log "LCM Inventory has created a plan" - set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + + # Reset the loop counter so we restart the amount of loops we need to run + set _loops=0 - # As the new json only needs to have the generate_plan changed into "perform_update" we also migh tuse sed... + # As the new json for the perform the upgrade only needs to have "generate_plan" changed into "perform_update" we use sed... _json_data=$(echo $_json_data | sed -e 's/generate_plan/perform_update/g') - # Create new json data string - #_json_data="-d " - #_json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"perform_update\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"" - #_json_data+=$_first_uuid - #_json_data+="\\\",\\\"" - #_json_data+=$_first_version - #_json_data+="\\\"],[\\\"" - #_json_data+=$_sec_uuid - #_json_data+="\\\",\\\"" - #_json_data+=$_sec_version - #_json_data+="\\\"]]]}}\"}" - # Run the upgrade to have the latest versions _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \") @@ -195,6 +202,39 @@ function lcm() { fi } + +############################################################################################################################################################################### +# Routine to enable Karbon +############################################################################################################################################################################### + +function karbon_enable() { + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local _loop=0 + local _json_data_enable="-d '{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}'" + local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" + + # Send the enable command to the PC IP using localhost + log "Enable the Karbon service on the PC..." + + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data_enable ${_httpURL}| grep true | wc -l) + + if [[ $_response -le 0 ]]; then + log "Retrying to enable Karbon services one more time...." + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data_enable ${_httpURL} | grep true | wc -l) + + if [[ $_response -le 0 ]]; then + log "Unable to enable Karbon. As there are more dependencies on Karbon we stop the script....." + exit 1 + else + log "Karbon has been enabled..." + fi + else + log "Karbon has been enabled..." + fi +} + + + ############################################################################################################################################################################### # Routine for PC_Admin ############################################################################################################################################################################### @@ -289,7 +329,6 @@ EOF ############################################################################################################################################################################### # Routine to import the images into PC -# TODO: As we test for acli, which is not avail on the PC, we use NUCLEI. Can we make a change???? ############################################################################################################################################################################### function pc_cluster_img_import() { diff --git a/scripts/we-lib.common.sh b/scripts/we-lib.common.sh new file mode 100644 index 0000000..804f625 --- /dev/null +++ b/scripts/we-lib.common.sh @@ -0,0 +1,849 @@ +#!/usr/bin/env bash +# dependencies: dig + +function args_required() { + local _argument + local _error=88 + + for _argument in ${1}; do + if [[ ${DEBUG} ]]; then + log "DEBUG: Checking ${_argument}..." + fi + _RESULT=$(eval "echo \$${_argument}") + if [[ -z ${_RESULT} ]]; then + log "Error ${_error}: ${_argument} not provided!" + exit ${_error} + elif [[ ${DEBUG} ]]; then + log "Non-error: ${_argument} for ${_RESULT}" + fi + done + + if [[ ${DEBUG} ]]; then + log 'Success: required arguments provided.' + fi +} + +function begin() { + local _release + + if [[ -e ${RELEASE} ]]; then + _release=" release: $(grep FullSemVer ${RELEASE} | awk -F\" '{print $4}')" + fi + + log "$(basename ${0})${_release} start._____________________" +} + +function dependencies { + local _argument + local _error + local _index + local _jq_pkg=${JQ_REPOS[0]##*/} + local _sshpass_pkg=${SSHPASS_REPOS[0]##*/} + + if [[ -z ${1} ]]; then + _error=20 + log "Error ${_error}: missing install or remove verb." + exit ${_error} + elif [[ -z ${2} ]]; then + _error=21 + log "Error ${_error}: missing package name." + exit ${_error} + elif [[ "${1}" != 'install' ]] && [[ "${1}" != 'remove' ]]; then + _error=20 + log "Error ${_error}: wrong install or remove verb (case sensitive)." + exit ${_error} + fi + + case "${1}" in + 'install') + + if [[ -z $(which ${2}) ]]; then + log "Install ${2}..." + case "${2}" in + sshpass | ${_sshpass_pkg}) + if [[ ( ${OS_NAME} == 'Ubuntu' || ${OS_NAME} == 'LinuxMint' ) ]]; then + sudo apt-get install --yes sshpass + elif [[ ${OS_NAME} == '"centos"' ]]; then + # TOFIX: assumption, probably on NTNX CVM or PCVM = CentOS7 + if [[ ! -e ${_sshpass_pkg} ]]; then + repo_source SSHPASS_REPOS[@] ${_sshpass_pkg} + download ${SOURCE_URL} + fi + sudo rpm -ivh ${_sshpass_pkg} + if (( $? > 0 )); then + _error=31 + log "Error ${_error}: cannot install ${2}." + exit ${_error} + fi + elif [[ ${OS_NAME} == 'Darwin' ]]; then + brew install https://raw.githubusercontent.com/kadwanev/bigboybrew/master/Library/Formula/sshpass.rb + fi + ;; + jq | ${_jq_pkg} ) + if [[ ( ${OS_NAME} == 'Ubuntu' || ${OS_NAME} == 'LinuxMint' ) ]]; then + if [[ ! -e ${_jq_pkg} ]]; then + sudo apt-get install --yes jq + fi + elif [[ ${OS_NAME} == '"centos"' ]]; then + if [[ ! -e ${_jq_pkg} ]]; then + repo_source JQ_REPOS[@] ${_jq_pkg} + download ${SOURCE_URL} + fi + chmod u+x ${_jq_pkg} && ln -s ${_jq_pkg} jq + + if [[ -d ${HOME}/bin ]]; then + mv jq* ${HOME}/bin/ + else + PATH+=:$(pwd) + export PATH + fi + elif [[ ${OS_NAME} == 'Darwin' ]]; then + brew install jq + fi + ;; + esac + + if (( $? > 0 )); then + _error=98 + log "Error ${_error}: can't install ${2}." + exit ${_error} + fi + else + log "Success: found ${2}." + fi + ;; + 'remove') + if [[ ${OS_NAME} == '"centos"' ]]; then + log "Warning: assuming on PC or PE VM, removing ${2}..." + case "${2}" in + sshpass | ${_sshpass_pkg}) + sudo rpm -e sshpass + ;; + jq | ${_jq_pkg} ) + if [[ -d ${HOME}/bin ]]; then + pushd bin || true + rm -f jq ${_jq_pkg} + popd || true + else + rm -f jq ${_jq_pkg} + fi + ;; + esac + else + log "Feature: don't remove dependencies on Mac OS Darwin, Ubuntu, or LinuxMint." + fi + ;; + esac +} + +function dns_check() { + local _dns + local _error + local _lookup=${1} # REQUIRED + local _test + + if [[ -z ${_lookup} ]]; then + _error=43 + log "Error ${_error}: missing lookup record!" + exit ${_error} + fi + + _dns=$(dig +retry=0 +time=2 +short @${AUTH_HOST} ${_lookup}) + _test=$? + + if [[ ${_dns} != "${AUTH_HOST}" ]]; then + _error=44 + log "Error ${_error}: result was ${_test}: ${_dns}" + return ${_error} + fi +} + +function download() { + local _attempts=5 + local _error=0 + local _http_range_enabled # TODO:40 OPTIMIZATION: disabled '--continue-at -' + local _loop=0 + local _output + local _sleep=2 + + if [[ -z ${1} ]]; then + _error=33 + log "Error ${_error}: no URL to download!" + exit ${_error} + fi + + while true ; do + (( _loop++ )) + log "${1}..." + _output='' + curl ${CURL_OPTS} ${_http_range_enabled} --remote-name --location ${1} + _output=$? + #DEBUG=1; if [[ ${DEBUG} ]]; then log "DEBUG: curl exited ${_output}."; fi + + if (( ${_output} == 0 )); then + log "Success: ${1##*/}" + break + fi + + if (( ${_loop} == ${_attempts} )); then + _error=11 + log "Error ${_error}: couldn't download from: ${1}, giving up after ${_loop} tries." + exit ${_error} + elif (( ${_output} == 33 )); then + log "Web server doesn't support HTTP range command, purging and falling back." + _http_range_enabled='' + rm -f ${1##*/} + else + log "${_loop}/${_attempts}: curl=${_output} ${1##*/} sleep ${_sleep}..." + sleep ${_sleep} + fi + done +} + +function fileserver() { + local _action=${1} # REQUIRED + local _host=${2} # REQUIRED, TODO: default to PE? + local _port=${3} # OPTIONAL + local _directory=${4} # OPTIONAL + + if [[ -z ${1} ]]; then + _error=38 + log "Error ${_error}: start or stop action required!" + exit ${_error} + fi + if [[ -z ${2} ]]; then + _error=39 + log "Error ${_error}: host required!" + exit ${_error} + fi + if [[ -z ${3} ]]; then + _port=8181 + fi + if [[ -z ${4} ]]; then + _directory=cache + fi + + case ${_action} in + 'start' ) + # Determine if on PE or PC with _host PE or PC, then _host=localhost + # ssh -nNT -R 8181:localhost:8181 nutanix@10.21.31.31 + pushd ${_directory} || exit + + remote_exec 'ssh' ${_host} \ + "python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}" + + # acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \ + # source_url=http://10.4.150.64:8181/autodc-2.0.qcow2 + #AutoDC2: pending + #AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable + popd || exit + ;; + 'stop' ) + remote_exec 'ssh' ${_host} \ + "kill -9 $(pgrep python -a | grep ${_port} | awk '{ print $1 }')" 'OPTIONAL' + ;; + esac +} + +function finish() { + log "${0} ran for ${SECONDS} seconds._____________________" + echo +} + +function images() { + # https://portal.nutanix.com/#/page/docs/details?targetId=Command-Ref-AOS-v59:acl-acli-image-auto-r.html + local _cli='acli' + local _command + local _http_body + local _image + local _image_type + local _name + local _source='source_url' + local _test + + which "$_cli" + if (( $? > 0 )); then + _cli='nuclei' + _source='source_uri' + fi + + for _image in "${QCOW2_IMAGES[@]}" ; do + + # log "DEBUG: ${_image} image.create..." + if [[ ${_cli} == 'nuclei' ]]; then + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ${_cli} image.list 2>&1 \ + | grep -i complete \ + | grep "${_image}") + else + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ${_cli} image.list 2>&1 \ + | grep "${_image}") + fi + + if [[ ! -z ${_test} ]]; then + log "Skip: ${_image} already complete on cluster." + else + _command='' + _name="${_image}" + + if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc --lines) )); then + log 'Bypass multiple repo source checks...' + SOURCE_URL="${_image}" + else + repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]! + fi + + if [[ -z "${SOURCE_URL}" ]]; then + _error=30 + log "Warning ${_error}: didn't find any sources for ${_image}, continuing..." + # exit ${_error} + fi + + # TODO:0 TOFIX: acs-centos ugly override for today... + if (( $(echo "${_image}" | grep -i 'acs-centos' | wc --lines ) > 0 )); then + _name=acs-centos + fi + + if [[ ${_cli} == 'acli' ]]; then + _image_type='kDiskImage' + if (( $(echo "${SOURCE_URL}" | grep -i -e 'iso$' | wc --lines ) > 0 )); then + _image_type='kIsoImage' + fi + + _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ + container=${STORAGE_IMAGES} architecture=kX86_64 wait=true" + else + _command+=" name=${_name} description=\"${_image}\"" + fi + + if [[ ${_cli} == 'nuclei' ]]; then + _http_body=$(cat <&1 & + if (( $? != 0 )); then + log "Warning: Image submission: $?. Continuing..." + #exit 10 + fi + + if [[ ${_cli} == 'nuclei' ]]; then + log "NOTE: image.uuid = RUNNING, but takes a while to show up in:" + log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State" + fi + fi + fi + + done +} + +# Function to encode the message in the POST as web encoding. +function rawurlencode() { + local string="${1}" + local strlen=${#string} + local encoded="" + local pos c o + + for (( pos=0 ; pos&1) + _status=$? + + if (( $(echo "${_hold}" | grep websocket | wc --lines) > 0 )); then + log "Warning: Zookeeper isn't up yet." + elif (( ${_status} > 0 )); then + log "${_status} = ${_hold}, uh oh!" + else + log "Cluster info via nuclei seems good: ${_status}, moving on!" + break + fi + + if (( ${_loop} == ${_attempts} )); then + log "Error ${_error}: couldn't determine cluster information, giving up after ${_loop} tries." + exit ${_error} + else + log "${_loop}/${_attempts}: hold=${_hold} sleep ${_sleep}..." + sleep ${_sleep} + fi + done +} + +function ntnx_download() { + local _checksum + local _error + local _meta_url + local _ncli_softwaretype="${1}" + local _source_url + + case "${_ncli_softwaretype}" in + PC | pc | PRISM_CENTRAL_DEPLOY ) + args_required 'PC_VERSION' + + if [[ "${PC_VERSION}" == "${PC_DEV_VERSION}" ]]; then + _meta_url="${PC_DEV_METAURL}" + else + _meta_url="${PC_STABLE_METAURL}" + fi + + if [[ -z ${_meta_url} ]]; then + _error=22 + log "Error ${_error}: unsupported PC_VERSION=${PC_VERSION}!" + log 'Sync the following to global.var.sh...' + log 'Browse to https://portal.nutanix.com/#/page/releases/prismDetails' + log " - Find ${PC_VERSION} in the Additional Releases section on the lower right side" + log ' - Provide the metadata URL for the "PC 1-click deploy from PE" option to this function, both case stanzas.' + exit ${_error} + fi + + if [[ ! -z ${PC_URL} ]]; then + _source_url="${PC_URL}" + fi + ;; + 'NOS' | 'nos' | 'AOS' | 'aos') + # TODO:70 nos is a prototype + args_required 'AOS_VERSION AOS_UPGRADE' + _meta_url="${AOS_METAURL}" + + if [[ -z ${_meta_url} ]]; then + _error=23 + log "Error ${_error}: unsupported AOS_UPGRADE=${AOS_UPGRADE}!" + log 'Browse to https://portal.nutanix.com/#/page/releases/nosDetails' + log " - Find ${AOS_UPGRADE} in the Additional Releases section on the lower right side" + log ' - Provide the Upgrade metadata URL to this function for both case stanzas.' + exit ${_error} + fi + + if [[ ! -z ${AOS_URL} ]]; then + _source_url="${AOS_URL}" + fi + ;; + FILES | files | AFS | afs ) + args_required 'FILES_VERSION' + _meta_url="${FILES_METAURL}" + + if [[ -z ${_meta_url} ]]; then + _error=22 + log "Error ${_error}: unsupported FILES_VERSION=${FILES_VERSION}!" + log 'Sync the following to global.var.sh...' + log 'Browse to https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA' + log " - Find ${FILES_VERSION} in the Additional Releases section on the lower right side" + log ' - Provide the metadata URL option to this function, both case stanzas.' + exit ${_error} + fi + + if [[ ! -z ${FILES_URL} ]]; then + _source_url="${FILES_URL}" + fi + ;; + * ) + _error=88 + log "Error ${_error}:: couldn't determine software-type ${_ncli_softwaretype}!" + exit ${_error} + ;; + esac + + if [[ ! -e ${_meta_url##*/} ]]; then + log "Retrieving download metadata ${_meta_url##*/} ..." + download "${_meta_url}" + else + log "Warning: using cached download ${_meta_url##*/}" + fi + + if [[ -z ${_source_url} ]]; then + dependencies 'install' 'jq' || exit 13 + _source_url=$(cat ${_meta_url##*/} | jq -r .download_url_cdn) + fi + + if (( $(pgrep curl | wc --lines | tr -d '[:space:]') > 0 )); then + pkill curl + fi + log "Retrieving Nutanix ${_ncli_softwaretype} bits..." + download "${_source_url}" + + _checksum=$(md5sum ${_source_url##*/} | awk '{print $1}') + if [[ $(cat ${_meta_url##*/} | jq -r .hex_md5) != "${_checksum}" ]]; then + + _error=2 + log "Error ${_error}: md5sum ${_checksum} doesn't match on: ${_source_url##*/} removing and exit!" + rm -f ${_source_url##*/} + exit ${_error} + else + log "Success: ${_ncli_softwaretype} bits downloaded and passed MD5 checksum!" + fi + + ncli software upload software-type=${_ncli_softwaretype} \ + file-path="$(pwd)/${_source_url##*/}" \ + meta-file-path="$(pwd)/${_meta_url##*/}" + + if (( $? == 0 )) ; then + log "Success! Delete ${_ncli_softwaretype} sources to free CVM space..." + rm -f ${_source_url##*/} ${_meta_url##*/} + else + _error=3 + log "Error ${_error}: failed ncli upload of ${_ncli_softwaretype}." + exit ${_error} + fi +} + +function pe_determine() { + # ${1} REQUIRED: run on 'PE' or 'PC' + local _error + local _hold + + dependencies 'install' 'jq' + + # ncli @PE and @PC yeild different info! So PC uses nuclei. + case ${1} in + PE | pe ) + _hold=$(source /etc/profile.d/nutanix_env.sh \ + && ncli --json=true cluster info) + ;; + PC | Pc | pc ) + # WORKAROUND: Entities non-JSON outputs by nuclei on lines 1-2... + _hold=$(source /etc/profile.d/nutanix_env.sh \ + && export NUCLEI_SERVER='localhost' \ + && export NUCLEI_USERNAME="${PRISM_ADMIN}" \ + && export NUCLEI_PASSWORD="${PE_PASSWORD}" \ + && nuclei cluster.list format=json 2>/dev/null \ + | grep -v 'Entities :' \ + | jq \ + '.entities[].status | select(.state == "COMPLETE") | select(.resources.network.external_ip != null)' + ) + ;; + *) + log 'Error: invoke with PC or PE argument.' + ;; + esac + + #log "DEBUG: cluster info on ${1}. |${_hold}|" + + if [[ -z "${_hold}" ]]; then + _error=12 + log "Error ${_error}: couldn't resolve cluster info on ${1}. |${_hold}|" + args_required 'PE_HOST' + exit ${_error} + else + case ${1} in + PE | pe ) + CLUSTER_NAME=$(echo ${_hold} | jq -r .data.name) + PE_HOST=$(echo ${_hold} | jq -r .data.clusterExternalIPAddress) + ;; + PC | Pc | pc ) + CLUSTER_NAME=$(echo ${_hold} | jq -r .name) + PE_HOST=$(echo ${_hold} | jq -r .resources.network.external_ip) + ;; + esac + + export CLUSTER_NAME PE_HOST + log "Success: Cluster name=${CLUSTER_NAME}, PE external IP=${PE_HOST}" + fi +} + +function prism_check { + # Argument ${1} = REQUIRED: PE or PC + # Argument ${2} = OPTIONAL: number of attempts + # Argument ${3} = OPTIONAL: number of seconds per cycle + + args_required 'ATTEMPTS PE_PASSWORD SLEEP' + + local _attempts=${ATTEMPTS} + local _error=77 + local _host + local _loop=0 + local _password="${PE_PASSWORD}" + local _pw_init='Nutanix/4u' + local _sleep=${SLEEP} + local _test=0 + + #shellcheck disable=2153 + if [[ ${1} == 'PC' ]]; then + _host=${PC_HOST} + else + _host=${PE_HOST} + fi + if [[ ! -z ${2} ]]; then + _attempts=${2} + fi + + while true ; do + (( _loop++ )) + _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${_password} \ + -X POST --data '{ "kind": "cluster" }' \ + https://${_host}:9440/api/nutanix/v3/clusters/list \ + | tr -d \") # wonderful addition of "" around HTTP status code by cURL + + if [[ ! -z ${3} ]]; then + _sleep=${3} + fi + + if (( ${_test} == 401 )); then + log "Warning: unauthorized ${1} user or password on ${_host}." + + if [[ ${1} == 'PC' && ${_password} != "${_pw_init}" ]]; then + _password=${_pw_init} + log "Warning @${1}: Fallback on ${_host}: try initial password next cycle..." + #_sleep=0 #break + elif [[ ${1} == 'PC' && ${_password} == "${_pw_init}" && ${PC_VERSION} == "${PC_DEV_VERSION}" ]]; then + _password=${PE_PASSWORD} + log "Warning @${1}-dev: Fallback on ${_host}: try PE cluster password next cycle..." + #_sleep=0 #break + fi + + fi + + if (( ${_test} == 200 )); then + log "@${1}: successful." + return 0 + elif (( ${_loop} > ${_attempts} )); then + log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + return ${_error} + else + log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." + sleep ${_sleep} + fi + done +} + +function remote_exec() { +# Argument ${1} = REQUIRED: ssh or scp +# Argument ${2} = REQUIRED: PE, PC, or AUTH_SERVER +# Argument ${3} = REQUIRED: command configuration +# Argument ${4} = OPTIONAL: populated with anything = allowed to fail + + local _account='nutanix' + local _attempts=3 + local _error=99 + local _host + local _loop=0 + local _password="${PE_PASSWORD}" + local _pw_init="${NTNX_INIT_PASSWORD}" + local _sleep=${SLEEP} + local _test=0 + + args_required 'SSH_OPTS' + + # shellcheck disable=SC2153 + case ${2} in + 'PE' ) + _host=${PE_HOST} + ;; + 'PC' ) + _host=${PC_HOST} + _password=${_pw_init} + ;; + 'AUTH_SERVER' ) + _account='root' + _host=${AUTH_HOST} + _password=${_pw_init} + _sleep=7 + ;; + esac + + if [[ -z ${3} ]]; then + log 'Error ${_error}: missing third argument.' + exit ${_error} + fi + + if [[ ! -z ${4} ]]; then + _attempts=1 + _sleep=0 + fi + + while true ; do + (( _loop++ )) + case "${1}" in + 'SSH' | 'ssh') + #DEBUG=1; if [[ ${DEBUG} ]]; then log "_test will perform ${_account}@${_host} ${3}..."; fi + SSHPASS="${_password}" sshpass -e ssh -x ${SSH_OPTS} ${_account}@${_host} "${3}" + _test=$? + ;; + 'SCP' | 'scp') + #DEBUG=1; if [[ ${DEBUG} ]]; then log "_test will perform scp ${3} ${_account}@${_host}:"; fi + SSHPASS="${_password}" sshpass -e scp ${SSH_OPTS} ${3} ${_account}@${_host}: + _test=$? + ;; + *) + log "Error ${_error}: improper first argument, should be ssh or scp." + exit ${_error} + ;; + esac + + if (( ${_test} > 0 )) && [[ -z ${4} ]]; then + _error=22 + log "Error ${_error}: pwd=$(pwd), _test=${_test}, _host=${_host}" + exit ${_error} + fi + + if (( ${_test} == 0 )); then + if [[ ${DEBUG} ]]; then log "${3} executed properly."; fi + return 0 + elif (( ${_loop} == ${_attempts} )); then + if [[ -z ${4} ]]; then + _error=11 + log "Error ${_error}: giving up after ${_loop} tries." + exit ${_error} + else + log "Optional: giving up." + break + fi + else + log "${_loop}/${_attempts}: _test=$?|${_test}| SLEEP ${_sleep}..." + sleep ${_sleep} + fi + done +} + +function repo_source() { + # https://stackoverflow.com/questions/1063347/passing-arrays-as-parameters-in-bash#4017175 + local _candidates=("${!1}") # REQUIRED + local _package="${2}" # OPTIONAL + local _error=29 + local _http_code + local _index=0 + local _suffix + local _url + + if (( ${#_candidates[@]} == 0 )); then + log "Error ${_error}: Missing array!" + exit ${_error} + # else + # log "DEBUG: _candidates count is ${#_candidates[@]}" + fi + + if [[ -z ${_package} ]]; then + _suffix=${_candidates[0]##*/} + if (( $(echo "${_suffix}" | grep . | wc --lines) > 0)); then + log "Convenience: omitted package argument, added package=${_package}" + _package="${_suffix}" + fi + fi + # Prepend your local HTTP cache... + _candidates=( "http://${HTTP_CACHE_HOST}:${HTTP_CACHE_PORT}/" "${_candidates[@]}" ) + + while (( ${_index} < ${#_candidates[@]} )) + do + unset SOURCE_URL + + # log "DEBUG: ${_index} ${_candidates[${_index}]}, OPTIONAL: _package=${_package}" + _url=${_candidates[${_index}]} + + if [[ -z ${_package} ]]; then + if (( $(echo "${_url}" | grep '/$' | wc --lines) == 0 )); then + log "error ${_error}: ${_url} doesn't end in trailing slash, please correct." + exit ${_error} + fi + elif (( $(echo "${_url}" | grep '/$' | wc --lines) == 1 )); then + _url+="${_package}" + fi + + if (( $(echo "${_url}" | grep '^nfs' | wc --lines) == 1 )); then + log "warning: TODO: cURL can't test nfs URLs...assuming a pass!" + export SOURCE_URL="${_url}" + break + fi + + _http_code=$(curl ${CURL_OPTS} --max-time 5 --write-out '%{http_code}' --head ${_url} | tail -n1) + + if [[ (( ${_http_code} == 200 )) || (( ${_http_code} == 302 )) ]]; then + export SOURCE_URL="${_url}" + log "Found, HTTP:${_http_code} = ${SOURCE_URL}" + break + fi + log " Lost, HTTP:${_http_code} = ${_url}" + ((_index++)) + done + + if [[ -z "${SOURCE_URL}" ]]; then + _error=30 + log "Error ${_error}: didn't find any sources, last try was ${_url} with HTTP ${_http_code}." + exit ${_error} + fi +} + +function run_once() { + # TODO: PC dependent + if [[ ! -z ${PC_LAUNCH} ]] && (( $(cat ${HOME}/${PC_LAUNCH%%.sh}.log | wc ${WC_ARG}) > 20 )); then + finish + _error=2 + log "Warning ${_error}: ${PC_LAUNCH} already ran, exit!" + exit ${_error} + fi +} + +function ssh_pubkey() { + local _dir + local _directories=(\ + "${HOME}" \ + "${HOME}/ssh_keys" \ + "${HOME}/cache" \ + ) + local _name + local _test + + args_required 'EMAIL SSH_PUBKEY' + + _name=${EMAIL//\./_DOT_} + _name=${_name/@/_AT_} + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ncli cluster list-public-keys name=${_name}) + + if (( $(echo ${_test} | grep -i "Failed" | wc ${WC_ARG}) > 0 )); then + for _dir in "${_directories[@]}"; do + if [[ -e ${_dir}/${SSH_PUBKEY##*/} ]]; then + log "Note that a period and other symbols aren't allowed to be a key name." + + log "Locally adding ${_dir}/${SSH_PUBKEY##*/} under ${_name} label..." + ncli cluster add-public-key name=${_name} file-path=${_dir}/${SSH_PUBKEY##*/} || true + + break + fi + done + else + log "IDEMPOTENCY: found pubkey ${_name}" + fi +} diff --git a/scripts/we-ts2019.sh b/scripts/we-ts2019.sh new file mode 100644 index 0000000..86f6bf9 --- /dev/null +++ b/scripts/we-ts2019.sh @@ -0,0 +1,176 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. we-lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export PC_DEV_VERSION='5.10.2' + export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' + #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' + export FILES_VERSION='3.2.0.1' + export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.229" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export QCOW2_REPOS=(\ + 'http://10.42.8.50/images/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + xtract-vm-2.0.3.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_403.qcow2 \ + hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + VeeamBR_9.5.4.2615.Update4.iso \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + ) + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + # shellcheck disable=2206 + _pc_version=(${PC_VERSION//./ }) + + #commenting out to take images back to prevuous update + #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} <= 8 )); then + # log "PC<=5.8, Image imports..." + # ts_images + #fi + pc_project + flow_enable + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; + #IMAGES | images ) + # . lib.pc.sh + #ts_images + #;; +esac diff --git a/sync_upstream.sh b/sync_upstream.sh new file mode 100755 index 0000000..4dc256c --- /dev/null +++ b/sync_upstream.sh @@ -0,0 +1,3 @@ +git fetch upstream +git checkout master +git merge upstream/master diff --git a/test/LCM_Test/# GRABBING THE UUIDS b/test/LCM_Test/# GRABBING THE UUIDS new file mode 100644 index 0000000..034e00f --- /dev/null +++ b/test/LCM_Test/# GRABBING THE UUIDS @@ -0,0 +1,72 @@ +# GRABBING THE UUIDS +curl -X POST https://10.42.12.39:9440/api/nutanix/v3/groups -H 'Authorization: Basic YWRtaW46dGVjaFgyMDE5IQ==' -H 'Content-Type: application/json' -H 'Postman-Token: 113df335-5985-4145-bf7c-46aa97b2bef4' -H 'cache-control: no-cache' -d '{ + "entity_type": "lcm_available_version", + "grouping_attribute": "entity_uuid", + "group_member_count": 1000, + "group_member_attributes": [ + { + "attribute": "uuid" + }, + { + "attribute": "entity_uuid" + }, + { + "attribute": "entity_class" + }, + { + "attribute": "status" + }, + { + "attribute": "version" + }, + { + "attribute": "dependencies" + }, + { + "attribute": "order" + } + ] +}' --insecure | jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid")' | sort -u | head -3 + + + +# GRABBING THE Versions of the UUID + +curl -X POST https://10.42.12.39:9440/api/nutanix/v3/groups -H 'Authorization: Basic YWRtaW46dGVjaFgyMDE5IQ==' -H 'Content-Type: application/json' -H 'Postman-Token: 113df335-5985-4145-bf7c-46aa97b2bef4' -H 'cache-control: no-cache' -d '{ + "entity_type": "lcm_available_version", + "grouping_attribute": "entity_uuid", + "group_member_count": 1000, + "group_member_attributes": [ + { + "attribute": "uuid" + }, + { + "attribute": "entity_uuid" + }, + { + "attribute": "entity_class" + }, + { + "attribute": "status" + }, + { + "attribute": "version" + }, + { + "attribute": "dependencies" + }, + { + "attribute": "order" + } + ] +}' --insecure | jq 'if .group_results[].entity_results[].data[].values[].values[0]=="03a6e4a2-fa0e-4698-b0c0-e142820a2e94" then +> if .group_results[].entity_results[].data[].name=="version" then + +jq 'if .group_results[].entity_results[].data[].values[].values[0]=="03a6e4a2-fa0e-4698-b0c0-e142820a2e94" then +if .group_results[].entity_results[].data[].name=="version" then +.group_results[].entity_results[].data[].values[].values[0] end +end' + + +jq 'if .group_results[].entity_results[].data[].values[].values[0]=="03a6e4a2-fa0e-4698-b0c0-e142820a2e94" then if .group_results[].entity_results[].data[].name=="version" then .group_results[].entity_results[].data[].values[].values[0] end +end' \ No newline at end of file diff --git a/test/LCM_Test/Run the LCM inventory b/test/LCM_Test/Run the LCM inventory new file mode 100644 index 0000000..750588d --- /dev/null +++ b/test/LCM_Test/Run the LCM inventory @@ -0,0 +1,47 @@ +# Run the LCM inventory +{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"} + +# Run the upgrade of the LCM-2 steps; 1) Generate a plan; 2) execute the plan + +# Generate Plan API +https://10.42.9.39:9440/PrismGateway/services/rest/v1/genesis +{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"generate_plan\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",[[\"639b6f37-06c8-4fe0-aeca-5b2c89e61fe6\",\"2.6.0.2\"],[\"dd69fc72-df7f-4195-bb28-6f74eafe353a\",\"2.6.0.2\"]]]}}"} + +{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"generate_plan\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",[[\"06ac7598-d25a-4ba6-a6de-e728446747a1\",\"2.6.0.3\"],[\"5f99949b-7ae6-4095-b23f-f959054f6099\",\"2.6.0.3\"]]]}}"} + +{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method":\"generate_plan\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",[[\"293c7506-565e-40a4-93e0-75f1749581ef\",\"2.6.0.3\"],[\"83ee21f9-3468-4693-9e63-a83a657ec6a3\",\"2.6.0.3\"]]]}}"} + +# Run the upgrade API +https://10.42.9.39:9440/PrismGateway/services/rest/v1/genesis +{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_update\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",[[\"639b6f37-06c8-4fe0-aeca-5b2c89e61fe6\",\"2.6.0.2\"],[\"dd69fc72-df7f-4195-bb28-6f74eafe353a\",\"2.6.0.2\"]]]}}"} + + + +# Progress API is at for all LCM steps where the API calls return an ID; entities -> percentageCompleted: (JSON value) +https://10.42.9.39:9440/PrismGateway/services/rest/v1/progress_monitors?filterCriteria=parent_task_uuid%3D%3D817a37e2-9e57-4774-b889-57325485fd31 +817a37e2-9e57-4774-b889-57325485fd31 = UUID of the task that has been returned by the Execute plan +%3D%3D means '==' + +# Set LCM auto update +{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"configure\",\"args\":[\"http://download.nutanix.com/lcm/2.0\",null,null,true]}}"} + +# Get the UUIDs and versions of Calm and PC. +https://10.42.41.39:9440/api/nutanix/v3/groups +{"entity_type":"lcm_available_version","group_member_attributes":[{"attribute":"uuid"},{"attribute":"entity_uuid"},{"attribute":"entity_class"},{"attribute":"status"},{"attribute":"version"},{"attribute":"dependencies"},{"attribute":"order"}],"query_name":"prism:BaseGroupModel"} +This will deliver a massive nested JSON file.... + + +Get another look at the data in the cluster on existing versions in the PC. +{"entity_type":"lcm_entity","group_member_count":1000,"group_member_attributes":[{"attribute":"id"},{"attribute":"uuid"},{"attribute":"entity_model"},{"attribute":"version"},{"attribute":"location_id"},{"attribute":"entity_class"},{"attribute":"description"},{"attribute":"last_updated_time_usecs"},{"attribute":"request_version"},{"attribute":"_master_cluster_uuid_"}],"query_name":"prism:LCMQueryModel","filter_criteria":"entity_class==PC;_master_cluster_uuid_==[no_val]"} + + _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d "{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"generate_plan\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"${_first_uuid}\\\",\\\"${_first_version}\\\"],[\\\"${_sec_uuid}\\\",\\\"${_sec_version}\\\"]]]}}\"}" ${_url_lcm}) + + _task_id=`curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d "{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\k_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"perform_update\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"${_first_uuid}\\\",\\\"${_first_version}\\\"],[\\\"${_sec_uuid}\\\",\\\"${_sec_version}\\\"]]]}}\"}" ${_url_lcm}` + +Checking if Calm is enabled via: + +https://10.42.41.39:9440/api/nutanix/v3/services/nucalm/status +Response is: +{ + "service_enablement_status": "ENABLED" +} \ No newline at end of file diff --git a/test/LCM_Test/lcm_ops.out b/test/LCM_Test/lcm_ops.out new file mode 100644 index 0000000..efa7e34 --- /dev/null +++ b/test/LCM_Test/lcm_ops.out @@ -0,0 +1,3728 @@ +2019-03-27 08:36:21 INFO zookeeper_session.py:113 lcm_ops_by_pc is attempting to connect to Zookeeper +2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory NVMUtils +2019-03-27 08:36:21 INFO genesis_utils.py:2113 Trying to fetch cluster_functions from cached config_proto +2019-03-27 08:36:21 INFO nvm_utils.py:55 IS PC VM Call: True +2019-03-27 08:36:21 INFO nvm_utils.py:45 Retrieving PC Utilities. +2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory StagingUtils +2019-03-27 08:36:21 INFO zookeeper_session.py:113 lcm_ops_by_pc is attempting to connect to Zookeeper +2019-03-27 08:36:21 INFO operations.py:222 Found operation kDownloadOp +2019-03-27 08:36:21 INFO operations.py:222 Found operation kInventoryOp +2019-03-27 08:36:21 INFO ergon_utils.py:308 Root task uuid 3c1b3233-2377-4209-8cc1-421c2cb8c228 +2019-03-27 08:36:21 INFO zookeeper_session.py:113 lcm_ops_by_pc is attempting to connect to Zookeeper +2019-03-27 08:36:21 INFO staging_utils.py:33 Retrieving the Catalog staging utils. +2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory FoundationClient +2019-03-27 08:36:21 INFO foundation_rest_client.py:134 Getting foundation version +2019-03-27 08:36:21 INFO foundation_rest_client.py:95 Making GET request to http://localhost:8000/foundation/version +2019-03-27 08:36:21 ERROR foundation_rest_client.py:140 Failed to get foundation version. The ret code is and the response returned is +2019-03-27 08:36:21 INFO foundation_client.py:114 Retrieving the Foundation Genesis Client. +2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory PCUtils +2019-03-27 08:36:21 INFO genesis_utils.py:2113 Trying to fetch cluster_functions from cached config_proto +2019-03-27 08:36:21 INFO nvm_utils.py:55 IS PC VM Call: True +2019-03-27 08:36:21 INFO nvm_utils.py:45 Retrieving PC Utilities. +2019-03-27 08:36:21 INFO base_factory.py:52 Resetting factory Downloader +2019-03-27 08:36:21 INFO downloader.py:23 Retrieving the Catalog Downloader. +2019-03-27 08:36:21 INFO lcm_ops_by_pc:399 Waiting for leadership change event +2019-03-27 08:36:21 INFO lcm_ops_by_pc:127 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, Operation: 101, Environment: 304, PC: 10.42.12.39, State: 1000 +2019-03-27 08:36:21 INFO lcm_ops_by_pc:149 Starting to run state machine for LCM ops on PC. +2019-03-27 08:36:21 INFO lcm_ops_by_pc:160 State [1000], Handler [_execute_pre_actions], PC - [10.42.12.39] +2019-03-27 08:36:21 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 0 [None] +2019-03-27 08:36:21 INFO lcm_actions_helper.py:275 Using ZK WAL to store actions list +2019-03-27 08:36:21 INFO actions.py:584 actionsType: 0 +2019-03-27 08:36:21 INFO actions.py:588 Getting flags for task: 9d565964-0c6d-4247-bde5-903e76a31fd1 +2019-03-27 08:36:21 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5" +reference_name: "release.karbon.update" +status { + state: 1000 + description: "Created LCM task WAL for an inventory sub-task" +} +env_list: "pc" + +2019-03-27 08:36:21 INFO actions.py:588 Getting flags for task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740 +2019-03-27 08:36:21 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5" +reference_name: "nutanix.pc.update" +status { + state: 1000 + description: "Created LCM task WAL for an inventory sub-task" +} +env_list: "pc" + +2019-03-27 08:36:21 INFO actions.py:588 Getting flags for task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8 +2019-03-27 08:36:21 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5" +reference_name: "release.epsilon.update" +status { + state: 1000 + description: "Created LCM task WAL for an inventory sub-task" +} +env_list: "pc" + +2019-03-27 08:36:21 INFO actions.py:588 Getting flags for task: b462c306-b2a6-48dd-8ecc-8237e3006d14 +2019-03-27 08:36:21 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5" +reference_name: "release.calm.update" +status { + state: 1000 + description: "Created LCM task WAL for an inventory sub-task" +} +env_list: "pc" + +2019-03-27 08:36:21 ERROR lcm_genesis.py:438 Failed to get host type +2019-03-27 08:36:21 INFO actions.py:618 De-duplicated flag list: [] +2019-03-27 08:36:21 INFO lcm_actions_helper.py:93 action_list: [] +2019-03-27 08:36:21 INFO lcm_actions_helper.py:359 No actions found. +2019-03-27 08:36:21 INFO ergon_utils.py:189 Updating task with state 1000, message Finished to execute pre-actions on PC +2019-03-27 08:36:21 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 30 [30] +2019-03-27 08:36:21 INFO lcm_ops_by_pc:160 State [1001], Handler [_prepare_pc_for_operation], PC - [10.42.12.39] +2019-03-27 08:36:21 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 30 [None] +2019-03-27 08:36:21 INFO lcm_ops_by_pc:205 Preparing PC of [10.42.12.39] +2019-03-27 08:36:21 INFO catalog_staging_utils.py:105 Staging module release.karbon.update and dependancies to cvm 10.42.12.39 +2019-03-27 08:36:21 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging +2019-03-27 08:36:32 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1', '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c', '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8', '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655'] +2019-03-27 08:36:32 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:36:32 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:36:33 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1 +2019-03-27 08:36:33 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1 +2019-03-27 08:36:34 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73 +2019-03-27 08:36:34 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73 +2019-03-27 08:36:35 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:36:35 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:36:36 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8 +2019-03-27 08:36:36 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8 +2019-03-27 08:36:37 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655 +2019-03-27 08:36:37 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655 +2019-03-27 08:36:38 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None' +2019-03-27 08:36:38 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39 +2019-03-27 08:36:38 INFO ergon_utils.py:189 Updating task with state 1001, message Finished to prepare PC for operation +2019-03-27 08:36:38 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 40 [40] +2019-03-27 08:36:38 INFO lcm_ops_by_pc:160 State [1002], Handler [_perform_operation_by_pc], PC - [10.42.12.39] +2019-03-27 08:36:38 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 40 [None] +2019-03-27 08:36:38 INFO lcm_ops_by_pc:250 Performing 101 operation +2019-03-27 08:36:39 INFO lcm_ops_by_pc:295 Output: DEBUG: Currently installed version of karbon-core is 0.8.2 +DEBUG: Currently installed version of karbon-ui is 0.8.2 +##START## +PC,None,Karbon,Karbon container service,0.8.2,1 +##END## + +2019-03-27 08:36:39 INFO lcm_ops_by_pc:302 Inventory result: ['PC', 'None', 'Karbon', 'Karbon container service', '0.8.2', '1'] +2019-03-27 08:36:39 INFO cpdb_utils.py:1143 Creating new entity: c9ee6d12-7141-453f-a345-115e392e27e1 +2019-03-27 08:36:39 INFO cpdb_utils.py:1161 Family: , Class: PC, Model: Karbon +2019-03-27 08:36:39 INFO cpdb_utils.py:888 No available version tables exists to delete +2019-03-27 08:36:39 INFO ergon_utils.py:189 Updating task with state 1002, message Finished to perform operation by PC +2019-03-27 08:36:39 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 60 [60] +2019-03-27 08:36:39 INFO lcm_ops_by_pc:160 State [1003], Handler [_post_ops_pc_cleanup], PC - [10.42.12.39] +2019-03-27 08:36:39 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 60 [None] +2019-03-27 08:36:39 INFO lcm_ops_by_pc:322 Performing clean up post operation +2019-03-27 08:36:40 INFO ergon_utils.py:189 Updating task with state 1003, message Finished to perform cleanup on PC post operation +2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 70 [70] +2019-03-27 08:36:40 INFO lcm_ops_by_pc:160 State [1004], Handler [_execute_post_actions], PC - [10.42.12.39] +2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 2, Percentage: 70 [None] +2019-03-27 08:36:40 INFO lcm_ops_by_pc:333 Deferring post-actions till the last task of the batch. +2019-03-27 08:36:40 INFO ergon_utils.py:189 Updating task with state 1004, message Finished to execute post-actions on PC +2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 9d565964-0c6d-4247-bde5-903e76a31fd1, status: 3, Percentage: 100 [100] +2019-03-27 08:36:40 INFO lcm_ops_by_pc:141 Operation on 10.42.12.39 is successful +2019-03-27 08:36:40 INFO lcm_ops_by_pc:127 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, Operation: 101, Environment: 304, PC: 10.42.12.39, State: 1000 +2019-03-27 08:36:40 INFO lcm_ops_by_pc:149 Starting to run state machine for LCM ops on PC. +2019-03-27 08:36:40 INFO lcm_ops_by_pc:160 State [1000], Handler [_execute_pre_actions], PC - [10.42.12.39] +2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 0 [None] +2019-03-27 08:36:40 INFO lcm_ops_by_pc:192 Pre-actions already executed. Skipping. +2019-03-27 08:36:40 INFO ergon_utils.py:189 Updating task with state 1000, message Finished to execute pre-actions on PC +2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 30 [30] +2019-03-27 08:36:40 INFO lcm_ops_by_pc:160 State [1001], Handler [_prepare_pc_for_operation], PC - [10.42.12.39] +2019-03-27 08:36:40 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 30 [None] +2019-03-27 08:36:40 INFO lcm_ops_by_pc:205 Preparing PC of [10.42.12.39] +2019-03-27 08:36:40 INFO catalog_staging_utils.py:105 Staging module nutanix.pc.update and dependancies to cvm 10.42.12.39 +2019-03-27 08:36:40 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging +2019-03-27 08:36:48 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/216663c2-9dde-42f3-be4d-6b9ee86476d9', '/home/nutanix/tmp/lcm_staging/3203a9de-6158-4830-9dee-19eb1c13e250', '/home/nutanix/tmp/lcm_staging/3c5ee449-6b6e-4640-890f-c546f794ca6a', '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b'] +2019-03-27 08:36:48 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/216663c2-9dde-42f3-be4d-6b9ee86476d9 +2019-03-27 08:36:48 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/216663c2-9dde-42f3-be4d-6b9ee86476d9 +2019-03-27 08:36:49 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/3203a9de-6158-4830-9dee-19eb1c13e250 +2019-03-27 08:36:49 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/3203a9de-6158-4830-9dee-19eb1c13e250 +2019-03-27 08:36:50 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/3c5ee449-6b6e-4640-890f-c546f794ca6a +2019-03-27 08:36:50 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/3c5ee449-6b6e-4640-890f-c546f794ca6a +2019-03-27 08:36:51 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:36:51 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:36:52 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None' +2019-03-27 08:36:52 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39 +2019-03-27 08:36:52 INFO ergon_utils.py:189 Updating task with state 1001, message Finished to prepare PC for operation +2019-03-27 08:36:52 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 40 [40] +2019-03-27 08:36:52 INFO lcm_ops_by_pc:160 State [1002], Handler [_perform_operation_by_pc], PC - [10.42.12.39] +2019-03-27 08:36:52 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 40 [None] +2019-03-27 08:36:52 INFO lcm_ops_by_pc:250 Performing 101 operation +2019-03-27 08:36:55 INFO lcm_ops_by_pc:295 Output: ##START## +PC CORE CLUSTER,None,PC,PC version,5.10.2,1 +##END## + +2019-03-27 08:36:55 INFO lcm_ops_by_pc:302 Inventory result: ['PC CORE CLUSTER', 'None', 'PC', 'PC version', '5.10.2', '1'] +2019-03-27 08:36:55 INFO cpdb_utils.py:1143 Creating new entity: d6edff2c-f59f-4754-978b-06b6237796b4 +2019-03-27 08:36:55 INFO cpdb_utils.py:1161 Family: , Class: PC CORE CLUSTER, Model: PC +2019-03-27 08:36:55 INFO cpdb_utils.py:888 No available version tables exists to delete +2019-03-27 08:36:55 INFO ergon_utils.py:189 Updating task with state 1002, message Finished to perform operation by PC +2019-03-27 08:36:55 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 60 [60] +2019-03-27 08:36:56 INFO lcm_ops_by_pc:160 State [1003], Handler [_post_ops_pc_cleanup], PC - [10.42.12.39] +2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 60 [None] +2019-03-27 08:36:56 INFO lcm_ops_by_pc:322 Performing clean up post operation +2019-03-27 08:36:56 INFO ergon_utils.py:189 Updating task with state 1003, message Finished to perform cleanup on PC post operation +2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 70 [70] +2019-03-27 08:36:56 INFO lcm_ops_by_pc:160 State [1004], Handler [_execute_post_actions], PC - [10.42.12.39] +2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 2, Percentage: 70 [None] +2019-03-27 08:36:56 INFO lcm_ops_by_pc:333 Deferring post-actions till the last task of the batch. +2019-03-27 08:36:56 INFO ergon_utils.py:189 Updating task with state 1004, message Finished to execute post-actions on PC +2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740, status: 3, Percentage: 100 [100] +2019-03-27 08:36:56 INFO lcm_ops_by_pc:141 Operation on 10.42.12.39 is successful +2019-03-27 08:36:56 INFO lcm_ops_by_pc:127 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, Operation: 101, Environment: 304, PC: 10.42.12.39, State: 1000 +2019-03-27 08:36:56 INFO lcm_ops_by_pc:149 Starting to run state machine for LCM ops on PC. +2019-03-27 08:36:56 INFO lcm_ops_by_pc:160 State [1000], Handler [_execute_pre_actions], PC - [10.42.12.39] +2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 0 [None] +2019-03-27 08:36:56 INFO lcm_ops_by_pc:192 Pre-actions already executed. Skipping. +2019-03-27 08:36:56 INFO ergon_utils.py:189 Updating task with state 1000, message Finished to execute pre-actions on PC +2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 30 [30] +2019-03-27 08:36:56 INFO lcm_ops_by_pc:160 State [1001], Handler [_prepare_pc_for_operation], PC - [10.42.12.39] +2019-03-27 08:36:56 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 30 [None] +2019-03-27 08:36:56 INFO lcm_ops_by_pc:205 Preparing PC of [10.42.12.39] +2019-03-27 08:36:56 INFO catalog_staging_utils.py:105 Staging module release.epsilon.update and dependancies to cvm 10.42.12.39 +2019-03-27 08:36:56 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging +2019-03-27 08:37:06 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771', '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c', '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'] +2019-03-27 08:37:06 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:37:06 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:37:07 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771 +2019-03-27 08:37:07 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771 +2019-03-27 08:37:08 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8 +2019-03-27 08:37:08 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8 +2019-03-27 08:37:09 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:37:09 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:37:10 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8 +2019-03-27 08:37:10 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8 +2019-03-27 08:37:11 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None' +2019-03-27 08:37:11 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39 +2019-03-27 08:37:11 INFO ergon_utils.py:189 Updating task with state 1001, message Finished to prepare PC for operation +2019-03-27 08:37:11 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 40 [40] +2019-03-27 08:37:11 INFO lcm_ops_by_pc:160 State [1002], Handler [_perform_operation_by_pc], PC - [10.42.12.39] +2019-03-27 08:37:11 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 40 [None] +2019-03-27 08:37:11 INFO lcm_ops_by_pc:250 Performing 101 operation +2019-03-27 08:37:11 INFO lcm_ops_by_pc:295 Output: ##START## +PC,None,Epsilon,PC Container based orchestration engine service,2.5.1,1 +##END## + +2019-03-27 08:37:11 INFO lcm_ops_by_pc:302 Inventory result: ['PC', 'None', 'Epsilon', 'PC Container based orchestration engine service', '2.5.1', '1'] +2019-03-27 08:37:11 INFO cpdb_utils.py:1143 Creating new entity: 758942f1-d42d-4d49-99fc-b73e2f2dca30 +2019-03-27 08:37:11 INFO cpdb_utils.py:1161 Family: , Class: PC, Model: Epsilon +2019-03-27 08:37:11 INFO cpdb_utils.py:888 No available version tables exists to delete +2019-03-27 08:37:11 INFO ergon_utils.py:189 Updating task with state 1002, message Finished to perform operation by PC +2019-03-27 08:37:11 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 60 [60] +2019-03-27 08:37:11 INFO lcm_ops_by_pc:160 State [1003], Handler [_post_ops_pc_cleanup], PC - [10.42.12.39] +2019-03-27 08:37:11 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 60 [None] +2019-03-27 08:37:11 INFO lcm_ops_by_pc:322 Performing clean up post operation +2019-03-27 08:37:12 INFO ergon_utils.py:189 Updating task with state 1003, message Finished to perform cleanup on PC post operation +2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 70 [70] +2019-03-27 08:37:12 INFO lcm_ops_by_pc:160 State [1004], Handler [_execute_post_actions], PC - [10.42.12.39] +2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 2, Percentage: 70 [None] +2019-03-27 08:37:12 INFO lcm_ops_by_pc:333 Deferring post-actions till the last task of the batch. +2019-03-27 08:37:12 INFO ergon_utils.py:189 Updating task with state 1004, message Finished to execute post-actions on PC +2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8, status: 3, Percentage: 100 [100] +2019-03-27 08:37:12 INFO lcm_ops_by_pc:141 Operation on 10.42.12.39 is successful +2019-03-27 08:37:12 INFO lcm_ops_by_pc:127 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, Operation: 101, Environment: 304, PC: 10.42.12.39, State: 1000 +2019-03-27 08:37:12 INFO lcm_ops_by_pc:149 Starting to run state machine for LCM ops on PC. +2019-03-27 08:37:12 INFO lcm_ops_by_pc:160 State [1000], Handler [_execute_pre_actions], PC - [10.42.12.39] +2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 0 [None] +2019-03-27 08:37:12 INFO lcm_ops_by_pc:192 Pre-actions already executed. Skipping. +2019-03-27 08:37:12 INFO ergon_utils.py:189 Updating task with state 1000, message Finished to execute pre-actions on PC +2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 30 [30] +2019-03-27 08:37:12 INFO lcm_ops_by_pc:160 State [1001], Handler [_prepare_pc_for_operation], PC - [10.42.12.39] +2019-03-27 08:37:12 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 30 [None] +2019-03-27 08:37:12 INFO lcm_ops_by_pc:205 Preparing PC of [10.42.12.39] +2019-03-27 08:37:12 INFO catalog_staging_utils.py:105 Staging module release.calm.update and dependancies to cvm 10.42.12.39 +2019-03-27 08:37:12 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging +2019-03-27 08:37:21 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac', '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867', '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc', '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'] +2019-03-27 08:37:21 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac +2019-03-27 08:37:22 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac +2019-03-27 08:37:22 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867 +2019-03-27 08:37:23 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867 +2019-03-27 08:37:23 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc +2019-03-27 08:37:24 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc +2019-03-27 08:37:24 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:37:25 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:37:25 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:37:26 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:37:26 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None' +2019-03-27 08:37:26 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39 +2019-03-27 08:37:26 INFO ergon_utils.py:189 Updating task with state 1001, message Finished to prepare PC for operation +2019-03-27 08:37:26 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 40 [40] +2019-03-27 08:37:26 INFO lcm_ops_by_pc:160 State [1002], Handler [_perform_operation_by_pc], PC - [10.42.12.39] +2019-03-27 08:37:26 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 40 [None] +2019-03-27 08:37:26 INFO lcm_ops_by_pc:250 Performing 101 operation +2019-03-27 08:37:27 INFO lcm_ops_by_pc:295 Output: ##START## +PC,None,Calm,PC Container based cloud application lifecycle management service,2.4.0,1 +##END## + +2019-03-27 08:37:27 INFO lcm_ops_by_pc:302 Inventory result: ['PC', 'None', 'Calm', 'PC Container based cloud application lifecycle management service', '2.4.0', '1'] +2019-03-27 08:37:27 INFO cpdb_utils.py:1143 Creating new entity: 03a6e4a2-fa0e-4698-b0c0-e142820a2e94 +2019-03-27 08:37:27 INFO cpdb_utils.py:1161 Family: , Class: PC, Model: Calm +2019-03-27 08:37:27 INFO cpdb_utils.py:888 No available version tables exists to delete +2019-03-27 08:37:27 INFO ergon_utils.py:189 Updating task with state 1002, message Finished to perform operation by PC +2019-03-27 08:37:27 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 60 [60] +2019-03-27 08:37:27 INFO lcm_ops_by_pc:160 State [1003], Handler [_post_ops_pc_cleanup], PC - [10.42.12.39] +2019-03-27 08:37:27 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 60 [None] +2019-03-27 08:37:27 INFO lcm_ops_by_pc:322 Performing clean up post operation +2019-03-27 08:37:28 INFO ergon_utils.py:189 Updating task with state 1003, message Finished to perform cleanup on PC post operation +2019-03-27 08:37:28 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 70 [70] +2019-03-27 08:37:28 INFO lcm_ops_by_pc:160 State [1004], Handler [_execute_post_actions], PC - [10.42.12.39] +2019-03-27 08:37:28 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 2, Percentage: 70 [None] +2019-03-27 08:37:28 INFO lcm_actions_helper.py:275 Using ZK WAL to store actions list +2019-03-27 08:37:28 INFO actions.py:584 actionsType: 1 +2019-03-27 08:37:28 INFO actions.py:588 Getting flags for task: 9d565964-0c6d-4247-bde5-903e76a31fd1 +2019-03-27 08:37:28 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5" +reference_name: "release.karbon.update" +status { + state: 1004 + description: "Finished to execute post-actions on PC" +} +env_list: "pc" + +2019-03-27 08:37:28 INFO actions.py:588 Getting flags for task: 577f1e4d-b0ce-4743-82ff-13cee2a1d740 +2019-03-27 08:37:28 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5" +reference_name: "nutanix.pc.update" +status { + state: 1004 + description: "Finished to execute post-actions on PC" +} +env_list: "pc" + +2019-03-27 08:37:28 INFO actions.py:588 Getting flags for task: e1f1059b-d996-4a4b-91b0-1bf1899b67e8 +2019-03-27 08:37:28 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5" +reference_name: "release.epsilon.update" +status { + state: 1004 + description: "Finished to execute post-actions on PC" +} +env_list: "pc" + +2019-03-27 08:37:28 INFO actions.py:588 Getting flags for task: b462c306-b2a6-48dd-8ecc-8237e3006d14 +2019-03-27 08:37:28 INFO actions.py:592 task_info: location_uuid: "a269d4be-0de3-48fe-96a2-3e3b4c48dca5" +reference_name: "release.calm.update" +status { + state: 1003 + description: "Finished to perform cleanup on PC post operation" + operation_done: true +} +env_list: "pc" + +2019-03-27 08:37:28 ERROR lcm_genesis.py:438 Failed to get host type +2019-03-27 08:37:28 INFO actions.py:618 De-duplicated flag list: [] +2019-03-27 08:37:28 INFO lcm_actions_helper.py:93 action_list: [] +2019-03-27 08:37:28 INFO lcm_actions_helper.py:359 No actions found. +2019-03-27 08:37:28 INFO ergon_utils.py:189 Updating task with state 1004, message Finished to execute post-actions on PC +2019-03-27 08:37:28 INFO lcm_ergon.py:322 Task: b462c306-b2a6-48dd-8ecc-8237e3006d14, status: 3, Percentage: 100 [100] +2019-03-27 08:37:28 INFO lcm_ops_by_pc:141 Operation on 10.42.12.39 is successful +2019-03-27 08:37:28 INFO lcm_ops_by_pc:381 LCM operation 101 for 304 is successful +2019-03-27 08:37:51 DEBUG zookeeper_session.py:90 Using host_port_list: zk1:9876 +2019-03-27 08:37:51 INFO zookeeper_session.py:113 lcm_ops_for_rim is attempting to connect to Zookeeper +2019-03-27 08:37:51 INFO lcm_ops_for_rim:143 Setting Factories for RIM operations! +2019-03-27 08:37:51 INFO base_factory.py:63 Setting factory NVMUtils implementation +2019-03-27 08:37:51 INFO genesis_utils.py:2113 Trying to fetch cluster_functions from cached config_proto +2019-03-27 08:37:51 INFO nvm_utils.py:55 IS PC VM Call: True +2019-03-27 08:37:51 INFO nvm_utils.py:45 Retrieving PC Utilities. +2019-03-27 08:37:51 INFO base_factory.py:63 Setting factory PCUtils implementation +2019-03-27 08:37:51 INFO genesis_utils.py:2113 Trying to fetch cluster_functions from cached config_proto +2019-03-27 08:37:51 INFO nvm_utils.py:55 IS PC VM Call: True +2019-03-27 08:37:51 INFO nvm_utils.py:45 Retrieving PC Utilities. +2019-03-27 08:37:51 INFO lcm_ops_for_rim:105 Fetching available versions from repository +2019-03-27 08:37:51 INFO repository_image_utils.py:345 Performing Inventory for Repository Image +2019-03-27 08:37:51 DEBUG repository_image_utils.py:299 Fetching entities currently inventoried +2019-03-27 08:37:51 INFO lcm_ops_for_rim:173 Waiting for leadership change event +2019-03-27 08:37:51 DEBUG repository_image_utils.py:310 Adding uuid: "c9ee6d12-7141-453f-a345-115e392e27e1" +location_id: "pc:e636212f-3d79-4a4b-8e78-afa94c05cb66" +entity_class: "PC" +entity_model: "Karbon" +version: "0.8.2" +hw_family: "" +description: "Karbon container service" +count: 1 +last_detected_time_usecs: 1553700999578310 + to entity set +2019-03-27 08:37:51 DEBUG repository_image_utils.py:310 Adding uuid: "d6edff2c-f59f-4754-978b-06b6237796b4" +location_id: "pc:e636212f-3d79-4a4b-8e78-afa94c05cb66" +entity_class: "PC CORE CLUSTER" +entity_model: "PC" +version: "5.10.2" +hw_family: "" +description: "PC version" +count: 1 +last_detected_time_usecs: 1553701015964970 + to entity set +2019-03-27 08:37:51 DEBUG repository_image_utils.py:310 Adding uuid: "758942f1-d42d-4d49-99fc-b73e2f2dca30" +location_id: "pc:e636212f-3d79-4a4b-8e78-afa94c05cb66" +entity_class: "PC" +entity_model: "Epsilon" +version: "2.5.1" +hw_family: "" +description: "PC Container based orchestration engine service" +count: 1 +last_detected_time_usecs: 1553701031786357 + to entity set +2019-03-27 08:37:51 DEBUG repository_image_utils.py:310 Adding uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94" +location_id: "pc:e636212f-3d79-4a4b-8e78-afa94c05cb66" +entity_class: "PC" +entity_model: "Calm" +version: "2.4.0" +hw_family: "" +description: "PC Container based cloud application lifecycle management service" +count: 1 +last_detected_time_usecs: 1553701047595510 + to entity set +2019-03-27 08:37:51 INFO repository_image_utils.py:316 Entity Dict created is as follows: defaultdict(, {('PC', 'CALM'): [u'pc:e636212f-3d79-4a4b-8e78-afa94c05cb66'], ('PC', 'EPSILON'): [u'pc:e636212f-3d79-4a4b-8e78-afa94c05cb66'], ('PC', 'KARBON'): [u'pc:e636212f-3d79-4a4b-8e78-afa94c05cb66'], ('PC CORE CLUSTER', 'PC'): [u'pc:e636212f-3d79-4a4b-8e78-afa94c05cb66']}) +2019-03-27 08:37:51 DEBUG repository_image_utils.py:364 Fetching modules of type: update +2019-03-27 08:37:51 DEBUG repository_image_utils.py:374 Looping over all the update modules to perform inventory only on those modules which support repository image +2019-03-27 08:37:51 DEBUG repository_image_utils.py:387 Managed Entity with entity class: PC and entity model: Karbon supports repository image module +2019-03-27 08:37:51 INFO repository_image_utils.py:1086 Retrieving module with reference name release.karbon.repository_image +2019-03-27 08:37:51 INFO repository_image_utils.py:779 Getting tag list for: release.karbon.repository_image +2019-03-27 08:37:51 DEBUG repository_image_utils.py:783 Tag List for release.karbon.repository_image is [] +2019-03-27 08:37:51 INFO repository_image_utils.py:756 Getting flag list for: release.karbon.repository_image +2019-03-27 08:37:51 DEBUG repository_image_utils.py:760 Flag List for release.karbon.repository_image is [] +2019-03-27 08:37:51 DEBUG repository_image_utils.py:893 Building repository_image_base_url +2019-03-27 08:37:51 DEBUG zookeeper_session.py:90 Using host_port_list: zk1:9876 +2019-03-27 08:37:51 INFO zookeeper_session.py:113 lcm_ops_for_rim is attempting to connect to Zookeeper +2019-03-27 08:37:51 DEBUG configuration.py:158 Found config version: "2.1.5579" +url: "http://download.nutanix.com/lcm/2.0" +auto_update_enabled: false +lcm_standalone_ui_enabled: false +lcm_pc_enabled: false +deprecated_software_entities: "Firmware" + +2019-03-27 08:37:51 INFO configuration.py:137 Read config: {'url': u'http://download.nutanix.com/lcm/2.0', 'deprecated_software_entities': [u'Firmware'], 'lcm_standalone_ui_enabled': False, 'lcm_pc_enabled': False, 'version': u'2.1.5579', 'auto_update_enabled': False} +2019-03-27 08:37:51 DEBUG repository_image_utils.py:896 URL present in config is: http://download.nutanix.com/lcm/2.0 +2019-03-27 08:37:51 INFO repository_image_utils.py:900 Repository Image Base URL built by framework: http://download.nutanix.com/lcm/2.0/builds/ +2019-03-27 08:37:51 INFO repository_image_utils.py:84 Repository Image Verification as a feature is disabled or not supported in the current AOS +2019-03-27 08:37:51 INFO repository_image_utils.py:831 Parameters created for inventory by repository image module:{'public_key_location': '/home/nutanix/cluster/config/lcm/lcm_public.pem', 'flag_list': [], 'base_url': u'http://download.nutanix.com/lcm/2.0/builds/', 'entity_model': u'Karbon', 'rim_verification_support': False, 'tag_list': [], 'proxy_env': {'JETTY_HOME': '/usr/local/nutanix/jetty', 'MY_SERVICE_NAME': 'genesis', 'ZOO_LOG_DIR': '/home/nutanix/data/logs', 'ZOO_START_LOG_FILE': '/home/nutanix/data/logs/zookeeper.log', 'CATALINA_OUT': '/home/nutanix/data/logs/catalina.out', 'LESSOPEN': '||/usr/bin/lesspipe.sh %s', 'LOGNAME': 'nutanix', 'USER': 'nutanix', 'HOME': '/home/nutanix', 'NUTANIX_BINARY_LOG_DIR': '/home/nutanix/data/binary_logs', 'PATH': '/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin', 'CASSANDRA_HOME': '/usr/local/nutanix/apache-cassandra', 'LANG': 'en_US.UTF-8', 'CATALINA_BASE': '/home/nutanix/prism', 'SHELL': '/bin/bash', 'NUTANIX_START_LOG_FILE': '/home/nutanix/data/logs/startup.log', 'JDK_HOME': '/usr/lib/jvm/jre-1.8.0', 'SHLVL': '2', 'NUTANIX_BASE_DIR': '/usr/local/nutanix', 'HISTSIZE': '1000', 'GEVENT_RESOLVER': 'ares', 'ZOOKEEPER_HOST_PORT_LIST': 'zk1:9876', 'NUTANIX_LOG_DIR': '/home/nutanix/data/logs', 'MY_EXTERNAL_IP': '10.42.12.39', 'LIBVIRT_DEFAULT_URI': 'qemu+ssh://root@192.168.5.1/system?no_verify=1', 'JAVA_HOME': '/usr/lib/jvm/jre-1.8.0', 'CASSANDRA_INCLUDE': '/home/nutanix/config/cassandra/cassandra.in.sh', 'XDG_RUNTIME_DIR': '/run/user/1000', 'CORE_PATTERN': '|/home/nutanix/serviceability/bin/coredump.py %p %s %e', 'PERL_LWP_SSL_VERIFY_HOSTNAME': '0', 'SSH_KEY': '/home/nutanix/.ssh/id_rsa', 'TOMCAT_HOME': '/usr/local/nutanix/apache-tomcat', 'LC_ALL': 'en_US.UTF-8', 'XDG_SESSION_ID': 'c1', '_': '/home/nutanix/cluster/bin/genesis', 'ZOOCFGDIR': '/home/nutanix/config/zookeeper', 'GLOG_max_log_size': '100', 'CASSANDRA_MAX_HEAP_SIZE': '2560M', 'HOSTNAME': 'ntnx-10-42-12-39-a-pcvm', 'CASSANDRA_CONF': '/home/nutanix/config/cassandra', 'HISTCONTROL': 'ignoredups', 'PWD': '/home/nutanix', 'MAIL': '/var/spool/mail/nutanix'}} +2019-03-27 08:37:51 INFO repository_image_utils.py:421 Getting Version List for release.karbon.repository_image repository_image_module +2019-03-27 08:37:51 INFO catalog_staging_utils.py:105 Staging module release.karbon.update and dependancies to cvm 10.42.12.39 +2019-03-27 08:37:51 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging +2019-03-27 08:37:51 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:37:51 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging +2019-03-27 08:37:52 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0 +2019-03-27 08:37:52 DEBUG ssh_client.py:188 Executing mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:37:52 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 mkdir -p /home/nutanix/tmp/lcm_staging +2019-03-27 08:37:52 DEBUG ssh_client.py:203 Executed mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0 +2019-03-27 08:37:52 DEBUG cpdb_utils.py:267 Module list is +2019-03-27 08:37:52 DEBUG cpdb_utils.py:820 Found modules: [{'location': u'c88e6e66-f152-49bc-adce-0b44827cdcaa', 'digest': u'd3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c'}, {'location': u'd7cfc33d-2f6d-408e-af8d-f67c9b8ffb67', 'digest': u'48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c'}, {'location': u'8512cf7d-c1a5-4da0-922c-7c6c1ec16859', 'digest': u'77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12'}, {'location': u'60dd8442-1902-4b0a-a16a-1d3248069c6c', 'digest': u'f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e'}, {'location': u'f80da7c4-eb51-4df8-95ca-c92cfbdf5afa', 'digest': u'd51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978'}, {'location': u'469d6ddf-2093-4585-b93c-bfad0aea61b7', 'digest': u'bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7'}] +2019-03-27 08:37:52 DEBUG lcm_catalog.py:197 Found catalog items: –GTîÚwM‘h²8bBe, ÌêŠaEP³Š ü'_½, ²÷QýóáCp¾wÀõí†ê, ýO +&.NSšqT»ó&Ò, Äp.ëL•”FP{Vÿp, Ò +&€(Lr·dù™CÒF +2019-03-27 08:37:52 DEBUG catalog_utils.py:262 Getting file from uuid: "\226GT\356\332wM\030\221h\235\2628bBe" +name: "release.karbon.update" +annotation: "d3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c" +item_type: kLCM +version: 0 +opaque: "\n@d3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "Wz6GM3E,\227\r\364\254\226\306\377\352" + source_list { + file_uuid: "\276\246C-\352_B\205\261\344>\245\230\236\344\350" + } +} +global_catalog_item_uuid: "\310\216nf\361RI\274\255\316\013D\202|\334\252" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\310\216nf\361RI\274\255\316\013D\202|\334\252" + version: 0 +} + +2019-03-27 08:37:52 DEBUG lcm_catalog.py:283 Found file uuids ['bea6432d-ea5f-4285-b1e4-3ea5989ee4e8'] +2019-03-27 08:37:52 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:37:53 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ'} +2019-03-27 08:37:53 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\226GT\356\332wM\030\221h\235\2628bBe" +name: "release.karbon.update" +annotation: "d3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c" +item_type: kLCM +version: 0 +opaque: "\n@d3ccb11049624f9021065381859fbcb34f06ee25cb69ce8c32ac7c93879f841c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "Wz6GM3E,\227\r\364\254\226\306\377\352" + source_list { + file_uuid: "\276\246C-\352_B\205\261\344>\245\230\236\344\350" + } +} +global_catalog_item_uuid: "\310\216nf\361RI\274\255\316\013D\202|\334\252" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\310\216nf\361RI\274\255\316\013D\202|\334\252" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ' +file_name = '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:37:53 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ' +file_name = '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:37:53 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ' +file_name = '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:37:54 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRVmD0hlgPSGVIlG6HaYrc1gxLaRol6aYJ8d8XunKAo58_2-_5DNKQbVCT9pB429EIuo5rSKBcEE0ijMU8xpmIq-cngctpJWhRzqNojlFVLyHQjqz_NRQmst16lQUJO38K-7hCT_VtK7ZOfpN13OqAUmdONrSdmI2jyXgqnMdS0TDcWvZMDpIP2O-yVG6L7LBZZ0V-SPdy9bLdvAaw7xT5o3LD3jbpe7qHzxEYy7pig0pqbK4-sW5YD9alse2Rw-Xk3NcPiEHnflobUuhOqRGotsIrDKRFkQfA0teQaBCObJ2_XQp1w3WtSLIOkVD9awrvEGqQleyfDpfLH8gogW4.D30qUQ.QAb5gPxBCkCQ-jSgYftfEFwJR1y1OV5e3Rc4Ou_Cep_ptYklRl0ltZzGZuQ28-9Vx8DUXwVO2luVBUMgBILyoQ' +file_name = '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:37:54 DEBUG catalog_utils.py:262 Getting file from uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275" +name: "nutanix.tools" +annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c" +item_type: kLCM +version: 0 +opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t" + source_list { + file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+" + } +} +global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" + version: 0 +} + +2019-03-27 08:37:54 DEBUG lcm_catalog.py:283 Found file uuids ['48310501-3d7a-4be1-a3ed-9d990e39222b'] +2019-03-27 08:37:54 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:37:54 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw'} +2019-03-27 08:37:54 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275" +name: "nutanix.tools" +annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c" +item_type: kLCM +version: 0 +opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t" + source_list { + file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+" + } +} +global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:37:54 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:37:54 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:37:55 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwR1AwrrDbEeKpUhAd0O0xSZxh2W0jRK0k0T4r8vsHIYRz9_tt_zCaQl16IhEyALrqcR9D0ryGCRzOv6aT4XadIkYrqYLQRiqkSjppM6TWaHFCcQaU8u_FiKE-VmtSyjhH04xn1cYyB124qdl1_kPHcmotTbo4ttL2bjh2T8KHzAg6ZhuHMcmDxk77DdlLlcV-W-WJXVbp9v5fJ5XbxE8NqpdvfKDXst8rd8Cx8jsI5NzRa1NNhefKJq2QzWpXVdw_FydrrWd4hF7787F1OYXusR6K7GCwxkRLWLgKPPIdEgNOx8uF2KdctKaZJsYiTUf5rGfwi1yFpenw7n8y9Ta4Ee.D30qUg.I7OxawJVC14sMxiCI3ISjultBu3pKZyQ9uk4E5rhNg200yr3Xu9_EhFHxmRFmh46ExyVK12vnV3FiMBhizyEdw' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:37:55 DEBUG catalog_utils.py:262 Getting file from uuid: "\262\367Q\375\363\341Cp\276w\005\300\365\355\206\352" +name: "release.karbon.update_tools" +annotation: "77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12" +item_type: kLCM +version: 0 +opaque: "\n@77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "j\310\212\237]\250C\246\225\212\375y\357\350s\225" + source_list { + file_uuid: "R\261\316|]\234Mn\267\343`\355og\'\241" + } +} +global_catalog_item_uuid: "\205\022\317}\301\245M\240\222,|l\036\301hY" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\205\022\317}\301\245M\240\222,|l\036\301hY" + version: 0 +} + +2019-03-27 08:37:55 DEBUG lcm_catalog.py:283 Found file uuids ['52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1'] +2019-03-27 08:37:55 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:37:56 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA'} +2019-03-27 08:37:56 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\262\367Q\375\363\341Cp\276w\005\300\365\355\206\352" +name: "release.karbon.update_tools" +annotation: "77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12" +item_type: kLCM +version: 0 +opaque: "\n@77253ffc5eeaa1c21d3af86398e549af2d1be18f23114b4e26a0308684159a12\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "j\310\212\237]\250C\246\225\212\375y\357\350s\225" + source_list { + file_uuid: "R\261\316|]\234Mn\267\343`\355og\'\241" + } +} +global_catalog_item_uuid: "\205\022\317}\301\245M\240\222,|l\036\301hY" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\205\022\317}\301\245M\240\222,|l\036\301hY" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA' +file_name = '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:37:56 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA' +file_name = '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:37:56 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA' +file_name = '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:37:57 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVClLpDUEPSGVIQLfDNEVuY4alNI2SdNOE-O8LrBzG0c-f7fd8AWnJtWjIBMiD62kEfc8KcpglGS1mqRJTNZmIWZbMRY1ZIqie17RIM6ImhUh7cuHHUpwod6tlGSXswznu4wYDqcdW7Lz8Iue5MxGl3p5dbHsxH0-S8VT4gLWmYbhzHJg85O-w35WF3FblcbMqq8Ox2Mvlert5ieC9Ux2elQf2uineij18jMA6Ng1b1NJge_OJqmUzWJfWdSeOl_PLvX5CLHr_3bmYwvRaj0B3Dd5gICOqQwQcfQ6JBuHEzofHpVi3rJQmySZGQv2nafyHUIus5f3pcL3-AoYYgUY.D30qVA.VG6ELvUBz4ffxiPXxr4eHT-XHRHuad77Irkkj4pX4hlTG1dQwn6r-IKCRD3Cy7KkR9zfIOWdHN5fbgDBrtLBXA' +file_name = '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:37:57 DEBUG catalog_utils.py:262 Getting file from uuid: "\375O\n\026&.NS\232qT\273\363\201&\322" +name: "release.karbon.repository_image" +annotation: "d51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978" +item_type: kLCM +version: 0 +opaque: "\n@d51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "(r\345\272)\221D\206\242\230\277\257q\271\355\034" + source_list { + file_uuid: "\231\304\200\302\364TO\213\276\215 \273\322\004Ls" + } +} +global_catalog_item_uuid: "\370\r\247\304\353QM\370\225\312\311,\373\337Z\372" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\370\r\247\304\353QM\370\225\312\311,\373\337Z\372" + version: 0 +} + +2019-03-27 08:37:57 DEBUG lcm_catalog.py:283 Found file uuids ['99c480c2-f454-4f8b-be8d-20bbd2044c73'] +2019-03-27 08:37:57 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:37:58 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ'} +2019-03-27 08:37:58 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\375O\n\026&.NS\232qT\273\363\201&\322" +name: "release.karbon.repository_image" +annotation: "d51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978" +item_type: kLCM +version: 0 +opaque: "\n@d51b7d5a11341af55a65ed46ae19c4812e599ce3ee6b6778bcd613dea7297978\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "(r\345\272)\221D\206\242\230\277\257q\271\355\034" + source_list { + file_uuid: "\231\304\200\302\364TO\213\276\215 \273\322\004Ls" + } +} +global_catalog_item_uuid: "\370\r\247\304\353QM\370\225\312\311,\373\337Z\372" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\370\r\247\304\353QM\370\225\312\311,\373\337Z\372" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/99c480c2-f454-4f8b-be8d-20bbd2044c73' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ' +file_name = '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:37:58 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/99c480c2-f454-4f8b-be8d-20bbd2044c73' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ' +file_name = '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:37:58 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/99c480c2-f454-4f8b-be8d-20bbd2044c73' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ' +file_name = '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:37:59 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/99c480c2-f454-4f8b-be8d-20bbd2044c73' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwQVBLTrDbEekMqQWrodpilyWzMspWmUpJsmxH9fYOUwjn7-bL_nM0hDtkNN2kPq7UATGAZuIYVVsqA4xlismnopFkndiKe2ngtcJEkcRVEyixsItCPrfwyFiXy_WedBwsGfwj5u0FN734q9k19kHfc6oDSYkw1tJ5bTWTSdC-exVjQO95Y9k4P0HYp9nsldlR-2m7wqD1kh18-77UsAb52qfFTu2Os2e8sK-JiAsawbNqikxu7qE9uO9WhdGtsfOVxOz7f6ATHo3HdvQwo9KDUB1Td4hYG0qMoAWPocE43Cka3z90uh7rhtFUnWIRKqP03hP4Q6ZCVvT4fL5RcgX4D0.D30qVg.BlFlinKePCfToDbHZ7vQexdBJ-bQVhkjCXPvEHQ35bFEblctZthK1IwuDqXQylLiuxAfANEw9cZfLFxIlf2aDQ' +file_name = '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:37:59 DEBUG catalog_utils.py:262 Getting file from uuid: "\025\304p\022.\353L\225\224FP{V\377p\035" +name: "release.karbon.precheck" +annotation: "bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7" +item_type: kLCM +version: 0 +opaque: "\n@bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "69\323\026\270\327N\200\273\365L8\214\031\325?" + source_list { + file_uuid: "\333q\255\303\337\265Bg\215P\336\220@\377\346U" + } +} +global_catalog_item_uuid: "F\235m\337 \223E\205\271<\277\255\n\352a\267" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "F\235m\337 \223E\205\271<\277\255\n\352a\267" + version: 0 +} + +2019-03-27 08:37:59 DEBUG lcm_catalog.py:283 Found file uuids ['db71adc3-dfb5-4267-8d50-de9040ffe655'] +2019-03-27 08:37:59 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:37:59 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ'} +2019-03-27 08:37:59 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\025\304p\022.\353L\225\224FP{V\377p\035" +name: "release.karbon.precheck" +annotation: "bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7" +item_type: kLCM +version: 0 +opaque: "\n@bdd769ece19451895bc79ac1425ba4b95c46c8ad5d3f59d3de3e84290cc46ab7\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "69\323\026\270\327N\200\273\365L8\214\031\325?" + source_list { + file_uuid: "\333q\255\303\337\265Bg\215P\336\220@\377\346U" + } +} +global_catalog_item_uuid: "F\235m\337 \223E\205\271<\277\255\n\352a\267" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "F\235m\337 \223E\205\271<\277\255\n\352a\267" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/db71adc3-dfb5-4267-8d50-de9040ffe655' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ' +file_name = '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:37:59 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/db71adc3-dfb5-4267-8d50-de9040ffe655' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ' +file_name = '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:37:59 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/db71adc3-dfb5-4267-8d50-de9040ffe655' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ' +file_name = '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:00 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/db71adc3-dfb5-4267-8d50-de9040ffe655' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwTBoIP1hlgPSGVIQLfDNEVuYoalNI2SdNOE-O8LrBzG0c-f7fd8AunIN2jJRsij72gAXccacjiM1CSbjWdiVNe1mM5VJhCVFtPHiSL1NK-1yiDRgXz8cZQmys1yUSYJu3hM-1hhJH3bim2QX-QDtzah1LmjT-0gsuF4NHwQIWJtqB9uPUemAPk7bDdlIddVuV8ty2q3L7Zy8bxevSTw2ql298oNe10Vb8UWPgbgPFvFDo202Fx8om7Y9tal8-2B0-X8dK3vEIchfLc-pbCdMQMwrcILDGRFtUuAp88-US8c2Id4u5TqhrU2JNmmSGj-NIP_EGqQjbw-Hc7nX41EggA.D30qVw.ffzezBstZwATQPKdWeMP3p5yffFceh_MSywzDLl63DEkKa1s5XwyP-Hvvawuq867soeFeogjGGjPhCBqQ99pNQ' +file_name = '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:00 DEBUG catalog_utils.py:262 Getting file from uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F" +name: "release.linux.tools" +annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e" +item_type: kLCM +version: 0 +opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "wl\207P\271OK8\263\327\214ox\207[\236" + source_list { + file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214" + } +} +global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" + version: 0 +} + +2019-03-27 08:38:00 DEBUG lcm_catalog.py:283 Found file uuids ['9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'] +2019-03-27 08:38:00 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:01 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA'} +2019-03-27 08:38:01 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F" +name: "release.linux.tools" +annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e" +item_type: kLCM +version: 0 +opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "wl\207P\271OK8\263\327\214ox\207[\236" + source_list { + file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214" + } +} +global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:01 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:01 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:02 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkE9rwkAQxb_LnF2pRkVyE5uDECuoaQ-lLJNkrAObzbK7aSnid--YxkM9znu_-fPmAtqRb9CSjZBG39EIuo5rSOE0WVKJi6la4LxUsyRBtSyXiZrNxCmTZIFYgdCBfPxxJB35br3KRcIunmUeVxipvk_FNugv8oFbKyh17uzFDmo-njyNpypELA0Nza3nyBQgfYf9Ls_0tsiPm3VeHI7ZXq-et5sXAXunODwqd-x1k71le_gYgfNsK3ZotMXmdifWDdvhdO18e2LZnF76-gFxGMJ36yWF7YwZgWkrvMFAVhUHATx9DokG4cQ-xPsmqRuua0OarURC86cZ_IdQg2x0_3S4Xn8BxuCBbQ.D30qWQ.Al88lPf9yTZSOhcnJGOrqKHQdo-DI0UxrfOpW0cMpBvV-gXqlORK9OZrlY51Ie2k0hKYJYQUPWsQdmbNHftjjA' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:02 DEBUG ssh_client.py:188 Executing ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:02 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 ls "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:02 DEBUG ssh_client.py:203 Executed ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:02 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1', '/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c', '/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8', '/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655'] +2019-03-27 08:38:02 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:38:02 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:02 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:03 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:03 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:38:03 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:03 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:38:03 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39: rv 0 +2019-03-27 08:38:03 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1 +2019-03-27 08:38:03 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:03 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:04 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:04 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1 +2019-03-27 08:38:04 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:04 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1 +2019-03-27 08:38:04 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/52b1ce7c-5d9c-4d6e-b7e3-60ed6f6727a1 on 10.42.12.39: rv 0 +2019-03-27 08:38:04 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73 +2019-03-27 08:38:04 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:04 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:05 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:05 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73 +2019-03-27 08:38:05 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:05 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73 +2019-03-27 08:38:05 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/99c480c2-f454-4f8b-be8d-20bbd2044c73 on 10.42.12.39: rv 0 +2019-03-27 08:38:05 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:38:05 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:05 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:06 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:06 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:38:06 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:06 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:38:06 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39: rv 0 +2019-03-27 08:38:06 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8 +2019-03-27 08:38:06 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:06 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:07 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:07 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8 +2019-03-27 08:38:07 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:07 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8 +2019-03-27 08:38:07 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/bea6432d-ea5f-4285-b1e4-3ea5989ee4e8 on 10.42.12.39: rv 0 +2019-03-27 08:38:07 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655 +2019-03-27 08:38:07 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:07 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:08 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:08 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655 +2019-03-27 08:38:08 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:08 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655 +2019-03-27 08:38:08 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/db71adc3-dfb5-4267-8d50-de9040ffe655 on 10.42.12.39: rv 0 +2019-03-27 08:38:08 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None' +2019-03-27 08:38:08 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39 +2019-03-27 08:38:08 INFO repository_image_utils.py:182 Repository Image Path is as follows: release.karbon.repository_image +2019-03-27 08:38:08 DEBUG repository_image_utils.py:108 Repository Image Module Path: release.karbon.repository_image +2019-03-27 08:38:08 DEBUG repository_image_utils.py:113 Version List returned is: [Status: available Image: karbon.tar.gz Version: 1.0.0 Flag List: [] Update Library List: []] +2019-03-27 08:38:08 DEBUG repository_image_utils.py:429 Versions List is: [Status: available Image: karbon.tar.gz Version: 1.0.0 Flag List: [] Update Library List: []] +2019-03-27 08:38:08 INFO repository_image_utils.py:441 Managed Entity with entity_class: PC and entity_model: Karbon is a pc entity +2019-03-27 08:38:08 DEBUG cpdb_utils.py:572 [(UUID('c9ee6d12-7141-453f-a345-115e392e27e1'), ), (UUID('d6edff2c-f59f-4754-978b-06b6237796b4'), ), (UUID('758942f1-d42d-4d49-99fc-b73e2f2dca30'), ), (UUID('03a6e4a2-fa0e-4698-b0c0-e142820a2e94'), )] +2019-03-27 08:38:08 DEBUG cpdb_utils.py:573 location_id of entity: pc:e636212f-3d79-4a4b-8e78-afa94c05cb66 +2019-03-27 08:38:08 INFO repository_image_utils.py:257 Updating the managed entity with versions +2019-03-27 08:38:08 INFO repository_image_utils.py:269 Updating the available versions table with versions from repository +2019-03-27 08:38:08 INFO cpdb_utils.py:888 No available version tables exists to delete +2019-03-27 08:38:08 DEBUG cpdb_utils.py:1201 Saved index 0 +2019-03-27 08:38:08 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:08 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 51d5c46b-c2dc-4f13-9b9b-b2b46fdacd81, object: +uuid: "51d5c46b-c2dc-4f13-9b9b-b2b46fdacd81" +entity_uuid: "c9ee6d12-7141-453f-a345-115e392e27e1" +version: "1.0.0" +status: "available" +dependencies: "[]" +order: 1 +entity_class: "PC" + +2019-03-27 08:38:08 DEBUG repository_image_utils.py:382 Managed Entity with entity class: PC CORE CLUSTER and entity model: PC does not support repository image module +2019-03-27 08:38:08 DEBUG repository_image_utils.py:387 Managed Entity with entity class: PC and entity model: Epsilon supports repository image module +2019-03-27 08:38:08 INFO repository_image_utils.py:1086 Retrieving module with reference name release.epsilon.repository_image +2019-03-27 08:38:08 INFO repository_image_utils.py:779 Getting tag list for: release.epsilon.repository_image +2019-03-27 08:38:08 DEBUG repository_image_utils.py:783 Tag List for release.epsilon.repository_image is [] +2019-03-27 08:38:08 INFO repository_image_utils.py:756 Getting flag list for: release.epsilon.repository_image +2019-03-27 08:38:08 DEBUG repository_image_utils.py:760 Flag List for release.epsilon.repository_image is [] +2019-03-27 08:38:08 DEBUG repository_image_utils.py:893 Building repository_image_base_url +2019-03-27 08:38:08 DEBUG configuration.py:158 Found config version: "2.1.5579" +url: "http://download.nutanix.com/lcm/2.0" +auto_update_enabled: false +lcm_standalone_ui_enabled: false +lcm_pc_enabled: false +deprecated_software_entities: "Firmware" + +2019-03-27 08:38:08 INFO configuration.py:137 Read config: {'url': u'http://download.nutanix.com/lcm/2.0', 'deprecated_software_entities': [u'Firmware'], 'lcm_standalone_ui_enabled': False, 'lcm_pc_enabled': False, 'version': u'2.1.5579', 'auto_update_enabled': False} +2019-03-27 08:38:08 DEBUG repository_image_utils.py:896 URL present in config is: http://download.nutanix.com/lcm/2.0 +2019-03-27 08:38:08 INFO repository_image_utils.py:900 Repository Image Base URL built by framework: http://download.nutanix.com/lcm/2.0/builds/ +2019-03-27 08:38:08 INFO repository_image_utils.py:84 Repository Image Verification as a feature is disabled or not supported in the current AOS +2019-03-27 08:38:08 INFO repository_image_utils.py:831 Parameters created for inventory by repository image module:{'public_key_location': '/home/nutanix/cluster/config/lcm/lcm_public.pem', 'flag_list': [], 'base_url': u'http://download.nutanix.com/lcm/2.0/builds/', 'entity_model': u'Epsilon', 'rim_verification_support': False, 'tag_list': [], 'proxy_env': {'JETTY_HOME': '/usr/local/nutanix/jetty', 'MY_SERVICE_NAME': 'genesis', 'ZOO_LOG_DIR': '/home/nutanix/data/logs', 'ZOO_START_LOG_FILE': '/home/nutanix/data/logs/zookeeper.log', 'CATALINA_OUT': '/home/nutanix/data/logs/catalina.out', 'LESSOPEN': '||/usr/bin/lesspipe.sh %s', 'LOGNAME': 'nutanix', 'USER': 'nutanix', 'HOME': '/home/nutanix', 'NUTANIX_BINARY_LOG_DIR': '/home/nutanix/data/binary_logs', 'PATH': '/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin', 'CASSANDRA_HOME': '/usr/local/nutanix/apache-cassandra', 'LANG': 'en_US.UTF-8', 'CATALINA_BASE': '/home/nutanix/prism', 'SHELL': '/bin/bash', 'NUTANIX_START_LOG_FILE': '/home/nutanix/data/logs/startup.log', 'JDK_HOME': '/usr/lib/jvm/jre-1.8.0', 'SHLVL': '2', 'NUTANIX_BASE_DIR': '/usr/local/nutanix', 'HISTSIZE': '1000', 'GEVENT_RESOLVER': 'ares', 'ZOOKEEPER_HOST_PORT_LIST': 'zk1:9876', 'NUTANIX_LOG_DIR': '/home/nutanix/data/logs', 'MY_EXTERNAL_IP': '10.42.12.39', 'LIBVIRT_DEFAULT_URI': 'qemu+ssh://root@192.168.5.1/system?no_verify=1', 'JAVA_HOME': '/usr/lib/jvm/jre-1.8.0', 'CASSANDRA_INCLUDE': '/home/nutanix/config/cassandra/cassandra.in.sh', 'XDG_RUNTIME_DIR': '/run/user/1000', 'CORE_PATTERN': '|/home/nutanix/serviceability/bin/coredump.py %p %s %e', 'PERL_LWP_SSL_VERIFY_HOSTNAME': '0', 'SSH_KEY': '/home/nutanix/.ssh/id_rsa', 'TOMCAT_HOME': '/usr/local/nutanix/apache-tomcat', 'LC_ALL': 'en_US.UTF-8', 'XDG_SESSION_ID': 'c1', '_': '/home/nutanix/cluster/bin/genesis', 'ZOOCFGDIR': '/home/nutanix/config/zookeeper', 'GLOG_max_log_size': '100', 'CASSANDRA_MAX_HEAP_SIZE': '2560M', 'HOSTNAME': 'ntnx-10-42-12-39-a-pcvm', 'CASSANDRA_CONF': '/home/nutanix/config/cassandra', 'HISTCONTROL': 'ignoredups', 'PWD': '/home/nutanix', 'MAIL': '/var/spool/mail/nutanix'}} +2019-03-27 08:38:08 INFO repository_image_utils.py:421 Getting Version List for release.epsilon.repository_image repository_image_module +2019-03-27 08:38:08 INFO catalog_staging_utils.py:105 Staging module release.epsilon.update and dependancies to cvm 10.42.12.39 +2019-03-27 08:38:08 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging +2019-03-27 08:38:08 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:08 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging +2019-03-27 08:38:09 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0 +2019-03-27 08:38:09 DEBUG ssh_client.py:188 Executing mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:09 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 mkdir -p /home/nutanix/tmp/lcm_staging +2019-03-27 08:38:09 DEBUG ssh_client.py:203 Executed mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0 +2019-03-27 08:38:09 DEBUG cpdb_utils.py:267 Module list is +2019-03-27 08:38:09 DEBUG cpdb_utils.py:820 Found modules: [{'location': u'8d8e6715-cd27-495f-b7ba-9c5570f34669', 'digest': u'1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f'}, {'location': u'd75633ce-c0ba-40a2-a447-b907a6d9fdc3', 'digest': u'4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70'}, {'location': u'd7cfc33d-2f6d-408e-af8d-f67c9b8ffb67', 'digest': u'48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c'}, {'location': u'60dd8442-1902-4b0a-a16a-1d3248069c6c', 'digest': u'f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e'}, {'location': u'1ff9f0ce-da75-43b7-91d4-f92865b99c5c', 'digest': u'57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9'}] +2019-03-27 08:38:09 DEBUG lcm_catalog.py:197 Found catalog items: ÚÐ@zFM¼>UÖ7‰“N, ?¾²ýåHGh¹j™Êµ, ÌêŠaEP³Š ü'_½, Ò +&€(Lr·dù™CÒF, âáNÀpO¸¨´|b/( +2019-03-27 08:38:09 DEBUG catalog_utils.py:262 Getting file from uuid: "\021\332\320@\217zFM\274>U\3267\211\223N" +name: "release.epsilon.update_tools" +annotation: "57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9" +item_type: kLCM +version: 0 +opaque: "\n@57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\333\367\214\363X\325D\223\215\346J\370.\372cT" + source_list { + file_uuid: "e\355\027Z\217GG\223\247y\234\007\002`\237\350" + } +} +global_catalog_item_uuid: "\037\371\360\316\332uC\267\221\324\371(e\271\234\\" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\037\371\360\316\332uC\267\221\324\371(e\271\234\\" + version: 0 +} + +2019-03-27 08:38:09 DEBUG lcm_catalog.py:283 Found file uuids ['65ed175a-8f47-4793-a779-9c0702609fe8'] +2019-03-27 08:38:09 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:10 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg'} +2019-03-27 08:38:10 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\021\332\320@\217zFM\274>U\3267\211\223N" +name: "release.epsilon.update_tools" +annotation: "57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9" +item_type: kLCM +version: 0 +opaque: "\n@57263978b3c71a0fda2385377855ab69dbd7ff3babcfcd28b7372c22886e7bf9\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\333\367\214\363X\325D\223\215\346J\370.\372cT" + source_list { + file_uuid: "e\355\027Z\217GG\223\247y\234\007\002`\237\350" + } +} +global_catalog_item_uuid: "\037\371\360\316\332uC\267\221\324\371(e\271\234\\" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\037\371\360\316\332uC\267\221\324\371(e\271\234\\" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/65ed175a-8f47-4793-a779-9c0702609fe8' +cookie = 'JSESSIONID=.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg' +file_name = '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:10 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/65ed175a-8f47-4793-a779-9c0702609fe8' +cookie = 'JSESSIONID=.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg' +file_name = '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:10 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/65ed175a-8f47-4793-a779-9c0702609fe8' +cookie = 'JSESSIONID=.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg' +file_name = '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:11 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/65ed175a-8f47-4793-a779-9c0702609fe8' +cookie = 'JSESSIONID=.eJxdkEFPwzAMhf-Lz8tURgtdb9PoYVLHpG6FA0KRaV1mKU2jJAWhaf-drHQHdvTzZ_s9n0Aash1q0h4ybweawTBwAxm0cRtHy-W9aNPkQcQRxSJdJIlIo8c6XlBLaUoQaEfW_xgKE8VuvSqChIM_hn1co6fmuhV7J7_IOu51QGkwRxvaTiTzu2i-EM7jh6JpuLfsmRxkb1Duilxuq-KwWRfV_pCXcvW03TwHcOxU-1vlir1s8te8hPcZGMu6ZoNKauwuPrHpWE_WpbF9y-FydhrrG8Sgc9-9DSn0oNQMVF_jBQbSotoHwNLnlGgSWrbOXy-FuuOmUSRZh0io_jSF_xDqkJUcnw7n8y_ue4DW.D30qYg.A1_oTvepF8PFpBE2y1vYkgdSYH-kzeGtx4kbvnx0SVYlrp0DDGNB7NzDjCRt1vBYyCXIOqrPwhMvymDZ0dF8cg' +file_name = '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:11 DEBUG catalog_utils.py:262 Getting file from uuid: "?\276\262\375\345HGh\271\020j\231\004\312\031\265" +name: "release.epsilon.update" +annotation: "1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f" +item_type: kLCM +version: 0 +opaque: "\n@1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "%\276:{\n\270I?\235\215\207\351\276\320\3654" + source_list { + file_uuid: "U\000\237\001\'\303@\016\251\367QEv\243\027q" + } +} +global_catalog_item_uuid: "\215\216g\025\315\'I_\267\272\234Up\363Fi" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\215\216g\025\315\'I_\267\272\234Up\363Fi" + version: 0 +} + +2019-03-27 08:38:11 DEBUG lcm_catalog.py:283 Found file uuids ['55009f01-27c3-400e-a9f7-514576a31771'] +2019-03-27 08:38:11 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:11 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg'} +2019-03-27 08:38:11 DEBUG catalog_utils.py:278 Getting catalog item uuid: "?\276\262\375\345HGh\271\020j\231\004\312\031\265" +name: "release.epsilon.update" +annotation: "1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f" +item_type: kLCM +version: 0 +opaque: "\n@1b9979a2ab7416a6e8e87198c02e0bd0d6a5b7c9af7cd8583b78721583298d1f\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "%\276:{\n\270I?\235\215\207\351\276\320\3654" + source_list { + file_uuid: "U\000\237\001\'\303@\016\251\367QEv\243\027q" + } +} +global_catalog_item_uuid: "\215\216g\025\315\'I_\267\272\234Up\363Fi" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\215\216g\025\315\'I_\267\272\234Up\363Fi" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/55009f01-27c3-400e-a9f7-514576a31771' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg' +file_name = '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:11 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/55009f01-27c3-400e-a9f7-514576a31771' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg' +file_name = '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:11 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/55009f01-27c3-400e-a9f7-514576a31771' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg' +file_name = '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:12 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/55009f01-27c3-400e-a9f7-514576a31771' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw1aCx1db4j1gFSGBHQ7TFNkWndYStMoSTdNiP--wMphHP382X7PJ5CGbIeatIfc24EiGAZuIAfMmnmdPmbiQLO5mLVJJp5oloppnNTzeJrEbYYQaEfW_xgKE-VmuSiDhIM_hn1co6fmthV7J7_IOu51QGkwRxvaTqST-GGSCOfxoGgc7i17Jgf5O2w3ZSHXVblfLctqty-2cvG8Xr0E8NqpdvfKDXtdFW_FFj4iMJZ1zQaV1NhdfGLTsR6tS2P7lsPl_HSt7xCDzn33NqTQg1IRqL7GCwykRbULgKXPMdEotGydv10KdcdNo0iyDpFQ_WkK_yHUISt5fTqcz78gaYDz.D30qYw.DNZds5SJsRQEUhY7N4PHj1tpOWdnPHJYZVlg29ImF9kYkL6mNAAmjUt5bPf9JnC-If_O-4aNV5rDvKFXNqakFg' +file_name = '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:12 DEBUG catalog_utils.py:262 Getting file from uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275" +name: "nutanix.tools" +annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c" +item_type: kLCM +version: 0 +opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t" + source_list { + file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+" + } +} +global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" + version: 0 +} + +2019-03-27 08:38:12 DEBUG lcm_catalog.py:283 Found file uuids ['48310501-3d7a-4be1-a3ed-9d990e39222b'] +2019-03-27 08:38:12 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:13 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ'} +2019-03-27 08:38:13 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275" +name: "nutanix.tools" +annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c" +item_type: kLCM +version: 0 +opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t" + source_list { + file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+" + } +} +global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:13 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:13 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:14 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwR1_N16Q6wHpDKkQrfDNEWmcYelNI2SdNOE-O4LXTmMo59_tt_zGaQl16AhEyANrqMRdB0rSOGpnk0XlVqIxXSOYqbUUhwTnIjHuZrUtEyShBAi7cmFH0txIt-tV3mUsAunuI8rDKRuW7H18ouc59ZElDp7crHtxXz8kIwnwgc8ahqGW8eByUP6DsUuz-S2zA-bdV7uD1khV8_bzUsE-065v1du2Osme8sK-BiBdWwqtqilwebqE1XDZrAurWtrjpfTc1_fIRa9_25dTGE6rUeg2wqvMJAR5T4Cjj6HRINQs_PhdinWDSulSbKJkVD_aRr_IdQga9k_HS6XX8nKgXE.D30qZQ.5AYg8RXgZQjC-zxKKZhxfRyfBHx88fv7A7-7QxY__q_WpvHTQiD87n5DA0gnVqju-aEivjeGmgrxwfbDsdrONQ' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:14 DEBUG catalog_utils.py:262 Getting file from uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F" +name: "release.linux.tools" +annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e" +item_type: kLCM +version: 0 +opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "wl\207P\271OK8\263\327\214ox\207[\236" + source_list { + file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214" + } +} +global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" + version: 0 +} + +2019-03-27 08:38:14 DEBUG lcm_catalog.py:283 Found file uuids ['9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'] +2019-03-27 08:38:14 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:15 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw'} +2019-03-27 08:38:15 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F" +name: "release.linux.tools" +annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e" +item_type: kLCM +version: 0 +opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "wl\207P\271OK8\263\327\214ox\207[\236" + source_list { + file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214" + } +} +global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:15 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:15 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:16 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-LzwRRKBP0hlgPSGVILd0O0xSZxgxLaRol6aYJ8d8XWDmMo58_2-_5DNKSa9GQCZAF19MI-p4VZJAum1miKBVq-jQR6XxOAtPjQahELZrlYjlLJglE2pMLP5biRLFbr4ooYR9OcR83GEjdt2Ln5Rc5z52JKPX25GLbi_k4mYynwgc8aBqGO8eByUP2DuWuyOW2LvabdVFX-7yUq-ft5iWCt05dPSp37HWTv-UlfIzAOjYNW9TSYHv1iaplM1iX1nVHjpez861-QCx6_925mML0Wo9Adw1eYSAj6ioCjj6HRINwZOfD_VKsW1ZKk2QTI6H-0zT-Q6hF1vL2dLhcfgFPaIEW.D30qZw.Pch1ylZ2rV-jnRSo1pl7y1KkV3WKXGcF7R1G_6J4GsMkCHsEs1o4aSwAfiaZgdDOMumk_hZ9OyoXuyoNUtQ5tw' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:16 DEBUG catalog_utils.py:262 Getting file from uuid: "\342\341N\300p\177O\270\250\264|b\035/\001(" +name: "release.epsilon.repository_image" +annotation: "4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70" +item_type: kLCM +version: 0 +opaque: "\n@4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "1\t5\240\177\361A\243\226\364\274\325b\005\233F" + source_list { + file_uuid: "\354\263\026\353|\325E`\214.\001\343\212E2\330" + } +} +global_catalog_item_uuid: "\327V3\316\300\272@\242\244G\271\007\246\331\375\303" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\327V3\316\300\272@\242\244G\271\007\246\331\375\303" + version: 0 +} + +2019-03-27 08:38:16 DEBUG lcm_catalog.py:283 Found file uuids ['ecb316eb-7cd5-4560-8c2e-01e38a4532d8'] +2019-03-27 08:38:16 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:16 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA'} +2019-03-27 08:38:16 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\342\341N\300p\177O\270\250\264|b\035/\001(" +name: "release.epsilon.repository_image" +annotation: "4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70" +item_type: kLCM +version: 0 +opaque: "\n@4e3f5b88c09808461f93e7ddc0cc0998b6212169d5469710517f96fb75330d70\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "1\t5\240\177\361A\243\226\364\274\325b\005\233F" + source_list { + file_uuid: "\354\263\026\353|\325E`\214.\001\343\212E2\330" + } +} +global_catalog_item_uuid: "\327V3\316\300\272@\242\244G\271\007\246\331\375\303" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\327V3\316\300\272@\242\244G\271\007\246\331\375\303" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/ecb316eb-7cd5-4560-8c2e-01e38a4532d8' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA' +file_name = '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:16 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/ecb316eb-7cd5-4560-8c2e-01e38a4532d8' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA' +file_name = '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:16 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/ecb316eb-7cd5-4560-8c2e-01e38a4532d8' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA' +file_name = '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:17 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/ecb316eb-7cd5-4560-8c2e-01e38a4532d8' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwTxp4PSG2I9IJUhAd0O0xSZxh2W0jRK0k0T4rsvsHIYRz__bL_nM0hLrkFDJkAWXEcD6DpWkIFS6Ww6noxEivOjSBZ1Kha0qEQ6ntU0nSdJPU8g0p5c-LEUJ4rtallECbtwivu4wkDqvhVbL7_IeW5NRKmzJxfbXjwNx6PhRPiAR039cOs4MHnI3mG3LXK5KYvDelWU-0O-k8vnzfolgrdOuX9U7tjrOn_Ld_AxAOvYVGxRS4PN1Seqhk1vXVrX1hwvZ-db_YBY9P67dTGF6bQegG4rvMJARpT7CDj67BP1Qs3Oh_ulWDeslCbJJkZC_adp_IdQg6zl7elwufwCNueBBg.D30qaA.lYw9nnfU2tfYgvilT3Fo01wQAVavorABO7V2y29aRpkFeV8p5fLltz7B-84QeMdxkZkvprTUqPe9Vmi4jXhtsA' +file_name = '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:17 DEBUG ssh_client.py:188 Executing ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:17 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 ls "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:18 DEBUG ssh_client.py:203 Executed ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:18 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771', '/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c', '/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8'] +2019-03-27 08:38:18 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:38:18 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:18 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:18 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:18 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:38:18 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:18 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:38:19 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39: rv 0 +2019-03-27 08:38:19 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771 +2019-03-27 08:38:19 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:19 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:19 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:19 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771 +2019-03-27 08:38:19 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:19 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771 +2019-03-27 08:38:20 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/55009f01-27c3-400e-a9f7-514576a31771 on 10.42.12.39: rv 0 +2019-03-27 08:38:20 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8 +2019-03-27 08:38:20 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:20 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:20 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:20 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8 +2019-03-27 08:38:20 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:20 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8 +2019-03-27 08:38:21 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/65ed175a-8f47-4793-a779-9c0702609fe8 on 10.42.12.39: rv 0 +2019-03-27 08:38:21 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:38:21 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:21 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:21 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:21 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:38:21 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:21 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:38:22 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39: rv 0 +2019-03-27 08:38:22 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8 +2019-03-27 08:38:22 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:22 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:22 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:22 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8 +2019-03-27 08:38:22 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:22 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8 +2019-03-27 08:38:23 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/ecb316eb-7cd5-4560-8c2e-01e38a4532d8 on 10.42.12.39: rv 0 +2019-03-27 08:38:23 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None' +2019-03-27 08:38:23 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39 +2019-03-27 08:38:23 INFO repository_image_utils.py:182 Repository Image Path is as follows: release.epsilon.repository_image +2019-03-27 08:38:23 DEBUG repository_image_utils.py:108 Repository Image Module Path: release.epsilon.repository_image +2019-03-27 08:38:23 DEBUG repository_image_utils.py:113 Version List returned is: [Status: available Image: epsilon.tar.gz Version: 2.4.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.4.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.5.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.5.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.2 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.3 Flag List: [u'smoke'] Update Library List: []] +2019-03-27 08:38:23 DEBUG repository_image_utils.py:429 Versions List is: [Status: available Image: epsilon.tar.gz Version: 2.4.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.4.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.5.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.5.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.2 Flag List: [u'smoke'] Update Library List: [], Status: available Image: epsilon.tar.gz Version: 2.6.0.3 Flag List: [u'smoke'] Update Library List: []] +2019-03-27 08:38:23 INFO repository_image_utils.py:441 Managed Entity with entity_class: PC and entity_model: Epsilon is a pc entity +2019-03-27 08:38:23 DEBUG cpdb_utils.py:572 [(UUID('c9ee6d12-7141-453f-a345-115e392e27e1'), ), (UUID('d6edff2c-f59f-4754-978b-06b6237796b4'), ), (UUID('758942f1-d42d-4d49-99fc-b73e2f2dca30'), ), (UUID('03a6e4a2-fa0e-4698-b0c0-e142820a2e94'), )] +2019-03-27 08:38:23 DEBUG cpdb_utils.py:573 location_id of entity: pc:e636212f-3d79-4a4b-8e78-afa94c05cb66 +2019-03-27 08:38:23 INFO repository_image_utils.py:257 Updating the managed entity with versions +2019-03-27 08:38:23 INFO repository_image_utils.py:269 Updating the available versions table with versions from repository +2019-03-27 08:38:23 DEBUG cpdb_utils.py:1201 Saved index 4 +2019-03-27 08:38:23 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:23 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID a9c18b96-3655-42f4-ac91-f91f53261c5c, object: +uuid: "a9c18b96-3655-42f4-ac91-f91f53261c5c" +entity_uuid: "758942f1-d42d-4d49-99fc-b73e2f2dca30" +version: "2.6.0.1" +status: "available" +dependencies: "[]" +order: 1 +entity_class: "PC" + +2019-03-27 08:38:23 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:23 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 851481ff-7807-4a94-be60-22b6c97aeac2, object: +uuid: "851481ff-7807-4a94-be60-22b6c97aeac2" +entity_uuid: "758942f1-d42d-4d49-99fc-b73e2f2dca30" +version: "2.6.0.2" +status: "available" +dependencies: "[]" +order: 2 +entity_class: "PC" + +2019-03-27 08:38:23 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:23 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 61337a17-132c-45ba-8fe0-f441ad922dd9, object: +uuid: "61337a17-132c-45ba-8fe0-f441ad922dd9" +entity_uuid: "758942f1-d42d-4d49-99fc-b73e2f2dca30" +version: "2.6.0.3" +status: "available" +dependencies: "[]" +order: 3 +entity_class: "PC" + +2019-03-27 08:38:23 DEBUG repository_image_utils.py:387 Managed Entity with entity class: PC and entity model: Calm supports repository image module +2019-03-27 08:38:23 INFO repository_image_utils.py:1086 Retrieving module with reference name release.calm.repository_image +2019-03-27 08:38:23 INFO repository_image_utils.py:779 Getting tag list for: release.calm.repository_image +2019-03-27 08:38:23 DEBUG repository_image_utils.py:783 Tag List for release.calm.repository_image is [] +2019-03-27 08:38:23 INFO repository_image_utils.py:756 Getting flag list for: release.calm.repository_image +2019-03-27 08:38:23 DEBUG repository_image_utils.py:760 Flag List for release.calm.repository_image is [] +2019-03-27 08:38:23 DEBUG repository_image_utils.py:893 Building repository_image_base_url +2019-03-27 08:38:23 DEBUG configuration.py:158 Found config version: "2.1.5579" +url: "http://download.nutanix.com/lcm/2.0" +auto_update_enabled: false +lcm_standalone_ui_enabled: false +lcm_pc_enabled: false +deprecated_software_entities: "Firmware" + +2019-03-27 08:38:23 INFO configuration.py:137 Read config: {'url': u'http://download.nutanix.com/lcm/2.0', 'deprecated_software_entities': [u'Firmware'], 'lcm_standalone_ui_enabled': False, 'lcm_pc_enabled': False, 'version': u'2.1.5579', 'auto_update_enabled': False} +2019-03-27 08:38:23 DEBUG repository_image_utils.py:896 URL present in config is: http://download.nutanix.com/lcm/2.0 +2019-03-27 08:38:23 INFO repository_image_utils.py:900 Repository Image Base URL built by framework: http://download.nutanix.com/lcm/2.0/builds/ +2019-03-27 08:38:23 INFO repository_image_utils.py:84 Repository Image Verification as a feature is disabled or not supported in the current AOS +2019-03-27 08:38:23 INFO repository_image_utils.py:831 Parameters created for inventory by repository image module:{'public_key_location': '/home/nutanix/cluster/config/lcm/lcm_public.pem', 'flag_list': [], 'base_url': u'http://download.nutanix.com/lcm/2.0/builds/', 'entity_model': u'Calm', 'rim_verification_support': False, 'tag_list': [], 'proxy_env': {'JETTY_HOME': '/usr/local/nutanix/jetty', 'MY_SERVICE_NAME': 'genesis', 'ZOO_LOG_DIR': '/home/nutanix/data/logs', 'ZOO_START_LOG_FILE': '/home/nutanix/data/logs/zookeeper.log', 'CATALINA_OUT': '/home/nutanix/data/logs/catalina.out', 'LESSOPEN': '||/usr/bin/lesspipe.sh %s', 'LOGNAME': 'nutanix', 'USER': 'nutanix', 'HOME': '/home/nutanix', 'NUTANIX_BINARY_LOG_DIR': '/home/nutanix/data/binary_logs', 'PATH': '/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/home/nutanix/ncc/panacea/bin:/usr/lib/jvm/jre-1.8.0/bin:/home/nutanix/ncc/bin:/home/nutanix/kvm/bin:/home/nutanix/prism/cli:/usr/local/nutanix/cluster/lib/sg3utils/bin:/usr/local/nutanix/minerva/bin:/usr/local/nutanix/cluster/bin:/usr/local/nutanix/zookeeper/bin:/usr/local/nutanix/apache-cassandra/bin:/usr/local/nutanix/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin', 'CASSANDRA_HOME': '/usr/local/nutanix/apache-cassandra', 'LANG': 'en_US.UTF-8', 'CATALINA_BASE': '/home/nutanix/prism', 'SHELL': '/bin/bash', 'NUTANIX_START_LOG_FILE': '/home/nutanix/data/logs/startup.log', 'JDK_HOME': '/usr/lib/jvm/jre-1.8.0', 'SHLVL': '2', 'NUTANIX_BASE_DIR': '/usr/local/nutanix', 'HISTSIZE': '1000', 'GEVENT_RESOLVER': 'ares', 'ZOOKEEPER_HOST_PORT_LIST': 'zk1:9876', 'NUTANIX_LOG_DIR': '/home/nutanix/data/logs', 'MY_EXTERNAL_IP': '10.42.12.39', 'LIBVIRT_DEFAULT_URI': 'qemu+ssh://root@192.168.5.1/system?no_verify=1', 'JAVA_HOME': '/usr/lib/jvm/jre-1.8.0', 'CASSANDRA_INCLUDE': '/home/nutanix/config/cassandra/cassandra.in.sh', 'XDG_RUNTIME_DIR': '/run/user/1000', 'CORE_PATTERN': '|/home/nutanix/serviceability/bin/coredump.py %p %s %e', 'PERL_LWP_SSL_VERIFY_HOSTNAME': '0', 'SSH_KEY': '/home/nutanix/.ssh/id_rsa', 'TOMCAT_HOME': '/usr/local/nutanix/apache-tomcat', 'LC_ALL': 'en_US.UTF-8', 'XDG_SESSION_ID': 'c1', '_': '/home/nutanix/cluster/bin/genesis', 'ZOOCFGDIR': '/home/nutanix/config/zookeeper', 'GLOG_max_log_size': '100', 'CASSANDRA_MAX_HEAP_SIZE': '2560M', 'HOSTNAME': 'ntnx-10-42-12-39-a-pcvm', 'CASSANDRA_CONF': '/home/nutanix/config/cassandra', 'HISTCONTROL': 'ignoredups', 'PWD': '/home/nutanix', 'MAIL': '/var/spool/mail/nutanix'}} +2019-03-27 08:38:23 INFO repository_image_utils.py:421 Getting Version List for release.calm.repository_image repository_image_module +2019-03-27 08:38:23 INFO catalog_staging_utils.py:105 Staging module release.calm.update and dependancies to cvm 10.42.12.39 +2019-03-27 08:38:23 INFO catalog_staging_utils.py:353 Prep remote staging area /home/nutanix/tmp/lcm_staging +2019-03-27 08:38:23 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:23 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging +2019-03-27 08:38:23 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0 +2019-03-27 08:38:23 DEBUG ssh_client.py:188 Executing mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:23 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 mkdir -p /home/nutanix/tmp/lcm_staging +2019-03-27 08:38:24 DEBUG ssh_client.py:203 Executed mkdir -p /home/nutanix/tmp/lcm_staging on 10.42.12.39: rv 0 +2019-03-27 08:38:24 DEBUG cpdb_utils.py:267 Module list is +2019-03-27 08:38:24 DEBUG cpdb_utils.py:820 Found modules: [{'location': u'1ed8d805-f2d8-4ad8-8f4c-d41623b986d3', 'digest': u'6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5'}, {'location': u'd7cfc33d-2f6d-408e-af8d-f67c9b8ffb67', 'digest': u'48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c'}, {'location': u'b12cfd60-e347-4261-8f33-9a54c6cd0fdd', 'digest': u'c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba'}, {'location': u'60dd8442-1902-4b0a-a16a-1d3248069c6c', 'digest': u'f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e'}, {'location': u'fc61a6d3-ca50-4f6e-a966-cb87a2c12339', 'digest': u'c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d'}] +2019-03-27 08:38:24 DEBUG lcm_catalog.py:197 Found catalog items: ÌêŠaEP³Š ü'_½, ïFUñaK•™©Zy4<, Rx=ôöK8‡ š]þx¹, V ~ÛÒHϩصVý ˆ, Ò +&€(Lr·dù™CÒF +2019-03-27 08:38:24 DEBUG catalog_utils.py:262 Getting file from uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275" +name: "nutanix.tools" +annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c" +item_type: kLCM +version: 0 +opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t" + source_list { + file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+" + } +} +global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" + version: 0 +} + +2019-03-27 08:38:24 DEBUG lcm_catalog.py:283 Found file uuids ['48310501-3d7a-4be1-a3ed-9d990e39222b'] +2019-03-27 08:38:24 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:24 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ'} +2019-03-27 08:38:24 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\025\314\352\212\235aEP\263\002\212\013\374\'_\275" +name: "nutanix.tools" +annotation: "48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c" +item_type: kLCM +version: 0 +opaque: "\n@48b429aaba419e03519da545038974d75282d73ef9a0fee103de1c920193344c\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\320;\252@\213\240O\244\215\352E\354\243\207]\t" + source_list { + file_uuid: "H1\005\001=zK\341\243\355\235\231\0169\"+" + } +} +global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\327\317\303=/m@\216\257\215\366|\233\217\373g" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:24 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:24 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:26 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/48310501-3d7a-4be1-a3ed-9d990e39222b' +cookie = 'JSESSIONID=.eJxdkE1vwjAMhv-LzwSVwtatN8R6QCpDArodpiky1AxLaRol6aYJ8d9nWDnAwQe_fvzx-gjakW_Qko2QR9_RALqOa8ghySaYptlISSRqMqJa4Zi2Knt6HGcp0RiTZxA6kI-_jqSjXM6mpUjYxYPM4x1Gqq9TsQ36m3zg1gpKnTt4KQf1MBwlw1SFiFtDfXPrOTIFyD9gtSwLvajKzXxWVutNsdLTl8X8VcBLpVrfK1fsbV68Fyv4HIDzbHfs0GiLzflOrBu2_ena-XbPsjk_XvI7xGEIP60XF7YzZgCm3eEZBrKqWgvg6at31At79iFeN0necF0b0mzFEpp_zeANQg2y0Zenw-n0B6zogKk.D30qcA.9iOrjuvkSjpOLn1lSSxjJAoieOUtXcILHBd0mFSOWdFdm-oA7AgF93YisWoXWQHygWFC-7ERglrvn7um3imxNQ' +file_name = '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:26 DEBUG catalog_utils.py:262 Getting file from uuid: "\357FU\361\032aK\225\231\251\027Zy4<\177" +name: "release.calm.update_tools" +annotation: "c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba" +item_type: kLCM +version: 0 +opaque: "\n@c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\230U\203\350\213\254HI\236*d\351\240u\020s" + source_list { + file_uuid: "\n\347^\264\230RE\314\256\013\257B\345\362\347\254" + } +} +global_catalog_item_uuid: "\261,\375`\343GBa\2173\232T\306\315\017\335" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\261,\375`\343GBa\2173\232T\306\315\017\335" + version: 0 +} + +2019-03-27 08:38:26 DEBUG lcm_catalog.py:283 Found file uuids ['0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac'] +2019-03-27 08:38:26 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:26 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw'} +2019-03-27 08:38:26 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\357FU\361\032aK\225\231\251\027Zy4<\177" +name: "release.calm.update_tools" +annotation: "c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba" +item_type: kLCM +version: 0 +opaque: "\n@c94b2574a0b5060e534bfb10c53da9b2613f800b4897e771e6fa2b86376253ba\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\230U\203\350\213\254HI\236*d\351\240u\020s" + source_list { + file_uuid: "\n\347^\264\230RE\314\256\013\257B\345\362\347\254" + } +} +global_catalog_item_uuid: "\261,\375`\343GBa\2173\232T\306\315\017\335" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\261,\375`\343GBa\2173\232T\306\315\017\335" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw' +file_name = '/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:26 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw' +file_name = '/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:26 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw' +file_name = '/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:27 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw2itFNRb4j1gFSGBHQ7TFPktu6wlKZRkm6aEP99gZXDOPr5s_2ezyAN2R41aQ-5tyNFMI7cQg7xMsFls0hFl9WxSGuMBVKWiXrRdck86zCZpxBoR9b_GAoT5W69KoOEoz-Ffdygp_a-FQcnv8g6HnRAaTQnG9pOPM3i-WwhnMda0TQ8WPZMDvJ32O_KQm6r8rhZl9XhWOzl6nm7eQngrVMdHpU79rop3oo9fERgLOuGDSqpsb_6xLZnPVmXxg4dh8v5-VY_IAad-x5sSKFHpSJQQ4NXGEiL6hAAS59Tokno2Dp_vxTqnttWkWQdIqH60xT-Q6hHVvL2dLhcfgG97oFo.D30qcg.5Vz8h_uo7AveShZA1_TJd1JbcrWYJx_mvD8umMIOAlU5WdLVcAJgefxZGVMFCZfJ0EfnMZxEnZHqIHPjWGgZbw' +file_name = '/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:27 DEBUG catalog_utils.py:262 Getting file from uuid: "Rx=\364\010\366K8\207\014\232]\376x\271\024" +name: "release.calm.update" +annotation: "6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5" +item_type: kLCM +version: 0 +opaque: "\n@6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "j\301\335\225\005\010I\351\247@\331\334j\327\3376" + source_list { + file_uuid: "\035L\215\207|\276B\243\225\004P\243\0273(\374" + } +} +global_catalog_item_uuid: "\036\330\330\005\362\330J\330\217L\324\026#\271\206\323" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\036\330\330\005\362\330J\330\217L\324\026#\271\206\323" + version: 0 +} + +2019-03-27 08:38:27 DEBUG lcm_catalog.py:283 Found file uuids ['1d4c8d87-7cbe-42a3-9504-50a3173328fc'] +2019-03-27 08:38:27 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:28 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg'} +2019-03-27 08:38:28 DEBUG catalog_utils.py:278 Getting catalog item uuid: "Rx=\364\010\366K8\207\014\232]\376x\271\024" +name: "release.calm.update" +annotation: "6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5" +item_type: kLCM +version: 0 +opaque: "\n@6dafc74096a44cb2e140f7bd18758240cede7c928c888f84788a2dce4fa3ffa5\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "j\301\335\225\005\010I\351\247@\331\334j\327\3376" + source_list { + file_uuid: "\035L\215\207|\276B\243\225\004P\243\0273(\374" + } +} +global_catalog_item_uuid: "\036\330\330\005\362\330J\330\217L\324\026#\271\206\323" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\036\330\330\005\362\330J\330\217L\324\026#\271\206\323" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/1d4c8d87-7cbe-42a3-9504-50a3173328fc' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg' +file_name = '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:28 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/1d4c8d87-7cbe-42a3-9504-50a3173328fc' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg' +file_name = '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:28 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/1d4c8d87-7cbe-42a3-9504-50a3173328fc' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg' +file_name = '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:29 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/1d4c8d87-7cbe-42a3-9504-50a3173328fc' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf9LzgR1o0DVG4IekMqQWrodpikyxAxLaRol6aYJ8d9nWDmMo58_2-_5LJRD34JFG0UefY8j0fekRS4y0Ok8SxM5SROUaQKZ3CczLTM9mSazuWZhL5gO6OOPQ54ot8tFyRL08cT76AAR9X0rdEF9oQ_UWUaxdyfP7SCn46dk_CxDhL3BYbjzFAmDyN9FtS0LtWnK3XpZNvWuqNRitVm_MHjrNPWjcsde18VbUYmPkXCe7IEcGGWhvfoE3ZIdrCvnuyPx5fx8qx8QByF8d55T2N6YkTDdAa6wQCubmgGPn0OiQTiSD_F-ieuWtDaoyHIkMH-agX8ItkBG3Z4uLpdf_A-A3w.D30qdA.AokVGtR1c_JtpdZHz_tpgT3HK2ygdduec5P0xdErMGB8GJKXqLZFMKUl8XQ_6EbTKSwtkrmE2hMwo9Z4C8cfWg' +file_name = '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:29 DEBUG catalog_utils.py:262 Getting file from uuid: "V\027\014~\333\322H\317\251\330\032\265V\375\t\210" +name: "release.calm.repository_image" +annotation: "c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d" +item_type: kLCM +version: 0 +opaque: "\n@c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\244U\230\024`xJe\272\325\227\367l\004\362\236" + source_list { + file_uuid: "\025\343\303\326\206\035L\026\254M\375TDd\230g" + } +} +global_catalog_item_uuid: "\374a\246\323\312POn\251f\313\207\242\301#9" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\374a\246\323\312POn\251f\313\207\242\301#9" + version: 0 +} + +2019-03-27 08:38:29 DEBUG lcm_catalog.py:283 Found file uuids ['15e3c3d6-861d-4c16-ac4d-fd5444649867'] +2019-03-27 08:38:29 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:29 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow'} +2019-03-27 08:38:29 DEBUG catalog_utils.py:278 Getting catalog item uuid: "V\027\014~\333\322H\317\251\330\032\265V\375\t\210" +name: "release.calm.repository_image" +annotation: "c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d" +item_type: kLCM +version: 0 +opaque: "\n@c595a650f3efe40fa03eda3d62a3d7a71965292a46cb34a222687f2c37aa998d\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "\244U\230\024`xJe\272\325\227\367l\004\362\236" + source_list { + file_uuid: "\025\343\303\326\206\035L\026\254M\375TDd\230g" + } +} +global_catalog_item_uuid: "\374a\246\323\312POn\251f\313\207\242\301#9" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "\374a\246\323\312POn\251f\313\207\242\301#9" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/15e3c3d6-861d-4c16-ac4d-fd5444649867' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow' +file_name = '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:29 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/15e3c3d6-861d-4c16-ac4d-fd5444649867' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow' +file_name = '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:29 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/15e3c3d6-861d-4c16-ac4d-fd5444649867' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow' +file_name = '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:30 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/15e3c3d6-861d-4c16-ac4d-fd5444649867' +cookie = 'JSESSIONID=.eJxdkEFvwjAMhf-Lzw3qBoWtN8R6QCpDKnQ7TFPkNe6wlKZRkm6aEP99gZXDOPr5s_2ejyAtuQ4NmQB5cAMlMAysIIc2S2eNykhkbarEbNqgeMBHFPPFAiltcTZXU4i0Jxd-LMWJcrtallHCIRziPm4wkLpuxd7LL3KeexNRGuzBxbYX2eQundwLH_BD0zjcOw5MHvI3qLZlITd1uV-vynq3Lyq5fNqsnyN46dS7W-WKvayL16KC9wSsY9OwRS0NdmefqDo2o3VpXd9yvJwfL_UNYtH7797FFGbQOgHdN3iGgYyodxFw9DkmGoWWnQ_XS7HuWClNkk2MhPpP0_gPoQ5Zy8vT4XT6BVg_gdU.D30qdQ.j2J7Pwse-DnT_EhKupUYLueVhrlHFmVOYqMuqcZad_jtyYZYKjBhPEP3q_XXF2TPJji6upm7IE1pOA75mTzCow' +file_name = '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:30 DEBUG catalog_utils.py:262 Getting file from uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F" +name: "release.linux.tools" +annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e" +item_type: kLCM +version: 0 +opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "wl\207P\271OK8\263\327\214ox\207[\236" + source_list { + file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214" + } +} +global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" + version: 0 +} + +2019-03-27 08:38:30 DEBUG lcm_catalog.py:283 Found file uuids ['9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'] +2019-03-27 08:38:30 DEBUG catalog_utils.py:675 Prism is up and running +2019-03-27 08:38:31 DEBUG catalog_utils.py:653 Authentication header: {'name': 'JSESSIONID', 'value': '.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA'} +2019-03-27 08:38:31 DEBUG catalog_utils.py:278 Getting catalog item uuid: "\322\n&\037\200(Lr\267d\371\231\022C\322F" +name: "release.linux.tools" +annotation: "f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e" +item_type: kLCM +version: 0 +opaque: "\n@f1ad5e467e011a6d2eed86e2536489ee323647ee6c0fdf410716fc64b9b0ab3e\020\002\032\020\3466!/=yJK\216x\257\251L\005\313f" +source_group_list { + uuid: "wl\207P\271OK8\263\327\214ox\207[\236" + source_list { + file_uuid: "\237\247HW(\346On\234o\372J\243y\243\214" + } +} +global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" +location_list { + cluster_uuid: "\000\005\205\023d\264Q$\000\000\000\000\000\000\321\267" +} +owner_cluster_uuid: "\3466!/=yJK\216x\257\251L\005\313f" +source_catalog_item_id { + global_catalog_item_uuid: "`\335\204B\031\002K\n\241j\0352H\006\234l" + version: 0 +} + with "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:31 DEBUG ssh_client.py:188 Executing "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:31 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " +2019-03-27 08:38:32 DEBUG ssh_client.py:203 Executed "python -c \"try: + from urllib import request +except: + import urllib2 as request +import ssl + +# suppress 'InsecureRequestWarning:Unverified HTTPS*' warning. +try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +except: + pass + +url = 'https://10.42.12.39:9440/file_repo/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' +cookie = 'JSESSIONID=.eJxdkE9vwjAMxb-LzwS1iD9bbwh6QCpDArodpilyG3dYStMoSTdNiO--wMphHP3887OfzyAtuRYNmQBZcD2NoO9ZQQaUzCa1auYipaQSU1Wl4gkXz4KmaVpPkqSaL2qItCcXfizFiWK3WhZRwj6coh_XGEjdXbHz8ouc585czXt7crHtxWycJuOJ8AErTcNw5zgwecjeYb8rcrkti-NmVZSHY76Xy_V28xLBW6c8PCp37HWTv-V7-BiBdWxqtqilwfZ6J6qWzXC6tK5rOG7Ozrf6AbHo_XfnYgrTaz0C3dV4hYGMKA8RcPQ5JBqEhp0P902xblkpTZJNjIT6T9P4D6EWWcvb0-Fy-QW8n4Fj.D30qdw.iesjQpoaxHhK20107iVoQ39FRTaS6PL_4qkwsddn4RAUgXmSX16-cLZuj7Ep-PUTnyuGFKm9-djPfvWnf1OwRA' +file_name = '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c' + +download_out = '~/download.out' + +print('Performing the fetch request') +req = request.Request(url, headers={'Cookie': cookie}) +try: + resp = request.urlopen(req, context=ssl._create_unverified_context()) +except (TypeError, AttributeError): + resp = request.urlopen(req) + +buf_size = 2**20 +print('Writing the catalog item to a file') + +with open(file_name, 'wb') as writer: + while True: + data = resp.read(buf_size) + if not data: + break + writer.write(data)\" " on 10.42.12.39: rv 0 +2019-03-27 08:38:32 DEBUG ssh_client.py:188 Executing ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:32 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 ls "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:32 DEBUG ssh_client.py:203 Executed ls "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:32 INFO catalog_staging_utils.py:444 tar files: ['/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac', '/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867', '/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc', '/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b', '/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c'] +2019-03-27 08:38:32 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac +2019-03-27 08:38:32 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:32 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:33 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:33 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac +2019-03-27 08:38:33 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:33 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac +2019-03-27 08:38:33 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/0ae75eb4-9852-45cc-ae0b-af42e5f2e7ac on 10.42.12.39: rv 0 +2019-03-27 08:38:33 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867 +2019-03-27 08:38:33 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:33 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:34 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:34 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867 +2019-03-27 08:38:34 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867 on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:34 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867 +2019-03-27 08:38:34 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/15e3c3d6-861d-4c16-ac4d-fd5444649867 on 10.42.12.39: rv 0 +2019-03-27 08:38:34 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc +2019-03-27 08:38:34 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:34 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:35 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:35 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc +2019-03-27 08:38:35 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:35 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc +2019-03-27 08:38:35 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/1d4c8d87-7cbe-42a3-9504-50a3173328fc on 10.42.12.39: rv 0 +2019-03-27 08:38:35 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:38:35 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:35 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:36 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:36 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:38:36 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:36 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b +2019-03-27 08:38:36 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/48310501-3d7a-4be1-a3ed-9d990e39222b on 10.42.12.39: rv 0 +2019-03-27 08:38:36 INFO catalog_staging_utils.py:447 Extracting /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:38:36 DEBUG ssh_client.py:188 Executing tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:36 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" +2019-03-27 08:38:37 DEBUG ssh_client.py:203 Executed tar zxvf "/home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c" -C "/home/nutanix/tmp/lcm_staging" on 10.42.12.39: rv 0 +2019-03-27 08:38:37 INFO catalog_staging_utils.py:456 Removing /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:38:37 DEBUG ssh_client.py:188 Executing rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39 using ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 +2019-03-27 08:38:37 DEBUG ssh_client.py:200 Executing cmd: ssh -q -o CheckHostIp=no -o ConnectTimeout=15 -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=publickey nutanix@10.42.12.39 rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c +2019-03-27 08:38:37 DEBUG ssh_client.py:203 Executed rm -rf /home/nutanix/tmp/lcm_staging/9fa74857-28e6-4f6e-9c6f-fa4aa379a38c on 10.42.12.39: rv 0 +2019-03-27 08:38:37 INFO catalog_staging_utils.py:503 Not staging the repository image since the target version is 'None' +2019-03-27 08:38:37 INFO catalog_staging_utils.py:140 Staging is done for node 10.42.12.39 +2019-03-27 08:38:37 INFO repository_image_utils.py:182 Repository Image Path is as follows: release.calm.repository_image +2019-03-27 08:38:37 DEBUG repository_image_utils.py:108 Repository Image Module Path: release.calm.repository_image +2019-03-27 08:38:37 DEBUG repository_image_utils.py:113 Version List returned is: [Status: available Image: nucalm.tar.gz Version: 2.4.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.5.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.5.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.2 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.3 Flag List: [u'smoke'] Update Library List: []] +2019-03-27 08:38:37 DEBUG repository_image_utils.py:429 Versions List is: [Status: available Image: nucalm.tar.gz Version: 2.4.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.5.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.5.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.1 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.2 Flag List: [u'smoke'] Update Library List: [], Status: available Image: nucalm.tar.gz Version: 2.6.0.3 Flag List: [u'smoke'] Update Library List: []] +2019-03-27 08:38:37 INFO repository_image_utils.py:441 Managed Entity with entity_class: PC and entity_model: Calm is a pc entity +2019-03-27 08:38:37 DEBUG cpdb_utils.py:572 [(UUID('c9ee6d12-7141-453f-a345-115e392e27e1'), ), (UUID('d6edff2c-f59f-4754-978b-06b6237796b4'), ), (UUID('758942f1-d42d-4d49-99fc-b73e2f2dca30'), ), (UUID('03a6e4a2-fa0e-4698-b0c0-e142820a2e94'), )] +2019-03-27 08:38:37 DEBUG cpdb_utils.py:573 location_id of entity: pc:e636212f-3d79-4a4b-8e78-afa94c05cb66 +2019-03-27 08:38:37 INFO repository_image_utils.py:257 Updating the managed entity with versions +2019-03-27 08:38:37 INFO repository_image_utils.py:269 Updating the available versions table with versions from repository +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1201 Saved index 1 +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 36ef8785-241f-43eb-85db-b733a1a3d013, object: +uuid: "36ef8785-241f-43eb-85db-b733a1a3d013" +entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94" +version: "2.5.0" +status: "available" +dependencies: "[]" +order: 1 +entity_class: "PC" + +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 990eb0aa-e595-4e64-ac09-7602dc98e8fa, object: +uuid: "990eb0aa-e595-4e64-ac09-7602dc98e8fa" +entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94" +version: "2.5.0.1" +status: "available" +dependencies: "[]" +order: 2 +entity_class: "PC" + +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 476e0eb7-e40e-4886-84e4-a6988edfc79b, object: +uuid: "476e0eb7-e40e-4886-84e4-a6988edfc79b" +entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94" +version: "2.6.0" +status: "available" +dependencies: "[]" +order: 3 +entity_class: "PC" + +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID 4b20d8e4-1d0c-4203-a3b6-fe30264f3115, object: +uuid: "4b20d8e4-1d0c-4203-a3b6-fe30264f3115" +entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94" +version: "2.6.0.1" +status: "available" +dependencies: "[]" +order: 4 +entity_class: "PC" + +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID e53f2a96-eb2b-4a05-83d5-2a70e95f6b4f, object: +uuid: "e53f2a96-eb2b-4a05-83d5-2a70e95f6b4f" +entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94" +version: "2.6.0.2" +status: "available" +dependencies: "[]" +order: 5 +entity_class: "PC" + +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1212 Adding dependency: [] +2019-03-27 08:38:37 DEBUG cpdb_utils.py:1254 Creating new LcmAvailableVersion entry with UUID e6806517-0eb8-4d7d-b722-5764c532d3db, object: +uuid: "e6806517-0eb8-4d7d-b722-5764c532d3db" +entity_uuid: "03a6e4a2-fa0e-4698-b0c0-e142820a2e94" +version: "2.6.0.3" +status: "available" +dependencies: "[]" +order: 6 +entity_class: "PC" + +2019-03-27 08:38:37 INFO repository_image_utils.py:479 Repository Image Inventory Done +2019-03-27 08:38:37 INFO lcm_ops_for_rim:155 LCM operation 101 is successful +DEBUG: Karbon rim url: http://download.nutanix.com/lcm/builds/karbon-builds/repository_metadata/metadata.json diff --git a/test/LCM_Test/lcm_run.sh b/test/LCM_Test/lcm_run.sh new file mode 100644 index 0000000..4a4494d --- /dev/null +++ b/test/LCM_Test/lcm_run.sh @@ -0,0 +1,142 @@ +#!/bin/bash +CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure --write-out %{http_code}' # --output /dev/null --silent --show-error +PRISM_ADMIN='admin' +PE_PASSWORD='techX2019!' +_url_lcm='https://localhost:9440/PrismGateway/services/rest/v1/genesis' +_url_progress='https://localhost:9440/PrismGateway/services/rest/v1/progress_monitors' +_url_groups='https://localhost:9440/api/nutanix/v3/groups' + +############################################################################################################################################################################### +# Routine to be run/loop till yes we are ok. +############################################################################################################################################################################### +function loop(){ + + local _attempts=40 + local _error=22 + local _loops=0 + local _sleep=30 + + if [ -z "$1" ]; then + echo "No parameter" + else + _task_id=$1 + fi + + # What is the progress of the taskid?? + while true; do + (( _loops++ )) + # Get the progress of the task + _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}?filterCriteria=parent_task_uuid%3D%3D${_task_id} | jq '.entities[0].percentageCompleted' 2>nul | tr -d \") + if (( ${_progress} == 100 )); then + echo "The step has been succesfuly run" + set _error=0 + break; + elif (( ${_loops} > ${_attempts} )); then + echo "Warning ${_error} @${1}: Giving up after ${_loop} tries." + return ${_error} + else + echo "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds" + sleep ${_sleep} + fi + done +} + +function calm_enable() { + local _http_body + local _test + + log "Enable Nutanix Calm..." + _http_body=$(cat <nul | cut -d "\\" -f 4 | tr -d \") + +# If there has been a reply (task_id) then the URL has accepted by PC +if [ -z "$_task_id" ]; then + echo "LCM Inventory start has encountered an eror..." +else + echo "LCM Inventory started.." + set _loops=0 # Reset the loop counter + + # Run the progess checker + loop + + # We need to get the UUIDs and the versions to be used.. so we can start the update. They are in the /home/nutanix/data/logs/lcm_ops.out AFTER an inventory run! + _full_uuids=$(cat /home/nutanix/data/logs/lcm_ops.out | grep -A 1 entity_uuid | grep -B 1 "2.6.0.3") + # As we need to have the latest version from the LCM we need to reverse the string so we get the last (rev) version + _first_uuid=$(echo $_full_uuids |rev|cut -d":" -f 4 |rev | cut -d "\"" -f2) + _first_version="2.6.0.3" + _sec_uuid=$(echo $_full_uuids rev|rev | cut -d":" -f 2 |rev | cut -d "\"" -f2) + _sec_version=$_first_version + + #echo "This values have been found:" $_first_uuid" and " $_first_version " and " $_sec_uuid " and " $_sec_version + + # Set the parameter to create the ugrade plan + # Create the curl json string -d xyz + _json_data="-d " + _json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"generate_plan\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"" + _json_data+=$_first_uuid + _json_data+="\\\",\\\"" + _json_data+=$_first_version + _json_data+="\\\"],[\\\"" + _json_data+=$_sec_uuid + _json_data+="\\\",\\\"" + _json_data+=$_sec_version + _json_data+="\\\"]]]}}\"}" + + + # Run the generate plan task + _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm}) + + # Notify the log server that the LCM has been creating a plan + echo "LCM Inventory has created a plan" + set _loops=0 # Reset the loop counter + + # Create new json data string + _json_data="-d " + _json_data+="{\"value\":\"{\\\".oid\\\":\\\"LifeCycleManager\\\",\\\".method\\\":\\\"lcm_framework_rpc\\\",\\\".kwargs\\\":{\\\"method_class\\\":\\\"LcmFramework\\\",\\\"method\\\":\\\"perform_update\\\",\\\"args\\\":[\\\"http://download.nutanix.com/lcm/2.0\\\",[[\\\"" + _json_data+=$_first_uuid + _json_data+="\\\",\\\"" + _json_data+=$_first_version + _json_data+="\\\"],[\\\"" + _json_data+=$_sec_uuid + _json_data+="\\\",\\\"" + _json_data+=$_sec_version + _json_data+="\\\"]]]}}\"}" + + + # Run the upgrade to have the latest versions + _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \") + + # If there has been a reply task_id then the URL has accepted by PC + if [ -z "$_task_id" ]; then + # There has been an error!!! + echo "LCM Upgrade has encountered an error!!!!" + else + # Notify the logserver that we are starting the LCM Upgrade + echo "LCM Upgrade starting..." + + # Run the progess checker + loop + fi +fi \ No newline at end of file diff --git a/test/LCM_Test/lcm_update.sh b/test/LCM_Test/lcm_update.sh new file mode 100644 index 0000000..dc76458 --- /dev/null +++ b/test/LCM_Test/lcm_update.sh @@ -0,0 +1,27 @@ +################################################################# +# Grab the json from the possible to be updated UUIDs and versions and save local in reply_json.json +################################################################# +_url_groups='https://10.42.8.39:9440/api/nutanix/v3/groups' +CURL_HTTP_OPTS=' --silent --max-time 25 --header Content-Type:application/json --header Accept:application/json --insecure ' +PRISM_ADMIN="admin" +PE_PASSWORD="techX2019!" + +# Run the Curl command and save the oputput in a temp file +curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json + +# Fill the uuid array with the correct values +my_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " ")) + +# Grabbing the versions of the UUID and put them in a versions array +for uuid in "${my_arr[@]}" +do + version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply-inventory.json | tail -4 | head -n 1)) +done + +# Combine the two values to the full needed data +count=0 +while [ $count -lt ${#my_arr[@]} ] +do + echo "$count: UUID is ${my_arr[$count]} and the version is ${version_ar[$count]}" + let count=count+1 +done \ No newline at end of file diff --git a/test/LCM_Test/reply-inventory.json b/test/LCM_Test/reply-inventory.json new file mode 100644 index 0000000..1a5ee6c --- /dev/null +++ b/test/LCM_Test/reply-inventory.json @@ -0,0 +1,495 @@ +{ + "entity_type": "lcm_available_version", + "filtered_group_count": 3, + "total_entity_count": 5, + "filtered_entity_count": 5, + "group_results": [ + { + "entity_results": [ + { + "entity_id": "1d18b99f-12e8-42f5-a4b5-b41dcc803ef3", + "data": [ + { + "values": [ + { + "values": [ + "1d18b99f-12e8-42f5-a4b5-b41dcc803ef3" + ], + "time": 1554372384133118 + } + ], + "name": "uuid" + }, + { + "values": [ + { + "values": [ + "41520d2e-76ab-4cbc-88ca-42bb484ba69f" + ], + "time": 1554372384133118 + } + ], + "name": "entity_uuid" + }, + { + "values": [ + { + "values": [ + "PC" + ], + "time": 1554372384133118 + } + ], + "name": "entity_class" + }, + { + "values": [ + { + "values": [ + "available" + ], + "time": 1554372384133118 + } + ], + "name": "status" + }, + { + "values": [ + { + "values": [ + "1.0.0" + ], + "time": 1554372384133118 + } + ], + "name": "version" + }, + { + "values": [ + { + "values": [ + "[]" + ], + "time": 1554372384133118 + } + ], + "name": "dependencies" + }, + { + "values": [ + { + "values": [ + "1" + ], + "time": 1554372384133118 + } + ], + "name": "order" + }, + { + "values": [ + { + "values": [ + "1554372384133118" + ], + "time": 1554372384133118 + } + ], + "name": "_created_timestamp_usecs_" + } + ] + } + ], + "group_by_column_value": "41520d2e-76ab-4cbc-88ca-42bb484ba69f", + "total_entity_count": 1, + "group_summaries": {} + }, + { + "entity_results": [ + { + "entity_id": "bcab72a9-d2f9-4327-a882-da4d0675eeea", + "data": [ + { + "values": [ + { + "values": [ + "bcab72a9-d2f9-4327-a882-da4d0675eeea" + ], + "time": 1554372397678814 + } + ], + "name": "uuid" + }, + { + "values": [ + { + "values": [ + "7dbb76af-09d5-4b83-9cdb-8a3c0a63f23e" + ], + "time": 1554372397678814 + } + ], + "name": "entity_uuid" + }, + { + "values": [ + { + "values": [ + "PC" + ], + "time": 1554372397678814 + } + ], + "name": "entity_class" + }, + { + "values": [ + { + "values": [ + "available" + ], + "time": 1554372397678814 + } + ], + "name": "status" + }, + { + "values": [ + { + "values": [ + "2.6.0.3" + ], + "time": 1554372397678814 + } + ], + "name": "version" + }, + { + "values": [ + { + "values": [ + "[]" + ], + "time": 1554372397678814 + } + ], + "name": "dependencies" + }, + { + "values": [ + { + "values": [ + "1" + ], + "time": 1554372397678814 + } + ], + "name": "order" + }, + { + "values": [ + { + "values": [ + "1554372397678814" + ], + "time": 1554372397678814 + } + ], + "name": "_created_timestamp_usecs_" + } + ] + }, + { + "entity_id": "8e3b6079-d474-4e3a-bae5-558a2c0d5bb9", + "data": [ + { + "values": [ + { + "values": [ + "8e3b6079-d474-4e3a-bae5-558a2c0d5bb9" + ], + "time": 1554372397685975 + } + ], + "name": "uuid" + }, + { + "values": [ + { + "values": [ + "7dbb76af-09d5-4b83-9cdb-8a3c0a63f23e" + ], + "time": 1554372397685975 + } + ], + "name": "entity_uuid" + }, + { + "values": [ + { + "values": [ + "PC" + ], + "time": 1554372397685975 + } + ], + "name": "entity_class" + }, + { + "values": [ + { + "values": [ + "available" + ], + "time": 1554372397685975 + } + ], + "name": "status" + }, + { + "values": [ + { + "values": [ + "2.6.0.4" + ], + "time": 1554372397685975 + } + ], + "name": "version" + }, + { + "values": [ + { + "values": [ + "[]" + ], + "time": 1554372397685975 + } + ], + "name": "dependencies" + }, + { + "values": [ + { + "values": [ + "2" + ], + "time": 1554372397685975 + } + ], + "name": "order" + }, + { + "values": [ + { + "values": [ + "1554372397685975" + ], + "time": 1554372397685975 + } + ], + "name": "_created_timestamp_usecs_" + } + ] + } + ], + "group_by_column_value": "7dbb76af-09d5-4b83-9cdb-8a3c0a63f23e", + "total_entity_count": 2, + "group_summaries": {} + }, + { + "entity_results": [ + { + "entity_id": "18aa787f-4694-487f-8278-e95eb78da3be", + "data": [ + { + "values": [ + { + "values": [ + "18aa787f-4694-487f-8278-e95eb78da3be" + ], + "time": 1554372411718254 + } + ], + "name": "uuid" + }, + { + "values": [ + { + "values": [ + "acd9117e-d0c7-4d22-a2ac-7468dfdf07ec" + ], + "time": 1554372411718254 + } + ], + "name": "entity_uuid" + }, + { + "values": [ + { + "values": [ + "PC" + ], + "time": 1554372411718254 + } + ], + "name": "entity_class" + }, + { + "values": [ + { + "values": [ + "available" + ], + "time": 1554372411718254 + } + ], + "name": "status" + }, + { + "values": [ + { + "values": [ + "2.6.0.3" + ], + "time": 1554372411718254 + } + ], + "name": "version" + }, + { + "values": [ + { + "values": [ + "[{\"entity_class\": \"PC\", \"version\": \"2.6.0.3\", \"exact\": \"true\", \"entity_model\": \"Epsilon\"}]" + ], + "time": 1554372414999630 + } + ], + "name": "dependencies" + }, + { + "values": [ + { + "values": [ + "1" + ], + "time": 1554372411718254 + } + ], + "name": "order" + }, + { + "values": [ + { + "values": [ + "1554372411718254" + ], + "time": 1554372411718254 + } + ], + "name": "_created_timestamp_usecs_" + } + ] + }, + { + "entity_id": "00a06ebf-0ba1-403c-ac0f-8ae3d5a8c341", + "data": [ + { + "values": [ + { + "values": [ + "00a06ebf-0ba1-403c-ac0f-8ae3d5a8c341" + ], + "time": 1554372411724529 + } + ], + "name": "uuid" + }, + { + "values": [ + { + "values": [ + "acd9117e-d0c7-4d22-a2ac-7468dfdf07ec" + ], + "time": 1554372411724529 + } + ], + "name": "entity_uuid" + }, + { + "values": [ + { + "values": [ + "PC" + ], + "time": 1554372411724529 + } + ], + "name": "entity_class" + }, + { + "values": [ + { + "values": [ + "available" + ], + "time": 1554372411724529 + } + ], + "name": "status" + }, + { + "values": [ + { + "values": [ + "2.6.0.4" + ], + "time": 1554372411724529 + } + ], + "name": "version" + }, + { + "values": [ + { + "values": [ + "[{\"entity_class\": \"PC\", \"version\": \"2.6.0.4\", \"exact\": \"true\", \"entity_model\": \"Epsilon\"}]" + ], + "time": 1554372415349026 + } + ], + "name": "dependencies" + }, + { + "values": [ + { + "values": [ + "2" + ], + "time": 1554372411724529 + } + ], + "name": "order" + }, + { + "values": [ + { + "values": [ + "1554372411724529" + ], + "time": 1554372411724529 + } + ], + "name": "_created_timestamp_usecs_" + } + ] + } + ], + "group_by_column_value": "acd9117e-d0c7-4d22-a2ac-7468dfdf07ec", + "total_entity_count": 2, + "group_summaries": {} + } + ], + "total_group_count": 3 +} \ No newline at end of file diff --git a/test/LCM_Test/reply.json b/test/LCM_Test/reply.json new file mode 100644 index 0000000..9a53f6d --- /dev/null +++ b/test/LCM_Test/reply.json @@ -0,0 +1,3 @@ +{ + "service_enablement_status": "ENABLED" +} \ No newline at end of file diff --git a/test/Run PC local installation b/test/Run PC local installation new file mode 100644 index 0000000..29873bd --- /dev/null +++ b/test/Run PC local installation @@ -0,0 +1 @@ +EMAIL=nathan.cox@nutanix.com PC_HOST=10.42.41.39 PE_HOST=10.42.41.37 PE_PASSWORD=techX2019! PC_LAUNCH=ts2019.sh PC_VERSION=5.10.2 :bash -x /home/nutanix/ts2019.sh PC \ No newline at end of file diff --git a/we_push_centos_cl_disk.sh b/we_push_centos_cl_disk.sh new file mode 100644 index 0000000..b6c44d7 --- /dev/null +++ b/we_push_centos_cl_disk.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# Set the SSHPASS so we can run the sshpass command +export SSHPASS=techX2019! + +# Script needed to push the CentOS_7_Cloud image +for i in `cat /root/GTS2019-APAC/gts2019_cluster_list_group2.txt | cut -d"|" -f 1` +do + sshpass -e ssh -x -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -q nutanix@$i '/usr/local/nutanix/bin/acli image.create CentOS_Cloud_7 image_type=kDiskImage wait=true container=Images source_url=http://10.42.8.50/images/CentOS-7-x86_64-GenericCloud.qcow2' +done + + diff --git a/we_stage_workshop.sh b/we_stage_workshop.sh new file mode 100644 index 0000000..1d75c68 --- /dev/null +++ b/we_stage_workshop.sh @@ -0,0 +1,380 @@ +#!/usr/bin/env bash +# use bash -x to debug command substitution and evaluation instead of echo. +DEBUG= + +# For WORKSHOPS keyword mappings to scripts and variables, please use: +# - Calm || Citrix || Summit +# - PC #.# +WORKSHOPS=(\ +"Calm Workshop (AOS 5.5+/AHV PC 5.8.x) = Stable (AutoDC1)" \ +"Calm Workshop (AOS 5.8.x/AHV PC 5.10.x) = Stable (AutoDC2)" \ +"Calm Workshop (AOS 5.9+/AHV PC 5.10.x) = Development" \ +"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ +"Citrix Desktop on AHV Workshop (AOS/AHV 5.6)" \ +) # Adjust function stage_clusters, below, for file/script mappings as needed + +function stage_clusters() { + # Adjust map below as needed with $WORKSHOPS + local _cluster + local _container + local _dependency + local _fields + local _libraries='global.vars.sh we-lib.common.sh ' + local _pe_launch # will be transferred and executed on PE + local _pc_launch # will be transferred and executed on PC + local _sshkey=${SSH_PUBKEY} + local _wc_arg='--lines' + local _workshop=${WORKSHOPS[$((${WORKSHOP_NUM}-1))]} + + # Map to latest and greatest of each point release + # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download + # TODO: make WORKSHOPS and map a JSON configuration file? + if (( $(echo ${_workshop} | grep -i "PC 5.10" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION="${PC_DEV_VERSION}" + elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION="${PC_STABLE_VERSION}" + elif (( $(echo ${_workshop} | grep -i "PC 5.9" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION=5.9.2 + elif (( $(echo ${_workshop} | grep -i "PC 5.7" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION=5.7.1.1 + elif (( $(echo ${_workshop} | grep -i "PC 5.6" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION=5.6.2 + fi + + # Map workshop to staging script(s) and libraries, + # _pe_launch will be executed on PE + if (( $(echo ${_workshop} | grep -i Calm | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='calm.sh' + _pc_launch=${_pe_launch} + fi + if (( $(echo ${_workshop} | grep -i Citrix | wc ${WC_ARG}) > 0 )); then + _pe_launch='stage_citrixhow.sh' + _pc_launch='stage_citrixhow_pc.sh' + fi + if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='we-ts2019.sh' + _pc_launch=${_pe_launch} + fi + + dependencies 'install' 'sshpass' + + if [[ -z ${PC_VERSION} ]]; then + log "WORKSHOP #${WORKSHOP_NUM} = ${_workshop} with PC-${PC_VERSION}" + fi + + # Send configuration scripts to remote clusters and execute Prism Element script + if [[ ${CLUSTER_LIST} == '-' ]]; then + echo "Login to see tasks in flight via https://${PRISM_ADMIN}:${PE_PASSWORD}@${PE_HOST}:9440" + pe_configuration_args "${_pc_launch}" + + pushd scripts || true + eval "${PE_CONFIGURATION} ./${_pe_launch} 'PE'" >> ${HOME}/${_pe_launch%%.sh}.log 2>&1 & + unset PE_CONFIGURATION + popd || true + else + for _cluster in $(cat ${CLUSTER_LIST} | grep -v ^#) + do + set -f + # shellcheck disable=2206 + _fields=(${_cluster//|/ }) + PE_HOST=${_fields[0]} + PE_PASSWORD=${_fields[1]} + EMAIL=${_fields[2]} + + pe_configuration_args "${_pc_launch}" + + . scripts/global.vars.sh # re-import for relative settings + + cat <> ${_pe_launch%%.sh}.log 2>&1 &" + unset PE_CONFIGURATION + + # shellcheck disable=SC2153 + cat < Gear > Cluster Lockdown, + the following will fail silently, use ssh nutanix@{PE|PC} instead. + + $ SSHPASS='${PE_PASSWORD}' sshpass -e ssh \\ + ${SSH_OPTS} \\ + nutanix@${PE_HOST} 'date; tail -f ${_pe_launch%%.sh}.log' + You can login to PE to see tasks in flight and eventual PC registration: + https://${PRISM_ADMIN}:${PE_PASSWORD}@${PE_HOST}:9440/ + +EOM + + if (( "$(echo ${_libraries} | grep -i lib.pc | wc ${_wc_arg})" > 0 )); then + # shellcheck disable=2153 + cat < 0 )) \ + && (( $(($OPTARG)) < $((${#WORKSHOPS[@]}-${NONWORKSHOPS}+1)) )); then + WORKSHOP_NUM=${OPTARG} + else + echo "Error: workshop not found = ${OPTARG}" + script_usage + fi + ;; + \? ) + script_usage + ;; + esac +done +shift $((OPTIND -1)) + +if [[ -z ${CLUSTER_LIST} ]]; then + get_file +fi +if [[ -z ${WORKSHOP_NUM} ]]; then + log "Warning: missing workshop number argument." + select_workshop +fi + +if [[ ${WORKSHOPS[${WORKSHOP_NUM}]} == "${_VALIDATE}" ]]; then + validate_clusters +elif (( ${WORKSHOP_NUM} == ${#WORKSHOPS[@]} - 1 )); then + echo ${WORKSHOPS[${WORKSHOP_NUM}]} + finish +elif (( ${WORKSHOP_NUM} == ${#WORKSHOPS[@]} - 2 )); then + echo ${WORKSHOPS[${WORKSHOP_NUM}]} +elif (( ${WORKSHOP_NUM} > 0 && ${WORKSHOP_NUM} <= ${#WORKSHOPS[@]} - 3 )); then + stage_clusters +else + #log "DEBUG: WORKSHOP_NUM=${WORKSHOP_NUM}" + script_usage +fi From 1ffd6491321604a18f407a452a46f219bf6b843a Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 4 Apr 2019 15:44:33 -0700 Subject: [PATCH 039/691] Updates for Bootcamps 5.10 release --- scripts/global.vars.sh | 38 +++++++++------- scripts/lib.common.sh | 2 + scripts/lib.pc.sh | 100 +++++++++-------------------------------- stage_workshop.sh | 11 ++--- 4 files changed, 51 insertions(+), 100 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 54b0d22..12d9d11 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -7,7 +7,9 @@ # - Find ${PC_VERSION} in the Additional Releases section on the lower right side # - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL PC_DEV_VERSION='5.10.2' - PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.1.1/pcdeploy-5.10.2.json' + PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' + PC_CURRENT_VERSION='5.10.2' + PC_CURRENT_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' PC_STABLE_VERSION='5.8.2' PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/v1/pc_deploy-5.8.2.json' # Sync the following to lib.common.sh::ntnx_download-Case=FILES @@ -15,7 +17,11 @@ PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/ # - Find ${FILES_VERSION} in the Additional Releases section on the lower right side # - Provide "Upgrade Metadata File" URL to FILES_METAURL FILES_VERSION='3.2.0.1' - FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' # 2019-02-15: override until metadata URL fixed # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' #FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' @@ -37,7 +43,7 @@ NTNX_INIT_PASSWORD='nutanix/4u' # https://stedolan.github.io/jq/download/#checksums_and_signatures JQ_REPOS=(\ - 'http://10.42.8.50/images/' \ + #'http://10.42.8.50/images/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ 'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) @@ -53,16 +59,18 @@ NTNX_INIT_PASSWORD='nutanix/4u' Windows2016.qcow2 \ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) - ISO_IMAGES=(\ - CentOS7.iso \ - Windows2016.iso \ - Windows2012R2.iso \ - Windows10.iso \ - Nutanix-VirtIO-1.1.3.iso \ - SQLServer2014SP3.iso \ - XenApp_and_XenDesktop_7_18.iso \ + ISO_IMAGES=(\ + CentOS7.iso \ + Windows2016.iso \ + Windows2012R2.iso \ + Windows10.iso \ + Nutanix-VirtIO-1.1.3.iso \ + SQLServer2014SP3.iso \ + XenApp_and_XenDesktop_7_18.iso \ ) # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso # http://download.nutanix.com/era/1.0.0/ERA-Server-build-1.0.0-bae7ca0d653e1af2bcb9826d1320e88d8c4713cc.qcow2 @@ -158,8 +166,8 @@ case "${OCTET[0]}.${OCTET[1]}" in NW1_DHCP_END="${IPV4_PREFIX}.250" # PC deploy file local override, TODO:30 make an PC_URL array and eliminate PC_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_DEV_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json - PC_STABLE_METAURL=${PC_DEV_METAURL} + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_STABLE_METAURL=${PC_CURRENT_METAURL} QCOW2_IMAGES=(\ Centos7-Base.qcow2 \ @@ -242,8 +250,8 @@ case "${OCTET[0]}.${OCTET[1]}" in NW1_DHCP_END="${IPV4_PREFIX}.250" # PC deploy file local override, TODO:30 make an PC_URL array and eliminate PC_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_DEV_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json - PC_STABLE_METAURL=${PC_DEV_METAURL} + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_STABLE_METAURL=${PC_CURRENT_METAURL} QCOW2_IMAGES=(\ Centos7-Base.qcow2 \ diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index c84268a..596a022 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -488,6 +488,8 @@ function ntnx_download() { if [[ "${PC_VERSION}" == "${PC_DEV_VERSION}" ]]; then _meta_url="${PC_DEV_METAURL}" + elif [[ "${PC_VERSION}" == "${PC_CURRENT_VERSION}" ]]; then + _meta_url="${PC_CURRENT_METAURL}" else _meta_url="${PC_STABLE_METAURL}" fi diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 2aca9af..2942047 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -2,66 +2,6 @@ # -x # Dependencies: curl, ncli, nuclei, jq -############################################################################################################################################################################### -# Routine to update Calm, but can be done via the LCM!!!! -############################################################################################################################################################################### - -function calm_update() { - local _attempts=12 - local _calm_bin=/usr/local/nutanix/epsilon - local _container - local _error=19 - local _loop=0 - local _sleep=10 - local _url=http://${AUTH_HOST}:8080 - - if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then - log "Bypassing download of updated containers." - else - dependencies 'install' 'sshpass' || exit 13 - remote_exec 'ssh' 'AUTH_SERVER' \ - 'if [[ ! -e nucalm.tar ]]; then smbclient -I 10.21.249.12 \\\\pocfs\\images --user ${1} --command "prompt ; cd /Calm-EA/pc-'${PC_VERSION}'/ ; mget *tar"; echo; ls -lH *tar ; fi' \ - 'OPTIONAL' - - while true ; do - (( _loop++ )) - _test=$(curl ${CURL_HTTP_OPTS} ${_url} \ - | tr -d \") # wonderful addition of "" around HTTP status code by cURL - - if (( ${_test} == 200 )); then - log "Success reaching ${_url}" - break; - elif (( ${_loop} > ${_attempts} )); then - log "Warning ${_error} @${1}: Giving up after ${_loop} tries." - return ${_error} - else - log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." - sleep ${_sleep} - fi - done - - download ${_url}/epsilon.tar - download ${_url}/nucacallm.tar - fi - - if [[ -e ${HOME}/epsilon.tar ]] && [[ -e ${HOME}/nucalm.tar ]]; then - ls -lh ${HOME}/*tar - mkdir ${HOME}/calm.backup || true - cp ${_calm_bin}/*tar ${HOME}/calm.backup/ \ - && genesis stop nucalm epsilon \ - && docker rm -f "$(docker ps -aq)" || true \ - && docker rmi -f "$(docker images -q)" || true \ - && cp ${HOME}/*tar ${_calm_bin}/ \ - && cluster start # ~75 seconds to start both containers - - for _container in epsilon nucalm ; do - local _test=0 - while (( ${_test} < 1 )); do - _test=$(docker ps -a | grep ${_container} | grep -i healthy | wc --lines) - done - done - fi -} ############################################################################################################################################################################### # Routine to enable Flow @@ -88,10 +28,10 @@ function loop(){ local _sleep=60 local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' - # What is the progress of the taskid?? + # What is the progress of the taskid?? while true; do (( _loops++ )) - # Get the progress of the task + # Get the progress of the task _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}?filterCriteria=parent_task_uuid%3D%3D${_task_id} | jq '.entities[0].percentageCompleted' 2>nul | tr -d \") if (( ${_progress} == 100 )); then log "The step has been succesfuly run" @@ -119,7 +59,7 @@ function lcm() { # Inventory download/run _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"}' ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \") - + # If there has been a reply (task_id) then the URL has accepted by PC # Changed (()) to [] so it works.... if [ -z "$_task_id" ]; then @@ -127,28 +67,28 @@ function lcm() { else log "LCM Inventory started.." set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run - + # Run the progess checker loop - + ################################################################# # Grab the json from the possible to be updated UUIDs and versions and save local in reply_json.json ################################################################# - + # Need loop so we can create the full json more dynamical # Run the Curl command and save the oputput in a temp file curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json - + # Fill the uuid array with the correct values uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " ")) - + # Grabbing the versions of the UUID and put them in a versions array for uuid in "${uuid_arr[@]}" do version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply-inventory.json | tail -4 | head -n 1 | tr -d \")) done - + # Set the parameter to create the ugrade plan # Create the curl json string '-d blablablablabla' so we can call the string and not the full json data line # Begin of the JSON data payload @@ -161,7 +101,7 @@ function lcm() { do _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," let count=count+1 - done + done # Remove the last "," as we don't need it. _json_data=${_json_data%?}; @@ -171,7 +111,7 @@ function lcm() { # Run the generate plan task _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm}) - + # Remove the temp json file as we don't need it anymore rm -rf reply_json.json @@ -179,15 +119,15 @@ function lcm() { log "LCM Inventory has created a plan" # Reset the loop counter so we restart the amount of loops we need to run - set _loops=0 - + set _loops=0 + # As the new json for the perform the upgrade only needs to have "generate_plan" changed into "perform_update" we use sed... _json_data=$(echo $_json_data | sed -e 's/generate_plan/perform_update/g') - + # Run the upgrade to have the latest versions _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \") - + # If there has been a reply task_id then the URL has accepted by PC if [ -z "$_task_id" ]; then # There has been an error!!! @@ -195,7 +135,7 @@ function lcm() { else # Notify the logserver that we are starting the LCM Upgrade log "LCM Upgrade starting..." - + # Run the progess checker loop fi @@ -225,10 +165,10 @@ function karbon_enable() { if [[ $_response -le 0 ]]; then log "Unable to enable Karbon. As there are more dependencies on Karbon we stop the script....." exit 1 - else + else log "Karbon has been enabled..." fi - else + else log "Karbon has been enabled..." fi } @@ -621,10 +561,10 @@ EOF --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" \ https://localhost:9440/api/nutanix/v3/services/nucalm) log "_test=|${_test}|" - + # Check if Calm is enabled while true; do - # Get the progress of the task + # Get the progress of the task _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/api/nutanix/v3/services/nucalm/status | jq '.service_enablement_status' 2>nul | tr -d \") if [[ ${_progress} == "ENABLED" ]]; then log "Calm has been Enabled..." diff --git a/stage_workshop.sh b/stage_workshop.sh index aa41a3a..2d0b584 100644 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -6,11 +6,10 @@ DEBUG= # - Calm || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Calm Workshop (AOS 5.5+/AHV PC 5.8.x) = Stable (AutoDC1)" \ -"Calm Workshop (AOS 5.8.x/AHV PC 5.10.x) = Stable (AutoDC2)" \ -"Calm Workshop (AOS 5.9+/AHV PC 5.10.x) = Development" \ +"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC1)" \ +"Calm Workshop (AOS 5.10+/AHV PC 5.10+) = Stable (AutoDC2)" \ +"Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ -"Citrix Desktop on AHV Workshop (AOS/AHV 5.6)" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -29,8 +28,10 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.10" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.11" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" + elif (( $(echo ${_workshop} | grep -i "PC 5.10" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION="${PC_CURRENT_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.9" | wc ${WC_ARG}) > 0 )); then From e5ec5d5b48277e76b83031b32a02e3ae69062b1b Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 4 Apr 2019 17:24:42 -0700 Subject: [PATCH 040/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 12d9d11..6fdd678 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -44,7 +44,7 @@ NTNX_INIT_PASSWORD='nutanix/4u' # https://stedolan.github.io/jq/download/#checksums_and_signatures JQ_REPOS=(\ #'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ 'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) QCOW2_REPOS=(\ From 1ab48ad229732102b4962164777e9aa4bf69910e Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 4 Apr 2019 17:35:41 -0700 Subject: [PATCH 041/691] Update global.vars.sh --- scripts/global.vars.sh | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6fdd678..72615ce 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -6,12 +6,13 @@ # Browse to: https://portal.nutanix.com/#/page/releases/prismDetails # - Find ${PC_VERSION} in the Additional Releases section on the lower right side # - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL - PC_DEV_VERSION='5.10.2' - PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' - PC_CURRENT_VERSION='5.10.2' - PC_CURRENT_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' -PC_STABLE_VERSION='5.8.2' -PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/v1/pc_deploy-5.8.2.json' + PC_DEV_VERSION='5.10.2' + PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' + PC_CURRENT_VERSION='5.10.2' + PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + PC_STABLE_VERSION='5.8.2' + PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/v1/pc_deploy-5.8.2.json' # Sync the following to lib.common.sh::ntnx_download-Case=FILES # Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA # - Find ${FILES_VERSION} in the Additional Releases section on the lower right side From 9a91d044c31f3a1ad15ffca2d66fbe31c740e546 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 4 Apr 2019 18:33:42 -0700 Subject: [PATCH 042/691] Update calm.sh --- scripts/calm.sh | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index e8925e9..c3c346f 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -31,17 +31,25 @@ case ${1} in if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ && prism_check 'PC' \ - && pc_configure \ - && pc_configure \ - && dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' - log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" - log "PE = https://${PE_HOST}:9440" - log "PC = https://${PC_HOST}:9440" + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" - #files_install & # parallel, optional. Versus: $0 'files' & + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & - finish + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi else finish _error=18 @@ -96,7 +104,7 @@ case ${1} in && calm_enable \ && lcm \ && images \ - && pc_cluster_img_import \ + #&& pc_cluster_img_import \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From d497d829b36cf8a037143dec07331c8b4c18698c Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 4 Apr 2019 19:26:51 -0700 Subject: [PATCH 043/691] Update calm.sh --- scripts/calm.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index c3c346f..56f460d 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -104,7 +104,6 @@ case ${1} in && calm_enable \ && lcm \ && images \ - #&& pc_cluster_img_import \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 13e5d2694c30d7e7c752660de7513310ef51e2f3 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 5 Apr 2019 09:27:24 +0200 Subject: [PATCH 044/691] Updated Karbon and Flow enable --- scripts/calm.sh | 1 + scripts/lib.pc.sh | 35 ++++++++++++++++++++++++++++------- 2 files changed, 29 insertions(+), 7 deletions(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index 56f460d..d585519 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -102,6 +102,7 @@ case ${1} in ssp_auth \ && calm_enable \ + && karbon_enable \ && lcm \ && images \ && prism_check 'PC' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 2942047..16a8f18 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -8,14 +8,34 @@ ############################################################################################################################################################################### function flow_enable() { - ## (API; Didn't work. Used nuclei instead) - ## https://localhost:9440/api/nutanix/v3/services/microseg - ## {"state":"ENABLE"} - # To disable flow run the following on PC: nuclei microseg.disable + local _attempts=30 + local _loops=0 + local _sleep=60 + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local _url_flow='https://localhost:9440/api/nutanix/v3/services/microseg' + + # Create the JSON payload + _json_data='{"state":"ENABLE"}' log "Enable Nutanix Flow..." - nuclei microseg.enable 2>/dev/null - nuclei microseg.get_status 2>/dev/null + + # Enabling Flow and put the task id in a variable + _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) + + # Try one more time then fail, but continue + if [ -z $_task_id ] then + log "Flow not yet enabled. Will retry...." + _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) + + if [ -z $_task_id ] then + log "Flow still not enabled.... ***Not retrying. Please enable via UI.***" + fi + else + log "Flow has been enebaled..." + fi + + + } ############################################################################################################################################################################### @@ -86,7 +106,7 @@ function lcm() { # Grabbing the versions of the UUID and put them in a versions array for uuid in "${uuid_arr[@]}" do - version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply-inventory.json | tail -4 | head -n 1 | tr -d \")) + version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply_json.json | tail -4 | head -n 1 | tr -d \")) done # Set the parameter to create the ugrade plan @@ -101,6 +121,7 @@ function lcm() { do _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," let count=count+1 + log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" done # Remove the last "," as we don't need it. From 3e016c7f9e5725747cae6f6c59af9348e1863d1c Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 5 Apr 2019 09:35:23 +0200 Subject: [PATCH 045/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 16a8f18..c50e55c 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -23,11 +23,11 @@ function flow_enable() { _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) # Try one more time then fail, but continue - if [ -z $_task_id ] then + if [ -z $_task_id ]; then log "Flow not yet enabled. Will retry...." _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) - if [ -z $_task_id ] then + if [ -z $_task_id ]; then log "Flow still not enabled.... ***Not retrying. Please enable via UI.***" fi else From eabf991204d197d4b6681fb877db2394ebe907dd Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 5 Apr 2019 09:46:55 +0200 Subject: [PATCH 046/691] Update ts2019.sh --- scripts/ts2019.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 9e080ac..ed4b286 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -137,6 +137,7 @@ case ${1} in ssp_auth \ && calm_enable \ + && karbon_enable \ && lcm \ && images \ && prism_check 'PC' From 8f614e2ea2d5f0ffd7b20850f42e244f7226dd6b Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 5 Apr 2019 15:42:57 -0700 Subject: [PATCH 047/691] Update lib.pe.sh --- scripts/lib.pe.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 7c1dc01..6b9c703 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -42,7 +42,8 @@ function authentication_source() { local _autodc_status="systemctl show ${_autodc_service} --property=SubState" local _autodc_success='SubState=running' - if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then + #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 8 )); then log "PC_VERSION ${PC_VERSION} >= 5.9, setting AutoDC-2.0..." _autodc_auth=" --username=${AUTH_ADMIN_USER} --password=${AUTH_ADMIN_PASS}" _autodc_index='' From 0ac740470a061b8d5ece4c05025b4265d41e4855 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 5 Apr 2019 17:44:26 -0700 Subject: [PATCH 048/691] Updates for local Image Pull --- scripts/global.vars.sh | 10 ++++++---- scripts/lib.common.sh | 29 ++++++++++++++++------------- 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 72615ce..4c31c5e 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -8,11 +8,13 @@ # - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL PC_DEV_VERSION='5.10.2' PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' + PC_DEV_URL='' PC_CURRENT_VERSION='5.10.2' PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + PC_CURRENT_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' PC_STABLE_VERSION='5.8.2' - PC_STABLE_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.8.2/v1/pc_deploy-5.8.2.json' + PC_STABLE_METAURL='http://10.42.8.50/images/pcdeploy-5.8.2.json' + PC_STABLE_URL='http://10.42.8.50/images/euphrates-5.8.2-stable-prism_central.tar' # Sync the following to lib.common.sh::ntnx_download-Case=FILES # Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA # - Find ${FILES_VERSION} in the Additional Releases section on the lower right side @@ -166,7 +168,7 @@ case "${OCTET[0]}.${OCTET[1]}" in NW1_DHCP_START="${IPV4_PREFIX}.100" NW1_DHCP_END="${IPV4_PREFIX}.250" # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json PC_STABLE_METAURL=${PC_CURRENT_METAURL} @@ -250,7 +252,7 @@ case "${OCTET[0]}.${OCTET[1]}" in NW1_DHCP_START="${IPV4_PREFIX}.100" NW1_DHCP_END="${IPV4_PREFIX}.250" # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json PC_STABLE_METAURL=${PC_CURRENT_METAURL} diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 596a022..28384ef 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -488,25 +488,28 @@ function ntnx_download() { if [[ "${PC_VERSION}" == "${PC_DEV_VERSION}" ]]; then _meta_url="${PC_DEV_METAURL}" + _source_url="${PC_DEV_URL}" elif [[ "${PC_VERSION}" == "${PC_CURRENT_VERSION}" ]]; then _meta_url="${PC_CURRENT_METAURL}" + _source_url="${PC_CURRENT_URL}" else _meta_url="${PC_STABLE_METAURL}" + _source_url="${PC_STABLE_URL}" fi - if [[ -z ${_meta_url} ]]; then - _error=22 - log "Error ${_error}: unsupported PC_VERSION=${PC_VERSION}!" - log 'Sync the following to global.var.sh...' - log 'Browse to https://portal.nutanix.com/#/page/releases/prismDetails' - log " - Find ${PC_VERSION} in the Additional Releases section on the lower right side" - log ' - Provide the metadata URL for the "PC 1-click deploy from PE" option to this function, both case stanzas.' - exit ${_error} - fi - - if [[ ! -z ${PC_URL} ]]; then - _source_url="${PC_URL}" - fi + #if [[ -z ${_meta_url} ]]; then + # _error=22 + # log "Error ${_error}: unsupported PC_VERSION=${PC_VERSION}!" + # log 'Sync the following to global.var.sh...' + # log 'Browse to https://portal.nutanix.com/#/page/releases/prismDetails' + # log " - Find ${PC_VERSION} in the Additional Releases section on the lower right side" + # log ' - Provide the metadata URL for the "PC 1-click deploy from PE" option to this function, both case stanzas.' + # exit ${_error} + #fi + + #if [[ ! -z ${PC_URL} ]]; then + # _source_url="${PC_URL}" + #fi ;; 'NOS' | 'nos' | 'AOS' | 'aos') # TODO:70 nos is a prototype From 70208a43fcab984c641c4add4d46d4a2af1556a0 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 5 Apr 2019 18:28:27 -0700 Subject: [PATCH 049/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4c31c5e..4f42f65 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -13,7 +13,7 @@ PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' PC_CURRENT_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' PC_STABLE_VERSION='5.8.2' - PC_STABLE_METAURL='http://10.42.8.50/images/pcdeploy-5.8.2.json' + PC_STABLE_METAURL='http://10.42.8.50/images/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.8.50/images/euphrates-5.8.2-stable-prism_central.tar' # Sync the following to lib.common.sh::ntnx_download-Case=FILES # Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA From cbc41e2d93e11d2530e8d661547533a2cb7f973c Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 5 Apr 2019 20:03:26 -0700 Subject: [PATCH 050/691] Update calm.sh --- scripts/calm.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/calm.sh b/scripts/calm.sh index d585519..e2fc1e0 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -105,6 +105,7 @@ case ${1} in && karbon_enable \ && lcm \ && images \ + && pc_cluster_img_import \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From d28031671b4d6f69332569b47852f94c7469781c Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sun, 7 Apr 2019 17:19:24 -0700 Subject: [PATCH 051/691] Updates for Bootcamp Specific file --- scripts/bootcamp.sh | 133 ++++++++++++++++++++++++++++++++++++++++++++ scripts/calm.sh | 2 - stage_workshop.sh | 9 ++- 3 files changed, 140 insertions(+), 4 deletions(-) create mode 100644 scripts/bootcamp.sh diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh new file mode 100644 index 0000000..e2fc1e0 --- /dev/null +++ b/scripts/bootcamp.sh @@ -0,0 +1,133 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && lcm \ + && images \ + && pc_cluster_img_import \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + pc_project + flow_enable + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/calm.sh b/scripts/calm.sh index e2fc1e0..5cf5c97 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -102,8 +102,6 @@ case ${1} in ssp_auth \ && calm_enable \ - && karbon_enable \ - && lcm \ && images \ && pc_cluster_img_import \ && prism_check 'PC' diff --git a/stage_workshop.sh b/stage_workshop.sh index 2d0b584..06d9d21 100644 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -6,8 +6,8 @@ DEBUG= # - Calm || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC1)" \ -"Calm Workshop (AOS 5.10+/AHV PC 5.10+) = Stable (AutoDC2)" \ +"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ +"Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current (AutoDC2)" \ "Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -44,6 +44,11 @@ function stage_clusters() { # Map workshop to staging script(s) and libraries, # _pe_launch will be executed on PE + if (( $(echo ${_workshop} | grep -i Bootcamp | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i Calm | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='calm.sh' From 83c94e5c1feac9019077593a82879cbb7c9c6aa4 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Sun, 7 Apr 2019 19:08:51 -0700 Subject: [PATCH 052/691] updates --- bootstrap.sh | 2 +- release.json | 1 + stage_workshop.sh | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/bootstrap.sh b/bootstrap.sh index 9969368..9897677 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -131,7 +131,7 @@ cat < Date: Mon, 8 Apr 2019 09:38:10 +0200 Subject: [PATCH 053/691] Update lib.pc.org.sh --- scripts/lib.pc.org.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.org.sh b/scripts/lib.pc.org.sh index fadfe4a..8a0ebaa 100644 --- a/scripts/lib.pc.org.sh +++ b/scripts/lib.pc.org.sh @@ -1,4 +1,4 @@ -65#!/usr/bin/env bash +#!/usr/bin/env bash # -x # Dependencies: curl, ncli, nuclei, jq From a0754aa5fc1119f2f3bad4f49e56bb797eca77fb Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 8 Apr 2019 10:28:26 +0200 Subject: [PATCH 054/691] Made the PC_CURRENT_VERSION deciding If teh PC_CURRENT_VERSION = "5.10.2" then we can run LCM and Karbon enable. Otherwise we skip these two steps.. --- scripts/calm.sh | 11 +++++++++-- scripts/ts2019.sh | 13 +++++++++---- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index 5cf5c97..c22a268 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -101,8 +101,15 @@ case ${1} in && pc_smtp ssp_auth \ - && calm_enable \ - && images \ + && calm_enable + + # IF the PC_version is 5.10 then we can rnun LCM and Karbon enable + if [ $PC_CURRENT_VERSION=='5.10.2' ]; then + karbon_enable + lcm + fi + + images \ && pc_cluster_img_import \ && prism_check 'PC' diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index ed4b286..5fa63fc 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -136,10 +136,15 @@ case ${1} in && pc_smtp ssp_auth \ - && calm_enable \ - && karbon_enable \ - && lcm \ - && images \ + && calm_enable + + # IF the PC_version is 5.10 then we can rnun LCM and Karbon enable + if [ $PC_CURRENT_VERSION=='5.10.2' ]; then + karbon_enable + lcm + fi + + images \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 277157d17717010125716eb7ca32f7f13d2064b2 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 8 Apr 2019 10:31:40 +0200 Subject: [PATCH 055/691] Small order changes due to Flow AFTER LCM. As Flow enable is after LCM, we might end up in issues. Chnaged the order to enable flow first beofere we start LCM (if possible) --- scripts/calm.sh | 4 ++-- scripts/ts2019.sh | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index c22a268..78b4672 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -101,7 +101,8 @@ case ${1} in && pc_smtp ssp_auth \ - && calm_enable + && calm_enable \ + && flow_enable # IF the PC_version is 5.10 then we can rnun LCM and Karbon enable if [ $PC_CURRENT_VERSION=='5.10.2' ]; then @@ -115,7 +116,6 @@ case ${1} in log "Non-blocking functions (in development) follow." pc_project - flow_enable pc_admin # ntnx_download 'AOS' # function in lib.common.sh diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 5fa63fc..c06ef45 100644 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -136,7 +136,8 @@ case ${1} in && pc_smtp ssp_auth \ - && calm_enable + && calm_enable \ + && flow_enable # IF the PC_version is 5.10 then we can rnun LCM and Karbon enable if [ $PC_CURRENT_VERSION=='5.10.2' ]; then @@ -157,7 +158,6 @@ case ${1} in # ts_images #fi pc_project - flow_enable pc_admin # ntnx_download 'AOS' # function in lib.common.sh From a7e6e014dd8febf26875d280ec050ad84b1936b9 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 10 Apr 2019 08:08:25 -0700 Subject: [PATCH 056/691] Nathans updates --- bootstrap.sh | 2 +- scripts/calm.sh | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/bootstrap.sh b/bootstrap.sh index 9897677..42ec6dd 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -102,7 +102,7 @@ elif [[ ! -d ${REPOSITORY}-${BRANCH} ]]; then fi pushd ${REPOSITORY}-${BRANCH}/ \ - && chmod -R u+x *sh + && chmod -R u+x *.sh if [[ -e release.json ]]; then echo -e "\n${ARCHIVE}::$(basename $0) release: $(grep FullSemVer release.json | awk -F\" '{print $4}')" diff --git a/scripts/calm.sh b/scripts/calm.sh index 5cf5c97..eb6d81c 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -101,7 +101,6 @@ case ${1} in && pc_smtp ssp_auth \ - && calm_enable \ && images \ && pc_cluster_img_import \ && prism_check 'PC' From ce4ddf2affdc4541eb9f03e9c766efbaa648a11b Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 10 Apr 2019 08:12:55 -0700 Subject: [PATCH 057/691] Update bootcamp.sh --- scripts/bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index e2fc1e0..59ca059 100644 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -105,12 +105,12 @@ case ${1} in && karbon_enable \ && lcm \ && images \ + && flow_enable \ && pc_cluster_img_import \ && prism_check 'PC' log "Non-blocking functions (in development) follow." pc_project - flow_enable pc_admin # ntnx_download 'AOS' # function in lib.common.sh From 6361d95c9995fde89b3332abae14172b69481267 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 10 Apr 2019 20:43:42 -0700 Subject: [PATCH 058/691] Updating Globals for new Central Workshop IMage repo location --- scripts/global.vars.sh | 82 ++++++++++++++++++++++++++++++------------ 1 file changed, 59 insertions(+), 23 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4f42f65..eac7ca9 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -40,23 +40,26 @@ NTNX_INIT_PASSWORD='nutanix/4u' STORAGE_DEFAULT='Default' STORAGE_IMAGES='Images' +################################## +# +# Look for JQ, AutoDC, and QCOW2 Repos in DC specific below. +# +################################## + # Conventions for *_REPOS arrays -- the URL must end with either: # - trailing slash, which imples _IMAGES argument to function repo_source() # - or full package filename. # https://stedolan.github.io/jq/download/#checksums_and_signatures - JQ_REPOS=(\ - #'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - 'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ - ) - QCOW2_REPOS=(\ - 'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - #'http://10.21.250.221/images/tech-enablement/' \ - #'http://10.21.250.221/images/ahv/techsummit/' \ - #'http://10.132.128.50:81/share/saved-images/' \ - ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + #JQ_REPOS=(\ + #'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ + #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + #) + #QCOW2_REPOS=(\ + #'http://10.42.8.50/images/' \ + #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #) QCOW2_IMAGES=(\ CentOS7.qcow2 \ Windows2016.qcow2 \ @@ -111,15 +114,12 @@ SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' - AUTODC_REPOS=(\ - 'http://10.42.8.50/images/AutoDC.qcow2' \ - 'http://10.42.8.50/images/AutoDC2.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ -) + #AUTODC_REPOS=(\ + #'http://10.42.8.50/images/AutoDC.qcow2' \ + #'http://10.42.8.50/images/AutoDC2.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ +#) # For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) # https://sewiki.nutanix.com/index.php/HPOC_IP_Schema @@ -147,7 +147,20 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.55 ) # HPOC us-east = DUR - DNS_SERVERS='10.55.251.10' + JQ_REPOS=(\ + 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + ) + QCOW2_REPOS=(\ + 'http://10.55.251.38/workshop_staging/' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + ) + AUTODC_REPOS=(\ + 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + ) + DNS_SERVERS='10.55.251.10,10.55.251.11' NW2_NAME='Secondary' NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) NW2_SUBNET="${IPV4_PREFIX}.129/25" @@ -155,7 +168,20 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.42 ) # HPOC us-west = PHX - DNS_SERVERS='10.42.196.10' + JQ_REPOS=(\ + 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + ) + QCOW2_REPOS=(\ + 'http://10.42.194.11/workshop_staging/' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + ) + AUTODC_REPOS=(\ + 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + ) + DNS_SERVERS='10.42.196.10,10.42.194.10 ' NW2_NAME='Secondary' NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) NW2_SUBNET="${IPV4_PREFIX}.129/25" @@ -163,6 +189,16 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR + JQ_REPOS=(\ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + ) + QCOW2_REPOS=(\ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + ) + AUTODC_REPOS=(\ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + ) DNS_SERVERS='10.132.71.40' NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" NW1_DHCP_START="${IPV4_PREFIX}.100" From 5a50360cd841df3e0919947fea0d8695d9b111f3 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 10 Apr 2019 21:05:56 -0700 Subject: [PATCH 059/691] Ypdated Perms --- scripts/bootcamp.sh | 0 scripts/ts2019.sh | 0 scripts/ts2019_mlavi.sh | 179 ---------------------------------------- 3 files changed, 179 deletions(-) mode change 100644 => 100755 scripts/bootcamp.sh mode change 100644 => 100755 scripts/ts2019.sh delete mode 100644 scripts/ts2019_mlavi.sh diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh old mode 100644 new mode 100755 diff --git a/scripts/ts2019_mlavi.sh b/scripts/ts2019_mlavi.sh deleted file mode 100644 index d8d6ca7..0000000 --- a/scripts/ts2019_mlavi.sh +++ /dev/null @@ -1,179 +0,0 @@ -#!/usr/bin/env bash -# -x -function ts_images() { - export QCOW2_REPOS=(\ - 'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ - ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share - export QCOW2_IMAGES=(\ - CentOS7.qcow2 \ - Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ - Windows2012R2.iso \ - SQLServer2014SP3.iso \ - Nutanix-VirtIO-1.1.3.iso \ - xtract-vm-2.0.3.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ - sherlock-k8s-base-image_320.qcow2 \ - hycu-3.5.0-6253.qcow2 \ - VeeamAvailability_1.0.457.vmdk \ - VeeamBR_9.5.4.2615.Update4.iso \ - 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ - ) - - images && pc_cluster_img_import -} -#__main()__________ - -# Source Nutanix environment (PATH + aliases), then common routines + global variables -. /etc/profile.d/nutanix_env.sh -. lib.common.sh -. global.vars.sh -begin - -args_required 'EMAIL PE_PASSWORD PC_VERSION' - -#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization -# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! - -case ${1} in - PE | pe ) - . lib.pe.sh - - export PC_DEV_VERSION='5.10.1.1' - export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.1.1.json' - export PC_URL='http://10.42.8.50/images/euphrates-5.10.1.1-stable-prism_central.tar' - #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' - #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' - export FILES_VERSION='3.2.0.1' - export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - export NW2_DHCP_START="${IPV4_PREFIX}.132" - export NW2_DHCP_END="${IPV4_PREFIX}.229" - - args_required 'PE_HOST PC_LAUNCH' - ssh_pubkey & # non-blocking, parallel suitable - - dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ - && pe_license \ - && pe_init \ - && network_configure \ - && authentication_source \ - && pe_auth - - if (( $? == 0 )) ; then - pc_install "${NW1_NAME}" \ - && prism_check 'PC' \ - - if (( $? == 0 )) ; then - _command="EMAIL=${EMAIL} \ - PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" - - cluster_check \ - && log "Remote asynchroneous PC Image import script... ${_command}" \ - && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & - - pc_configure \ - && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" - log "PE = https://${PE_HOST}:9440" - log "PC = https://${PC_HOST}:9440" - - files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & - #dependencies 'remove' 'sshpass' - finish - fi - else - finish - _error=18 - log "Error ${_error}: in main functional chain, exit!" - exit ${_error} - fi - ;; - PC | pc ) - . lib.pc.sh - - run_once - - dependencies 'install' 'jq' || exit 13 - - ssh_pubkey & # non-blocking, parallel suitable - - pc_passwd - ntnx_cmd # check cli services available? - - export NUCLEI_SERVER='localhost' - export NUCLEI_USERNAME="${PRISM_ADMIN}" - export NUCLEI_PASSWORD="${PE_PASSWORD}" - # nuclei -debug -username admin -server localhost -password x vm.list - - if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX - log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" - pe_determine ${1} - . global.vars.sh # re-populate PE_HOST dependencies - else - CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ - jq -r .data[0].clusterDetails.clusterName) - if [[ ${CLUSTER_NAME} != '' ]]; then - log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." - fi - fi - - if [[ ! -z "${2}" ]]; then # hidden bonus - log "Don't forget: $0 first.last@nutanixdc.local%password" - calm_update && exit 0 - fi - - export ATTEMPTS=2 - export SLEEP=10 - - pc_init \ - && pc_dns_add \ - && pc_ui \ - && pc_auth \ - && pc_smtp - - ssp_auth \ - && calm_enable \ - && lcm \ - && images \ - && prism_check 'PC' - - log "Non-blocking functions (in development) follow." - # shellcheck disable=2206 - _pc_version=(${PC_VERSION//./ }) - - if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} <= 8 )); then - log "PC<=5.8, Image imports..." - ts_images - fi - pc_project - flow_enable - pc_admin - # ntnx_download 'AOS' # function in lib.common.sh - - unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD - - if (( $? == 0 )); then - #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ - #&& - log "PC = https://${PC_HOST}:9440" - finish - else - _error=19 - log "Error ${_error}: failed to reach PC!" - exit ${_error} - fi - ;; - FILES | files | afs ) - files_install - ;; - IMAGES | images ) - . lib.pc.sh - ts_images - ;; -esac From 6c4687623448eaf72670a4aa0d023935e428fe02 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 10 Apr 2019 22:00:13 -0700 Subject: [PATCH 060/691] Update global.vars.sh --- scripts/global.vars.sh | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index eac7ca9..ee23eea 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -10,18 +10,18 @@ PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' PC_DEV_URL='' PC_CURRENT_VERSION='5.10.2' - PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + #PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + #PC_CURRENT_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' PC_STABLE_VERSION='5.8.2' - PC_STABLE_METAURL='http://10.42.8.50/images/pc_deploy-5.8.2.json' - PC_STABLE_URL='http://10.42.8.50/images/euphrates-5.8.2-stable-prism_central.tar' + #PC_STABLE_METAURL='http://10.42.8.50/images/pc_deploy-5.8.2.json' + #PC_STABLE_URL='http://10.42.8.50/images/euphrates-5.8.2-stable-prism_central.tar' # Sync the following to lib.common.sh::ntnx_download-Case=FILES # Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA # - Find ${FILES_VERSION} in the Additional Releases section on the lower right side # - Provide "Upgrade Metadata File" URL to FILES_METAURL FILES_VERSION='3.2.0.1' - FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' @@ -147,7 +147,13 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.55 ) # HPOC us-east = DUR - JQ_REPOS=(\ +PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' +PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' + PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' + FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='http://10.55.251.38/workshop_staging/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ @@ -168,6 +174,12 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.42 ) # HPOC us-west = PHX +PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' +PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' + FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='http://10.42.194.11/workshop_staging/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ From 05e4655f3551ae54a97991bde3009f336a4943da Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 10 Apr 2019 22:18:45 -0700 Subject: [PATCH 061/691] Update lib.pe.sh --- scripts/lib.pe.sh | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 6b9c703..d86d72f 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -54,16 +54,12 @@ function authentication_source() { _autodc_success=' * status: started' # REVIEW: override global.vars - export AUTODC_REPOS=(\ - 'http://10.42.8.50/images/AutoDC2.qcow2' \ - 'http://10.42.8.50/images/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - #'http://10.132.128.50:81/share/saved-images/autodc-2.0.qcow2' \ - #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ - ) + #export AUTODC_REPOS=(\ + #'http://10.42.8.50/images/AutoDC2.qcow2' \ + #'http://10.42.8.50/images/AutoDC.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + #) fi dns_check "dc${_autodc_index}.${AUTH_FQDN}" From d1aca2a8f7bbcfc24428e96e7f71ef4469d70920 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 11 Apr 2019 08:52:30 -0700 Subject: [PATCH 062/691] Updates to ts2019.sh for Willems test. --- scripts/calm.sh | 2 +- scripts/lib.pc.sh | 2 +- scripts/lib.pe.sh | 2 +- scripts/ts2019.sh | 24 ++++++++++++------------ stage_workshop.sh | 2 +- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index bb1cc95..6a27e51 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -35,7 +35,7 @@ case ${1} in if (( $? == 0 )) ; then _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} IMAGES" cluster_check \ && log "Remote asynchroneous PC Image import script... ${_command}" \ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index c50e55c..af5456e 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -31,7 +31,7 @@ function flow_enable() { log "Flow still not enabled.... ***Not retrying. Please enable via UI.***" fi else - log "Flow has been enebaled..." + log "Flow has been Enabled..." fi diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index d86d72f..f93ece0 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -297,7 +297,7 @@ function pc_configure() { ##################################################################################### _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index c06ef45..bc9fe15 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -18,14 +18,14 @@ case ${1} in PE | pe ) . lib.pe.sh - export PC_DEV_VERSION='5.10.2' - export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + #export PC_DEV_VERSION='5.10.2' + #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' - export FILES_VERSION='3.2.0.1' - export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_VERSION='3.2.0.1' + #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' export NW2_DHCP_START="${IPV4_PREFIX}.132" @@ -137,14 +137,14 @@ case ${1} in ssp_auth \ && calm_enable \ - && flow_enable + #&& flow_enable # IF the PC_version is 5.10 then we can rnun LCM and Karbon enable - if [ $PC_CURRENT_VERSION=='5.10.2' ]; then - karbon_enable - lcm - fi - + #if [ $PC_CURRENT_VERSION=='5.10.2' ]; then + # karbon_enable + # lcm + #fi + images \ && prism_check 'PC' diff --git a/stage_workshop.sh b/stage_workshop.sh index 2f50ce0..8842e9f 100644 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -150,7 +150,7 @@ EoM fi log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" - remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION # shellcheck disable=SC2153 From 2929c44b6b20f00513236961a5e7ab77ba31d202 Mon Sep 17 00:00:00 2001 From: wessenstam Date: Thu, 11 Apr 2019 20:19:34 -0700 Subject: [PATCH 063/691] Changes to lib.pc.sh and lib.common.sh --- quick.txt | 2 +- scripts/bootcamp.sh | 2 +- scripts/lib.common.sh | 61 ++++++++++++++++++++++++++++++++++++++++--- scripts/lib.pc.sh | 46 +++++++++++++++++++++----------- stage_workshop.sh | 0 5 files changed, 90 insertions(+), 21 deletions(-) mode change 100644 => 100755 stage_workshop.sh diff --git a/quick.txt b/quick.txt index 0b7dad2..c75bee7 100644 --- a/quick.txt +++ b/quick.txt @@ -1 +1 @@ -10.42.41.37|techX2019!|nathan.cox@nutanix.com \ No newline at end of file +10.42.9.37|techX2019!|willem@nutanix.com diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 59ca059..f0a5e60 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -35,7 +35,7 @@ case ${1} in if (( $? == 0 )) ; then _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} IMAGES" cluster_check \ && log "Remote asynchroneous PC Image import script... ${_command}" \ diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 28384ef..8deef1e 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -1,6 +1,16 @@ #!/usr/bin/env bash # dependencies: dig +################################################################################## +# List of date, who and change made to the file +# -------------------------------------------------------------------------------- +# 12-04-2019 - Willem Essenstam +# Changed the run_once function so it checks not on lines in the log file but +# on if the PC is configured by trying to log in using the set password +################################################################################## + +################################################################################## + function args_required() { local _argument local _error=88 @@ -23,6 +33,8 @@ function args_required() { fi } +################################################################################## + function begin() { local _release @@ -33,6 +45,8 @@ function begin() { log "$(basename ${0})${_release} start._____________________" } +################################################################################## + function dependencies { local _argument local _error @@ -136,6 +150,8 @@ function dependencies { esac } +################################################################################## + function dns_check() { local _dns local _error @@ -158,6 +174,8 @@ function dns_check() { fi } +################################################################################## + function download() { local _attempts=5 local _error=0 @@ -200,6 +218,8 @@ function download() { done } +################################################################################## + function fileserver() { local _action=${1} # REQUIRED local _host=${2} # REQUIRED, TODO: default to PE? @@ -245,11 +265,17 @@ function fileserver() { esac } +################################################################################## + + function finish() { log "${0} ran for ${SECONDS} seconds._____________________" echo } +################################################################################## + + function images() { # https://portal.nutanix.com/#/page/docs/details?targetId=Command-Ref-AOS-v59:acl-acli-image-auto-r.html local _cli='acli' @@ -436,6 +462,9 @@ EOF done } +################################################################################## + + function log() { local _caller @@ -443,6 +472,9 @@ function log() { echo "$(date '+%Y-%m-%d %H:%M:%S')|$$|${_caller}|${1}" } +################################################################################## + + function ntnx_cmd() { local _attempts=25 local _error=10 @@ -475,6 +507,9 @@ function ntnx_cmd() { done } +################################################################################## + + function ntnx_download() { local _checksum local _error @@ -597,6 +632,9 @@ function ntnx_download() { fi } +################################################################################## + + function pe_determine() { # ${1} REQUIRED: run on 'PE' or 'PC' local _error @@ -651,6 +689,9 @@ function pe_determine() { fi } +################################################################################## + + function prism_check { # Argument ${1} = REQUIRED: PE or PC # Argument ${2} = OPTIONAL: number of attempts @@ -716,6 +757,9 @@ function prism_check { done } +################################################################################## + + function remote_exec() { # Argument ${1} = REQUIRED: ssh or scp # Argument ${2} = REQUIRED: PE, PC, or AUTH_SERVER @@ -805,6 +849,9 @@ function remote_exec() { done } +################################################################################## + + function repo_source() { # https://stackoverflow.com/questions/1063347/passing-arrays-as-parameters-in-bash#4017175 local _candidates=("${!1}") # REQUIRED @@ -872,16 +919,22 @@ function repo_source() { fi } +################################################################################## + + function run_once() { - # TODO: PC dependent - if [[ ! -z ${PC_LAUNCH} ]] && (( $(cat ${HOME}/${PC_LAUNCH%%.sh}.log | wc ${WC_ARG}) > 20 )); then - finish + # Try to login to the PC UI using an API and use the NEW to be password so we can check if PC config has run.... + _Configured_PC=$(curl -X POST https://${PC_HOST}:9440/api/nutanix/v3/clusters/list --user ${PRISM_ADMIN}:${PE_PASSWORD} -H 'Content-Type: application/json' -d '{ "kind": "cluster" }' --insecure --silent | grep "AUTHENTICATION_REQUIRED" | wc -l) + if [[ $_Configured_PC -lt 1 ]]; then _error=2 - log "Warning ${_error}: ${PC_LAUNCH} already ran, exit!" + log "Warning ${_error}: ${PC_LAUNCH} already ran and configured PRISM Central, exit!" exit ${_error} fi } +################################################################################## + + function ssh_pubkey() { local _dir local _directories=(\ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index af5456e..a96aeba 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -2,6 +2,12 @@ # -x # Dependencies: curl, ncli, nuclei, jq +############################################################################################################################################################################### +# 12th of April 2019 - Willem Essenstam +# Added a "-d" character in the flow_enable so the command would run. +# Changed the Karbon Eanable function so it also checks that Karbon has been enabled. +############################################################################################################################################################################### + ############################################################################################################################################################################### # Routine to enable Flow @@ -20,7 +26,7 @@ function flow_enable() { log "Enable Nutanix Flow..." # Enabling Flow and put the task id in a variable - _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) + _task_id=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) # Try one more time then fail, but continue if [ -z $_task_id ]; then @@ -171,27 +177,37 @@ function lcm() { function karbon_enable() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' local _loop=0 - local _json_data_enable="-d '{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}'" + local _json_data_set_enable="-d '{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}'" + local _json_is_enable="-d '{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"}' " local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" # Send the enable command to the PC IP using localhost log "Enable the Karbon service on the PC..." - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data_enable ${_httpURL}| grep true | wc -l) + # Start the enablement process + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data_set_enable ${_httpURL}| grep true | wc -l) - if [[ $_response -le 0 ]]; then - log "Retrying to enable Karbon services one more time...." - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data_enable ${_httpURL} | grep true | wc -l) - - if [[ $_response -le 0 ]]; then - log "Unable to enable Karbon. As there are more dependencies on Karbon we stop the script....." - exit 1 - else - log "Karbon has been enabled..." - fi + # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... + if [[ $_response -eq 1 ]]; then + # Check if Karbon has been enabled + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_is_enable ${_httpURL}| grep true | wc -l) + while [ $_response -ne 1 ]; do + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_is_enable ${_httpURL}| grep true | wc -l) + done + log "Karbon has been enabled." + break else - log "Karbon has been enabled..." - fi + log "Retrying to enable Karbon one more time." + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data_set_enable ${_httpURL}| grep true | wc -l) + if [[ $_response -eq 1 ]]; then + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_is_enable ${_httpURL}| grep true | wc -l) + if [ $_response -lt 1 ]; then + log "Karbon isn't enabled. Please use the UI to enable it." + else + log "Karbon has been enabled." + fi + fi + fi } diff --git a/stage_workshop.sh b/stage_workshop.sh old mode 100644 new mode 100755 From 627d93fb508d7b9384e22583ef47f985d074699d Mon Sep 17 00:00:00 2001 From: wessenstam Date: Fri, 12 Apr 2019 00:24:06 -0700 Subject: [PATCH 064/691] Solved Flow, Karbon and a rare Calm anable situation --- scripts/bootcamp.sh | 9 +++++++++ scripts/lib.pc.sh | 47 +++++++++++++++++++-------------------------- stage_workshop.sh | 1 + 3 files changed, 30 insertions(+), 27 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index f0a5e60..54bce2f 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -33,6 +33,15 @@ case ${1} in && prism_check 'PC' \ if (( $? == 0 )) ; then + ## TODO: If Debug is set we should run with bash -x. Maybe this???? Or are we going to use a fourth parameter + # if [ ! -z DEBUG ]; then + # bash_cmd='bash' + # else + # bash_cmd='bash -x' + # fi + # _command="EMAIL=${EMAIL} \ + # PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + # PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup ${bash_cmd} ${HOME}/${PC_LAUNCH} IMAGES" _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} IMAGES" diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index a96aeba..02bcdc4 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -5,7 +5,7 @@ ############################################################################################################################################################################### # 12th of April 2019 - Willem Essenstam # Added a "-d" character in the flow_enable so the command would run. -# Changed the Karbon Eanable function so it also checks that Karbon has been enabled. +# Changed the Karbon Eanable function so it also checks that Karbon has been enabled. Some small typos changed so the Karbon part should work ############################################################################################################################################################################### @@ -26,7 +26,7 @@ function flow_enable() { log "Enable Nutanix Flow..." # Enabling Flow and put the task id in a variable - _task_id=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) + _task_id=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow | jq '.task_uuid' | tr -d \") # Try one more time then fail, but continue if [ -z $_task_id ]; then @@ -177,30 +177,26 @@ function lcm() { function karbon_enable() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' local _loop=0 - local _json_data_set_enable="-d '{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}'" - local _json_is_enable="-d '{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"}' " + local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" + local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" - - # Send the enable command to the PC IP using localhost - log "Enable the Karbon service on the PC..." - + # Start the enablement process - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data_set_enable ${_httpURL}| grep true | wc -l) - + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) + # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... if [[ $_response -eq 1 ]]; then # Check if Karbon has been enabled - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_is_enable ${_httpURL}| grep true | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) while [ $_response -ne 1 ]; do - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_is_enable ${_httpURL}| grep true | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) done log "Karbon has been enabled." - break else log "Retrying to enable Karbon one more time." - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data_set_enable ${_httpURL}| grep true | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) if [[ $_response -eq 1 ]]; then - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_is_enable ${_httpURL}| grep true | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) if [ $_response -lt 1 ]; then log "Karbon isn't enabled. Please use the UI to enable it." else @@ -210,8 +206,6 @@ function karbon_enable() { fi } - - ############################################################################################################################################################################### # Routine for PC_Admin ############################################################################################################################################################################### @@ -586,17 +580,16 @@ function calm_enable() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' log "Enable Nutanix Calm..." - _http_body=$(cat <> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION From c4668ce6eb12bd62e3cc6d20cc9797b3030a171d Mon Sep 17 00:00:00 2001 From: wessenstam Date: Fri, 12 Apr 2019 00:38:49 -0700 Subject: [PATCH 065/691] Removed bash -x in the scripts --- scripts/lib.pe.sh | 4 +++- stage_workshop.sh | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index f93ece0..9fbbfda 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -295,9 +295,11 @@ function pc_configure() { ##################################################################################### ### Handing of to the PC for rest of the installation ##################################################################################### + + ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." diff --git a/stage_workshop.sh b/stage_workshop.sh index 1a4074c..d360760 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -151,7 +151,7 @@ EoM log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" ## TODO: If DEBUG is set, we run the below command with bash -x - remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION # shellcheck disable=SC2153 From 9bde6b99d33719f5d6752699175d01c2a2b875ed Mon Sep 17 00:00:00 2001 From: wessenstam Date: Fri, 12 Apr 2019 01:03:40 -0700 Subject: [PATCH 066/691] Added some readability stuff --- scripts/lib.pe.sh | 41 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 9fbbfda..fd74e42 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -2,6 +2,9 @@ # -x # Dependencies: acli, ncli, jq, sshpass, curl, md5sum, pgrep, wc, tr, pkill +############################################################################################################################################################################### +# Routine to set the acli command +############################################################################################################################################################################### function acli() { local _cmd @@ -10,6 +13,9 @@ function acli() { # DEBUG=1 && if [[ ${DEBUG} ]]; then log "$@"; fi } +############################################################################################################################################################################### +# Routine to install the AutoDC and join the Domain +############################################################################################################################################################################### function authentication_source() { local _attempts local _error=13 @@ -157,6 +163,10 @@ function authentication_source() { esac } +############################################################################################################################################################################### +# Routine to get the Nutanix Files injected +############################################################################################################################################################################### + function files_install() { local _ncli_softwaretype='FILE_SERVER' local _ncli_software_type='afs' @@ -178,6 +188,9 @@ function files_install() { fi } +############################################################################################################################################################################### +# Routine to crerate the networks +############################################################################################################################################################################### function network_configure() { local _network_name="${NW1_NAME}" @@ -210,6 +223,10 @@ function network_configure() { fi } +############################################################################################################################################################################### +# Routine to check if the registration of PE was successful +############################################################################################################################################################################### + function cluster_check() { local _attempts=20 local _loop=0 @@ -260,13 +277,17 @@ function cluster_check() { } +############################################################################################################################################################################### +# Routine to configure the PC and handoff to the PC local installation +############################################################################################################################################################################### + function pc_configure() { args_required 'PC_LAUNCH RELEASE' local _command local _container local _dependencies="global.vars.sh lib.pc.sh ${PC_LAUNCH}" - # If we are being called via the we-ts2019.sh, we need to change the lib.common.sh to we-lib.common.sh + # If we are being called via the we-*.sh, we need to change the lib.common.sh to we-lib.common.sh if [[ ${PC_LAUNCH} != *"we-"* ]]; then _dependencies+=" lib.common.sh" else @@ -305,6 +326,9 @@ function pc_configure() { log "PC Configuration complete: try Validate Staged Clusters now." } +############################################################################################################################################################################### +# Routine to install the PC in the PE +############################################################################################################################################################################### function pc_install() { local _ncli_softwaretype='PRISM_CENTRAL_DEPLOY' local _nw_name="${1}" @@ -385,6 +409,9 @@ EOF fi } +############################################################################################################################################################################### +# Routine to set the PE to use the AutoDC for authentication +############################################################################################################################################################################### function pe_auth() { local _aos local _aos_version @@ -428,6 +455,9 @@ function pe_auth() { fi } +############################################################################################################################################################################### +# Routine set PE's initial configuration +############################################################################################################################################################################### function pe_init() { args_required 'DATA_SERVICE_IP EMAIL \ SMTP_SERVER_ADDRESS SMTP_SERVER_FROM SMTP_SERVER_PORT \ @@ -471,6 +501,9 @@ function pe_init() { fi } +############################################################################################################################################################################### +# Routine to accept the EULA and disable pulse +############################################################################################################################################################################### function pe_license() { local _test args_required 'CURL_POST_OPTS PE_PASSWORD' @@ -511,6 +544,9 @@ function pe_license() { fi } +############################################################################################################################################################################### +# Routine to unregister PE from PC +############################################################################################################################################################################### function pc_unregister { local _cluster_uuid local _pc_uuid @@ -541,6 +577,9 @@ function pc_unregister { pc_destroy } +############################################################################################################################################################################### +# Routine to destroy the PC VM +############################################################################################################################################################################### function pc_destroy() { local _vm From 44249bd2ed19899abc4b743da94d840023943d64 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 12 Apr 2019 09:47:17 -0700 Subject: [PATCH 067/691] Bringing SSHpass local. --- scripts/calm.sh | 5 +++-- scripts/global.vars.sh | 14 +++++++++++--- scripts/lib.pe.sh | 2 +- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index 6a27e51..4d5e36f 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -37,8 +37,9 @@ case ${1} in PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} IMAGES" - cluster_check \ - && log "Remote asynchroneous PC Image import script... ${_command}" \ + #cluster_check \ + #&& log "Remote asynchroneous PC Image import script... ${_command}" \ + log "Remote asynchroneous PC Image import script... ${_command}" \ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & pc_configure \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index ee23eea..93d053c 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -83,9 +83,9 @@ NTNX_INIT_PASSWORD='nutanix/4u' # https://pkgs.org/download/sshpass # https://sourceforge.net/projects/sshpass/files/sshpass/ - SSHPASS_REPOS=(\ - 'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ - ) + #SSHPASS_REPOS=(\ + #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + #) # shellcheck disable=2206 OCTET=(${PE_HOST//./ }) # zero index @@ -158,6 +158,10 @@ PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) + SSHPASS_REPOS=(\ + 'http://10.55.251.38/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ + #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + ) QCOW2_REPOS=(\ 'http://10.55.251.38/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -185,6 +189,10 @@ PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) + SSHPASS_REPOS=(\ + 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ + #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + ) QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index fd74e42..74761c2 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -320,7 +320,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." From f3e4f79dbb437a02700a8e75a1b3768fd052ae6b Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 12 Apr 2019 10:18:33 -0700 Subject: [PATCH 068/691] updates for PC 5.8.x registration --- scripts/calm.sh | 5 ++--- scripts/lib.pe.sh | 2 ++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index 4d5e36f..6a27e51 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -37,9 +37,8 @@ case ${1} in PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} IMAGES" - #cluster_check \ - #&& log "Remote asynchroneous PC Image import script... ${_command}" \ - log "Remote asynchroneous PC Image import script... ${_command}" \ + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & pc_configure \ diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 74761c2..b33e421 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -240,6 +240,8 @@ function cluster_check() { if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 )); then log "PC>=5.10, checking multicluster state..." + elif (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 8 )); then + log "PC>=5.8, checking multicluster state..." while true ; do (( _loop++ )) From 0ff972542d6f345efe280ace8ab9c57ac9eb6af0 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 12 Apr 2019 15:06:52 -0700 Subject: [PATCH 069/691] Bringing back enable Calm in 5.8.2 --- scripts/bootcamp.sh | 2 +- scripts/calm.sh | 3 ++- scripts/lib.pe.sh | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 54bce2f..3f9e024 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -44,7 +44,7 @@ case ${1} in # PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup ${bash_cmd} ${HOME}/${PC_LAUNCH} IMAGES" _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} IMAGES" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" cluster_check \ && log "Remote asynchroneous PC Image import script... ${_command}" \ diff --git a/scripts/calm.sh b/scripts/calm.sh index 6a27e51..707e15e 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -35,7 +35,7 @@ case ${1} in if (( $? == 0 )) ; then _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} IMAGES" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" cluster_check \ && log "Remote asynchroneous PC Image import script... ${_command}" \ @@ -101,6 +101,7 @@ case ${1} in && pc_smtp ssp_auth \ + && calm_enable \ && images \ && pc_cluster_img_import \ && prism_check 'PC' diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index b33e421..69c61c0 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -322,7 +322,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." From f8f806fa04457aa59ee637190f1cbdd112545010 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 15 Apr 2019 11:20:19 -0700 Subject: [PATCH 070/691] Updated AutoDC Locations --- scripts/global.vars.sh | 13 +++++-------- scripts/lib.pe.sh | 12 ++++++------ 2 files changed, 11 insertions(+), 14 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 7fb2b23..54efb83 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -31,9 +31,7 @@ NTNX_INIT_PASSWORD='nutanix/4u' 'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) QCOW2_REPOS=(\ - 'http://10.21.250.221/images/tech-enablement/' \ - 'http://10.21.250.221/images/ahv/techsummit/' \ - 'http://10.132.128.50:81/share/saved-images/' \ + 'http://10.42.8.50/images/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share QCOW2_IMAGES=(\ @@ -85,11 +83,10 @@ AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' AUTODC_REPOS=(\ - 'http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC-04282018.qcow2' \ - 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ + 'http://10.42.8.50/images/AutoDC.qcow2' \ + 'http://10.42.8.50/images/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) # For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 44beb90..a75429e 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -53,13 +53,13 @@ function authentication_source() { _autodc_success=' * status: started' # REVIEW: override global.vars - export AUTODC_REPOS=(\ - 'http://10.132.128.50:81/share/saved-images/autodc-2.0.qcow2' \ - 'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + #export AUTODC_REPOS=(\ + #'http://10.132.128.50:81/share/saved-images/autodc-2.0.qcow2' \ + #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - 'http://10.59.103.143:8000/autodc-2.0.qcow2' \ - ) - fi + #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ + #) + #fi dns_check "dc${_autodc_index}.${AUTH_FQDN}" _result=$? From 864ff184bf657828844393a72a29491cf82c5da7 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 15 Apr 2019 11:27:20 -0700 Subject: [PATCH 071/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index a75429e..4245612 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -59,7 +59,7 @@ function authentication_source() { # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ #) - #fi + fi dns_check "dc${_autodc_index}.${AUTH_FQDN}" _result=$? From 60b7b17e42e78ce7ffd439d49890dabd05643c46 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 15 Apr 2019 11:40:23 -0700 Subject: [PATCH 072/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 4245612..8c5b13f 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -42,7 +42,7 @@ function authentication_source() { local _autodc_status="systemctl show ${_autodc_service} --property=SubState" local _autodc_success='SubState=running' - if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then + if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 8 )); then log "PC_VERSION ${PC_VERSION} >= 5.9, setting AutoDC-2.0..." _autodc_auth=" --username=${AUTH_ADMIN_USER} --password=${AUTH_ADMIN_PASS}" _autodc_index='' From f516d58801c781c674aa08086549e12678e8f241 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 15 Apr 2019 14:45:00 -0700 Subject: [PATCH 073/691] Update lib.pe.sh --- scripts/lib.pe.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 69c61c0..917041e 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -50,7 +50,7 @@ function authentication_source() { #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 8 )); then - log "PC_VERSION ${PC_VERSION} >= 5.9, setting AutoDC-2.0..." + log "PC_VERSION ${PC_VERSION} >= 5.9, setting AutoDC2..." _autodc_auth=" --username=${AUTH_ADMIN_USER} --password=${AUTH_ADMIN_PASS}" _autodc_index='' _autodc_release=2 @@ -240,8 +240,8 @@ function cluster_check() { if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 )); then log "PC>=5.10, checking multicluster state..." - elif (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 8 )); then - log "PC>=5.8, checking multicluster state..." + #elif (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 8 )); then + # log "PC>=5.8, checking multicluster state..." while true ; do (( _loop++ )) From 52795c6ce45614265eb99225ee67f8fa2dffa03d Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 15 Apr 2019 15:41:35 -0700 Subject: [PATCH 074/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index d360760..7e01a95 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -6,10 +6,10 @@ DEBUG= # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ "Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current (AutoDC2)" \ "Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ +#"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { From 8761318ab4765012b70401f3af78ea405059359c Mon Sep 17 00:00:00 2001 From: wessenstam Date: Mon, 15 Apr 2019 17:03:46 -0700 Subject: [PATCH 075/691] Small typos on AFS qcow2 location changed --- scripts/global.vars.sh | 4 ++-- scripts/lib.pe.sh | 2 +- stage_workshop.sh | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 93d053c..c042747 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -152,7 +152,7 @@ PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.55.251.38/workshop_staging/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -183,7 +183,7 @@ PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.42.194.11/workshop_staging/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 917041e..fec4974 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -322,7 +322,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." diff --git a/stage_workshop.sh b/stage_workshop.sh index 7e01a95..bf1e62e 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -9,7 +9,7 @@ WORKSHOPS=(\ "Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current (AutoDC2)" \ "Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ -#"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ +"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -151,7 +151,7 @@ EoM log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" ## TODO: If DEBUG is set, we run the below command with bash -x - remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION # shellcheck disable=SC2153 From 020a0a408bb2921afc3c624a96238618821283b1 Mon Sep 17 00:00:00 2001 From: wessenstam Date: Mon, 15 Apr 2019 17:06:02 -0700 Subject: [PATCH 076/691] Disabled calm 5.8.2 workshop and remove the -x in bash -x --- scripts/lib.pe.sh | 2 +- stage_workshop.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index fec4974..917041e 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -322,7 +322,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." diff --git a/stage_workshop.sh b/stage_workshop.sh index bf1e62e..7e01a95 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -9,7 +9,7 @@ WORKSHOPS=(\ "Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current (AutoDC2)" \ "Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ -"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ +#"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -151,7 +151,7 @@ EoM log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" ## TODO: If DEBUG is set, we run the below command with bash -x - remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION # shellcheck disable=SC2153 From 230e5f699adc69440bdeb402ffc5b8402da9e5ca Mon Sep 17 00:00:00 2001 From: wessenstam Date: Mon, 15 Apr 2019 23:17:13 -0700 Subject: [PATCH 077/691] Solving PE Register to PC and image upload in AOS and PC 5.8.2 --- scripts/lib.common.sh | 22 +++++------ scripts/lib.pc.sh | 9 +++++ scripts/lib.pe.sh | 86 ++++++++++++++++++++++--------------------- stage_workshop.sh | 4 +- 4 files changed, 67 insertions(+), 54 deletions(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 8deef1e..66a0467 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -278,20 +278,20 @@ function finish() { function images() { # https://portal.nutanix.com/#/page/docs/details?targetId=Command-Ref-AOS-v59:acl-acli-image-auto-r.html - local _cli='acli' + local _cli='nuclei' local _command local _http_body local _image local _image_type local _name - local _source='source_url' + local _source='source_uri' local _test - which "$_cli" - if (( $? > 0 )); then - _cli='nuclei' - _source='source_uri' - fi + #which "$_cli" + #if (( $? > 0 )); then + # _cli='nuclei' + # _source='source_uri' + #fi ####################################### # For doing Disk IMAGES @@ -305,10 +305,10 @@ function images() { && ${_cli} image.list 2>&1 \ | grep -i complete \ | grep "${_image}") - else - _test=$(source /etc/profile.d/nutanix_env.sh \ - && ${_cli} image.list 2>&1 \ - | grep "${_image}") + #else + # _test=$(source /etc/profile.d/nutanix_env.sh \ + # && ${_cli} image.list 2>&1 \ + # | grep "${_image}") fi if [[ ! -z ${_test} ]]; then diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 02bcdc4..7a95718 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -580,6 +580,15 @@ function calm_enable() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' log "Enable Nutanix Calm..." + # Need to check if the PE to PC registration has been done before we move forward to enable Calm. If we've done that, move on. + _json_data="{\"perform_validation_only\":true}" + _response=($(curl $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d "${_json_data}" https://localhost:9440/api/nutanix/v3/services/nucalm | jq '.validation_result_list[].has_passed')) + while [ ${#_response[@]} -lt 4 ]; do + _response=($(curl $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d "${_json_data}" https://localhost:9440/api/nutanix/v3/services/nucalm | jq '.validation_result_list[].has_passed')) + sleep 10 + done + + _http_body='{"enable_nutanix_apps":true,"state":"ENABLE"}' _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d "${_http_body}" https://localhost:9440/api/nutanix/v3/services/nucalm) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 917041e..83c7185 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -234,48 +234,52 @@ function cluster_check() { local _sleep=60 local _test=1 local _test_exit + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' # shellcheck disable=2206 - _pc_version=(${PC_VERSION//./ }) - - if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 )); then - log "PC>=5.10, checking multicluster state..." - #elif (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 8 )); then - # log "PC>=5.8, checking multicluster state..." - - while true ; do - (( _loop++ )) - - _test=$(ncli --json=true multicluster get-cluster-state | \ - jq -r .data[0].clusterDetails.multicluster) - _test_exit=$? - log "Cluster status: |${_test}|, exit: ${_test_exit}." - - if [[ ${_test} != 'true' ]]; then - _test=$(ncli multicluster add-to-multicluster \ - external-ip-address-or-svm-ips=${PC_HOST} \ - username=${PRISM_ADMIN} password=${PE_PASSWORD}) - _test_exit=$? - log "Manual join PE to PC = |${_test}|, exit: ${_test_exit}." - fi - - _test=$(ncli --json=true multicluster get-cluster-state | \ - jq -r .data[0].clusterDetails.multicluster) - _test_exit=$? - log "Cluster status: |${_test}|, exit: ${_test_exit}." - - if [[ ${_test} == 'true' ]]; then - log "PE to PC = cluster registration: successful." - return 0 - elif (( ${_loop} > ${_attempts} )); then - log "Warning ${_error} @${1}: Giving up after ${_loop} tries." - return ${_error} - else - log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." - sleep ${_sleep} - fi - done - fi + #_pc_version=(${PC_VERSION//./ }) + + #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 )); then + # log "PC>=5.10, checking multicluster state..." + # while true ; do + # (( _loop++ )) + + # _test=$(ncli --json=true multicluster get-cluster-state | jq -r .data[0].clusterDetails.multicluster) + # _test_exit=$? + # log "Cluster status: |${_test}|, exit: ${_test_exit}." + + # if [[ ${_test} != 'true' ]]; then + # _test=$(ncli multicluster add-to-multicluster \ + # external-ip-address-or-svm-ips=${PC_HOST} \ + # username=${PRISM_ADMIN} password=${PE_PASSWORD}) + # _test_exit=$? + # log "Manual join PE to PC = |${_test}|, exit: ${_test_exit}." + # fi + + # _test=$(ncli --json=true multicluster get-cluster-state | \ + # jq -r .data[0].clusterDetails.multicluster) + # _test_exit=$? + # log "Cluster status: |${_test}|, exit: ${_test_exit}." + + # if [[ ${_test} == 'true' ]]; then + # log "PE to PC = cluster registration: successful." + # return 0 + # elif (( ${_loop} > ${_attempts} )); then + # log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + # return ${_error} + # else + # log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." + # sleep ${_sleep} + # fi + # done + #fi + + #if (( ${_pc_version[0]} -ge 5 && ${_pc_version[1]} -eq 8 )); then + log "PC is version 5.8, enabling and checking" + # Enable the PE to PC registration + _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" + _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') + #fi } @@ -322,7 +326,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." diff --git a/stage_workshop.sh b/stage_workshop.sh index 7e01a95..bf1e62e 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -9,7 +9,7 @@ WORKSHOPS=(\ "Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current (AutoDC2)" \ "Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ -#"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ +"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -151,7 +151,7 @@ EoM log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" ## TODO: If DEBUG is set, we run the below command with bash -x - remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION # shellcheck disable=SC2153 From cb80860f7fb26030aa344d82f3a2e476c67b08fa Mon Sep 17 00:00:00 2001 From: wessenstam Date: Mon, 15 Apr 2019 23:27:36 -0700 Subject: [PATCH 078/691] Remove bash -x to bash so it runs in none debug mode --- scripts/lib.pe.sh | 2 +- stage_workshop.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 83c7185..f3fc169 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -326,7 +326,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." diff --git a/stage_workshop.sh b/stage_workshop.sh index bf1e62e..15c62ac 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -151,7 +151,7 @@ EoM log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" ## TODO: If DEBUG is set, we run the below command with bash -x - remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION # shellcheck disable=SC2153 From e4d6951c83bd0e797df559f84804d8b3d39b10fc Mon Sep 17 00:00:00 2001 From: wessenstam Date: Mon, 15 Apr 2019 23:30:45 -0700 Subject: [PATCH 079/691] Change to CHANGE.MD file with the latest changes --- CHANGELOG.MD | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 515e356..fe8524d 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -6,6 +6,24 @@

Changes made

+* 2019-04-12 willem@nutanix.com + +

Stageworkshop part

+ * Changed the following files: + * scripts/lib.pe.sh; Changed the way we check if the PC is running + Changed the PE to PC registration + + * scripts/lib.pc.sh; Before we can enable calm we need to make sure the system is ready for it. + * scripts/lib.common.sh; Removed the acli as 5.8.2 has an error on that. All on nuclei now. + * stage_workshop.sh; Re-enabled calm workshop (AOS and PC 5.8.2) + +

Call back server part

+ + * No changes + + +

Changes made

+ * 2019-04-04 willem@nutanix.com From 9d208f95f1031f105f91219108f16d6d015dcd4e Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 16 Apr 2019 09:09:03 -0700 Subject: [PATCH 080/691] Update lib.common.sh --- scripts/lib.common.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 66a0467..ec573c1 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -4,8 +4,8 @@ ################################################################################## # List of date, who and change made to the file # -------------------------------------------------------------------------------- -# 12-04-2019 - Willem Essenstam -# Changed the run_once function so it checks not on lines in the log file but +# 12-04-2019 - Willem Essenstam +# Changed the run_once function so it checks not on lines in the log file but # on if the PC is configured by trying to log in using the set password ################################################################################## @@ -389,10 +389,10 @@ EOF && ${_cli} image.list 2>&1 \ | grep -i complete \ | grep "${_image}") - else - _test=$(source /etc/profile.d/nutanix_env.sh \ - && ${_cli} image.list 2>&1 \ - | grep "${_image}") + #else + # _test=$(source /etc/profile.d/nutanix_env.sh \ + # && ${_cli} image.list 2>&1 \ + # | grep "${_image}") fi if [[ ! -z ${_test} ]]; then From 31793697e990cb3faf9f84c8b5f4310fe8c18530 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 16 Apr 2019 11:49:16 -0700 Subject: [PATCH 081/691] Update ts2019.sh --- scripts/ts2019.sh | 22 +++++----------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index bc9fe15..5c1c0bf 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -137,26 +137,14 @@ case ${1} in ssp_auth \ && calm_enable \ - #&& flow_enable - - # IF the PC_version is 5.10 then we can rnun LCM and Karbon enable - #if [ $PC_CURRENT_VERSION=='5.10.2' ]; then - # karbon_enable - # lcm - #fi - - images \ + && lcm \ + && images \ + && karbon_enable \ + && flow_enable \ + && pc_cluster_img_import \ && prism_check 'PC' log "Non-blocking functions (in development) follow." - # shellcheck disable=2206 - _pc_version=(${PC_VERSION//./ }) - - #commenting out to take images back to prevuous update - #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} <= 8 )); then - # log "PC<=5.8, Image imports..." - # ts_images - #fi pc_project pc_admin # ntnx_download 'AOS' # function in lib.common.sh From 1ddd6ec684f929a0094a3779a619f6bff22db0c6 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 16 Apr 2019 11:50:41 -0700 Subject: [PATCH 082/691] Update ts2019.sh --- scripts/ts2019.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index 5c1c0bf..a1d5520 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -73,10 +73,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export QCOW2_REPOS=(\ - 'http://10.42.8.50/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ - ) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + #export QCOW2_REPOS=(\ + #'http://10.42.8.50/images/' \ + #'https://s3.amazonaws.com/get-ahv-images/' \ + #) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share export QCOW2_IMAGES=(\ CentOS7.qcow2 \ Windows2016.qcow2 \ From 11b698e67b0adffdb4291b4ee03e7d22487180a1 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 17 Apr 2019 09:07:19 -0700 Subject: [PATCH 083/691] Update global.vars.sh Adding in the new 1-Node Subnet --- scripts/global.vars.sh | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index c042747..f33af0a 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -208,6 +208,37 @@ PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' NW2_DHCP_START="${IPV4_PREFIX}.132" NW2_DHCP_END="${IPV4_PREFIX}.253" ;; + 10.38 ) # HPOC us-west = PHX + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' + FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + JQ_REPOS=(\ + 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + ) + SSHPASS_REPOS=(\ + 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ + #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + ) + QCOW2_REPOS=(\ + 'http://10.42.194.11/workshop_staging/' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + ) + AUTODC_REPOS=(\ + 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + ) + DNS_SERVERS='10.42.196.10,10.42.194.10 ' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR JQ_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ From 795012caddac7aa3958cce47f2a3b07469db65c1 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 17 Apr 2019 09:08:18 -0700 Subject: [PATCH 084/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 24667b1..5e2a38b 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -210,7 +210,7 @@ PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' NW2_DHCP_START="${IPV4_PREFIX}.132" NW2_DHCP_END="${IPV4_PREFIX}.253" ;; - 10.38 ) # HPOC us-west = PHX + 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' From 2db3989942d727ad20e7c69e35e8b1b886a6c975 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 23 Apr 2019 14:46:32 -0700 Subject: [PATCH 085/691] Update global.vars.sh --- scripts/global.vars.sh | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 5e2a38b..40e8c7a 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -234,12 +234,15 @@ PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.42.196.10,10.42.194.10 ' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" + NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" + NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) + NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + DNS_SERVERS="10.42.196.10,10.42.194.10,${AUTH_HOST}" + #NW2_NAME='Secondary' + #NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + #NW2_SUBNET="${IPV4_PREFIX}.129/25" + #NW2_DHCP_START="${IPV4_PREFIX}.132" + #NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR JQ_REPOS=(\ From 78322d32d9a01d85f96361f111cedbc3ed8a405a Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 24 Apr 2019 15:21:20 -0700 Subject: [PATCH 086/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 40e8c7a..edf5814 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -338,7 +338,7 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) NW2_SUBNET="${IPV4_PREFIX}.129/25" NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.254" + NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR DNS_SERVERS='10.132.71.40' From 8514d2638bb2c05d2dc4a94df719b4949d28f286 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 24 Apr 2019 15:24:12 -0700 Subject: [PATCH 087/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index ef4c74b..635da20 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -304,7 +304,7 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) NW2_SUBNET="${IPV4_PREFIX}.129/25" NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.254" + NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR DNS_SERVERS='10.132.71.40' From eff26e0c51e8597667375b9cb0e214e4bf7b8afc Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Apr 2019 17:58:35 +0200 Subject: [PATCH 088/691] Update lib.pe.sh --- scripts/lib.pe.sh | 49 ++++------------------------------------------- 1 file changed, 4 insertions(+), 45 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 83c7185..d6ad052 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -236,51 +236,10 @@ function cluster_check() { local _test_exit local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' - # shellcheck disable=2206 - #_pc_version=(${PC_VERSION//./ }) - - #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 )); then - # log "PC>=5.10, checking multicluster state..." - # while true ; do - # (( _loop++ )) - - # _test=$(ncli --json=true multicluster get-cluster-state | jq -r .data[0].clusterDetails.multicluster) - # _test_exit=$? - # log "Cluster status: |${_test}|, exit: ${_test_exit}." - - # if [[ ${_test} != 'true' ]]; then - # _test=$(ncli multicluster add-to-multicluster \ - # external-ip-address-or-svm-ips=${PC_HOST} \ - # username=${PRISM_ADMIN} password=${PE_PASSWORD}) - # _test_exit=$? - # log "Manual join PE to PC = |${_test}|, exit: ${_test_exit}." - # fi - - # _test=$(ncli --json=true multicluster get-cluster-state | \ - # jq -r .data[0].clusterDetails.multicluster) - # _test_exit=$? - # log "Cluster status: |${_test}|, exit: ${_test_exit}." - - # if [[ ${_test} == 'true' ]]; then - # log "PE to PC = cluster registration: successful." - # return 0 - # elif (( ${_loop} > ${_attempts} )); then - # log "Warning ${_error} @${1}: Giving up after ${_loop} tries." - # return ${_error} - # else - # log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." - # sleep ${_sleep} - # fi - # done - #fi - - #if (( ${_pc_version[0]} -ge 5 && ${_pc_version[1]} -eq 8 )); then - log "PC is version 5.8, enabling and checking" - # Enable the PE to PC registration - _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" - _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') - #fi - + log "PC is version 5.8, enabling and checking" + # Enable the PE to PC registration + _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" + _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') } ############################################################################################################################################################################### From 7a21393b9c5cee394ecaad15c29fae7325f05b78 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Apr 2019 18:13:16 +0200 Subject: [PATCH 089/691] Updated lib.pe.sh so we caln also use this in none HPOC environments The change is to skip SMTP configuration including sending email if we are not in HPOC (SMTP_SERVER_ADDRESS is empty) --- scripts/lib.pe.sh | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 58deefb..e3fb9f3 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -425,21 +425,32 @@ function pe_auth() { # Routine set PE's initial configuration ############################################################################################################################################################################### function pe_init() { - args_required 'DATA_SERVICE_IP EMAIL \ - SMTP_SERVER_ADDRESS SMTP_SERVER_FROM SMTP_SERVER_PORT \ - STORAGE_DEFAULT STORAGE_POOL STORAGE_IMAGES \ - SLEEP ATTEMPTS' + if [[ -z ${SMTP_SERVER_ADDRESS} ]]; then + # We are not running in HPOC so email is not needed, unless set manually + args_required 'DATA_SERVICE_IP EMAIL \ + STORAGE_DEFAULT STORAGE_POOL STORAGE_IMAGES \ + SLEEP ATTEMPTS' + else + args_required 'DATA_SERVICE_IP EMAIL \ + SMTP_SERVER_ADDRESS SMTP_SERVER_FROM SMTP_SERVER_PORT \ + STORAGE_DEFAULT STORAGE_POOL STORAGE_IMAGES \ + SLEEP ATTEMPTS' + fi if [[ `ncli cluster get-params | grep 'External Data' | \ awk -F: '{print $2}' | tr -d '[:space:]'` == "${DATA_SERVICE_IP}" ]]; then log "IDEMPOTENCY: Data Services IP set, skip." else - log "Configure SMTP" - ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \ + # If we are running in HPOC so email can be send and defined + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + log "Configure SMTP" + + ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \ from-email-address=${SMTP_SERVER_FROM} address=${SMTP_SERVER_ADDRESS} - ${HOME}/serviceability/bin/email-alerts --to_addresses="${EMAIL}" \ - --subject="[pe_init:Config SMTP:alert test] $(ncli cluster get-params)" \ - && ${HOME}/serviceability/bin/send-email + ${HOME}/serviceability/bin/email-alerts --to_addresses="${EMAIL}" \ + --subject="[pe_init:Config SMTP:alert test] $(ncli cluster get-params)" \ + && ${HOME}/serviceability/bin/send-email + fi log "Configure NTP" ncli cluster add-to-ntp-servers servers=${NTP_SERVERS} From 3ff33a8f8cf83fc3b4f91d9bb87dec834d8a8e7c Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 29 Apr 2019 15:06:12 +0200 Subject: [PATCH 090/691] small change to the global.vars.sh The change makes it possible to alos use the right wc arguments on Alpine linux (Dark Site container implementation). --- scripts/global.vars.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index edf5814..ce5bb55 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -389,3 +389,6 @@ WC_ARG='--lines' if [[ ${OS_NAME} == 'Darwin' ]]; then WC_ARG='-l' fi +if [[ ${OS_NAME} == 'alpine' ]]; then + WC_ARG='-l' +fi From 20e116b7c2cf55db06ebc8891aafe6debf6d90fc Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 29 Apr 2019 15:55:13 +0200 Subject: [PATCH 091/691] Small change to the bootcamp.sh This chnage makes it possible to run in the dark_site after pulling the repo. In the dark_site we don't have any SMTP server defined automatically... --- scripts/bootcamp.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 3f9e024..9c12ac7 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -106,8 +106,10 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth \ - && pc_smtp + && pc_auth + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ From e1d651ae1e537edf568c94abe90218fdaa6ba4f5 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 29 Apr 2019 17:01:45 +0200 Subject: [PATCH 092/691] Changes needed for the dark_site container to work. We have no SMTP server, so skipping the SMTP configuration --- scripts/bootcamp.sh | 2 ++ scripts/calm.sh | 8 ++++++-- scripts/citrix.sh | 9 +++++++-- scripts/global.vars.sh | 2 +- scripts/ts2019.sh | 8 ++++++-- scripts/we-ts2019.sh | 8 ++++++-- 6 files changed, 28 insertions(+), 9 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 9c12ac7..a130b5d 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -107,6 +107,8 @@ case ${1} in && pc_dns_add \ && pc_ui \ && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then pc_smtp fi diff --git a/scripts/calm.sh b/scripts/calm.sh index 707e15e..bc4840c 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -97,8 +97,12 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth \ - && pc_smtp + && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ diff --git a/scripts/citrix.sh b/scripts/citrix.sh index f837313..9851f00 100644 --- a/scripts/citrix.sh +++ b/scripts/citrix.sh @@ -148,8 +148,13 @@ EOF" pc_init \ && pc_ui \ - && pc_auth \ - && pc_smtp + && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi + ssp_auth \ && calm_enable \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index ce5bb55..604faea 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -385,7 +385,7 @@ elif [[ $(uname -s) == 'Darwin' ]]; then OS_NAME='Darwin' fi -WC_ARG='--lines' +WC_ARG='-l' if [[ ${OS_NAME} == 'Darwin' ]]; then WC_ARG='-l' fi diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index a1d5520..a82c281 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -132,8 +132,12 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth \ - && pc_smtp + && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ diff --git a/scripts/we-ts2019.sh b/scripts/we-ts2019.sh index 86f6bf9..42a9ede 100644 --- a/scripts/we-ts2019.sh +++ b/scripts/we-ts2019.sh @@ -130,8 +130,12 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth \ - && pc_smtp + && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ From 44a386e43f8d3f65f494629edb1097e3a584e5fe Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 29 Apr 2019 09:17:38 -0700 Subject: [PATCH 093/691] Cleanup Pass 1 --- scripts/global.vars.sh | 98 ---------- scripts/localhost.sh | 99 ++++++++++ scripts/stage_citrixhow.sh | 355 ---------------------------------- scripts/stage_citrixhow_pc.sh | 66 ------- 4 files changed, 99 insertions(+), 519 deletions(-) create mode 100644 scripts/localhost.sh delete mode 100644 scripts/stage_citrixhow.sh delete mode 100644 scripts/stage_citrixhow_pc.sh diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 604faea..c51c513 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -276,104 +276,6 @@ PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' ;; esac -HTTP_CACHE_HOST='localhost' -HTTP_CACHE_PORT=8181 - - AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file - AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" - LDAP_PORT=389 - AUTH_FQDN='ntnxlab.local' - AUTH_DOMAIN='NTNXLAB' -AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} -AUTH_ADMIN_PASS='nutanix/4u' -AUTH_ADMIN_GROUP='SSP Admins' - AUTODC_REPOS=(\ - 'http://10.42.8.50/images/AutoDC.qcow2' \ - 'http://10.42.8.50/images/AutoDC2.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - - #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ - -) - -# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) -# https://sewiki.nutanix.com/index.php/HPOC_IP_Schema -case "${OCTET[0]}.${OCTET[1]}" in - 10.20 ) #Marketing: us-west = SV - DNS_SERVERS='10.21.253.10' - ;; - 10.21 ) #HPOC: us-west = SV - if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then - log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' - exit 0 - fi - - # backup cluster; override relative IP addressing - if (( ${OCTET[2]} == 249 )); then - AUTH_HOST="${IPV4_PREFIX}.118" - PC_HOST="${IPV4_PREFIX}.119" - fi - - DNS_SERVERS='10.21.253.10,10.21.253.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; - 10.55 ) # HPOC us-east = DUR - DNS_SERVERS='10.21.253.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; - 10.42 ) # HPOC us-west = PHX - DNS_SERVERS='10.42.196.10' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; - 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - DNS_SERVERS='10.132.71.40' - NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" - NW1_DHCP_START="${IPV4_PREFIX}.100" - NW1_DHCP_END="${IPV4_PREFIX}.250" - # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json - PC_STABLE_METAURL=${PC_CURRENT_METAURL} - - QCOW2_IMAGES=(\ - Centos7-Base.qcow2 \ - Centos7-Update.qcow2 \ - Windows2012R2.qcow2 \ - panlm-img-52.qcow2 \ - kx_k8s_01.qcow2 \ - kx_k8s_02.qcow2 \ - kx_k8s_03.qcow2 \ - ) - ;; -esac - -HTTP_CACHE_HOST='localhost' -HTTP_CACHE_PORT=8181 - - ATTEMPTS=40 - SLEEP=60 # pause (in seconds) between ATTEMPTS - - CURL_OPTS='--insecure --silent --show-error' # --verbose' -CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null" -CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" - SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' - SSH_OPTS+=' -q' # -v' - # Find operating system and set dependencies if [[ -e /etc/lsb-release ]]; then # Linux Standards Base diff --git a/scripts/localhost.sh b/scripts/localhost.sh new file mode 100644 index 0000000..a2896cd --- /dev/null +++ b/scripts/localhost.sh @@ -0,0 +1,99 @@ + + +HTTP_CACHE_HOST='localhost' +HTTP_CACHE_PORT=8181 + + AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file + AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" + LDAP_PORT=389 + AUTH_FQDN='ntnxlab.local' + AUTH_DOMAIN='NTNXLAB' +AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} +AUTH_ADMIN_PASS='nutanix/4u' +AUTH_ADMIN_GROUP='SSP Admins' + AUTODC_REPOS=(\ + 'http://10.42.8.50/images/AutoDC.qcow2' \ + 'http://10.42.8.50/images/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + + #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ + +) + +# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) +# https://sewiki.nutanix.com/index.php/HPOC_IP_Schema +case "${OCTET[0]}.${OCTET[1]}" in + 10.20 ) #Marketing: us-west = SV + DNS_SERVERS='10.21.253.10' + ;; + 10.21 ) #HPOC: us-west = SV + if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then + log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' + exit 0 + fi + + # backup cluster; override relative IP addressing + if (( ${OCTET[2]} == 249 )); then + AUTH_HOST="${IPV4_PREFIX}.118" + PC_HOST="${IPV4_PREFIX}.119" + fi + + DNS_SERVERS='10.21.253.10,10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.55 ) # HPOC us-east = DUR + DNS_SERVERS='10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.42 ) # HPOC us-west = PHX + DNS_SERVERS='10.42.196.10' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR + DNS_SERVERS='10.132.71.40' + NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" + NW1_DHCP_START="${IPV4_PREFIX}.100" + NW1_DHCP_END="${IPV4_PREFIX}.250" + # PC deploy file local override, TODO:30 make an PC_URL array and eliminate + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_STABLE_METAURL=${PC_CURRENT_METAURL} + + QCOW2_IMAGES=(\ + Centos7-Base.qcow2 \ + Centos7-Update.qcow2 \ + Windows2012R2.qcow2 \ + panlm-img-52.qcow2 \ + kx_k8s_01.qcow2 \ + kx_k8s_02.qcow2 \ + kx_k8s_03.qcow2 \ + ) + ;; +esac + +HTTP_CACHE_HOST='localhost' +HTTP_CACHE_PORT=8181 + + ATTEMPTS=40 + SLEEP=60 # pause (in seconds) between ATTEMPTS + + CURL_OPTS='--insecure --silent --show-error' # --verbose' +CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null" +CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" + SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' + SSH_OPTS+=' -q' # -v' diff --git a/scripts/stage_citrixhow.sh b/scripts/stage_citrixhow.sh deleted file mode 100644 index 6ae85a5..0000000 --- a/scripts/stage_citrixhow.sh +++ /dev/null @@ -1,355 +0,0 @@ -#!/bin/bash -# -# Please configure according to your needs -# -function pc_remote_exec { - sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null nutanix@10.21.${MY_HPOC_NUMBER}.39 "$@" -} -function pc_send_file { - sshpass -p nutanix/4u scp -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null "$1" nutanix@10.21.${MY_HPOC_NUMBER}.39:/home/nutanix/"$1" -} - -# Loging date format -#Never:0 Make logging format configurable -#MY_LOG_DATE='date +%Y-%m-%d %H:%M:%S' -# Script file name -MY_SCRIPT_NAME=`basename "$0"` -# Derive HPOC number from IP 3rd byte -#MY_CVM_IP=$(ip addr | grep inet | cut -d ' ' -f 6 | grep ^10.21 | head -n 1) -MY_CVM_IP=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}') -array=(${MY_CVM_IP//./ }) -MY_HPOC_NUMBER=${array[2]} -# HPOC Password (if commented, we assume we get that from environment) -#MY_PE_PASSWORD='nx2TechXXX!' -STORAGE_POOL='SP01' -STORAGE_DEFAULT='Default' -STORAGE_IMAGES='Images' -MY_DOMAIN_FQDN='ntnxlab.local' -MY_DOMAIN_NAME='NTNXLAB' -MY_DOMAIN_USER='administrator@ntnxlab.local' -MY_DOMAIN_PASS='nutanix/4u' -MY_DOMAIN_ADMIN_GROUP='SSP Admins' -MY_DOMAIN_URL="ldaps://10.21.${MY_HPOC_NUMBER}.40/" -MY_PRIMARY_NET_NAME='Primary' -MY_PRIMARY_NET_VLAN='0' -MY_SECONDARY_NET_NAME='Secondary' -MY_SECONDARY_NET_VLAN="${MY_HPOC_NUMBER}1" -MY_PC_SRC_URL='http://10.21.249.53/pc-5.7.1-stable-prism_central.tar' -MY_PC_META_URL='http://10.21.249.53/pc-5.7.1-stable-prism_central_metadata.json' -MY_AFS_SRC_URL='http://10.21.250.221/images/ahv/techsummit/nutanix-afs-el7.3-release-afs-3.0.0.1-stable.qcow2' -MY_AFS_META_URL='http://10.21.250.221/images/ahv/techsummit/nutanix-afs-el7.3-release-afs-3.0.0.1-stable-metadata.json' - -# From this point, we assume: -# IP Range: 10.21.${MY_HPOC_NUMBER}.0/25 -# Gateway: 10.21.${MY_HPOC_NUMBER}.1 -# DNS: 10.21.253.10,10.21.253.11 -# Domain: nutanixdc.local -# DHCP Pool: 10.21.${MY_HPOC_NUMBER}.50 - 10.21.${MY_HPOC_NUMBER}.120 -# -# DO NOT CHANGE ANYTHING BELOW THIS LINE UNLESS YOU KNOW WHAT YOU'RE DOING!! -# -# Source Nutanix environments (for PATH and other things) -source /etc/profile.d/nutanix_env.sh -# Logging function -function my_log { - #echo `$MY_LOG_DATE`" $1" - echo $(date "+%Y-%m-%d %H:%M:%S") $1 -} -# Check if we got a password from environment or from the settings above, otherwise exit before doing anything -if [[ -z ${MY_PE_PASSWORD+x} ]]; then - my_log "No password provided, exiting" - exit -1 -fi -my_log "My PID is $$" -my_log "Installing sshpass" -sudo rpm -ivh https://fr2.rpmfind.net/linux/epel/7/x86_64/Packages/s/sshpass-1.06-1.el7.x86_64.rpm -# Configure SMTP -my_log "Configure SMTP" -ncli cluster set-smtp-server address=nutanix-com.mail.protection.outlook.com from-email-address=cluster@nutanix.com port=25 -# Configure NTP -my_log "Configure NTP" -ncli cluster add-to-ntp-servers servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org -# Rename default storage container to STORAGE_DEFAULT -my_log "Rename default container to ${STORAGE_DEFAULT}" -default_container=$(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep '^default-container-') -ncli container edit name="${default_container}" new-name="${STORAGE_DEFAULT}" -# Rename default storage pool to STORAGE_POOL -my_log "Rename default storage pool to ${STORAGE_POOL}" -default_sp=$(ncli storagepool ls | grep 'Name' | cut -d ':' -f 2 | sed s/' '//g) -ncli sp edit name="${default_sp}" new-name="${STORAGE_POOL}" -# Check if there is a container named STORAGE_IMAGES, if not create one -my_log "Check if there is a container named ${STORAGE_IMAGES}, if not create one" -(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep "^${STORAGE_IMAGES}" 2>&1 > /dev/null) \ - && echo "Container ${STORAGE_IMAGES} already exists" \ - || ncli container create name="${STORAGE_IMAGES}" sp-name="${STORAGE_POOL}" -# Set external IP address: -#ncli cluster edit-params external-ip-address=10.21.${MY_HPOC_NUMBER}.37 -# Set Data Services IP address: -my_log "Set Data Services IP address to 10.21.${MY_HPOC_NUMBER}.38" -ncli cluster edit-params external-data-services-ip-address=10.21.${MY_HPOC_NUMBER}.38 - -# Importing images -MY_IMAGE="AutoDC" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2 wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -MY_IMAGE="CentOS" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/CentOS7-04282018.qcow2 wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -MY_IMAGE="Windows2012" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows2012R2-04282018.qcow2 wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -MY_IMAGE="Windows10" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows10-1709-04282018.qcow2 wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -MY_IMAGE="XenDesktop-7.15.iso" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kIsoImage source_url=http://10.21.250.221/images/ahv/techsummit/XD715.iso wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -# Remove existing VMs, if any -my_log "Removing \"Windows 2012\" VM if it exists" -acli -y vm.delete Windows\ 2012\ VM delete_snapshots=true -my_log "Removing \"Windows 10\" VM if it exists" -acli -y vm.delete Windows\ 10\ VM delete_snapshots=true -my_log "Removing \"CentOS\" VM if it exists" -acli -y vm.delete CentOS\ VM delete_snapshots=true - -# Remove Rx-Automation-Network network -my_log "Removing \"Rx-Automation-Network\" Network if it exists" -acli -y net.delete Rx-Automation-Network - -# Create primary network -my_log "Create primary network:" -my_log "Name: ${MY_PRIMARY_NET_NAME}" -my_log "VLAN: ${MY_PRIMARY_NET_VLAN}" -my_log "Subnet: 10.21.${MY_HPOC_NUMBER}.1/25" -my_log "Domain: ${MY_DOMAIN_NAME}" -my_log "Pool: 10.21.${MY_HPOC_NUMBER}.50 to 10.21.${MY_HPOC_NUMBER}.125" -acli net.create ${MY_PRIMARY_NET_NAME} vlan=${MY_PRIMARY_NET_VLAN} ip_config=10.21.${MY_HPOC_NUMBER}.1/25 -acli net.update_dhcp_dns ${MY_PRIMARY_NET_NAME} servers=10.21.${MY_HPOC_NUMBER}.40,10.21.253.10 domains=${MY_DOMAIN_NAME} -acli net.add_dhcp_pool ${MY_PRIMARY_NET_NAME} start=10.21.${MY_HPOC_NUMBER}.50 end=10.21.${MY_HPOC_NUMBER}.125 - -# Create secondary network -if [[ ${MY_SECONDARY_NET_NAME} ]]; then - my_log "Create secondary network:" - my_log "Name: ${MY_SECONDARY_NET_NAME}" - my_log "VLAN: ${MY_SECONDARY_NET_VLAN}" - my_log "Subnet: 10.21.${MY_HPOC_NUMBER}.129/25" - my_log "Domain: ${MY_DOMAIN_NAME}" - my_log "Pool: 10.21.${MY_HPOC_NUMBER}.132 to 10.21.${MY_HPOC_NUMBER}.253" - acli net.create ${MY_SECONDARY_NET_NAME} vlan=${MY_SECONDARY_NET_VLAN} ip_config=10.21.${MY_HPOC_NUMBER}.129/25 - acli net.update_dhcp_dns ${MY_SECONDARY_NET_NAME} servers=10.21.${MY_HPOC_NUMBER}.40,10.21.253.10 domains=${MY_DOMAIN_NAME} - acli net.add_dhcp_pool ${MY_SECONDARY_NET_NAME} start=10.21.${MY_HPOC_NUMBER}.132 end=10.21.${MY_HPOC_NUMBER}.253 -fi - -# Create AutoDC & power on -my_log "Create DC VM based on AutoDC image" -acli vm.create DC num_vcpus=2 num_cores_per_vcpu=1 memory=4G -acli vm.disk_create DC cdrom=true empty=true -acli vm.disk_create DC clone_from_image=AutoDC -acli vm.nic_create DC network=${MY_PRIMARY_NET_NAME} ip=10.21.${MY_HPOC_NUMBER}.40 -my_log "Power on DC VM" -acli vm.on DC - -# Need to wait for AutoDC to be up (30?60secs?) -my_log "Waiting 60sec to give DC VM time to start" -sleep 60 - -# Configure PE external authentication -my_log "Configure PE external authentication" -ncli authconfig add-directory directory-type=ACTIVE_DIRECTORY connection-type=LDAP directory-url="${MY_DOMAIN_URL}" domain="${MY_DOMAIN_FQDN}" name="${MY_DOMAIN_NAME}" service-account-username="${MY_DOMAIN_USER}" service-account-password="${MY_DOMAIN_PASS}" - -# Configure PE role mapping -my_log "Configure PE role mapping" -ncli authconfig add-role-mapping role=ROLE_CLUSTER_ADMIN entity-type=group name="${MY_DOMAIN_NAME}" entity-values="${MY_DOMAIN_ADMIN_GROUP}" - -# Reverse Lookup Zone -my_log "Creating Reverse Lookup Zone on DC VM" -sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ -root@10.21.${MY_HPOC_NUMBER}.40 "samba-tool dns zonecreate dc1 ${MY_HPOC_NUMBER}.21.10.in-addr.arpa; service samba-ad-dc restart" - -# Create custom OUs -sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ -root@10.21.${MY_HPOC_NUMBER}.40 "apt install ldb-tools -y -q" - -sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ -root@10.21.${MY_HPOC_NUMBER}.40 "cat << EOF > ous.ldif -dn: OU=Non-PersistentDesktop,DC=NTNXLAB,DC=local -changetype: add -objectClass: top -objectClass: organizationalunit -description: Non-Persistent Desktop OU - -dn: OU=PersistentDesktop,DC=NTNXLAB,DC=local -changetype: add -objectClass: top -objectClass: organizationalunit -description: Persistent Desktop OU - -dn: OU=XenAppServer,DC=NTNXLAB,DC=local -changetype: add -objectClass: top -objectClass: organizationalunit -description: XenApp Server OU -EOF" - -sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ -root@10.21.${MY_HPOC_NUMBER}.40 "ldbmodify -H /var/lib/samba/private/sam.ldb ous.ldif; service samba-ad-dc restart" - -# Provision local Prism account for XD MCS Plugin -my_log "Create PE user account xd for MCS Plugin" -ncli user create user-name=xd user-password=nutanix/4u first-name=XenDesktop last-name=Service email-id=no-reply@nutanix.com -ncli user grant-cluster-admin-role user-name=xd - -# Get UUID from cluster -my_log "Get UUIDs from cluster:" -MY_NET_UUID=$(acli net.get ${MY_PRIMARY_NET_NAME} | grep "uuid" | cut -f 2 -d ':' | xargs) -my_log "${MY_PRIMARY_NET_NAME} UUID is ${MY_NET_UUID}" -MY_CONTAINER_UUID=$(ncli container ls name=${STORAGE_DEFAULT} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) -my_log "${STORAGE_DEFAULT} UUID is ${MY_CONTAINER_UUID}" - -# Validate EULA on PE -my_log "Validate EULA on PE" -curl -u admin:${MY_PE_PASSWORD} -k -H 'Content-Type: application/json' -X POST \ - https://127.0.0.1:9440/PrismGateway/services/rest/v1/eulas/accept \ - -d '{ - "username": "SE", - "companyName": "NTNX", - "jobTitle": "SE" -}' - -# Disable Pulse in PE -my_log "Disable Pulse in PE" -curl -u admin:${MY_PE_PASSWORD} -k -H 'Content-Type: application/json' -X PUT \ - https://127.0.0.1:9440/PrismGateway/services/rest/v1/pulse \ - -d '{ - "defaultNutanixEmail": null, - "emailContactList": null, - "enable": false, - "enableDefaultNutanixEmail": false, - "isPulsePromptNeeded": false, - "nosVersion": null, - "remindLater": null, - "verbosityType": null -}' - -# AFS Download -my_log "Download AFS image from ${MY_AFS_SRC_URL}" -wget -nv ${MY_AFS_SRC_URL} -my_log "Download AFS metadata JSON from ${MY_AFS_META_URL}" -wget -nv ${MY_AFS_META_URL} - -# Staging AFS -my_log "Stage AFS" -ncli software upload file-path=/home/nutanix/${MY_AFS_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_AFS_META_URL##*/} software-type=FILE_SERVER - -# Freeing up space -my_log "Delete AFS sources to free some space" -rm ${MY_AFS_SRC_URL##*/} ${MY_AFS_META_URL##*/} - -# Prism Central Download -my_log "Download PC tarball from ${MY_PC_SRC_URL}" -wget -nv ${MY_PC_SRC_URL} -my_log "Download PC metadata JSON from ${MY_PC_META_URL}" -wget -nv ${MY_PC_META_URL} - -# Staging Prism Central -my_log "Stage Prism Central" -ncli software upload file-path=/home/nutanix/${MY_PC_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_PC_META_URL##*/} software-type=PRISM_CENTRAL_DEPLOY - -# Freeing up space -my_log "Delete PC sources to free some space" -rm ${MY_PC_SRC_URL##*/} ${MY_PC_META_URL##*/} - -# Deploy Prism Central -my_log "Deploy Prism Central" -MY_DEPLOY_BODY=$(cat <> pcconfig.log 2>&1 &" -my_log "Removing sshpass" -sudo rpm -e sshpass -my_log "PE Configuration complete" diff --git a/scripts/stage_citrixhow_pc.sh b/scripts/stage_citrixhow_pc.sh deleted file mode 100644 index 2cacfda..0000000 --- a/scripts/stage_citrixhow_pc.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -#MY_PC_UPGRADE_URL='http://10.21.250.221/images/ahv/techsummit/nutanix_installer_package_pc-release-euphrates-5.5.0.6-stable-14bd63735db09b1c9babdaaf48d062723137fc46.tar.gz' - -# Script file name -MY_SCRIPT_NAME=`basename "$0"` - -# Source Nutanix environments (for PATH and other things) -. /etc/profile.d/nutanix_env.sh -. lib.common.sh # source common routines -Dependencies 'install'; - -# Derive HPOC number from IP 3rd byte -#MY_CVM_IP=$(ip addr | grep inet | cut -d ' ' -f 6 | grep ^10.21 | head -n 1) - MY_CVM_IP=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}') - array=(${MY_CVM_IP//./ }) -MY_HPOC_NUMBER=${array[2]} - -CURL_OPTS="${CURL_OPTS} --user admin:${MY_PE_PASSWORD}" #lib.common.sh initialized -#CURL_OPTS="${CURL_OPTS} --verbose" - -# Set Prism Central Password to Prism Element Password -my_log "Setting PC password to PE password" -ncli user reset-password user-name="admin" password="${MY_PE_PASSWORD}" - -# Add NTP Server\ -my_log "Configure NTP on PC" -ncli cluster add-to-ntp-servers servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org - -# Accept Prism Central EULA -my_log "Validate EULA on PC" -curl ${CURL_OPTS} \ - https://10.21.${MY_HPOC_NUMBER}.39:9440/PrismGateway/services/rest/v1/eulas/accept \ - -d '{ - "username": "SE", - "companyName": "NTNX", - "jobTitle": "SE" -}' - -# Disable Prism Central Pulse -my_log "Disable Pulse on PC" -curl ${CURL_OPTS} -X PUT \ - https://10.21.${MY_HPOC_NUMBER}.39:9440/PrismGateway/services/rest/v1/pulse \ - -d '{ - "emailContactList":null, - "enable":false, - "verbosityType":null, - "enableDefaultNutanixEmail":false, - "defaultNutanixEmail":null, - "nosVersion":null, - "isPulsePromptNeeded":false, - "remindLater":null -}' - -# Prism Central upgrade -#my_log "Download PC upgrade image: ${MY_PC_UPGRADE_URL##*/}" -#wget -nv ${MY_PC_UPGRADE_URL} - -#my_log "Prepare PC upgrade image" -#tar -xzf ${MY_PC_UPGRADE_URL##*/} -#rm ${MY_PC_UPGRADE_URL##*/} - -#my_log "Upgrade PC" -#cd /home/nutanix/install ; ./bin/cluster -i . -p upgrade - -my_log "PC Configuration complete on `$date`" From 444fc72612cc4dc08c6e09883681c0e1ceef8f3f Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 29 Apr 2019 10:07:23 -0700 Subject: [PATCH 094/691] Update global.vars.sh Cleanup --- scripts/global.vars.sh | 252 ++++++++++++++--------------------------- 1 file changed, 84 insertions(+), 168 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index c51c513..acb0687 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -1,44 +1,19 @@ #!/usr/bin/env bash # shellcheck disable=SC2034 - RELEASE='release.json' -# Sync the following to lib.common.sh::ntnx_download-Case=PC -# Browse to: https://portal.nutanix.com/#/page/releases/prismDetails -# - Find ${PC_VERSION} in the Additional Releases section on the lower right side -# - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL - PC_DEV_VERSION='5.10.2' - PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' - PC_DEV_URL='' - PC_CURRENT_VERSION='5.10.2' - #PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - #PC_CURRENT_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' - PC_STABLE_VERSION='5.8.2' - #PC_STABLE_METAURL='http://10.42.8.50/images/pc_deploy-5.8.2.json' - #PC_STABLE_URL='http://10.42.8.50/images/euphrates-5.8.2-stable-prism_central.tar' -# Sync the following to lib.common.sh::ntnx_download-Case=FILES -# Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA -# - Find ${FILES_VERSION} in the Additional Releases section on the lower right side -# - Provide "Upgrade Metadata File" URL to FILES_METAURL - FILES_VERSION='3.2.0.1' - #FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' - # 2019-02-15: override until metadata URL fixed - # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - # Revert by overriding again... - #FILES_VERSION='3.2.0' - #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' - #FILES_URL= - +RELEASE='release.json' +PC_DEV_VERSION='5.10.2' +PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' +PC_DEV_URL='' +PC_CURRENT_VERSION='5.10.2' +PC_STABLE_VERSION='5.8.2' +FILES_VERSION='3.2.0.1' NTNX_INIT_PASSWORD='nutanix/4u' - PRISM_ADMIN='admin' - SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" - STORAGE_POOL='SP01' - STORAGE_DEFAULT='Default' - STORAGE_IMAGES='Images' +PRISM_ADMIN='admin' +SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" +STORAGE_POOL='SP01' +STORAGE_DEFAULT='Default' +STORAGE_IMAGES='Images' ################################## # @@ -46,23 +21,7 @@ NTNX_INIT_PASSWORD='nutanix/4u' # ################################## - # Conventions for *_REPOS arrays -- the URL must end with either: - # - trailing slash, which imples _IMAGES argument to function repo_source() - # - or full package filename. - - # https://stedolan.github.io/jq/download/#checksums_and_signatures - - #JQ_REPOS=(\ - #'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ - #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ - #) - #QCOW2_REPOS=(\ - #'http://10.42.8.50/images/' \ - #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - #) - - QCOW2_IMAGES=(\ +QCOW2_IMAGES=(\ CentOS7.qcow2 \ Windows2016.qcow2 \ Windows2012R2.qcow2 \ @@ -71,7 +30,8 @@ NTNX_INIT_PASSWORD='nutanix/4u' ERA-Server-build-1.0.1.qcow2 \ 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) - ISO_IMAGES=(\ + +ISO_IMAGES=(\ CentOS7.iso \ Windows2016.iso \ Windows2012R2.iso \ @@ -80,188 +40,144 @@ NTNX_INIT_PASSWORD='nutanix/4u' SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ ) - # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso - # http://download.nutanix.com/era/1.0.0/ERA-Server-build-1.0.0-bae7ca0d653e1af2bcb9826d1320e88d8c4713cc.qcow2 - - # https://pkgs.org/download/sshpass - # https://sourceforge.net/projects/sshpass/files/sshpass/ - #SSHPASS_REPOS=(\ - #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ - #) # shellcheck disable=2206 - OCTET=(${PE_HOST//./ }) # zero index - IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} +OCTET=(${PE_HOST//./ }) # zero index +IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) - PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) - DNS_SERVERS='8.8.8.8' - NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' - NW1_NAME='Primary' - NW1_VLAN=0 -# Assuming HPOC defaults - NW1_SUBNET="${IPV4_PREFIX}.1/25" - NW1_DHCP_START="${IPV4_PREFIX}.50" - NW1_DHCP_END="${IPV4_PREFIX}.125" -# https://sewiki.nutanix.com/index.php/Hosted_POC_FAQ#I.27d_like_to_test_email_alert_functionality.2C_what_SMTP_server_can_I_use_on_Hosted_POC_clusters.3F -#SMTP_SERVER_ADDRESS='nutanix-com.mail.protection.outlook.com' -SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' - SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' - SMTP_SERVER_PORT=25 +PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) +DNS_SERVERS='8.8.8.8' +NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' + +NW1_NAME='Primary' +NW1_VLAN=0 +NW1_SUBNET="${IPV4_PREFIX}.1/25" +NW1_DHCP_START="${IPV4_PREFIX}.50" +NW1_DHCP_END="${IPV4_PREFIX}.125" + +NW2_NAME='Secondary' +NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) +NW2_SUBNET="${IPV4_PREFIX}.129/25" +NW2_DHCP_START="${IPV4_PREFIX}.132" +NW2_DHCP_END="${IPV4_PREFIX}.253" - AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file - AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" - LDAP_PORT=389 - AUTH_FQDN='ntnxlab.local' - AUTH_DOMAIN='NTNXLAB' +SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' +SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' +SMTP_SERVER_PORT=25 + +AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file +AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" +LDAP_PORT=389 +AUTH_FQDN='ntnxlab.local' +AUTH_DOMAIN='NTNXLAB' AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' - #AUTODC_REPOS=(\ - #'http://10.42.8.50/images/AutoDC.qcow2' \ - #'http://10.42.8.50/images/AutoDC2.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ -#) -# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) + +# For Nutanix HPOC/Marketing clusters (RTP 10.55, PHC 10.42, PHX 10.38) # https://sewiki.nutanix.com/index.php/HPOC_IP_Schema case "${OCTET[0]}.${OCTET[1]}" in - 10.20 ) #Marketing: us-west = SV - DNS_SERVERS='10.21.253.10' - ;; - 10.21 ) #HPOC: us-west = SV - if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then - log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' - exit 0 - fi - - # backup cluster; override relative IP addressing - if (( ${OCTET[2]} == 249 )); then - AUTH_HOST="${IPV4_PREFIX}.118" - PC_HOST="${IPV4_PREFIX}.119" - fi - DNS_SERVERS='10.21.253.10,10.21.253.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; 10.55 ) # HPOC us-east = DUR -PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' -PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - JQ_REPOS=(\ + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.55.251.38/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.55.251.38/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.55.251.10,10.55.251.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" + DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX -PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' -PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - JQ_REPOS=(\ + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.42.196.10,10.42.194.10 ' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" + DNS_SERVERS='10.42.196.10,10.42.194.10 ' ;; - 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - JQ_REPOS=(\ + 10.38 ) # HPOC us-west = PHX 1-Node Clusters + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' + FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + UTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" - NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) - NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) - DNS_SERVERS="10.42.196.10,10.42.194.10,${AUTH_HOST}" - #NW2_NAME='Secondary' - #NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - #NW2_SUBNET="${IPV4_PREFIX}.129/25" - #NW2_DHCP_START="${IPV4_PREFIX}.132" - #NW2_DHCP_END="${IPV4_PREFIX}.253" + NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" + NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) + NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + DNS_SERVERS="10.42.196.10,10.42.194.10,${AUTH_HOST}" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - JQ_REPOS=(\ + JQ_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.132.71.40' - NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" + DNS_SERVERS='10.132.71.40' + NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" NW1_DHCP_START="${IPV4_PREFIX}.100" - NW1_DHCP_END="${IPV4_PREFIX}.250" + NW1_DHCP_END="${IPV4_PREFIX}.250" # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json PC_STABLE_METAURL=${PC_CURRENT_METAURL} QCOW2_IMAGES=(\ From 889b1eb0ac50aea42d3a7cd5d3fabe776a5a361c Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 29 Apr 2019 21:31:38 +0200 Subject: [PATCH 095/691] Reintroducing the needed parameters... Returning some parameters/variables that are needed by the script. --- cluster.txt | 2 +- scripts/global.vars.sh | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/cluster.txt b/cluster.txt index 1c28991..c75bee7 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1 +1 @@ -10.42.86.37|techX2019!|willem@nutanix.com +10.42.9.37|techX2019!|willem@nutanix.com diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index acb0687..c782a64 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -14,6 +14,15 @@ SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" STORAGE_POOL='SP01' STORAGE_DEFAULT='Default' STORAGE_IMAGES='Images' +ATTEMPTS=40 +SLEEP=60 + +# Curl and SSH settings +CURL_OPTS='--insecure --silent --show-error' # --verbose' +CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null" +CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" +SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' +SSH_OPTS+=' -q' # -v' ################################## # From 74c5afb4bfcde7d9100c47f79621a35c981ddd6d Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 30 Apr 2019 10:19:41 -0700 Subject: [PATCH 096/691] Update lib.pe.sh --- scripts/lib.pe.sh | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index e3fb9f3..7e52b44 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -59,14 +59,6 @@ function authentication_source() { _autodc_restart="sleep 2 && service ${_autodc_service} stop && sleep 5 && service ${_autodc_service} start" _autodc_status="service ${_autodc_service} status" _autodc_success=' * status: started' - - # REVIEW: override global.vars - #export AUTODC_REPOS=(\ - #'http://10.42.8.50/images/AutoDC2.qcow2' \ - #'http://10.42.8.50/images/AutoDC.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - #) fi dns_check "dc${_autodc_index}.${AUTH_FQDN}" @@ -444,7 +436,7 @@ function pe_init() { # If we are running in HPOC so email can be send and defined if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then log "Configure SMTP" - + ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \ from-email-address=${SMTP_SERVER_FROM} address=${SMTP_SERVER_ADDRESS} ${HOME}/serviceability/bin/email-alerts --to_addresses="${EMAIL}" \ From a1999736119bc5de5d9d71d01b5679973cd2be84 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 30 Apr 2019 10:55:51 -0700 Subject: [PATCH 097/691] Cleanup --- scripts/global.vars.sh | 6 +++--- scripts/lib.common.sh | 15 +-------------- scripts/lib.pc.sh | 8 ++++---- 3 files changed, 8 insertions(+), 21 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index c782a64..a0c90b8 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -112,7 +112,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.55.251.10,10.55.251.11' + DNS_SERVERS='10.55.251.10,10.55.251.11,${AUTH_HOST}' ;; 10.42 ) # HPOC us-west = PHX PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' @@ -138,7 +138,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.42.196.10,10.42.194.10 ' + DNS_SERVERS='10.42.196.10,10.42.194.10,${AUTH_HOST}' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' @@ -160,7 +160,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - UTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index ec573c1..b62137f 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -251,11 +251,6 @@ function fileserver() { remote_exec 'ssh' ${_host} \ "python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}" - - # acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \ - # source_url=http://10.4.150.64:8181/autodc-2.0.qcow2 - #AutoDC2: pending - #AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable popd || exit ;; 'stop' ) @@ -287,11 +282,6 @@ function images() { local _source='source_uri' local _test - #which "$_cli" - #if (( $? > 0 )); then - # _cli='nuclei' - # _source='source_uri' - #fi ####################################### # For doing Disk IMAGES @@ -305,10 +295,7 @@ function images() { && ${_cli} image.list 2>&1 \ | grep -i complete \ | grep "${_image}") - #else - # _test=$(source /etc/profile.d/nutanix_env.sh \ - # && ${_cli} image.list 2>&1 \ - # | grep "${_image}") + fi if [[ ! -z ${_test} ]]; then diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7a95718..b4897ef 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -180,10 +180,10 @@ function karbon_enable() { local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" - + # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) - + # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... if [[ $_response -eq 1 ]]; then # Check if Karbon has been enabled @@ -203,7 +203,7 @@ function karbon_enable() { log "Karbon has been enabled." fi fi - fi + fi } ############################################################################################################################################################################### @@ -428,7 +428,7 @@ function pc_passwd() { } ############################################################################################################################################################################### -# Routine to setp up the SSP authentication to use the AutoDC1 or 2 server +# Routine to setp up the SSP authentication to use the AutoDC server ############################################################################################################################################################################### function ssp_auth() { From e9413649a9ace67478574aaf86d369ac0bd64014 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 30 Apr 2019 10:59:34 -0700 Subject: [PATCH 098/691] Update global.vars.sh --- scripts/global.vars.sh | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index a0c90b8..1f19661 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,9 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.10.2' -PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' -PC_DEV_URL='' +PC_DEV_VERSION='5.10.3' PC_CURRENT_VERSION='5.10.2' PC_STABLE_VERSION='5.8.2' FILES_VERSION='3.2.0.1' @@ -89,6 +87,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' @@ -115,6 +115,8 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.55.251.10,10.55.251.11,${AUTH_HOST}' ;; 10.42 ) # HPOC us-west = PHX + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' @@ -141,6 +143,8 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.42.196.10,10.42.194.10,${AUTH_HOST}' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' From ce695dba359bf0573de077a7d4ee55aafce4fa45 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 30 Apr 2019 15:37:50 -0700 Subject: [PATCH 099/691] Updates for PC 5.10.3 & Files 3.5.0 --- scripts/global.vars.sh | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 1f19661..dd5bc72 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -3,9 +3,9 @@ # shellcheck disable=SC2034 RELEASE='release.json' PC_DEV_VERSION='5.10.3' -PC_CURRENT_VERSION='5.10.2' +PC_CURRENT_VERSION='5.10.3' PC_STABLE_VERSION='5.8.2' -FILES_VERSION='3.2.0.1' +FILES_VERSION='3.5.0' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -89,12 +89,12 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -117,12 +117,12 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -145,12 +145,12 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ From 13d6f6e466306e796c5a0141cfa5e727a61ada47 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 1 May 2019 10:13:17 -0700 Subject: [PATCH 100/691] Update global.vars.sh --- scripts/global.vars.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index dd5bc72..6a8aa79 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -35,7 +35,8 @@ QCOW2_IMAGES=(\ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ - 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + 'https://download.nutanix.com/karbon/centos/0.0/centos7-0.0.qcow2' \ + #'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) ISO_IMAGES=(\ From ac0c69237e70b4ca449de249a056617a3d2df16b Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 1 May 2019 14:48:02 -0700 Subject: [PATCH 101/691] Update lib.common.sh --- scripts/lib.common.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index b62137f..9accf60 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -322,6 +322,11 @@ function images() { _name=acs-centos fi + # TODO:0 TOFIX: acs-centos ugly override for today... + if (( $(echo "${_image}" | grep -i 'centos7-0.0' | wc --lines ) > 0 )); then + _name=karbon-centos7.5.1804-ntnx-0.0 + fi + if [[ ${_cli} == 'acli' ]]; then _image_type='kDiskImage' _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ From f3b427852d9d6468e9825e0142dc0f3ae264b348 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 2 May 2019 09:48:38 -0700 Subject: [PATCH 102/691] updates --- scripts/global.vars.sh | 2 +- scripts/lib.pc.sh | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6a8aa79..3aeb676 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -35,7 +35,7 @@ QCOW2_IMAGES=(\ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ - 'https://download.nutanix.com/karbon/centos/0.0/centos7-0.0.qcow2' \ + #'https://download.nutanix.com/karbon/centos/0.0/centos7-0.0.qcow2' \ #'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b4897ef..67ed089 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -206,6 +206,42 @@ function karbon_enable() { fi } +############################################################################################################################################################################### +# Download Karbon CentOS Image +############################################################################################################################################################################### + +function karbon_image_download() { + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local _loop=0 + local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" + local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " + local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" + + # Start the enablement process + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) + + # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... + if [[ $_response -eq 1 ]]; then + # Check if Karbon has been enabled + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) + while [ $_response -ne 1 ]; do + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) + done + log "Karbon has been enabled." + else + log "Retrying to enable Karbon one more time." + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) + if [[ $_response -eq 1 ]]; then + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) + if [ $_response -lt 1 ]; then + log "Karbon isn't enabled. Please use the UI to enable it." + else + log "Karbon has been enabled." + fi + fi + fi +} + ############################################################################################################################################################################### # Routine for PC_Admin ############################################################################################################################################################################### From e9097bd99918ec764ca6fc5684a3059b0285d825 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 14 May 2019 16:40:49 -0600 Subject: [PATCH 103/691] Updates for Prism Pro Data seeding --- scripts/bootcamp.sh | 3 ++- scripts/global.vars.sh | 3 +++ scripts/lib.pc.sh | 25 ++++++++++++++++++++++--- 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index a130b5d..a57bef7 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -106,7 +106,8 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth + && seedPC \ + && pc_auth # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 3aeb676..8981b48 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -113,6 +113,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) + PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' DNS_SERVERS='10.55.251.10,10.55.251.11,${AUTH_HOST}' ;; 10.42 ) # HPOC us-west = PHX @@ -141,6 +142,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) + PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' DNS_SERVERS='10.42.196.10,10.42.194.10,${AUTH_HOST}' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters @@ -169,6 +171,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) + PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 67ed089..40f5d35 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -215,7 +215,7 @@ function karbon_image_download() { local _loop=0 local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " - local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" + local _httpURL="https://localhost:7050/acs/image/download" # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) @@ -234,9 +234,9 @@ function karbon_image_download() { if [[ $_response -eq 1 ]]; then _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) if [ $_response -lt 1 ]; then - log "Karbon isn't enabled. Please use the UI to enable it." + log "Karbon CentOS image has not been downloaded." else - log "Karbon has been enabled." + log "Karbon CentOS image has been downloaded." fi fi fi @@ -463,6 +463,25 @@ function pc_passwd() { # log "cURL reset password _test=${_test}" } +############################################################################################################################################################################### +# Seed PC data for Prism Pro Labs +############################################################################################################################################################################### + +function seedPC() { + local _test + local _setup + + _test=$(curl -L ${PC_DATA} -o /home/nutanix/seedPC.zip) + log "Pulling Prism Data| PC_DATA ${PC_DATA}|${_test}" + unzip /home/nutanix/seedPC.zip + pushd /home/nutanix/lab/ + + _setup=$(/home/nutanix/lab/setupEnv.sh ${PC_HOST} > /dev/null 2>&1) + log "Running Setup Script|$_setup" + + popd +} + ############################################################################################################################################################################### # Routine to setp up the SSP authentication to use the AutoDC server ############################################################################################################################################################################### From 6493865c3750545f565a399f8a239a447fd2419a Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 16 May 2019 09:32:54 -0700 Subject: [PATCH 104/691] Update bootstrap.sh --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index 42ec6dd..b68142c 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -7,7 +7,7 @@ # curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/} if [[ -z ${SOURCE} ]]; then - ORGANIZATION=jncox + ORGANIZATION=nutanixworkshops REPOSITORY=stageworkshop BRANCH=master else From d0a2ba8f9ad72c1e1399b8993bde5b258e123e83 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 08:55:04 -0700 Subject: [PATCH 105/691] Update stage_workshop.sh --- stage_workshop.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 15c62ac..b7394f6 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -7,7 +7,7 @@ DEBUG= # - PC #.# WORKSHOPS=(\ "Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current (AutoDC2)" \ -"Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ +#"Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ "Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -267,10 +267,10 @@ function select_workshop() { get_file break ;; - "Validate Staged Clusters") - validate_clusters - break - ;; + #"Validate Staged Clusters") + # validate_clusters + # break + # ;; "Quit") exit ;; @@ -310,14 +310,14 @@ function select_workshop() { . scripts/global.vars.sh begin - _VALIDATE='Validate Staged Clusters' +# _VALIDATE='Validate Staged Clusters' _CLUSTER_FILE='Cluster Input File' CLUSTER_LIST= # NONWORKSHOPS appended to WORKSHOPS WORKSHOP_COUNT=${#WORKSHOPS[@]} WORKSHOPS[${#WORKSHOPS[@]}]="Change ${_CLUSTER_FILE}" -WORKSHOPS[${#WORKSHOPS[@]}]=${_VALIDATE} +#WORKSHOPS[${#WORKSHOPS[@]}]=${_VALIDATE} WORKSHOPS[${#WORKSHOPS[@]}]="Quit" let NONWORKSHOPS=${#WORKSHOPS[@]}-${WORKSHOP_COUNT} From fad197af60b3e5b5ff5374fba3fa6e542e7b5032 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 09:29:25 -0700 Subject: [PATCH 106/691] Update stage_workshop.sh --- stage_workshop.sh | 6 ------ 1 file changed, 6 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index b7394f6..018d496 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -34,12 +34,6 @@ function stage_clusters() { export PC_VERSION="${PC_CURRENT_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.9" | wc ${WC_ARG}) > 0 )); then - export PC_VERSION=5.9.2 - elif (( $(echo ${_workshop} | grep -i "PC 5.7" | wc ${WC_ARG}) > 0 )); then - export PC_VERSION=5.7.1.1 - elif (( $(echo ${_workshop} | grep -i "PC 5.6" | wc ${WC_ARG}) > 0 )); then - export PC_VERSION=5.6.2 fi # Map workshop to staging script(s) and libraries, From 938ccfbba604e15bf4aec5cf03ca3afe711f620f Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 09:29:35 -0700 Subject: [PATCH 107/691] Update stage_workshop.sh --- stage_workshop.sh | 6 ------ 1 file changed, 6 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index b7394f6..018d496 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -34,12 +34,6 @@ function stage_clusters() { export PC_VERSION="${PC_CURRENT_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.9" | wc ${WC_ARG}) > 0 )); then - export PC_VERSION=5.9.2 - elif (( $(echo ${_workshop} | grep -i "PC 5.7" | wc ${WC_ARG}) > 0 )); then - export PC_VERSION=5.7.1.1 - elif (( $(echo ${_workshop} | grep -i "PC 5.6" | wc ${WC_ARG}) > 0 )); then - export PC_VERSION=5.6.2 fi # Map workshop to staging script(s) and libraries, From 896d9728c06422bd086c108cb47193077403fa2e Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 09:43:28 -0700 Subject: [PATCH 108/691] Revert "Update stage_workshop.sh" This reverts commit fad197af60b3e5b5ff5374fba3fa6e542e7b5032. --- stage_workshop.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/stage_workshop.sh b/stage_workshop.sh index 018d496..b7394f6 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -34,6 +34,12 @@ function stage_clusters() { export PC_VERSION="${PC_CURRENT_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" + elif (( $(echo ${_workshop} | grep -i "PC 5.9" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION=5.9.2 + elif (( $(echo ${_workshop} | grep -i "PC 5.7" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION=5.7.1.1 + elif (( $(echo ${_workshop} | grep -i "PC 5.6" | wc ${WC_ARG}) > 0 )); then + export PC_VERSION=5.6.2 fi # Map workshop to staging script(s) and libraries, From cf554e6dac93039b51758ea12b90219411fe09e6 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 09:43:40 -0700 Subject: [PATCH 109/691] Revert "Merge pull request #18 from jncox/master" This reverts commit bd4ae8c3b1717267ee198f341da35248161f697b, reversing changes made to 5978b484333c73a5c171498eadea0076dffeb3b1. --- stage_workshop.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index b7394f6..15c62ac 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -7,7 +7,7 @@ DEBUG= # - PC #.# WORKSHOPS=(\ "Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current (AutoDC2)" \ -#"Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ +"Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ "Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -267,10 +267,10 @@ function select_workshop() { get_file break ;; - #"Validate Staged Clusters") - # validate_clusters - # break - # ;; + "Validate Staged Clusters") + validate_clusters + break + ;; "Quit") exit ;; @@ -310,14 +310,14 @@ function select_workshop() { . scripts/global.vars.sh begin -# _VALIDATE='Validate Staged Clusters' + _VALIDATE='Validate Staged Clusters' _CLUSTER_FILE='Cluster Input File' CLUSTER_LIST= # NONWORKSHOPS appended to WORKSHOPS WORKSHOP_COUNT=${#WORKSHOPS[@]} WORKSHOPS[${#WORKSHOPS[@]}]="Change ${_CLUSTER_FILE}" -#WORKSHOPS[${#WORKSHOPS[@]}]=${_VALIDATE} +WORKSHOPS[${#WORKSHOPS[@]}]=${_VALIDATE} WORKSHOPS[${#WORKSHOPS[@]}]="Quit" let NONWORKSHOPS=${#WORKSHOPS[@]}-${WORKSHOP_COUNT} From d82e0ac3fa13b4b6d841df6ffb651f5ed2d3f086 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 09:44:01 -0700 Subject: [PATCH 110/691] Revert "Merge branch 'master' of https://github.com/jncox/stageworkshop" This reverts commit a33812a111a4a332d9048d4ebd38f696616e413b, reversing changes made to d0a2ba8f9ad72c1e1399b8993bde5b258e123e83. --- bootstrap.sh | 2 +- scripts/global.vars.sh | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/bootstrap.sh b/bootstrap.sh index b68142c..42ec6dd 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -7,7 +7,7 @@ # curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/} if [[ -z ${SOURCE} ]]; then - ORGANIZATION=nutanixworkshops + ORGANIZATION=jncox REPOSITORY=stageworkshop BRANCH=master else diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f622f6b..8981b48 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -188,10 +188,8 @@ case "${OCTET[0]}.${OCTET[1]}" in AUTODC_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - - 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - DNS_SERVERS='10.132.71.40' - NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" + DNS_SERVERS='10.132.71.40' + NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" NW1_DHCP_START="${IPV4_PREFIX}.100" NW1_DHCP_END="${IPV4_PREFIX}.250" # PC deploy file local override, TODO:30 make an PC_URL array and eliminate From 909ea91482bfc96628a1047dfba96e1905aed67f Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 09:52:47 -0700 Subject: [PATCH 111/691] Updates to remove Localhost --- cluster.txt | 2 +- scripts/bootcamp.sh | 9 +- scripts/calm.sh | 8 +- scripts/citrix.sh | 9 +- scripts/global.vars.sh | 268 ++++++++++++++----------- scripts/lib.common.sh | 20 +- scripts/lib.pc.sh | 63 +----- scripts/lib.pe.sh | 86 +++++--- scripts/localhost.sh | 99 ---------- scripts/stage_citrixhow.sh | 355 ++++++++++++++++++++++++++++++++++ scripts/stage_citrixhow_pc.sh | 66 +++++++ scripts/ts2019.sh | 8 +- scripts/we-ts2019.sh | 8 +- stage_workshop.sh | 2 +- 14 files changed, 666 insertions(+), 337 deletions(-) delete mode 100644 scripts/localhost.sh create mode 100644 scripts/stage_citrixhow.sh create mode 100644 scripts/stage_citrixhow_pc.sh diff --git a/cluster.txt b/cluster.txt index c75bee7..1c28991 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1 +1 @@ -10.42.9.37|techX2019!|willem@nutanix.com +10.42.86.37|techX2019!|willem@nutanix.com diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index a57bef7..3f9e024 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -106,13 +106,8 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && seedPC \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ diff --git a/scripts/calm.sh b/scripts/calm.sh index bc4840c..707e15e 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -97,12 +97,8 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ diff --git a/scripts/citrix.sh b/scripts/citrix.sh index 9851f00..f837313 100644 --- a/scripts/citrix.sh +++ b/scripts/citrix.sh @@ -148,13 +148,8 @@ EOF" pc_init \ && pc_ui \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi - + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 8981b48..596f1e4 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -1,19 +1,46 @@ #!/usr/bin/env bash # shellcheck disable=SC2034 -RELEASE='release.json' -PC_DEV_VERSION='5.10.3' -PC_CURRENT_VERSION='5.10.3' -PC_STABLE_VERSION='5.8.2' -FILES_VERSION='3.5.0' + RELEASE='release.json' +# Sync the following to lib.common.sh::ntnx_download-Case=PC +# Browse to: https://portal.nutanix.com/#/page/releases/prismDetails +# - Find ${PC_VERSION} in the Additional Releases section on the lower right side +# - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL + PC_DEV_VERSION='5.10.2' + PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' + PC_DEV_URL='' + PC_CURRENT_VERSION='5.10.2' + #PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + #PC_CURRENT_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + PC_STABLE_VERSION='5.8.2' + #PC_STABLE_METAURL='http://10.42.8.50/images/pc_deploy-5.8.2.json' + #PC_STABLE_URL='http://10.42.8.50/images/euphrates-5.8.2-stable-prism_central.tar' +# Sync the following to lib.common.sh::ntnx_download-Case=FILES +# Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA +# - Find ${FILES_VERSION} in the Additional Releases section on the lower right side +# - Provide "Upgrade Metadata File" URL to FILES_METAURL + FILES_VERSION='3.2.0.1' + #FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' + # 2019-02-15: override until metadata URL fixed + # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + # Revert by overriding again... + #FILES_VERSION='3.2.0' + #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' + #FILES_URL= + NTNX_INIT_PASSWORD='nutanix/4u' -PRISM_ADMIN='admin' -SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" -STORAGE_POOL='SP01' -STORAGE_DEFAULT='Default' -STORAGE_IMAGES='Images' -ATTEMPTS=40 -SLEEP=60 + PRISM_ADMIN='admin' + SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" + STORAGE_POOL='SP01' + STORAGE_DEFAULT='Default' + STORAGE_IMAGES='Images' + ATTEMPTS=40 + SLEEP=60 # Curl and SSH settings CURL_OPTS='--insecure --silent --show-error' # --verbose' @@ -28,18 +55,32 @@ SSH_OPTS+=' -q' # -v' # ################################## -QCOW2_IMAGES=(\ + # Conventions for *_REPOS arrays -- the URL must end with either: + # - trailing slash, which imples _IMAGES argument to function repo_source() + # - or full package filename. + + # https://stedolan.github.io/jq/download/#checksums_and_signatures + + #JQ_REPOS=(\ + #'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ + #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + #) + #QCOW2_REPOS=(\ + #'http://10.42.8.50/images/' \ + #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #) + + QCOW2_IMAGES=(\ CentOS7.qcow2 \ Windows2016.qcow2 \ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ - #'https://download.nutanix.com/karbon/centos/0.0/centos7-0.0.qcow2' \ - #'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) - -ISO_IMAGES=(\ + ISO_IMAGES=(\ CentOS7.iso \ Windows2016.iso \ Windows2012R2.iso \ @@ -48,153 +89,152 @@ ISO_IMAGES=(\ SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ ) + # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso + # http://download.nutanix.com/era/1.0.0/ERA-Server-build-1.0.0-bae7ca0d653e1af2bcb9826d1320e88d8c4713cc.qcow2 + + # https://pkgs.org/download/sshpass + # https://sourceforge.net/projects/sshpass/files/sshpass/ + #SSHPASS_REPOS=(\ + #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + #) # shellcheck disable=2206 -OCTET=(${PE_HOST//./ }) # zero index -IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} + OCTET=(${PE_HOST//./ }) # zero index + IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) -PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) -DNS_SERVERS='8.8.8.8' -NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' - -NW1_NAME='Primary' -NW1_VLAN=0 -NW1_SUBNET="${IPV4_PREFIX}.1/25" -NW1_DHCP_START="${IPV4_PREFIX}.50" -NW1_DHCP_END="${IPV4_PREFIX}.125" - -NW2_NAME='Secondary' -NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) -NW2_SUBNET="${IPV4_PREFIX}.129/25" -NW2_DHCP_START="${IPV4_PREFIX}.132" -NW2_DHCP_END="${IPV4_PREFIX}.253" - + PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) + DNS_SERVERS='8.8.8.8' + NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' + NW1_NAME='Primary' + NW1_VLAN=0 +# Assuming HPOC defaults + NW1_SUBNET="${IPV4_PREFIX}.1/25" + NW1_DHCP_START="${IPV4_PREFIX}.50" + NW1_DHCP_END="${IPV4_PREFIX}.125" +# https://sewiki.nutanix.com/index.php/Hosted_POC_FAQ#I.27d_like_to_test_email_alert_functionality.2C_what_SMTP_server_can_I_use_on_Hosted_POC_clusters.3F +#SMTP_SERVER_ADDRESS='nutanix-com.mail.protection.outlook.com' SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' -SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' -SMTP_SERVER_PORT=25 - -AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file -AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" -LDAP_PORT=389 -AUTH_FQDN='ntnxlab.local' -AUTH_DOMAIN='NTNXLAB' + SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' + SMTP_SERVER_PORT=25 + + AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file + AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" + LDAP_PORT=389 + AUTH_FQDN='ntnxlab.local' + AUTH_DOMAIN='NTNXLAB' AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' + #AUTODC_REPOS=(\ + #'http://10.42.8.50/images/AutoDC.qcow2' \ + #'http://10.42.8.50/images/AutoDC2.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ +#) - -# For Nutanix HPOC/Marketing clusters (RTP 10.55, PHC 10.42, PHX 10.38) +# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) # https://sewiki.nutanix.com/index.php/HPOC_IP_Schema case "${OCTET[0]}.${OCTET[1]}" in + 10.20 ) #Marketing: us-west = SV + DNS_SERVERS='10.21.253.10' + ;; + 10.21 ) #HPOC: us-west = SV + if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then + log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' + exit 0 + fi + + # backup cluster; override relative IP addressing + if (( ${OCTET[2]} == 249 )); then + AUTH_HOST="${IPV4_PREFIX}.118" + PC_HOST="${IPV4_PREFIX}.119" + fi + DNS_SERVERS='10.21.253.10,10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' +PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' +PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - JQ_REPOS=(\ + FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.55.251.38/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.55.251.38/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' - DNS_SERVERS='10.55.251.10,10.55.251.11,${AUTH_HOST}' + DNS_SERVERS='10.55.251.10,10.55.251.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' +PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' +PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - JQ_REPOS=(\ + FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' - DNS_SERVERS='10.42.196.10,10.42.194.10,${AUTH_HOST}' + DNS_SERVERS='10.42.196.10,10.42.194.10 ' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" ;; - 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - JQ_REPOS=(\ - 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ - 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ - ) - SSHPASS_REPOS=(\ - 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ - #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ - ) - QCOW2_REPOS=(\ - 'http://10.42.194.11/workshop_staging/' \ - 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - ) - AUTODC_REPOS=(\ - 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - ) - PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' - NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" - NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) - NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) - DNS_SERVERS="10.42.196.10,10.42.194.10,${AUTH_HOST}" - ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - JQ_REPOS=(\ + JQ_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.132.71.40' - NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" NW1_DHCP_START="${IPV4_PREFIX}.100" - NW1_DHCP_END="${IPV4_PREFIX}.250" + NW1_DHCP_END="${IPV4_PREFIX}.250" # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json PC_STABLE_METAURL=${PC_CURRENT_METAURL} QCOW2_IMAGES=(\ @@ -209,6 +249,7 @@ case "${OCTET[0]}.${OCTET[1]}" in ;; esac + # Find operating system and set dependencies if [[ -e /etc/lsb-release ]]; then # Linux Standards Base @@ -220,10 +261,7 @@ elif [[ $(uname -s) == 'Darwin' ]]; then OS_NAME='Darwin' fi -WC_ARG='-l' +WC_ARG='--lines' if [[ ${OS_NAME} == 'Darwin' ]]; then WC_ARG='-l' fi -if [[ ${OS_NAME} == 'alpine' ]]; then - WC_ARG='-l' -fi diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 9accf60..ec573c1 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -251,6 +251,11 @@ function fileserver() { remote_exec 'ssh' ${_host} \ "python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}" + + # acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \ + # source_url=http://10.4.150.64:8181/autodc-2.0.qcow2 + #AutoDC2: pending + #AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable popd || exit ;; 'stop' ) @@ -282,6 +287,11 @@ function images() { local _source='source_uri' local _test + #which "$_cli" + #if (( $? > 0 )); then + # _cli='nuclei' + # _source='source_uri' + #fi ####################################### # For doing Disk IMAGES @@ -295,7 +305,10 @@ function images() { && ${_cli} image.list 2>&1 \ | grep -i complete \ | grep "${_image}") - + #else + # _test=$(source /etc/profile.d/nutanix_env.sh \ + # && ${_cli} image.list 2>&1 \ + # | grep "${_image}") fi if [[ ! -z ${_test} ]]; then @@ -322,11 +335,6 @@ function images() { _name=acs-centos fi - # TODO:0 TOFIX: acs-centos ugly override for today... - if (( $(echo "${_image}" | grep -i 'centos7-0.0' | wc --lines ) > 0 )); then - _name=karbon-centos7.5.1804-ntnx-0.0 - fi - if [[ ${_cli} == 'acli' ]]; then _image_type='kDiskImage' _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 40f5d35..7a95718 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -180,10 +180,10 @@ function karbon_enable() { local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" - + # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) - + # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... if [[ $_response -eq 1 ]]; then # Check if Karbon has been enabled @@ -203,43 +203,7 @@ function karbon_enable() { log "Karbon has been enabled." fi fi - fi -} - -############################################################################################################################################################################### -# Download Karbon CentOS Image -############################################################################################################################################################################### - -function karbon_image_download() { - local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' - local _loop=0 - local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" - local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " - local _httpURL="https://localhost:7050/acs/image/download" - - # Start the enablement process - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) - - # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... - if [[ $_response -eq 1 ]]; then - # Check if Karbon has been enabled - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) - while [ $_response -ne 1 ]; do - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) - done - log "Karbon has been enabled." - else - log "Retrying to enable Karbon one more time." - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) - if [[ $_response -eq 1 ]]; then - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) - if [ $_response -lt 1 ]; then - log "Karbon CentOS image has not been downloaded." - else - log "Karbon CentOS image has been downloaded." - fi - fi - fi + fi } ############################################################################################################################################################################### @@ -464,26 +428,7 @@ function pc_passwd() { } ############################################################################################################################################################################### -# Seed PC data for Prism Pro Labs -############################################################################################################################################################################### - -function seedPC() { - local _test - local _setup - - _test=$(curl -L ${PC_DATA} -o /home/nutanix/seedPC.zip) - log "Pulling Prism Data| PC_DATA ${PC_DATA}|${_test}" - unzip /home/nutanix/seedPC.zip - pushd /home/nutanix/lab/ - - _setup=$(/home/nutanix/lab/setupEnv.sh ${PC_HOST} > /dev/null 2>&1) - log "Running Setup Script|$_setup" - - popd -} - -############################################################################################################################################################################### -# Routine to setp up the SSP authentication to use the AutoDC server +# Routine to setp up the SSP authentication to use the AutoDC1 or 2 server ############################################################################################################################################################################### function ssp_auth() { diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 7e52b44..27577f7 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -59,6 +59,14 @@ function authentication_source() { _autodc_restart="sleep 2 && service ${_autodc_service} stop && sleep 5 && service ${_autodc_service} start" _autodc_status="service ${_autodc_service} status" _autodc_success=' * status: started' + + # REVIEW: override global.vars + #export AUTODC_REPOS=(\ + #'http://10.42.8.50/images/AutoDC2.qcow2' \ + #'http://10.42.8.50/images/AutoDC.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + #) fi dns_check "dc${_autodc_index}.${AUTH_FQDN}" @@ -229,10 +237,51 @@ function cluster_check() { local _test_exit local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' - log "PC is version 5.8, enabling and checking" - # Enable the PE to PC registration - _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" - _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') + # shellcheck disable=2206 + #_pc_version=(${PC_VERSION//./ }) + + #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 )); then + # log "PC>=5.10, checking multicluster state..." + # while true ; do + # (( _loop++ )) + + # _test=$(ncli --json=true multicluster get-cluster-state | jq -r .data[0].clusterDetails.multicluster) + # _test_exit=$? + # log "Cluster status: |${_test}|, exit: ${_test_exit}." + + # if [[ ${_test} != 'true' ]]; then + # _test=$(ncli multicluster add-to-multicluster \ + # external-ip-address-or-svm-ips=${PC_HOST} \ + # username=${PRISM_ADMIN} password=${PE_PASSWORD}) + # _test_exit=$? + # log "Manual join PE to PC = |${_test}|, exit: ${_test_exit}." + # fi + + # _test=$(ncli --json=true multicluster get-cluster-state | \ + # jq -r .data[0].clusterDetails.multicluster) + # _test_exit=$? + # log "Cluster status: |${_test}|, exit: ${_test_exit}." + + # if [[ ${_test} == 'true' ]]; then + # log "PE to PC = cluster registration: successful." + # return 0 + # elif (( ${_loop} > ${_attempts} )); then + # log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + # return ${_error} + # else + # log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." + # sleep ${_sleep} + # fi + # done + #fi + + #if (( ${_pc_version[0]} -ge 5 && ${_pc_version[1]} -eq 8 )); then + log "PC is version 5.8, enabling and checking" + # Enable the PE to PC registration + _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" + _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') + #fi + } ############################################################################################################################################################################### @@ -417,32 +466,21 @@ function pe_auth() { # Routine set PE's initial configuration ############################################################################################################################################################################### function pe_init() { - if [[ -z ${SMTP_SERVER_ADDRESS} ]]; then - # We are not running in HPOC so email is not needed, unless set manually - args_required 'DATA_SERVICE_IP EMAIL \ - STORAGE_DEFAULT STORAGE_POOL STORAGE_IMAGES \ - SLEEP ATTEMPTS' - else - args_required 'DATA_SERVICE_IP EMAIL \ - SMTP_SERVER_ADDRESS SMTP_SERVER_FROM SMTP_SERVER_PORT \ - STORAGE_DEFAULT STORAGE_POOL STORAGE_IMAGES \ - SLEEP ATTEMPTS' - fi + args_required 'DATA_SERVICE_IP EMAIL \ + SMTP_SERVER_ADDRESS SMTP_SERVER_FROM SMTP_SERVER_PORT \ + STORAGE_DEFAULT STORAGE_POOL STORAGE_IMAGES \ + SLEEP ATTEMPTS' if [[ `ncli cluster get-params | grep 'External Data' | \ awk -F: '{print $2}' | tr -d '[:space:]'` == "${DATA_SERVICE_IP}" ]]; then log "IDEMPOTENCY: Data Services IP set, skip." else - # If we are running in HPOC so email can be send and defined - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - log "Configure SMTP" - - ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \ + log "Configure SMTP" + ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \ from-email-address=${SMTP_SERVER_FROM} address=${SMTP_SERVER_ADDRESS} - ${HOME}/serviceability/bin/email-alerts --to_addresses="${EMAIL}" \ - --subject="[pe_init:Config SMTP:alert test] $(ncli cluster get-params)" \ - && ${HOME}/serviceability/bin/send-email - fi + ${HOME}/serviceability/bin/email-alerts --to_addresses="${EMAIL}" \ + --subject="[pe_init:Config SMTP:alert test] $(ncli cluster get-params)" \ + && ${HOME}/serviceability/bin/send-email log "Configure NTP" ncli cluster add-to-ntp-servers servers=${NTP_SERVERS} diff --git a/scripts/localhost.sh b/scripts/localhost.sh deleted file mode 100644 index a2896cd..0000000 --- a/scripts/localhost.sh +++ /dev/null @@ -1,99 +0,0 @@ - - -HTTP_CACHE_HOST='localhost' -HTTP_CACHE_PORT=8181 - - AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file - AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" - LDAP_PORT=389 - AUTH_FQDN='ntnxlab.local' - AUTH_DOMAIN='NTNXLAB' -AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} -AUTH_ADMIN_PASS='nutanix/4u' -AUTH_ADMIN_GROUP='SSP Admins' - AUTODC_REPOS=(\ - 'http://10.42.8.50/images/AutoDC.qcow2' \ - 'http://10.42.8.50/images/AutoDC2.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - - #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ - -) - -# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) -# https://sewiki.nutanix.com/index.php/HPOC_IP_Schema -case "${OCTET[0]}.${OCTET[1]}" in - 10.20 ) #Marketing: us-west = SV - DNS_SERVERS='10.21.253.10' - ;; - 10.21 ) #HPOC: us-west = SV - if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then - log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' - exit 0 - fi - - # backup cluster; override relative IP addressing - if (( ${OCTET[2]} == 249 )); then - AUTH_HOST="${IPV4_PREFIX}.118" - PC_HOST="${IPV4_PREFIX}.119" - fi - - DNS_SERVERS='10.21.253.10,10.21.253.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; - 10.55 ) # HPOC us-east = DUR - DNS_SERVERS='10.21.253.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; - 10.42 ) # HPOC us-west = PHX - DNS_SERVERS='10.42.196.10' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; - 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - DNS_SERVERS='10.132.71.40' - NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" - NW1_DHCP_START="${IPV4_PREFIX}.100" - NW1_DHCP_END="${IPV4_PREFIX}.250" - # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json - PC_STABLE_METAURL=${PC_CURRENT_METAURL} - - QCOW2_IMAGES=(\ - Centos7-Base.qcow2 \ - Centos7-Update.qcow2 \ - Windows2012R2.qcow2 \ - panlm-img-52.qcow2 \ - kx_k8s_01.qcow2 \ - kx_k8s_02.qcow2 \ - kx_k8s_03.qcow2 \ - ) - ;; -esac - -HTTP_CACHE_HOST='localhost' -HTTP_CACHE_PORT=8181 - - ATTEMPTS=40 - SLEEP=60 # pause (in seconds) between ATTEMPTS - - CURL_OPTS='--insecure --silent --show-error' # --verbose' -CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null" -CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" - SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' - SSH_OPTS+=' -q' # -v' diff --git a/scripts/stage_citrixhow.sh b/scripts/stage_citrixhow.sh new file mode 100644 index 0000000..6ae85a5 --- /dev/null +++ b/scripts/stage_citrixhow.sh @@ -0,0 +1,355 @@ +#!/bin/bash +# +# Please configure according to your needs +# +function pc_remote_exec { + sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null nutanix@10.21.${MY_HPOC_NUMBER}.39 "$@" +} +function pc_send_file { + sshpass -p nutanix/4u scp -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null "$1" nutanix@10.21.${MY_HPOC_NUMBER}.39:/home/nutanix/"$1" +} + +# Loging date format +#Never:0 Make logging format configurable +#MY_LOG_DATE='date +%Y-%m-%d %H:%M:%S' +# Script file name +MY_SCRIPT_NAME=`basename "$0"` +# Derive HPOC number from IP 3rd byte +#MY_CVM_IP=$(ip addr | grep inet | cut -d ' ' -f 6 | grep ^10.21 | head -n 1) +MY_CVM_IP=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}') +array=(${MY_CVM_IP//./ }) +MY_HPOC_NUMBER=${array[2]} +# HPOC Password (if commented, we assume we get that from environment) +#MY_PE_PASSWORD='nx2TechXXX!' +STORAGE_POOL='SP01' +STORAGE_DEFAULT='Default' +STORAGE_IMAGES='Images' +MY_DOMAIN_FQDN='ntnxlab.local' +MY_DOMAIN_NAME='NTNXLAB' +MY_DOMAIN_USER='administrator@ntnxlab.local' +MY_DOMAIN_PASS='nutanix/4u' +MY_DOMAIN_ADMIN_GROUP='SSP Admins' +MY_DOMAIN_URL="ldaps://10.21.${MY_HPOC_NUMBER}.40/" +MY_PRIMARY_NET_NAME='Primary' +MY_PRIMARY_NET_VLAN='0' +MY_SECONDARY_NET_NAME='Secondary' +MY_SECONDARY_NET_VLAN="${MY_HPOC_NUMBER}1" +MY_PC_SRC_URL='http://10.21.249.53/pc-5.7.1-stable-prism_central.tar' +MY_PC_META_URL='http://10.21.249.53/pc-5.7.1-stable-prism_central_metadata.json' +MY_AFS_SRC_URL='http://10.21.250.221/images/ahv/techsummit/nutanix-afs-el7.3-release-afs-3.0.0.1-stable.qcow2' +MY_AFS_META_URL='http://10.21.250.221/images/ahv/techsummit/nutanix-afs-el7.3-release-afs-3.0.0.1-stable-metadata.json' + +# From this point, we assume: +# IP Range: 10.21.${MY_HPOC_NUMBER}.0/25 +# Gateway: 10.21.${MY_HPOC_NUMBER}.1 +# DNS: 10.21.253.10,10.21.253.11 +# Domain: nutanixdc.local +# DHCP Pool: 10.21.${MY_HPOC_NUMBER}.50 - 10.21.${MY_HPOC_NUMBER}.120 +# +# DO NOT CHANGE ANYTHING BELOW THIS LINE UNLESS YOU KNOW WHAT YOU'RE DOING!! +# +# Source Nutanix environments (for PATH and other things) +source /etc/profile.d/nutanix_env.sh +# Logging function +function my_log { + #echo `$MY_LOG_DATE`" $1" + echo $(date "+%Y-%m-%d %H:%M:%S") $1 +} +# Check if we got a password from environment or from the settings above, otherwise exit before doing anything +if [[ -z ${MY_PE_PASSWORD+x} ]]; then + my_log "No password provided, exiting" + exit -1 +fi +my_log "My PID is $$" +my_log "Installing sshpass" +sudo rpm -ivh https://fr2.rpmfind.net/linux/epel/7/x86_64/Packages/s/sshpass-1.06-1.el7.x86_64.rpm +# Configure SMTP +my_log "Configure SMTP" +ncli cluster set-smtp-server address=nutanix-com.mail.protection.outlook.com from-email-address=cluster@nutanix.com port=25 +# Configure NTP +my_log "Configure NTP" +ncli cluster add-to-ntp-servers servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org +# Rename default storage container to STORAGE_DEFAULT +my_log "Rename default container to ${STORAGE_DEFAULT}" +default_container=$(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep '^default-container-') +ncli container edit name="${default_container}" new-name="${STORAGE_DEFAULT}" +# Rename default storage pool to STORAGE_POOL +my_log "Rename default storage pool to ${STORAGE_POOL}" +default_sp=$(ncli storagepool ls | grep 'Name' | cut -d ':' -f 2 | sed s/' '//g) +ncli sp edit name="${default_sp}" new-name="${STORAGE_POOL}" +# Check if there is a container named STORAGE_IMAGES, if not create one +my_log "Check if there is a container named ${STORAGE_IMAGES}, if not create one" +(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep "^${STORAGE_IMAGES}" 2>&1 > /dev/null) \ + && echo "Container ${STORAGE_IMAGES} already exists" \ + || ncli container create name="${STORAGE_IMAGES}" sp-name="${STORAGE_POOL}" +# Set external IP address: +#ncli cluster edit-params external-ip-address=10.21.${MY_HPOC_NUMBER}.37 +# Set Data Services IP address: +my_log "Set Data Services IP address to 10.21.${MY_HPOC_NUMBER}.38" +ncli cluster edit-params external-data-services-ip-address=10.21.${MY_HPOC_NUMBER}.38 + +# Importing images +MY_IMAGE="AutoDC" +retries=1 +my_log "Importing ${MY_IMAGE} image" +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2 wait=true) =~ "complete" ]]; do + let retries++ + if [ $retries -gt 5 ]; then + my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." + acli vm.create STAGING-FAILED-${MY_IMAGE} + break + fi + my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." + sleep 5 +done + +MY_IMAGE="CentOS" +retries=1 +my_log "Importing ${MY_IMAGE} image" +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/CentOS7-04282018.qcow2 wait=true) =~ "complete" ]]; do + let retries++ + if [ $retries -gt 5 ]; then + my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." + acli vm.create STAGING-FAILED-${MY_IMAGE} + break + fi + my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." + sleep 5 +done + +MY_IMAGE="Windows2012" +retries=1 +my_log "Importing ${MY_IMAGE} image" +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows2012R2-04282018.qcow2 wait=true) =~ "complete" ]]; do + let retries++ + if [ $retries -gt 5 ]; then + my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." + acli vm.create STAGING-FAILED-${MY_IMAGE} + break + fi + my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." + sleep 5 +done + +MY_IMAGE="Windows10" +retries=1 +my_log "Importing ${MY_IMAGE} image" +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows10-1709-04282018.qcow2 wait=true) =~ "complete" ]]; do + let retries++ + if [ $retries -gt 5 ]; then + my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." + acli vm.create STAGING-FAILED-${MY_IMAGE} + break + fi + my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." + sleep 5 +done + +MY_IMAGE="XenDesktop-7.15.iso" +retries=1 +my_log "Importing ${MY_IMAGE} image" +until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kIsoImage source_url=http://10.21.250.221/images/ahv/techsummit/XD715.iso wait=true) =~ "complete" ]]; do + let retries++ + if [ $retries -gt 5 ]; then + my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." + acli vm.create STAGING-FAILED-${MY_IMAGE} + break + fi + my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." + sleep 5 +done + +# Remove existing VMs, if any +my_log "Removing \"Windows 2012\" VM if it exists" +acli -y vm.delete Windows\ 2012\ VM delete_snapshots=true +my_log "Removing \"Windows 10\" VM if it exists" +acli -y vm.delete Windows\ 10\ VM delete_snapshots=true +my_log "Removing \"CentOS\" VM if it exists" +acli -y vm.delete CentOS\ VM delete_snapshots=true + +# Remove Rx-Automation-Network network +my_log "Removing \"Rx-Automation-Network\" Network if it exists" +acli -y net.delete Rx-Automation-Network + +# Create primary network +my_log "Create primary network:" +my_log "Name: ${MY_PRIMARY_NET_NAME}" +my_log "VLAN: ${MY_PRIMARY_NET_VLAN}" +my_log "Subnet: 10.21.${MY_HPOC_NUMBER}.1/25" +my_log "Domain: ${MY_DOMAIN_NAME}" +my_log "Pool: 10.21.${MY_HPOC_NUMBER}.50 to 10.21.${MY_HPOC_NUMBER}.125" +acli net.create ${MY_PRIMARY_NET_NAME} vlan=${MY_PRIMARY_NET_VLAN} ip_config=10.21.${MY_HPOC_NUMBER}.1/25 +acli net.update_dhcp_dns ${MY_PRIMARY_NET_NAME} servers=10.21.${MY_HPOC_NUMBER}.40,10.21.253.10 domains=${MY_DOMAIN_NAME} +acli net.add_dhcp_pool ${MY_PRIMARY_NET_NAME} start=10.21.${MY_HPOC_NUMBER}.50 end=10.21.${MY_HPOC_NUMBER}.125 + +# Create secondary network +if [[ ${MY_SECONDARY_NET_NAME} ]]; then + my_log "Create secondary network:" + my_log "Name: ${MY_SECONDARY_NET_NAME}" + my_log "VLAN: ${MY_SECONDARY_NET_VLAN}" + my_log "Subnet: 10.21.${MY_HPOC_NUMBER}.129/25" + my_log "Domain: ${MY_DOMAIN_NAME}" + my_log "Pool: 10.21.${MY_HPOC_NUMBER}.132 to 10.21.${MY_HPOC_NUMBER}.253" + acli net.create ${MY_SECONDARY_NET_NAME} vlan=${MY_SECONDARY_NET_VLAN} ip_config=10.21.${MY_HPOC_NUMBER}.129/25 + acli net.update_dhcp_dns ${MY_SECONDARY_NET_NAME} servers=10.21.${MY_HPOC_NUMBER}.40,10.21.253.10 domains=${MY_DOMAIN_NAME} + acli net.add_dhcp_pool ${MY_SECONDARY_NET_NAME} start=10.21.${MY_HPOC_NUMBER}.132 end=10.21.${MY_HPOC_NUMBER}.253 +fi + +# Create AutoDC & power on +my_log "Create DC VM based on AutoDC image" +acli vm.create DC num_vcpus=2 num_cores_per_vcpu=1 memory=4G +acli vm.disk_create DC cdrom=true empty=true +acli vm.disk_create DC clone_from_image=AutoDC +acli vm.nic_create DC network=${MY_PRIMARY_NET_NAME} ip=10.21.${MY_HPOC_NUMBER}.40 +my_log "Power on DC VM" +acli vm.on DC + +# Need to wait for AutoDC to be up (30?60secs?) +my_log "Waiting 60sec to give DC VM time to start" +sleep 60 + +# Configure PE external authentication +my_log "Configure PE external authentication" +ncli authconfig add-directory directory-type=ACTIVE_DIRECTORY connection-type=LDAP directory-url="${MY_DOMAIN_URL}" domain="${MY_DOMAIN_FQDN}" name="${MY_DOMAIN_NAME}" service-account-username="${MY_DOMAIN_USER}" service-account-password="${MY_DOMAIN_PASS}" + +# Configure PE role mapping +my_log "Configure PE role mapping" +ncli authconfig add-role-mapping role=ROLE_CLUSTER_ADMIN entity-type=group name="${MY_DOMAIN_NAME}" entity-values="${MY_DOMAIN_ADMIN_GROUP}" + +# Reverse Lookup Zone +my_log "Creating Reverse Lookup Zone on DC VM" +sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ +root@10.21.${MY_HPOC_NUMBER}.40 "samba-tool dns zonecreate dc1 ${MY_HPOC_NUMBER}.21.10.in-addr.arpa; service samba-ad-dc restart" + +# Create custom OUs +sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ +root@10.21.${MY_HPOC_NUMBER}.40 "apt install ldb-tools -y -q" + +sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ +root@10.21.${MY_HPOC_NUMBER}.40 "cat << EOF > ous.ldif +dn: OU=Non-PersistentDesktop,DC=NTNXLAB,DC=local +changetype: add +objectClass: top +objectClass: organizationalunit +description: Non-Persistent Desktop OU + +dn: OU=PersistentDesktop,DC=NTNXLAB,DC=local +changetype: add +objectClass: top +objectClass: organizationalunit +description: Persistent Desktop OU + +dn: OU=XenAppServer,DC=NTNXLAB,DC=local +changetype: add +objectClass: top +objectClass: organizationalunit +description: XenApp Server OU +EOF" + +sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ +root@10.21.${MY_HPOC_NUMBER}.40 "ldbmodify -H /var/lib/samba/private/sam.ldb ous.ldif; service samba-ad-dc restart" + +# Provision local Prism account for XD MCS Plugin +my_log "Create PE user account xd for MCS Plugin" +ncli user create user-name=xd user-password=nutanix/4u first-name=XenDesktop last-name=Service email-id=no-reply@nutanix.com +ncli user grant-cluster-admin-role user-name=xd + +# Get UUID from cluster +my_log "Get UUIDs from cluster:" +MY_NET_UUID=$(acli net.get ${MY_PRIMARY_NET_NAME} | grep "uuid" | cut -f 2 -d ':' | xargs) +my_log "${MY_PRIMARY_NET_NAME} UUID is ${MY_NET_UUID}" +MY_CONTAINER_UUID=$(ncli container ls name=${STORAGE_DEFAULT} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) +my_log "${STORAGE_DEFAULT} UUID is ${MY_CONTAINER_UUID}" + +# Validate EULA on PE +my_log "Validate EULA on PE" +curl -u admin:${MY_PE_PASSWORD} -k -H 'Content-Type: application/json' -X POST \ + https://127.0.0.1:9440/PrismGateway/services/rest/v1/eulas/accept \ + -d '{ + "username": "SE", + "companyName": "NTNX", + "jobTitle": "SE" +}' + +# Disable Pulse in PE +my_log "Disable Pulse in PE" +curl -u admin:${MY_PE_PASSWORD} -k -H 'Content-Type: application/json' -X PUT \ + https://127.0.0.1:9440/PrismGateway/services/rest/v1/pulse \ + -d '{ + "defaultNutanixEmail": null, + "emailContactList": null, + "enable": false, + "enableDefaultNutanixEmail": false, + "isPulsePromptNeeded": false, + "nosVersion": null, + "remindLater": null, + "verbosityType": null +}' + +# AFS Download +my_log "Download AFS image from ${MY_AFS_SRC_URL}" +wget -nv ${MY_AFS_SRC_URL} +my_log "Download AFS metadata JSON from ${MY_AFS_META_URL}" +wget -nv ${MY_AFS_META_URL} + +# Staging AFS +my_log "Stage AFS" +ncli software upload file-path=/home/nutanix/${MY_AFS_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_AFS_META_URL##*/} software-type=FILE_SERVER + +# Freeing up space +my_log "Delete AFS sources to free some space" +rm ${MY_AFS_SRC_URL##*/} ${MY_AFS_META_URL##*/} + +# Prism Central Download +my_log "Download PC tarball from ${MY_PC_SRC_URL}" +wget -nv ${MY_PC_SRC_URL} +my_log "Download PC metadata JSON from ${MY_PC_META_URL}" +wget -nv ${MY_PC_META_URL} + +# Staging Prism Central +my_log "Stage Prism Central" +ncli software upload file-path=/home/nutanix/${MY_PC_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_PC_META_URL##*/} software-type=PRISM_CENTRAL_DEPLOY + +# Freeing up space +my_log "Delete PC sources to free some space" +rm ${MY_PC_SRC_URL##*/} ${MY_PC_META_URL##*/} + +# Deploy Prism Central +my_log "Deploy Prism Central" +MY_DEPLOY_BODY=$(cat <> pcconfig.log 2>&1 &" +my_log "Removing sshpass" +sudo rpm -e sshpass +my_log "PE Configuration complete" diff --git a/scripts/stage_citrixhow_pc.sh b/scripts/stage_citrixhow_pc.sh new file mode 100644 index 0000000..2cacfda --- /dev/null +++ b/scripts/stage_citrixhow_pc.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +#MY_PC_UPGRADE_URL='http://10.21.250.221/images/ahv/techsummit/nutanix_installer_package_pc-release-euphrates-5.5.0.6-stable-14bd63735db09b1c9babdaaf48d062723137fc46.tar.gz' + +# Script file name +MY_SCRIPT_NAME=`basename "$0"` + +# Source Nutanix environments (for PATH and other things) +. /etc/profile.d/nutanix_env.sh +. lib.common.sh # source common routines +Dependencies 'install'; + +# Derive HPOC number from IP 3rd byte +#MY_CVM_IP=$(ip addr | grep inet | cut -d ' ' -f 6 | grep ^10.21 | head -n 1) + MY_CVM_IP=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}') + array=(${MY_CVM_IP//./ }) +MY_HPOC_NUMBER=${array[2]} + +CURL_OPTS="${CURL_OPTS} --user admin:${MY_PE_PASSWORD}" #lib.common.sh initialized +#CURL_OPTS="${CURL_OPTS} --verbose" + +# Set Prism Central Password to Prism Element Password +my_log "Setting PC password to PE password" +ncli user reset-password user-name="admin" password="${MY_PE_PASSWORD}" + +# Add NTP Server\ +my_log "Configure NTP on PC" +ncli cluster add-to-ntp-servers servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org + +# Accept Prism Central EULA +my_log "Validate EULA on PC" +curl ${CURL_OPTS} \ + https://10.21.${MY_HPOC_NUMBER}.39:9440/PrismGateway/services/rest/v1/eulas/accept \ + -d '{ + "username": "SE", + "companyName": "NTNX", + "jobTitle": "SE" +}' + +# Disable Prism Central Pulse +my_log "Disable Pulse on PC" +curl ${CURL_OPTS} -X PUT \ + https://10.21.${MY_HPOC_NUMBER}.39:9440/PrismGateway/services/rest/v1/pulse \ + -d '{ + "emailContactList":null, + "enable":false, + "verbosityType":null, + "enableDefaultNutanixEmail":false, + "defaultNutanixEmail":null, + "nosVersion":null, + "isPulsePromptNeeded":false, + "remindLater":null +}' + +# Prism Central upgrade +#my_log "Download PC upgrade image: ${MY_PC_UPGRADE_URL##*/}" +#wget -nv ${MY_PC_UPGRADE_URL} + +#my_log "Prepare PC upgrade image" +#tar -xzf ${MY_PC_UPGRADE_URL##*/} +#rm ${MY_PC_UPGRADE_URL##*/} + +#my_log "Upgrade PC" +#cd /home/nutanix/install ; ./bin/cluster -i . -p upgrade + +my_log "PC Configuration complete on `$date`" diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index a82c281..a1d5520 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -132,12 +132,8 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ diff --git a/scripts/we-ts2019.sh b/scripts/we-ts2019.sh index 42a9ede..86f6bf9 100644 --- a/scripts/we-ts2019.sh +++ b/scripts/we-ts2019.sh @@ -130,12 +130,8 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ diff --git a/stage_workshop.sh b/stage_workshop.sh index 15c62ac..8fdc503 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -7,7 +7,7 @@ DEBUG= # - PC #.# WORKSHOPS=(\ "Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current (AutoDC2)" \ -"Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ +#"Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ "Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed From 5d84cb7b6f32a6b125433d1076480dd40a604e08 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 09:54:50 -0700 Subject: [PATCH 112/691] Update bootstrap.sh --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index 42ec6dd..b68142c 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -7,7 +7,7 @@ # curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/} if [[ -z ${SOURCE} ]]; then - ORGANIZATION=jncox + ORGANIZATION=nutanixworkshops REPOSITORY=stageworkshop BRANCH=master else From 0d501187730aaf93bccd2b244c3fc5c4e7e58bea Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 10:17:14 -0700 Subject: [PATCH 113/691] Additional Cleanup --- scripts/bootcamp.sh | 9 +- scripts/calm.sh | 8 +- scripts/global.vars.sh | 270 ++++++++++++++++++----------------------- scripts/lib.common.sh | 15 +-- scripts/localhost.sh | 99 +++++++++++++++ scripts/ts2019.sh | 8 +- 6 files changed, 236 insertions(+), 173 deletions(-) create mode 100644 scripts/localhost.sh diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 3f9e024..a57bef7 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -106,8 +106,13 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth \ - && pc_smtp + && seedPC \ + && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ diff --git a/scripts/calm.sh b/scripts/calm.sh index 707e15e..bc4840c 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -97,8 +97,12 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth \ - && pc_smtp + && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 596f1e4..f622f6b 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -1,46 +1,19 @@ #!/usr/bin/env bash # shellcheck disable=SC2034 - RELEASE='release.json' -# Sync the following to lib.common.sh::ntnx_download-Case=PC -# Browse to: https://portal.nutanix.com/#/page/releases/prismDetails -# - Find ${PC_VERSION} in the Additional Releases section on the lower right side -# - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL - PC_DEV_VERSION='5.10.2' - PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' - PC_DEV_URL='' - PC_CURRENT_VERSION='5.10.2' - #PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - #PC_CURRENT_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' - PC_STABLE_VERSION='5.8.2' - #PC_STABLE_METAURL='http://10.42.8.50/images/pc_deploy-5.8.2.json' - #PC_STABLE_URL='http://10.42.8.50/images/euphrates-5.8.2-stable-prism_central.tar' -# Sync the following to lib.common.sh::ntnx_download-Case=FILES -# Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA -# - Find ${FILES_VERSION} in the Additional Releases section on the lower right side -# - Provide "Upgrade Metadata File" URL to FILES_METAURL - FILES_VERSION='3.2.0.1' - #FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' - # 2019-02-15: override until metadata URL fixed - # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - # Revert by overriding again... - #FILES_VERSION='3.2.0' - #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' - #FILES_URL= - +RELEASE='release.json' +PC_DEV_VERSION='5.10.3' +PC_CURRENT_VERSION='5.10.3' +PC_STABLE_VERSION='5.8.2' +FILES_VERSION='3.5.0' NTNX_INIT_PASSWORD='nutanix/4u' - PRISM_ADMIN='admin' - SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" - STORAGE_POOL='SP01' - STORAGE_DEFAULT='Default' - STORAGE_IMAGES='Images' - ATTEMPTS=40 - SLEEP=60 +PRISM_ADMIN='admin' +SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" +STORAGE_POOL='SP01' +STORAGE_DEFAULT='Default' +STORAGE_IMAGES='Images' +ATTEMPTS=40 +SLEEP=60 # Curl and SSH settings CURL_OPTS='--insecure --silent --show-error' # --verbose' @@ -55,32 +28,18 @@ SSH_OPTS+=' -q' # -v' # ################################## - # Conventions for *_REPOS arrays -- the URL must end with either: - # - trailing slash, which imples _IMAGES argument to function repo_source() - # - or full package filename. - - # https://stedolan.github.io/jq/download/#checksums_and_signatures - - #JQ_REPOS=(\ - #'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ - #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ - #) - #QCOW2_REPOS=(\ - #'http://10.42.8.50/images/' \ - #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - #) - - QCOW2_IMAGES=(\ +QCOW2_IMAGES=(\ CentOS7.qcow2 \ Windows2016.qcow2 \ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ - 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + #'https://download.nutanix.com/karbon/centos/0.0/centos7-0.0.qcow2' \ + #'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) - ISO_IMAGES=(\ + +ISO_IMAGES=(\ CentOS7.iso \ Windows2016.iso \ Windows2012R2.iso \ @@ -89,152 +48,155 @@ SSH_OPTS+=' -q' # -v' SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ ) - # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso - # http://download.nutanix.com/era/1.0.0/ERA-Server-build-1.0.0-bae7ca0d653e1af2bcb9826d1320e88d8c4713cc.qcow2 - - # https://pkgs.org/download/sshpass - # https://sourceforge.net/projects/sshpass/files/sshpass/ - #SSHPASS_REPOS=(\ - #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ - #) # shellcheck disable=2206 - OCTET=(${PE_HOST//./ }) # zero index - IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} +OCTET=(${PE_HOST//./ }) # zero index +IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) - PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) - DNS_SERVERS='8.8.8.8' - NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' - NW1_NAME='Primary' - NW1_VLAN=0 -# Assuming HPOC defaults - NW1_SUBNET="${IPV4_PREFIX}.1/25" - NW1_DHCP_START="${IPV4_PREFIX}.50" - NW1_DHCP_END="${IPV4_PREFIX}.125" -# https://sewiki.nutanix.com/index.php/Hosted_POC_FAQ#I.27d_like_to_test_email_alert_functionality.2C_what_SMTP_server_can_I_use_on_Hosted_POC_clusters.3F -#SMTP_SERVER_ADDRESS='nutanix-com.mail.protection.outlook.com' -SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' - SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' - SMTP_SERVER_PORT=25 +PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) +DNS_SERVERS='8.8.8.8' +NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' + +NW1_NAME='Primary' +NW1_VLAN=0 +NW1_SUBNET="${IPV4_PREFIX}.1/25" +NW1_DHCP_START="${IPV4_PREFIX}.50" +NW1_DHCP_END="${IPV4_PREFIX}.125" + +NW2_NAME='Secondary' +NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) +NW2_SUBNET="${IPV4_PREFIX}.129/25" +NW2_DHCP_START="${IPV4_PREFIX}.132" +NW2_DHCP_END="${IPV4_PREFIX}.253" - AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file - AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" - LDAP_PORT=389 - AUTH_FQDN='ntnxlab.local' - AUTH_DOMAIN='NTNXLAB' +SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' +SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' +SMTP_SERVER_PORT=25 + +AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file +AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" +LDAP_PORT=389 +AUTH_FQDN='ntnxlab.local' +AUTH_DOMAIN='NTNXLAB' AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' - #AUTODC_REPOS=(\ - #'http://10.42.8.50/images/AutoDC.qcow2' \ - #'http://10.42.8.50/images/AutoDC2.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ -#) -# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) + +# For Nutanix HPOC/Marketing clusters (RTP 10.55, PHC 10.42, PHX 10.38) # https://sewiki.nutanix.com/index.php/HPOC_IP_Schema case "${OCTET[0]}.${OCTET[1]}" in - 10.20 ) #Marketing: us-west = SV - DNS_SERVERS='10.21.253.10' - ;; - 10.21 ) #HPOC: us-west = SV - if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then - log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' - exit 0 - fi - # backup cluster; override relative IP addressing - if (( ${OCTET[2]} == 249 )); then - AUTH_HOST="${IPV4_PREFIX}.118" - PC_HOST="${IPV4_PREFIX}.119" - fi - - DNS_SERVERS='10.21.253.10,10.21.253.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; 10.55 ) # HPOC us-east = DUR -PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' -PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - JQ_REPOS=(\ + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.55.251.38/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.55.251.38/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.55.251.10,10.55.251.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" + PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' + DNS_SERVERS='10.55.251.10,10.55.251.11,${AUTH_HOST}' ;; 10.42 ) # HPOC us-west = PHX -PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' -PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - JQ_REPOS=(\ + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.42.196.10,10.42.194.10 ' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" + PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' + DNS_SERVERS='10.42.196.10,10.42.194.10,${AUTH_HOST}' ;; + 10.38 ) # HPOC us-west = PHX 1-Node Clusters + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + JQ_REPOS=(\ + 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + ) + SSHPASS_REPOS=(\ + 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ + #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + ) + QCOW2_REPOS=(\ + 'http://10.42.194.11/workshop_staging/' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + ) + AUTODC_REPOS=(\ + 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + ) + PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' + NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" + NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) + NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + DNS_SERVERS="10.42.196.10,10.42.194.10,${AUTH_HOST}" + ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - JQ_REPOS=(\ + JQ_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) + + 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR + DNS_SERVERS='10.132.71.40' + NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" NW1_DHCP_START="${IPV4_PREFIX}.100" - NW1_DHCP_END="${IPV4_PREFIX}.250" + NW1_DHCP_END="${IPV4_PREFIX}.250" # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json PC_STABLE_METAURL=${PC_CURRENT_METAURL} QCOW2_IMAGES=(\ @@ -249,7 +211,6 @@ PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' ;; esac - # Find operating system and set dependencies if [[ -e /etc/lsb-release ]]; then # Linux Standards Base @@ -261,7 +222,10 @@ elif [[ $(uname -s) == 'Darwin' ]]; then OS_NAME='Darwin' fi -WC_ARG='--lines' +WC_ARG='-l' if [[ ${OS_NAME} == 'Darwin' ]]; then WC_ARG='-l' fi +if [[ ${OS_NAME} == 'alpine' ]]; then + WC_ARG='-l' +fi diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index ec573c1..b62137f 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -251,11 +251,6 @@ function fileserver() { remote_exec 'ssh' ${_host} \ "python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}" - - # acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \ - # source_url=http://10.4.150.64:8181/autodc-2.0.qcow2 - #AutoDC2: pending - #AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable popd || exit ;; 'stop' ) @@ -287,11 +282,6 @@ function images() { local _source='source_uri' local _test - #which "$_cli" - #if (( $? > 0 )); then - # _cli='nuclei' - # _source='source_uri' - #fi ####################################### # For doing Disk IMAGES @@ -305,10 +295,7 @@ function images() { && ${_cli} image.list 2>&1 \ | grep -i complete \ | grep "${_image}") - #else - # _test=$(source /etc/profile.d/nutanix_env.sh \ - # && ${_cli} image.list 2>&1 \ - # | grep "${_image}") + fi if [[ ! -z ${_test} ]]; then diff --git a/scripts/localhost.sh b/scripts/localhost.sh new file mode 100644 index 0000000..a2896cd --- /dev/null +++ b/scripts/localhost.sh @@ -0,0 +1,99 @@ + + +HTTP_CACHE_HOST='localhost' +HTTP_CACHE_PORT=8181 + + AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file + AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" + LDAP_PORT=389 + AUTH_FQDN='ntnxlab.local' + AUTH_DOMAIN='NTNXLAB' +AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} +AUTH_ADMIN_PASS='nutanix/4u' +AUTH_ADMIN_GROUP='SSP Admins' + AUTODC_REPOS=(\ + 'http://10.42.8.50/images/AutoDC.qcow2' \ + 'http://10.42.8.50/images/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + + #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ + +) + +# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) +# https://sewiki.nutanix.com/index.php/HPOC_IP_Schema +case "${OCTET[0]}.${OCTET[1]}" in + 10.20 ) #Marketing: us-west = SV + DNS_SERVERS='10.21.253.10' + ;; + 10.21 ) #HPOC: us-west = SV + if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then + log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' + exit 0 + fi + + # backup cluster; override relative IP addressing + if (( ${OCTET[2]} == 249 )); then + AUTH_HOST="${IPV4_PREFIX}.118" + PC_HOST="${IPV4_PREFIX}.119" + fi + + DNS_SERVERS='10.21.253.10,10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.55 ) # HPOC us-east = DUR + DNS_SERVERS='10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.42 ) # HPOC us-west = PHX + DNS_SERVERS='10.42.196.10' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR + DNS_SERVERS='10.132.71.40' + NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" + NW1_DHCP_START="${IPV4_PREFIX}.100" + NW1_DHCP_END="${IPV4_PREFIX}.250" + # PC deploy file local override, TODO:30 make an PC_URL array and eliminate + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_STABLE_METAURL=${PC_CURRENT_METAURL} + + QCOW2_IMAGES=(\ + Centos7-Base.qcow2 \ + Centos7-Update.qcow2 \ + Windows2012R2.qcow2 \ + panlm-img-52.qcow2 \ + kx_k8s_01.qcow2 \ + kx_k8s_02.qcow2 \ + kx_k8s_03.qcow2 \ + ) + ;; +esac + +HTTP_CACHE_HOST='localhost' +HTTP_CACHE_PORT=8181 + + ATTEMPTS=40 + SLEEP=60 # pause (in seconds) between ATTEMPTS + + CURL_OPTS='--insecure --silent --show-error' # --verbose' +CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null" +CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" + SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' + SSH_OPTS+=' -q' # -v' diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index a1d5520..a82c281 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -132,8 +132,12 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth \ - && pc_smtp + && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ From 55fe58887f7038723cc3d7def409a19834d2dcda Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 10:18:17 -0700 Subject: [PATCH 114/691] Update bootstrap.sh --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index b68142c..42ec6dd 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -7,7 +7,7 @@ # curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/} if [[ -z ${SOURCE} ]]; then - ORGANIZATION=nutanixworkshops + ORGANIZATION=jncox REPOSITORY=stageworkshop BRANCH=master else From 6efec4f8695fee4024eba5f2f239c2217acb8013 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 11:07:20 -0700 Subject: [PATCH 115/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 8fdc503..264740c 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -22,7 +22,7 @@ function stage_clusters() { local _pe_launch # will be transferred and executed on PE local _pc_launch # will be transferred and executed on PC local _sshkey=${SSH_PUBKEY} - local _wc_arg='--lines' + #local _wc_arg='--lines' local _workshop=${WORKSHOPS[$((${WORKSHOP_NUM}-1))]} # Map to latest and greatest of each point release From 5ba1ae742d153c8533578864cbc66c84a6a40375 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 11:33:12 -0700 Subject: [PATCH 116/691] Update bootstrap.sh --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index b68142c..0f0b6e3 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -83,7 +83,7 @@ if [[ -z ${EMAIL} ]]; then read -p "REQUIRED: Email address for cluster admin? " EMAIL fi -_WC_ARG='--lines' +_WC_ARG='-l' if [[ $(uname -s) == 'Darwin' ]]; then _WC_ARG='-l' fi From 65babdf179689c38d83ab7bf02abef2fe9470ae4 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 12:00:04 -0700 Subject: [PATCH 117/691] Updates --- bootstrap.sh | 4 ++-- stage_workshop.sh | 6 ------ 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/bootstrap.sh b/bootstrap.sh index 0f0b6e3..ff14681 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -83,7 +83,7 @@ if [[ -z ${EMAIL} ]]; then read -p "REQUIRED: Email address for cluster admin? " EMAIL fi -_WC_ARG='-l' +_WC_ARG='--list' if [[ $(uname -s) == 'Darwin' ]]; then _WC_ARG='-l' fi @@ -123,7 +123,7 @@ fi PE_HOST=${PE_HOST} \ PRISM_ADMIN=${PRISM_ADMIN} \ PE_PASSWORD=${PE_PASSWORD} \ -./stage_workshop.sh -f - ${MY_WORKSHOP} # \ +bash -x ./stage_workshop.sh -f ${MY_WORKSHOP} # \ # && popd || exit echo -e "\n DONE: ${0} ran for ${SECONDS} seconds." diff --git a/stage_workshop.sh b/stage_workshop.sh index 264740c..f2ef491 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -34,12 +34,6 @@ function stage_clusters() { export PC_VERSION="${PC_CURRENT_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.9" | wc ${WC_ARG}) > 0 )); then - export PC_VERSION=5.9.2 - elif (( $(echo ${_workshop} | grep -i "PC 5.7" | wc ${WC_ARG}) > 0 )); then - export PC_VERSION=5.7.1.1 - elif (( $(echo ${_workshop} | grep -i "PC 5.6" | wc ${WC_ARG}) > 0 )); then - export PC_VERSION=5.6.2 fi # Map workshop to staging script(s) and libraries, From ec15e85dd1581a4626f283e4171732cde65b914b Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 12:03:34 -0700 Subject: [PATCH 118/691] Update bootstrap.sh --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index ff14681..3b72135 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -123,7 +123,7 @@ fi PE_HOST=${PE_HOST} \ PRISM_ADMIN=${PRISM_ADMIN} \ PE_PASSWORD=${PE_PASSWORD} \ -bash -x ./stage_workshop.sh -f ${MY_WORKSHOP} # \ +bash -x ./stage_workshop.sh -f ${MY_WORKSHOP} > stagedebug.log # \ # && popd || exit echo -e "\n DONE: ${0} ran for ${SECONDS} seconds." From dbe7da2a193f3a55586a2dac7bda8140f6b6a4a8 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 12:05:47 -0700 Subject: [PATCH 119/691] Update bootstrap.sh --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index 3b72135..5b10755 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -123,7 +123,7 @@ fi PE_HOST=${PE_HOST} \ PRISM_ADMIN=${PRISM_ADMIN} \ PE_PASSWORD=${PE_PASSWORD} \ -bash -x ./stage_workshop.sh -f ${MY_WORKSHOP} > stagedebug.log # \ +./stage_workshop.sh -f ${MY_WORKSHOP} # \ # && popd || exit echo -e "\n DONE: ${0} ran for ${SECONDS} seconds." From 0d27d1675381a97736ef759046f291a1e1572fea Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 12:06:39 -0700 Subject: [PATCH 120/691] Revert "Additional Cleanup" This reverts commit 0d501187730aaf93bccd2b244c3fc5c4e7e58bea. --- scripts/bootcamp.sh | 9 +- scripts/calm.sh | 8 +- scripts/global.vars.sh | 270 +++++++++++++++++++++++------------------ scripts/lib.common.sh | 15 ++- scripts/localhost.sh | 99 --------------- scripts/ts2019.sh | 8 +- 6 files changed, 173 insertions(+), 236 deletions(-) delete mode 100644 scripts/localhost.sh diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index a57bef7..3f9e024 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -106,13 +106,8 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && seedPC \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ diff --git a/scripts/calm.sh b/scripts/calm.sh index bc4840c..707e15e 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -97,12 +97,8 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f622f6b..596f1e4 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -1,19 +1,46 @@ #!/usr/bin/env bash # shellcheck disable=SC2034 -RELEASE='release.json' -PC_DEV_VERSION='5.10.3' -PC_CURRENT_VERSION='5.10.3' -PC_STABLE_VERSION='5.8.2' -FILES_VERSION='3.5.0' + RELEASE='release.json' +# Sync the following to lib.common.sh::ntnx_download-Case=PC +# Browse to: https://portal.nutanix.com/#/page/releases/prismDetails +# - Find ${PC_VERSION} in the Additional Releases section on the lower right side +# - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL + PC_DEV_VERSION='5.10.2' + PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' + PC_DEV_URL='' + PC_CURRENT_VERSION='5.10.2' + #PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + #PC_CURRENT_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + PC_STABLE_VERSION='5.8.2' + #PC_STABLE_METAURL='http://10.42.8.50/images/pc_deploy-5.8.2.json' + #PC_STABLE_URL='http://10.42.8.50/images/euphrates-5.8.2-stable-prism_central.tar' +# Sync the following to lib.common.sh::ntnx_download-Case=FILES +# Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA +# - Find ${FILES_VERSION} in the Additional Releases section on the lower right side +# - Provide "Upgrade Metadata File" URL to FILES_METAURL + FILES_VERSION='3.2.0.1' + #FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' + # 2019-02-15: override until metadata URL fixed + # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + # Revert by overriding again... + #FILES_VERSION='3.2.0' + #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' + #FILES_URL= + NTNX_INIT_PASSWORD='nutanix/4u' -PRISM_ADMIN='admin' -SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" -STORAGE_POOL='SP01' -STORAGE_DEFAULT='Default' -STORAGE_IMAGES='Images' -ATTEMPTS=40 -SLEEP=60 + PRISM_ADMIN='admin' + SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" + STORAGE_POOL='SP01' + STORAGE_DEFAULT='Default' + STORAGE_IMAGES='Images' + ATTEMPTS=40 + SLEEP=60 # Curl and SSH settings CURL_OPTS='--insecure --silent --show-error' # --verbose' @@ -28,18 +55,32 @@ SSH_OPTS+=' -q' # -v' # ################################## -QCOW2_IMAGES=(\ + # Conventions for *_REPOS arrays -- the URL must end with either: + # - trailing slash, which imples _IMAGES argument to function repo_source() + # - or full package filename. + + # https://stedolan.github.io/jq/download/#checksums_and_signatures + + #JQ_REPOS=(\ + #'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ + #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + #) + #QCOW2_REPOS=(\ + #'http://10.42.8.50/images/' \ + #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #) + + QCOW2_IMAGES=(\ CentOS7.qcow2 \ Windows2016.qcow2 \ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ - #'https://download.nutanix.com/karbon/centos/0.0/centos7-0.0.qcow2' \ - #'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ ) - -ISO_IMAGES=(\ + ISO_IMAGES=(\ CentOS7.iso \ Windows2016.iso \ Windows2012R2.iso \ @@ -48,155 +89,152 @@ ISO_IMAGES=(\ SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ ) + # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso + # http://download.nutanix.com/era/1.0.0/ERA-Server-build-1.0.0-bae7ca0d653e1af2bcb9826d1320e88d8c4713cc.qcow2 + + # https://pkgs.org/download/sshpass + # https://sourceforge.net/projects/sshpass/files/sshpass/ + #SSHPASS_REPOS=(\ + #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + #) # shellcheck disable=2206 -OCTET=(${PE_HOST//./ }) # zero index -IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} + OCTET=(${PE_HOST//./ }) # zero index + IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) -PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) -DNS_SERVERS='8.8.8.8' -NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' - -NW1_NAME='Primary' -NW1_VLAN=0 -NW1_SUBNET="${IPV4_PREFIX}.1/25" -NW1_DHCP_START="${IPV4_PREFIX}.50" -NW1_DHCP_END="${IPV4_PREFIX}.125" - -NW2_NAME='Secondary' -NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) -NW2_SUBNET="${IPV4_PREFIX}.129/25" -NW2_DHCP_START="${IPV4_PREFIX}.132" -NW2_DHCP_END="${IPV4_PREFIX}.253" - + PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) + DNS_SERVERS='8.8.8.8' + NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' + NW1_NAME='Primary' + NW1_VLAN=0 +# Assuming HPOC defaults + NW1_SUBNET="${IPV4_PREFIX}.1/25" + NW1_DHCP_START="${IPV4_PREFIX}.50" + NW1_DHCP_END="${IPV4_PREFIX}.125" +# https://sewiki.nutanix.com/index.php/Hosted_POC_FAQ#I.27d_like_to_test_email_alert_functionality.2C_what_SMTP_server_can_I_use_on_Hosted_POC_clusters.3F +#SMTP_SERVER_ADDRESS='nutanix-com.mail.protection.outlook.com' SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' -SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' -SMTP_SERVER_PORT=25 - -AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file -AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" -LDAP_PORT=389 -AUTH_FQDN='ntnxlab.local' -AUTH_DOMAIN='NTNXLAB' + SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' + SMTP_SERVER_PORT=25 + + AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file + AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" + LDAP_PORT=389 + AUTH_FQDN='ntnxlab.local' + AUTH_DOMAIN='NTNXLAB' AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' + #AUTODC_REPOS=(\ + #'http://10.42.8.50/images/AutoDC.qcow2' \ + #'http://10.42.8.50/images/AutoDC2.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ +#) - -# For Nutanix HPOC/Marketing clusters (RTP 10.55, PHC 10.42, PHX 10.38) +# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) # https://sewiki.nutanix.com/index.php/HPOC_IP_Schema case "${OCTET[0]}.${OCTET[1]}" in + 10.20 ) #Marketing: us-west = SV + DNS_SERVERS='10.21.253.10' + ;; + 10.21 ) #HPOC: us-west = SV + if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then + log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' + exit 0 + fi + # backup cluster; override relative IP addressing + if (( ${OCTET[2]} == 249 )); then + AUTH_HOST="${IPV4_PREFIX}.118" + PC_HOST="${IPV4_PREFIX}.119" + fi + + DNS_SERVERS='10.21.253.10,10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' +PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' +PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - JQ_REPOS=(\ + FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.55.251.38/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.55.251.38/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' - DNS_SERVERS='10.55.251.10,10.55.251.11,${AUTH_HOST}' + DNS_SERVERS='10.55.251.10,10.55.251.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' +PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' +PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - JQ_REPOS=(\ + FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' - DNS_SERVERS='10.42.196.10,10.42.194.10,${AUTH_HOST}' + DNS_SERVERS='10.42.196.10,10.42.194.10 ' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" ;; - 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - JQ_REPOS=(\ - 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ - 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ - ) - SSHPASS_REPOS=(\ - 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ - #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ - ) - QCOW2_REPOS=(\ - 'http://10.42.194.11/workshop_staging/' \ - 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - ) - AUTODC_REPOS=(\ - 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - ) - PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' - NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" - NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) - NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) - DNS_SERVERS="10.42.196.10,10.42.194.10,${AUTH_HOST}" - ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - JQ_REPOS=(\ + JQ_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - - 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - DNS_SERVERS='10.132.71.40' - NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" NW1_DHCP_START="${IPV4_PREFIX}.100" - NW1_DHCP_END="${IPV4_PREFIX}.250" + NW1_DHCP_END="${IPV4_PREFIX}.250" # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json PC_STABLE_METAURL=${PC_CURRENT_METAURL} QCOW2_IMAGES=(\ @@ -211,6 +249,7 @@ case "${OCTET[0]}.${OCTET[1]}" in ;; esac + # Find operating system and set dependencies if [[ -e /etc/lsb-release ]]; then # Linux Standards Base @@ -222,10 +261,7 @@ elif [[ $(uname -s) == 'Darwin' ]]; then OS_NAME='Darwin' fi -WC_ARG='-l' +WC_ARG='--lines' if [[ ${OS_NAME} == 'Darwin' ]]; then WC_ARG='-l' fi -if [[ ${OS_NAME} == 'alpine' ]]; then - WC_ARG='-l' -fi diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index b62137f..ec573c1 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -251,6 +251,11 @@ function fileserver() { remote_exec 'ssh' ${_host} \ "python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}" + + # acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \ + # source_url=http://10.4.150.64:8181/autodc-2.0.qcow2 + #AutoDC2: pending + #AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable popd || exit ;; 'stop' ) @@ -282,6 +287,11 @@ function images() { local _source='source_uri' local _test + #which "$_cli" + #if (( $? > 0 )); then + # _cli='nuclei' + # _source='source_uri' + #fi ####################################### # For doing Disk IMAGES @@ -295,7 +305,10 @@ function images() { && ${_cli} image.list 2>&1 \ | grep -i complete \ | grep "${_image}") - + #else + # _test=$(source /etc/profile.d/nutanix_env.sh \ + # && ${_cli} image.list 2>&1 \ + # | grep "${_image}") fi if [[ ! -z ${_test} ]]; then diff --git a/scripts/localhost.sh b/scripts/localhost.sh deleted file mode 100644 index a2896cd..0000000 --- a/scripts/localhost.sh +++ /dev/null @@ -1,99 +0,0 @@ - - -HTTP_CACHE_HOST='localhost' -HTTP_CACHE_PORT=8181 - - AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file - AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" - LDAP_PORT=389 - AUTH_FQDN='ntnxlab.local' - AUTH_DOMAIN='NTNXLAB' -AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} -AUTH_ADMIN_PASS='nutanix/4u' -AUTH_ADMIN_GROUP='SSP Admins' - AUTODC_REPOS=(\ - 'http://10.42.8.50/images/AutoDC.qcow2' \ - 'http://10.42.8.50/images/AutoDC2.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - - #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ - #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ - -) - -# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) -# https://sewiki.nutanix.com/index.php/HPOC_IP_Schema -case "${OCTET[0]}.${OCTET[1]}" in - 10.20 ) #Marketing: us-west = SV - DNS_SERVERS='10.21.253.10' - ;; - 10.21 ) #HPOC: us-west = SV - if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then - log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' - exit 0 - fi - - # backup cluster; override relative IP addressing - if (( ${OCTET[2]} == 249 )); then - AUTH_HOST="${IPV4_PREFIX}.118" - PC_HOST="${IPV4_PREFIX}.119" - fi - - DNS_SERVERS='10.21.253.10,10.21.253.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; - 10.55 ) # HPOC us-east = DUR - DNS_SERVERS='10.21.253.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; - 10.42 ) # HPOC us-west = PHX - DNS_SERVERS='10.42.196.10' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; - 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - DNS_SERVERS='10.132.71.40' - NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" - NW1_DHCP_START="${IPV4_PREFIX}.100" - NW1_DHCP_END="${IPV4_PREFIX}.250" - # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json - PC_STABLE_METAURL=${PC_CURRENT_METAURL} - - QCOW2_IMAGES=(\ - Centos7-Base.qcow2 \ - Centos7-Update.qcow2 \ - Windows2012R2.qcow2 \ - panlm-img-52.qcow2 \ - kx_k8s_01.qcow2 \ - kx_k8s_02.qcow2 \ - kx_k8s_03.qcow2 \ - ) - ;; -esac - -HTTP_CACHE_HOST='localhost' -HTTP_CACHE_PORT=8181 - - ATTEMPTS=40 - SLEEP=60 # pause (in seconds) between ATTEMPTS - - CURL_OPTS='--insecure --silent --show-error' # --verbose' -CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null" -CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" - SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' - SSH_OPTS+=' -q' # -v' diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index a82c281..a1d5520 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -132,12 +132,8 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ From 3c855a45832de03dab0aeeaa757bb4e3dc5538d3 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 12:43:21 -0700 Subject: [PATCH 121/691] Update bootstrap.sh --- bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index 5b10755..3c3fcd6 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -123,7 +123,7 @@ fi PE_HOST=${PE_HOST} \ PRISM_ADMIN=${PRISM_ADMIN} \ PE_PASSWORD=${PE_PASSWORD} \ -./stage_workshop.sh -f ${MY_WORKSHOP} # \ +./stage_workshop.sh -f - ${MY_WORKSHOP} # \ # && popd || exit echo -e "\n DONE: ${0} ran for ${SECONDS} seconds." From 3add31bf5e2623bb4089404f22a3c39bf41701f2 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 15:09:43 -0700 Subject: [PATCH 122/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 018d496..60c0fd7 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -22,7 +22,7 @@ function stage_clusters() { local _pe_launch # will be transferred and executed on PE local _pc_launch # will be transferred and executed on PC local _sshkey=${SSH_PUBKEY} - local _wc_arg='--lines' + #local _wc_arg='--lines' local _workshop=${WORKSHOPS[$((${WORKSHOP_NUM}-1))]} # Map to latest and greatest of each point release From 444bf8442879c37105e4a8a8e8f8c234c120c426 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 15:37:31 -0700 Subject: [PATCH 123/691] Update stage_workshop.sh --- stage_workshop.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 60c0fd7..f2ef491 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -261,10 +261,10 @@ function select_workshop() { get_file break ;; - #"Validate Staged Clusters") - # validate_clusters - # break - # ;; + "Validate Staged Clusters") + validate_clusters + break + ;; "Quit") exit ;; @@ -304,14 +304,14 @@ function select_workshop() { . scripts/global.vars.sh begin -# _VALIDATE='Validate Staged Clusters' + _VALIDATE='Validate Staged Clusters' _CLUSTER_FILE='Cluster Input File' CLUSTER_LIST= # NONWORKSHOPS appended to WORKSHOPS WORKSHOP_COUNT=${#WORKSHOPS[@]} WORKSHOPS[${#WORKSHOPS[@]}]="Change ${_CLUSTER_FILE}" -#WORKSHOPS[${#WORKSHOPS[@]}]=${_VALIDATE} +WORKSHOPS[${#WORKSHOPS[@]}]=${_VALIDATE} WORKSHOPS[${#WORKSHOPS[@]}]="Quit" let NONWORKSHOPS=${#WORKSHOPS[@]}-${WORKSHOP_COUNT} From e10723e054881875339736ed167de81369b93e7c Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 15:41:44 -0700 Subject: [PATCH 124/691] WC_ARG Fix --- bootstrap.sh | 2 +- scripts/citrix.sh | 9 +- scripts/lib.common.sh | 15 +- scripts/lib.pe.sh | 57 +----- scripts/localhost.sh | 99 ++++++++++ scripts/stage_citrixhow.sh | 355 ---------------------------------- scripts/stage_citrixhow_pc.sh | 66 ------- scripts/ts2019.sh | 8 +- 8 files changed, 118 insertions(+), 493 deletions(-) create mode 100644 scripts/localhost.sh delete mode 100644 scripts/stage_citrixhow.sh delete mode 100644 scripts/stage_citrixhow_pc.sh diff --git a/bootstrap.sh b/bootstrap.sh index 3c3fcd6..b68142c 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -83,7 +83,7 @@ if [[ -z ${EMAIL} ]]; then read -p "REQUIRED: Email address for cluster admin? " EMAIL fi -_WC_ARG='--list' +_WC_ARG='--lines' if [[ $(uname -s) == 'Darwin' ]]; then _WC_ARG='-l' fi diff --git a/scripts/citrix.sh b/scripts/citrix.sh index f837313..9851f00 100644 --- a/scripts/citrix.sh +++ b/scripts/citrix.sh @@ -148,8 +148,13 @@ EOF" pc_init \ && pc_ui \ - && pc_auth \ - && pc_smtp + && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi + ssp_auth \ && calm_enable \ diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index ec573c1..b62137f 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -251,11 +251,6 @@ function fileserver() { remote_exec 'ssh' ${_host} \ "python -m SimpleHTTPServer ${_port} || python -m http.server ${_port}" - - # acli image.create AutoDC2 image_type=kDiskImage wait=true container=Images \ - # source_url=http://10.4.150.64:8181/autodc-2.0.qcow2 - #AutoDC2: pending - #AutoDC2: UploadFailure: Could not access the URL, please check the URL and make sure the hostname is resolvable popd || exit ;; 'stop' ) @@ -287,11 +282,6 @@ function images() { local _source='source_uri' local _test - #which "$_cli" - #if (( $? > 0 )); then - # _cli='nuclei' - # _source='source_uri' - #fi ####################################### # For doing Disk IMAGES @@ -305,10 +295,7 @@ function images() { && ${_cli} image.list 2>&1 \ | grep -i complete \ | grep "${_image}") - #else - # _test=$(source /etc/profile.d/nutanix_env.sh \ - # && ${_cli} image.list 2>&1 \ - # | grep "${_image}") + fi if [[ ! -z ${_test} ]]; then diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 27577f7..25e1359 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -59,14 +59,6 @@ function authentication_source() { _autodc_restart="sleep 2 && service ${_autodc_service} stop && sleep 5 && service ${_autodc_service} start" _autodc_status="service ${_autodc_service} status" _autodc_success=' * status: started' - - # REVIEW: override global.vars - #export AUTODC_REPOS=(\ - #'http://10.42.8.50/images/AutoDC2.qcow2' \ - #'http://10.42.8.50/images/AutoDC.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - #) fi dns_check "dc${_autodc_index}.${AUTH_FQDN}" @@ -237,51 +229,10 @@ function cluster_check() { local _test_exit local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' - # shellcheck disable=2206 - #_pc_version=(${PC_VERSION//./ }) - - #if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 10 )); then - # log "PC>=5.10, checking multicluster state..." - # while true ; do - # (( _loop++ )) - - # _test=$(ncli --json=true multicluster get-cluster-state | jq -r .data[0].clusterDetails.multicluster) - # _test_exit=$? - # log "Cluster status: |${_test}|, exit: ${_test_exit}." - - # if [[ ${_test} != 'true' ]]; then - # _test=$(ncli multicluster add-to-multicluster \ - # external-ip-address-or-svm-ips=${PC_HOST} \ - # username=${PRISM_ADMIN} password=${PE_PASSWORD}) - # _test_exit=$? - # log "Manual join PE to PC = |${_test}|, exit: ${_test_exit}." - # fi - - # _test=$(ncli --json=true multicluster get-cluster-state | \ - # jq -r .data[0].clusterDetails.multicluster) - # _test_exit=$? - # log "Cluster status: |${_test}|, exit: ${_test_exit}." - - # if [[ ${_test} == 'true' ]]; then - # log "PE to PC = cluster registration: successful." - # return 0 - # elif (( ${_loop} > ${_attempts} )); then - # log "Warning ${_error} @${1}: Giving up after ${_loop} tries." - # return ${_error} - # else - # log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep} seconds..." - # sleep ${_sleep} - # fi - # done - #fi - - #if (( ${_pc_version[0]} -ge 5 && ${_pc_version[1]} -eq 8 )); then - log "PC is version 5.8, enabling and checking" - # Enable the PE to PC registration - _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" - _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') - #fi - + log "PC is version 5.8, enabling and checking" + # Enable the PE to PC registration + _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" + _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') } ############################################################################################################################################################################### diff --git a/scripts/localhost.sh b/scripts/localhost.sh new file mode 100644 index 0000000..a2896cd --- /dev/null +++ b/scripts/localhost.sh @@ -0,0 +1,99 @@ + + +HTTP_CACHE_HOST='localhost' +HTTP_CACHE_PORT=8181 + + AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file + AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" + LDAP_PORT=389 + AUTH_FQDN='ntnxlab.local' + AUTH_DOMAIN='NTNXLAB' +AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} +AUTH_ADMIN_PASS='nutanix/4u' +AUTH_ADMIN_GROUP='SSP Admins' + AUTODC_REPOS=(\ + 'http://10.42.8.50/images/AutoDC.qcow2' \ + 'http://10.42.8.50/images/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + + #'nfs://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + # 'smb://pocfs.nutanixdc.local/images/CorpSE_Calm/autodc-2.0.qcow2' \ + #'http://10.59.103.143:8000/autodc-2.0.qcow2' \ + +) + +# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) +# https://sewiki.nutanix.com/index.php/HPOC_IP_Schema +case "${OCTET[0]}.${OCTET[1]}" in + 10.20 ) #Marketing: us-west = SV + DNS_SERVERS='10.21.253.10' + ;; + 10.21 ) #HPOC: us-west = SV + if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then + log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' + exit 0 + fi + + # backup cluster; override relative IP addressing + if (( ${OCTET[2]} == 249 )); then + AUTH_HOST="${IPV4_PREFIX}.118" + PC_HOST="${IPV4_PREFIX}.119" + fi + + DNS_SERVERS='10.21.253.10,10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.55 ) # HPOC us-east = DUR + DNS_SERVERS='10.21.253.11' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.42 ) # HPOC us-west = PHX + DNS_SERVERS='10.42.196.10' + NW2_NAME='Secondary' + NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR + DNS_SERVERS='10.132.71.40' + NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" + NW1_DHCP_START="${IPV4_PREFIX}.100" + NW1_DHCP_END="${IPV4_PREFIX}.250" + # PC deploy file local override, TODO:30 make an PC_URL array and eliminate + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_STABLE_METAURL=${PC_CURRENT_METAURL} + + QCOW2_IMAGES=(\ + Centos7-Base.qcow2 \ + Centos7-Update.qcow2 \ + Windows2012R2.qcow2 \ + panlm-img-52.qcow2 \ + kx_k8s_01.qcow2 \ + kx_k8s_02.qcow2 \ + kx_k8s_03.qcow2 \ + ) + ;; +esac + +HTTP_CACHE_HOST='localhost' +HTTP_CACHE_PORT=8181 + + ATTEMPTS=40 + SLEEP=60 # pause (in seconds) between ATTEMPTS + + CURL_OPTS='--insecure --silent --show-error' # --verbose' +CURL_POST_OPTS="${CURL_OPTS} --max-time 5 --header Content-Type:application/json --header Accept:application/json --output /dev/null" +CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" + SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' + SSH_OPTS+=' -q' # -v' diff --git a/scripts/stage_citrixhow.sh b/scripts/stage_citrixhow.sh deleted file mode 100644 index 6ae85a5..0000000 --- a/scripts/stage_citrixhow.sh +++ /dev/null @@ -1,355 +0,0 @@ -#!/bin/bash -# -# Please configure according to your needs -# -function pc_remote_exec { - sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null nutanix@10.21.${MY_HPOC_NUMBER}.39 "$@" -} -function pc_send_file { - sshpass -p nutanix/4u scp -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null "$1" nutanix@10.21.${MY_HPOC_NUMBER}.39:/home/nutanix/"$1" -} - -# Loging date format -#Never:0 Make logging format configurable -#MY_LOG_DATE='date +%Y-%m-%d %H:%M:%S' -# Script file name -MY_SCRIPT_NAME=`basename "$0"` -# Derive HPOC number from IP 3rd byte -#MY_CVM_IP=$(ip addr | grep inet | cut -d ' ' -f 6 | grep ^10.21 | head -n 1) -MY_CVM_IP=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}') -array=(${MY_CVM_IP//./ }) -MY_HPOC_NUMBER=${array[2]} -# HPOC Password (if commented, we assume we get that from environment) -#MY_PE_PASSWORD='nx2TechXXX!' -STORAGE_POOL='SP01' -STORAGE_DEFAULT='Default' -STORAGE_IMAGES='Images' -MY_DOMAIN_FQDN='ntnxlab.local' -MY_DOMAIN_NAME='NTNXLAB' -MY_DOMAIN_USER='administrator@ntnxlab.local' -MY_DOMAIN_PASS='nutanix/4u' -MY_DOMAIN_ADMIN_GROUP='SSP Admins' -MY_DOMAIN_URL="ldaps://10.21.${MY_HPOC_NUMBER}.40/" -MY_PRIMARY_NET_NAME='Primary' -MY_PRIMARY_NET_VLAN='0' -MY_SECONDARY_NET_NAME='Secondary' -MY_SECONDARY_NET_VLAN="${MY_HPOC_NUMBER}1" -MY_PC_SRC_URL='http://10.21.249.53/pc-5.7.1-stable-prism_central.tar' -MY_PC_META_URL='http://10.21.249.53/pc-5.7.1-stable-prism_central_metadata.json' -MY_AFS_SRC_URL='http://10.21.250.221/images/ahv/techsummit/nutanix-afs-el7.3-release-afs-3.0.0.1-stable.qcow2' -MY_AFS_META_URL='http://10.21.250.221/images/ahv/techsummit/nutanix-afs-el7.3-release-afs-3.0.0.1-stable-metadata.json' - -# From this point, we assume: -# IP Range: 10.21.${MY_HPOC_NUMBER}.0/25 -# Gateway: 10.21.${MY_HPOC_NUMBER}.1 -# DNS: 10.21.253.10,10.21.253.11 -# Domain: nutanixdc.local -# DHCP Pool: 10.21.${MY_HPOC_NUMBER}.50 - 10.21.${MY_HPOC_NUMBER}.120 -# -# DO NOT CHANGE ANYTHING BELOW THIS LINE UNLESS YOU KNOW WHAT YOU'RE DOING!! -# -# Source Nutanix environments (for PATH and other things) -source /etc/profile.d/nutanix_env.sh -# Logging function -function my_log { - #echo `$MY_LOG_DATE`" $1" - echo $(date "+%Y-%m-%d %H:%M:%S") $1 -} -# Check if we got a password from environment or from the settings above, otherwise exit before doing anything -if [[ -z ${MY_PE_PASSWORD+x} ]]; then - my_log "No password provided, exiting" - exit -1 -fi -my_log "My PID is $$" -my_log "Installing sshpass" -sudo rpm -ivh https://fr2.rpmfind.net/linux/epel/7/x86_64/Packages/s/sshpass-1.06-1.el7.x86_64.rpm -# Configure SMTP -my_log "Configure SMTP" -ncli cluster set-smtp-server address=nutanix-com.mail.protection.outlook.com from-email-address=cluster@nutanix.com port=25 -# Configure NTP -my_log "Configure NTP" -ncli cluster add-to-ntp-servers servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org -# Rename default storage container to STORAGE_DEFAULT -my_log "Rename default container to ${STORAGE_DEFAULT}" -default_container=$(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep '^default-container-') -ncli container edit name="${default_container}" new-name="${STORAGE_DEFAULT}" -# Rename default storage pool to STORAGE_POOL -my_log "Rename default storage pool to ${STORAGE_POOL}" -default_sp=$(ncli storagepool ls | grep 'Name' | cut -d ':' -f 2 | sed s/' '//g) -ncli sp edit name="${default_sp}" new-name="${STORAGE_POOL}" -# Check if there is a container named STORAGE_IMAGES, if not create one -my_log "Check if there is a container named ${STORAGE_IMAGES}, if not create one" -(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep "^${STORAGE_IMAGES}" 2>&1 > /dev/null) \ - && echo "Container ${STORAGE_IMAGES} already exists" \ - || ncli container create name="${STORAGE_IMAGES}" sp-name="${STORAGE_POOL}" -# Set external IP address: -#ncli cluster edit-params external-ip-address=10.21.${MY_HPOC_NUMBER}.37 -# Set Data Services IP address: -my_log "Set Data Services IP address to 10.21.${MY_HPOC_NUMBER}.38" -ncli cluster edit-params external-data-services-ip-address=10.21.${MY_HPOC_NUMBER}.38 - -# Importing images -MY_IMAGE="AutoDC" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/AutoDC.qcow2 wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -MY_IMAGE="CentOS" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/CentOS7-04282018.qcow2 wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -MY_IMAGE="Windows2012" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows2012R2-04282018.qcow2 wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -MY_IMAGE="Windows10" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kDiskImage source_url=http://10.21.250.221/images/ahv/techsummit/Windows10-1709-04282018.qcow2 wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -MY_IMAGE="XenDesktop-7.15.iso" -retries=1 -my_log "Importing ${MY_IMAGE} image" -until [[ $(acli image.create ${MY_IMAGE} container="${STORAGE_IMAGES}" image_type=kIsoImage source_url=http://10.21.250.221/images/ahv/techsummit/XD715.iso wait=true) =~ "complete" ]]; do - let retries++ - if [ $retries -gt 5 ]; then - my_log "${MY_IMAGE} failed to upload after 5 attempts. This cluster may require manual remediation." - acli vm.create STAGING-FAILED-${MY_IMAGE} - break - fi - my_log "acli image.create ${MY_IMAGE} FAILED. Retrying upload (${retries} of 5)..." - sleep 5 -done - -# Remove existing VMs, if any -my_log "Removing \"Windows 2012\" VM if it exists" -acli -y vm.delete Windows\ 2012\ VM delete_snapshots=true -my_log "Removing \"Windows 10\" VM if it exists" -acli -y vm.delete Windows\ 10\ VM delete_snapshots=true -my_log "Removing \"CentOS\" VM if it exists" -acli -y vm.delete CentOS\ VM delete_snapshots=true - -# Remove Rx-Automation-Network network -my_log "Removing \"Rx-Automation-Network\" Network if it exists" -acli -y net.delete Rx-Automation-Network - -# Create primary network -my_log "Create primary network:" -my_log "Name: ${MY_PRIMARY_NET_NAME}" -my_log "VLAN: ${MY_PRIMARY_NET_VLAN}" -my_log "Subnet: 10.21.${MY_HPOC_NUMBER}.1/25" -my_log "Domain: ${MY_DOMAIN_NAME}" -my_log "Pool: 10.21.${MY_HPOC_NUMBER}.50 to 10.21.${MY_HPOC_NUMBER}.125" -acli net.create ${MY_PRIMARY_NET_NAME} vlan=${MY_PRIMARY_NET_VLAN} ip_config=10.21.${MY_HPOC_NUMBER}.1/25 -acli net.update_dhcp_dns ${MY_PRIMARY_NET_NAME} servers=10.21.${MY_HPOC_NUMBER}.40,10.21.253.10 domains=${MY_DOMAIN_NAME} -acli net.add_dhcp_pool ${MY_PRIMARY_NET_NAME} start=10.21.${MY_HPOC_NUMBER}.50 end=10.21.${MY_HPOC_NUMBER}.125 - -# Create secondary network -if [[ ${MY_SECONDARY_NET_NAME} ]]; then - my_log "Create secondary network:" - my_log "Name: ${MY_SECONDARY_NET_NAME}" - my_log "VLAN: ${MY_SECONDARY_NET_VLAN}" - my_log "Subnet: 10.21.${MY_HPOC_NUMBER}.129/25" - my_log "Domain: ${MY_DOMAIN_NAME}" - my_log "Pool: 10.21.${MY_HPOC_NUMBER}.132 to 10.21.${MY_HPOC_NUMBER}.253" - acli net.create ${MY_SECONDARY_NET_NAME} vlan=${MY_SECONDARY_NET_VLAN} ip_config=10.21.${MY_HPOC_NUMBER}.129/25 - acli net.update_dhcp_dns ${MY_SECONDARY_NET_NAME} servers=10.21.${MY_HPOC_NUMBER}.40,10.21.253.10 domains=${MY_DOMAIN_NAME} - acli net.add_dhcp_pool ${MY_SECONDARY_NET_NAME} start=10.21.${MY_HPOC_NUMBER}.132 end=10.21.${MY_HPOC_NUMBER}.253 -fi - -# Create AutoDC & power on -my_log "Create DC VM based on AutoDC image" -acli vm.create DC num_vcpus=2 num_cores_per_vcpu=1 memory=4G -acli vm.disk_create DC cdrom=true empty=true -acli vm.disk_create DC clone_from_image=AutoDC -acli vm.nic_create DC network=${MY_PRIMARY_NET_NAME} ip=10.21.${MY_HPOC_NUMBER}.40 -my_log "Power on DC VM" -acli vm.on DC - -# Need to wait for AutoDC to be up (30?60secs?) -my_log "Waiting 60sec to give DC VM time to start" -sleep 60 - -# Configure PE external authentication -my_log "Configure PE external authentication" -ncli authconfig add-directory directory-type=ACTIVE_DIRECTORY connection-type=LDAP directory-url="${MY_DOMAIN_URL}" domain="${MY_DOMAIN_FQDN}" name="${MY_DOMAIN_NAME}" service-account-username="${MY_DOMAIN_USER}" service-account-password="${MY_DOMAIN_PASS}" - -# Configure PE role mapping -my_log "Configure PE role mapping" -ncli authconfig add-role-mapping role=ROLE_CLUSTER_ADMIN entity-type=group name="${MY_DOMAIN_NAME}" entity-values="${MY_DOMAIN_ADMIN_GROUP}" - -# Reverse Lookup Zone -my_log "Creating Reverse Lookup Zone on DC VM" -sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ -root@10.21.${MY_HPOC_NUMBER}.40 "samba-tool dns zonecreate dc1 ${MY_HPOC_NUMBER}.21.10.in-addr.arpa; service samba-ad-dc restart" - -# Create custom OUs -sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ -root@10.21.${MY_HPOC_NUMBER}.40 "apt install ldb-tools -y -q" - -sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ -root@10.21.${MY_HPOC_NUMBER}.40 "cat << EOF > ous.ldif -dn: OU=Non-PersistentDesktop,DC=NTNXLAB,DC=local -changetype: add -objectClass: top -objectClass: organizationalunit -description: Non-Persistent Desktop OU - -dn: OU=PersistentDesktop,DC=NTNXLAB,DC=local -changetype: add -objectClass: top -objectClass: organizationalunit -description: Persistent Desktop OU - -dn: OU=XenAppServer,DC=NTNXLAB,DC=local -changetype: add -objectClass: top -objectClass: organizationalunit -description: XenApp Server OU -EOF" - -sshpass -p nutanix/4u ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null \ -root@10.21.${MY_HPOC_NUMBER}.40 "ldbmodify -H /var/lib/samba/private/sam.ldb ous.ldif; service samba-ad-dc restart" - -# Provision local Prism account for XD MCS Plugin -my_log "Create PE user account xd for MCS Plugin" -ncli user create user-name=xd user-password=nutanix/4u first-name=XenDesktop last-name=Service email-id=no-reply@nutanix.com -ncli user grant-cluster-admin-role user-name=xd - -# Get UUID from cluster -my_log "Get UUIDs from cluster:" -MY_NET_UUID=$(acli net.get ${MY_PRIMARY_NET_NAME} | grep "uuid" | cut -f 2 -d ':' | xargs) -my_log "${MY_PRIMARY_NET_NAME} UUID is ${MY_NET_UUID}" -MY_CONTAINER_UUID=$(ncli container ls name=${STORAGE_DEFAULT} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) -my_log "${STORAGE_DEFAULT} UUID is ${MY_CONTAINER_UUID}" - -# Validate EULA on PE -my_log "Validate EULA on PE" -curl -u admin:${MY_PE_PASSWORD} -k -H 'Content-Type: application/json' -X POST \ - https://127.0.0.1:9440/PrismGateway/services/rest/v1/eulas/accept \ - -d '{ - "username": "SE", - "companyName": "NTNX", - "jobTitle": "SE" -}' - -# Disable Pulse in PE -my_log "Disable Pulse in PE" -curl -u admin:${MY_PE_PASSWORD} -k -H 'Content-Type: application/json' -X PUT \ - https://127.0.0.1:9440/PrismGateway/services/rest/v1/pulse \ - -d '{ - "defaultNutanixEmail": null, - "emailContactList": null, - "enable": false, - "enableDefaultNutanixEmail": false, - "isPulsePromptNeeded": false, - "nosVersion": null, - "remindLater": null, - "verbosityType": null -}' - -# AFS Download -my_log "Download AFS image from ${MY_AFS_SRC_URL}" -wget -nv ${MY_AFS_SRC_URL} -my_log "Download AFS metadata JSON from ${MY_AFS_META_URL}" -wget -nv ${MY_AFS_META_URL} - -# Staging AFS -my_log "Stage AFS" -ncli software upload file-path=/home/nutanix/${MY_AFS_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_AFS_META_URL##*/} software-type=FILE_SERVER - -# Freeing up space -my_log "Delete AFS sources to free some space" -rm ${MY_AFS_SRC_URL##*/} ${MY_AFS_META_URL##*/} - -# Prism Central Download -my_log "Download PC tarball from ${MY_PC_SRC_URL}" -wget -nv ${MY_PC_SRC_URL} -my_log "Download PC metadata JSON from ${MY_PC_META_URL}" -wget -nv ${MY_PC_META_URL} - -# Staging Prism Central -my_log "Stage Prism Central" -ncli software upload file-path=/home/nutanix/${MY_PC_SRC_URL##*/} meta-file-path=/home/nutanix/${MY_PC_META_URL##*/} software-type=PRISM_CENTRAL_DEPLOY - -# Freeing up space -my_log "Delete PC sources to free some space" -rm ${MY_PC_SRC_URL##*/} ${MY_PC_META_URL##*/} - -# Deploy Prism Central -my_log "Deploy Prism Central" -MY_DEPLOY_BODY=$(cat <> pcconfig.log 2>&1 &" -my_log "Removing sshpass" -sudo rpm -e sshpass -my_log "PE Configuration complete" diff --git a/scripts/stage_citrixhow_pc.sh b/scripts/stage_citrixhow_pc.sh deleted file mode 100644 index 2cacfda..0000000 --- a/scripts/stage_citrixhow_pc.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -#MY_PC_UPGRADE_URL='http://10.21.250.221/images/ahv/techsummit/nutanix_installer_package_pc-release-euphrates-5.5.0.6-stable-14bd63735db09b1c9babdaaf48d062723137fc46.tar.gz' - -# Script file name -MY_SCRIPT_NAME=`basename "$0"` - -# Source Nutanix environments (for PATH and other things) -. /etc/profile.d/nutanix_env.sh -. lib.common.sh # source common routines -Dependencies 'install'; - -# Derive HPOC number from IP 3rd byte -#MY_CVM_IP=$(ip addr | grep inet | cut -d ' ' -f 6 | grep ^10.21 | head -n 1) - MY_CVM_IP=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}') - array=(${MY_CVM_IP//./ }) -MY_HPOC_NUMBER=${array[2]} - -CURL_OPTS="${CURL_OPTS} --user admin:${MY_PE_PASSWORD}" #lib.common.sh initialized -#CURL_OPTS="${CURL_OPTS} --verbose" - -# Set Prism Central Password to Prism Element Password -my_log "Setting PC password to PE password" -ncli user reset-password user-name="admin" password="${MY_PE_PASSWORD}" - -# Add NTP Server\ -my_log "Configure NTP on PC" -ncli cluster add-to-ntp-servers servers=0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org - -# Accept Prism Central EULA -my_log "Validate EULA on PC" -curl ${CURL_OPTS} \ - https://10.21.${MY_HPOC_NUMBER}.39:9440/PrismGateway/services/rest/v1/eulas/accept \ - -d '{ - "username": "SE", - "companyName": "NTNX", - "jobTitle": "SE" -}' - -# Disable Prism Central Pulse -my_log "Disable Pulse on PC" -curl ${CURL_OPTS} -X PUT \ - https://10.21.${MY_HPOC_NUMBER}.39:9440/PrismGateway/services/rest/v1/pulse \ - -d '{ - "emailContactList":null, - "enable":false, - "verbosityType":null, - "enableDefaultNutanixEmail":false, - "defaultNutanixEmail":null, - "nosVersion":null, - "isPulsePromptNeeded":false, - "remindLater":null -}' - -# Prism Central upgrade -#my_log "Download PC upgrade image: ${MY_PC_UPGRADE_URL##*/}" -#wget -nv ${MY_PC_UPGRADE_URL} - -#my_log "Prepare PC upgrade image" -#tar -xzf ${MY_PC_UPGRADE_URL##*/} -#rm ${MY_PC_UPGRADE_URL##*/} - -#my_log "Upgrade PC" -#cd /home/nutanix/install ; ./bin/cluster -i . -p upgrade - -my_log "PC Configuration complete on `$date`" diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index a1d5520..a82c281 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -132,8 +132,12 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth \ - && pc_smtp + && pc_auth + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ From f693026ddd66a9e34840fc11ffba6cb95c87d01f Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 16:31:15 -0700 Subject: [PATCH 125/691] Cleanup --- scripts/bootcamp.sh | 9 ++------- scripts/global.vars.sh | 26 +++++++++++--------------- scripts/lib.common.sh | 5 ----- scripts/lib.pc.sh | 4 ++-- 4 files changed, 15 insertions(+), 29 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index a57bef7..3f9e024 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -106,13 +106,8 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && seedPC \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f622f6b..074821c 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -35,10 +35,7 @@ QCOW2_IMAGES=(\ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ - #'https://download.nutanix.com/karbon/centos/0.0/centos7-0.0.qcow2' \ - #'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ - ) - +) ISO_IMAGES=(\ CentOS7.iso \ Windows2016.iso \ @@ -189,17 +186,16 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - DNS_SERVERS='10.132.71.40' - NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" - NW1_DHCP_START="${IPV4_PREFIX}.100" - NW1_DHCP_END="${IPV4_PREFIX}.250" - # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json - PC_STABLE_METAURL=${PC_CURRENT_METAURL} - - QCOW2_IMAGES=(\ + DNS_SERVERS='10.132.71.40' + NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" + NW1_DHCP_START="${IPV4_PREFIX}.100" + NW1_DHCP_END="${IPV4_PREFIX}.250" + # PC deploy file local override, TODO:30 make an PC_URL array and eliminate + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_STABLE_METAURL=${PC_CURRENT_METAURL} + + QCOW2_IMAGES=(\ Centos7-Base.qcow2 \ Centos7-Update.qcow2 \ Windows2012R2.qcow2 \ diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 9accf60..b62137f 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -322,11 +322,6 @@ function images() { _name=acs-centos fi - # TODO:0 TOFIX: acs-centos ugly override for today... - if (( $(echo "${_image}" | grep -i 'centos7-0.0' | wc --lines ) > 0 )); then - _name=karbon-centos7.5.1804-ntnx-0.0 - fi - if [[ ${_cli} == 'acli' ]]; then _image_type='kDiskImage' _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 40f5d35..0336a55 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -180,10 +180,10 @@ function karbon_enable() { local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" - + # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) - + # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... if [[ $_response -eq 1 ]]; then # Check if Karbon has been enabled From 06b604b18f4f5a43e327a7896caee748144f8621 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 17:06:17 -0700 Subject: [PATCH 126/691] Updates for SMTP Check --- scripts/bootcamp.sh | 6 +++++- scripts/calm.sh | 2 +- scripts/ts2019.sh | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 3f9e024..e0e6a72 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -107,7 +107,11 @@ case ${1} in && pc_dns_add \ && pc_ui \ && pc_auth \ - && pc_smtp + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ diff --git a/scripts/calm.sh b/scripts/calm.sh index bc4840c..7c3622a 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -97,7 +97,7 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth + && pc_auth \ # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index a82c281..fc1a3a1 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -132,7 +132,7 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth + && pc_auth \ # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then From 1682ef4fbbac398c6d71a036bde72e6c785d7378 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 17 May 2019 17:19:25 -0700 Subject: [PATCH 127/691] Cleanup - JNC --- scripts/calm.sh | 6 +- scripts/global.vars.sh | 276 ++++++++++++++++++----------------------- scripts/lib.pc.sh | 21 +++- scripts/ts2019.sh | 2 +- 4 files changed, 144 insertions(+), 161 deletions(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index 707e15e..7c3622a 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -98,7 +98,11 @@ case ${1} in && pc_dns_add \ && pc_ui \ && pc_auth \ - && pc_smtp + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi ssp_auth \ && calm_enable \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 596f1e4..074821c 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -1,46 +1,19 @@ #!/usr/bin/env bash # shellcheck disable=SC2034 - RELEASE='release.json' -# Sync the following to lib.common.sh::ntnx_download-Case=PC -# Browse to: https://portal.nutanix.com/#/page/releases/prismDetails -# - Find ${PC_VERSION} in the Additional Releases section on the lower right side -# - Provide the metadata URL for the "PC 1-click deploy from PE" option to PC_*_METAURL - PC_DEV_VERSION='5.10.2' - PC_DEV_METAURL='http://download.nutanix.com/pc/one-click-pc-deployment/5.10.2/pcdeploy-5.10.2.json' - PC_DEV_URL='' - PC_CURRENT_VERSION='5.10.2' - #PC_CURRENT_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - #PC_CURRENT_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' - PC_STABLE_VERSION='5.8.2' - #PC_STABLE_METAURL='http://10.42.8.50/images/pc_deploy-5.8.2.json' - #PC_STABLE_URL='http://10.42.8.50/images/euphrates-5.8.2-stable-prism_central.tar' -# Sync the following to lib.common.sh::ntnx_download-Case=FILES -# Browse to: https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA -# - Find ${FILES_VERSION} in the Additional Releases section on the lower right side -# - Provide "Upgrade Metadata File" URL to FILES_METAURL - FILES_VERSION='3.2.0.1' - #FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' - # 2019-02-15: override until metadata URL fixed - # http://download.nutanix.com/afs/7.3/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - # Revert by overriding again... - #FILES_VERSION='3.2.0' - #FILES_METAURL='http://download.nutanix.com/afs/3.2.0/v1/afs-3.2.0.json' - #FILES_URL= - +RELEASE='release.json' +PC_DEV_VERSION='5.10.3' +PC_CURRENT_VERSION='5.10.3' +PC_STABLE_VERSION='5.8.2' +FILES_VERSION='3.5.0' NTNX_INIT_PASSWORD='nutanix/4u' - PRISM_ADMIN='admin' - SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" - STORAGE_POOL='SP01' - STORAGE_DEFAULT='Default' - STORAGE_IMAGES='Images' - ATTEMPTS=40 - SLEEP=60 +PRISM_ADMIN='admin' +SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" +STORAGE_POOL='SP01' +STORAGE_DEFAULT='Default' +STORAGE_IMAGES='Images' +ATTEMPTS=40 +SLEEP=60 # Curl and SSH settings CURL_OPTS='--insecure --silent --show-error' # --verbose' @@ -55,32 +28,15 @@ SSH_OPTS+=' -q' # -v' # ################################## - # Conventions for *_REPOS arrays -- the URL must end with either: - # - trailing slash, which imples _IMAGES argument to function repo_source() - # - or full package filename. - - # https://stedolan.github.io/jq/download/#checksums_and_signatures - - #JQ_REPOS=(\ - #'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ - #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ - #) - #QCOW2_REPOS=(\ - #'http://10.42.8.50/images/' \ - #'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ - #) - - QCOW2_IMAGES=(\ +QCOW2_IMAGES=(\ CentOS7.qcow2 \ Windows2016.qcow2 \ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ - 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ - ) - ISO_IMAGES=(\ +) +ISO_IMAGES=(\ CentOS7.iso \ Windows2016.iso \ Windows2012R2.iso \ @@ -89,155 +45,157 @@ SSH_OPTS+=' -q' # -v' SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ ) - # "XenDesktop-7.15.iso" http://10.21.250.221/images/ahv/techsummit/XD715.iso - # http://download.nutanix.com/era/1.0.0/ERA-Server-build-1.0.0-bae7ca0d653e1af2bcb9826d1320e88d8c4713cc.qcow2 - - # https://pkgs.org/download/sshpass - # https://sourceforge.net/projects/sshpass/files/sshpass/ - #SSHPASS_REPOS=(\ - #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ - #) # shellcheck disable=2206 - OCTET=(${PE_HOST//./ }) # zero index - IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} +OCTET=(${PE_HOST//./ }) # zero index +IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) - PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) - DNS_SERVERS='8.8.8.8' - NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' - NW1_NAME='Primary' - NW1_VLAN=0 -# Assuming HPOC defaults - NW1_SUBNET="${IPV4_PREFIX}.1/25" - NW1_DHCP_START="${IPV4_PREFIX}.50" - NW1_DHCP_END="${IPV4_PREFIX}.125" -# https://sewiki.nutanix.com/index.php/Hosted_POC_FAQ#I.27d_like_to_test_email_alert_functionality.2C_what_SMTP_server_can_I_use_on_Hosted_POC_clusters.3F -#SMTP_SERVER_ADDRESS='nutanix-com.mail.protection.outlook.com' -SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' - SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' - SMTP_SERVER_PORT=25 +PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) +DNS_SERVERS='8.8.8.8' +NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' + +NW1_NAME='Primary' +NW1_VLAN=0 +NW1_SUBNET="${IPV4_PREFIX}.1/25" +NW1_DHCP_START="${IPV4_PREFIX}.50" +NW1_DHCP_END="${IPV4_PREFIX}.125" + +NW2_NAME='Secondary' +NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) +NW2_SUBNET="${IPV4_PREFIX}.129/25" +NW2_DHCP_START="${IPV4_PREFIX}.132" +NW2_DHCP_END="${IPV4_PREFIX}.253" - AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file - AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" - LDAP_PORT=389 - AUTH_FQDN='ntnxlab.local' - AUTH_DOMAIN='NTNXLAB' +SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' +SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' +SMTP_SERVER_PORT=25 + +AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file +AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" +LDAP_PORT=389 +AUTH_FQDN='ntnxlab.local' +AUTH_DOMAIN='NTNXLAB' AUTH_ADMIN_USER='administrator@'${AUTH_FQDN} AUTH_ADMIN_PASS='nutanix/4u' AUTH_ADMIN_GROUP='SSP Admins' - #AUTODC_REPOS=(\ - #'http://10.42.8.50/images/AutoDC.qcow2' \ - #'http://10.42.8.50/images/AutoDC2.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC.qcow2' \ - #'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ -#) -# For Nutanix HPOC/Marketing clusters (10.20, 10.21, 10.55, 10.42) + +# For Nutanix HPOC/Marketing clusters (RTP 10.55, PHC 10.42, PHX 10.38) # https://sewiki.nutanix.com/index.php/HPOC_IP_Schema case "${OCTET[0]}.${OCTET[1]}" in - 10.20 ) #Marketing: us-west = SV - DNS_SERVERS='10.21.253.10' - ;; - 10.21 ) #HPOC: us-west = SV - if (( ${OCTET[2]} == 60 )) || (( ${OCTET[2]} == 77 )); then - log 'GPU cluster, aborting! See https://sewiki.nutanix.com/index.php/Hosted_Proof_of_Concept_(HPOC)#GPU_Clusters' - exit 0 - fi - - # backup cluster; override relative IP addressing - if (( ${OCTET[2]} == 249 )); then - AUTH_HOST="${IPV4_PREFIX}.118" - PC_HOST="${IPV4_PREFIX}.119" - fi - DNS_SERVERS='10.21.253.10,10.21.253.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" - ;; 10.55 ) # HPOC us-east = DUR -PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' -PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - JQ_REPOS=(\ + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.55.251.38/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.55.251.38/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.55.251.10,10.55.251.11' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" + PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' + DNS_SERVERS='10.55.251.10,10.55.251.11,${AUTH_HOST}' ;; 10.42 ) # HPOC us-west = PHX -PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.2-stable-prism_central.tar' -PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - JQ_REPOS=(\ + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - SSHPASS_REPOS=(\ + SSHPASS_REPOS=(\ 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - DNS_SERVERS='10.42.196.10,10.42.194.10 ' - NW2_NAME='Secondary' - NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) - NW2_SUBNET="${IPV4_PREFIX}.129/25" - NW2_DHCP_START="${IPV4_PREFIX}.132" - NW2_DHCP_END="${IPV4_PREFIX}.253" + PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' + DNS_SERVERS='10.42.196.10,10.42.194.10,${AUTH_HOST}' ;; + 10.38 ) # HPOC us-west = PHX 1-Node Clusters + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + JQ_REPOS=(\ + 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + ) + SSHPASS_REPOS=(\ + 'http://10.42.194.11/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ + #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + ) + QCOW2_REPOS=(\ + 'http://10.42.194.11/workshop_staging/' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + ) + AUTODC_REPOS=(\ + 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + ) + PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' + NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" + NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) + NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + DNS_SERVERS="10.42.196.10,10.42.194.10,${AUTH_HOST}" + ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR - JQ_REPOS=(\ + JQ_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) - QCOW2_REPOS=(\ + QCOW2_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ ) - AUTODC_REPOS=(\ + AUTODC_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) - NW1_DHCP_START="${IPV4_PREFIX}.100" - NW1_DHCP_END="${IPV4_PREFIX}.250" - # PC deploy file local override, TODO:30 make an PC_URL array and eliminate - PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar - PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json - PC_STABLE_METAURL=${PC_CURRENT_METAURL} - QCOW2_IMAGES=(\ + DNS_SERVERS='10.132.71.40' + NW1_SUBNET="${IPV4_PREFIX%.*}.128.4/17" + NW1_DHCP_START="${IPV4_PREFIX}.100" + NW1_DHCP_END="${IPV4_PREFIX}.250" + # PC deploy file local override, TODO:30 make an PC_URL array and eliminate + PC_CURRENT_URL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy.tar + PC_CURRENT_METAURL=http://10.132.128.50/E%3A/share/Nutanix/PrismCentral/pc-${PC_VERSION}-deploy-metadata.json + PC_STABLE_METAURL=${PC_CURRENT_METAURL} + + QCOW2_IMAGES=(\ Centos7-Base.qcow2 \ Centos7-Update.qcow2 \ Windows2012R2.qcow2 \ @@ -249,7 +207,6 @@ PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' ;; esac - # Find operating system and set dependencies if [[ -e /etc/lsb-release ]]; then # Linux Standards Base @@ -261,7 +218,10 @@ elif [[ $(uname -s) == 'Darwin' ]]; then OS_NAME='Darwin' fi -WC_ARG='--lines' +WC_ARG='-l' if [[ ${OS_NAME} == 'Darwin' ]]; then WC_ARG='-l' fi +if [[ ${OS_NAME} == 'alpine' ]]; then + WC_ARG='-l' +fi diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7a95718..97b2c90 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -428,7 +428,26 @@ function pc_passwd() { } ############################################################################################################################################################################### -# Routine to setp up the SSP authentication to use the AutoDC1 or 2 server +# Seed PC data for Prism Pro Labs +############################################################################################################################################################################### + +function seedPC() { + local _test + local _setup + + _test=$(curl -L ${PC_DATA} -o /home/nutanix/seedPC.zip) + log "Pulling Prism Data| PC_DATA ${PC_DATA}|${_test}" + unzip /home/nutanix/seedPC.zip + pushd /home/nutanix/lab/ + + _setup=$(/home/nutanix/lab/setupEnv.sh ${PC_HOST} > /dev/null 2>&1) + log "Running Setup Script|$_setup" + + popd +} + +############################################################################################################################################################################### +# Routine to setp up the SSP authentication to use the AutoDC server ############################################################################################################################################################################### function ssp_auth() { diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh index a82c281..fc1a3a1 100755 --- a/scripts/ts2019.sh +++ b/scripts/ts2019.sh @@ -132,7 +132,7 @@ case ${1} in pc_init \ && pc_dns_add \ && pc_ui \ - && pc_auth + && pc_auth \ # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then From 46ec1d424a76ce3f9c8bfde631a7763dfbed8050 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 22 May 2019 08:08:36 -0400 Subject: [PATCH 128/691] Update global.vars.sh DNS Fix --- scripts/global.vars.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 074821c..7f89723 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -111,7 +111,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' - DNS_SERVERS='10.55.251.10,10.55.251.11,${AUTH_HOST}' + DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' @@ -140,7 +140,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' - DNS_SERVERS='10.42.196.10,10.42.194.10,${AUTH_HOST}' + DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' @@ -172,7 +172,7 @@ case "${OCTET[0]}.${OCTET[1]}" in NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) - DNS_SERVERS="10.42.196.10,10.42.194.10,${AUTH_HOST}" + DNS_SERVERS="10.42.196.10,10.42.194.10" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR JQ_REPOS=(\ From 02916d6525a1721427ff99fd3acbb8e482efec71 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 22 May 2019 11:00:53 -0400 Subject: [PATCH 129/691] Update global.vars.sh --- scripts/global.vars.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 074821c..231ff2f 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -111,7 +111,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' - DNS_SERVERS='10.55.251.10,10.55.251.11,${AUTH_HOST}' + DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' @@ -140,7 +140,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' - DNS_SERVERS='10.42.196.10,10.42.194.10,${AUTH_HOST}' + DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' @@ -172,7 +172,7 @@ case "${OCTET[0]}.${OCTET[1]}" in NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) - DNS_SERVERS="10.42.196.10,10.42.194.10,${AUTH_HOST}" + DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR JQ_REPOS=(\ From 171c163b0eef5f04606a00ca9f2f626514a41b18 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 22 May 2019 13:48:33 -0400 Subject: [PATCH 130/691] Update global.vars.sh --- scripts/global.vars.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 7f89723..6148736 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -35,6 +35,7 @@ QCOW2_IMAGES=(\ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ ERA-Server-build-1.0.1.qcow2 \ + hycu-3.5.0-6253.qcow2 \ ) ISO_IMAGES=(\ CentOS7.iso \ From a61ae6ff87f09f76a6d1e3fde1c9df8de03507c9 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 22 May 2019 18:06:04 -0400 Subject: [PATCH 131/691] Updates for Files and File Analytics --- scripts/bootcamp.sh | 6 +- scripts/global.vars.sh | 7 ++ scripts/lib.common.sh | 18 ++++++ scripts/lib.pe.sh | 144 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 174 insertions(+), 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index e0e6a72..8d41210 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -55,7 +55,11 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + files_install && sleep 30 + + create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 + + file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish fi diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6148736..5cfdcd3 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -6,6 +6,7 @@ PC_DEV_VERSION='5.10.3' PC_CURRENT_VERSION='5.10.3' PC_STABLE_VERSION='5.8.2' FILES_VERSION='3.5.0' +FILE_ANALYTICS_VERSION='1.0.1' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -94,6 +95,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' + FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -123,6 +126,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -152,6 +157,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index b62137f..c8e368d 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -569,6 +569,24 @@ function ntnx_download() { _source_url="${FILES_URL}" fi ;; + FILE ANALYTICS | file analytics ) + args_required 'FILE_ANALYTICS_VERSION' + _meta_url="${FILE_ANALYTICS_METAURL}" + + if [[ -z ${_meta_url} ]]; then + _error=22 + log "Error ${_error}: unsupported FILES_VERSION=${FILE_ANALYTICS_VERSION}!" + log 'Sync the following to global.var.sh...' + log 'Browse to https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA' + log " - Find ${FILE_ANALYTICS_VERSION} in the Additional Releases section on the lower right side" + log ' - Provide the metadata URL option to this function, both case stanzas.' + exit ${_error} + fi + + if [[ ! -z ${FILE_ANALYTICS_URL} ]]; then + _source_url="${FILE_ANALYTICS_URL}" + fi + ;; * ) _error=88 log "Error ${_error}:: couldn't determine software-type ${_ncli_softwaretype}!" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 7e52b44..8c7c565 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -181,6 +181,150 @@ function files_install() { fi } +############################################################################################################################################################################### +# Routine to get the Nutanix File Analytics injected +############################################################################################################################################################################### + +function file_analytics_install() { + local _ncli_softwaretype='FILE_ANALYTICS' + local _ncli_software_type='file_analytics' + local _test + + dependencies 'install' 'jq' || exit 13 + + log "IDEMPOTENCY: checking for ${_ncli_software_type} completed..." + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ncli --json=true software list \ + | jq -r \ + '.data[] | select(.softwareType == "'${_ncli_softwaretype}'") | select(.status == "COMPLETED") | .version') + + if [[ ${_test} != "${FILE_ANALYTICS_VERSION}" ]]; then + log "Files ${FILE_ANALYTICS_VERSION} not completed. ${_test}" + ntnx_download "${_ncli_software_type}" + else + log "IDEMPOTENCY: Files ${FILE_ANALYTICS_VERSION} already completed." + fi +} + +############################################################################################################################################################################### +# Create File Server +############################################################################################################################################################################### + +function create_file_server() { + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --insecure ' + local _loop=0 + local + local _fileserver_name="Bootcamp-FS" + local _internal_nw_name="${1}" + local _internal_nw_uuid + local _external_nw_name="${2}" + local _external_nw_uuid + local _test + + log "IDEMPOTENCY: Checking PC API responds, curl failures are acceptable..." + prism_check 'PC' 2 0 + + if (( $? == 0 )) ; then + log "IDEMPOTENCY: PC API responds, skip." + else + log "Get cluster network and storage container UUIDs..." + _internal_nw_uuid=$(acli "net.get ${_internal_nw_name}" \ + | grep "uuid" | cut -f 2 -d ':' | xargs) + _external_nw_uuid=$(acli "net.get ${_external_nw_name}" \ + | grep "uuid" | cut -f 2 -d ':' | xargs) + _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \ + | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) + log "${_internal_nw_name} network UUID: ${_internal_nw_uuid}" + log "${_external_nw_name} network UUID: ${_external_nw_uuid}" + log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" + + local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/vfilers" + + HTTP_JSON_BODY=$(cat < Date: Wed, 22 May 2019 18:11:04 -0400 Subject: [PATCH 132/691] Update lib.common.sh --- scripts/lib.common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index c8e368d..df5ed67 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -569,7 +569,7 @@ function ntnx_download() { _source_url="${FILES_URL}" fi ;; - FILE ANALYTICS | file analytics ) + FILE_ANALYTICS | file_analytics ) args_required 'FILE_ANALYTICS_VERSION' _meta_url="${FILE_ANALYTICS_METAURL}" From b0f2dba6743705b1a895b1999917683ed2e83886 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 22 May 2019 18:16:42 -0400 Subject: [PATCH 133/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 8c7c565..d5fa6fa 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -237,7 +237,7 @@ function create_file_server() { log "${_internal_nw_name} network UUID: ${_internal_nw_uuid}" log "${_external_nw_name} network UUID: ${_external_nw_uuid}" log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" - + fi local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/vfilers" HTTP_JSON_BODY=$(cat < Date: Wed, 22 May 2019 19:10:45 -0400 Subject: [PATCH 134/691] Update lib.pe.sh --- scripts/lib.pe.sh | 35 ++++++++++++++--------------------- 1 file changed, 14 insertions(+), 21 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index d5fa6fa..fe11452 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -220,26 +220,19 @@ function create_file_server() { local _external_nw_name="${2}" local _external_nw_uuid local _test - - log "IDEMPOTENCY: Checking PC API responds, curl failures are acceptable..." - prism_check 'PC' 2 0 - - if (( $? == 0 )) ; then - log "IDEMPOTENCY: PC API responds, skip." - else - log "Get cluster network and storage container UUIDs..." - _internal_nw_uuid=$(acli "net.get ${_internal_nw_name}" \ - | grep "uuid" | cut -f 2 -d ':' | xargs) - _external_nw_uuid=$(acli "net.get ${_external_nw_name}" \ - | grep "uuid" | cut -f 2 -d ':' | xargs) - _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \ - | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) - log "${_internal_nw_name} network UUID: ${_internal_nw_uuid}" - log "${_external_nw_name} network UUID: ${_external_nw_uuid}" - log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" - fi local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/vfilers" + log "Get cluster network and storage container UUIDs..." + _internal_nw_uuid=$(acli "net.get ${_internal_nw_name}" \ + | grep "uuid" | cut -f 2 -d ':' | xargs) + _external_nw_uuid=$(acli "net.get ${_external_nw_name}" \ + | grep "uuid" | cut -f 2 -d ':' | xargs) + _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \ + | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) + log "${_internal_nw_name} network UUID: ${_internal_nw_uuid}" + log "${_external_nw_name} network UUID: ${_external_nw_uuid}" + log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" + HTTP_JSON_BODY=$(cat < Date: Wed, 22 May 2019 19:20:20 -0400 Subject: [PATCH 135/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index fe11452..6e7e05a 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -211,7 +211,7 @@ function file_analytics_install() { ############################################################################################################################################################################### function create_file_server() { - local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --insecure ' + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --insecure " local _loop=0 local local _fileserver_name="Bootcamp-FS" From 9544d62924b779e040cac370d62d24a29cb7292e Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 22 May 2019 19:31:39 -0400 Subject: [PATCH 136/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 6e7e05a..2bb0661 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -210,7 +210,7 @@ function file_analytics_install() { # Create File Server ############################################################################################################################################################################### -function create_file_server() { +function create_file_server(){ local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --insecure " local _loop=0 local From c007ddbf5ac95f4e2244a9627a4dc173b7ecf280 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 22 May 2019 19:33:22 -0400 Subject: [PATCH 137/691] Some small changes in the location of files --- .gitignore | 2 + .../__pycache__/webserver.cpython-34.pyc | Bin 1390 -> 0 bytes .../__pycache__/webserver.cpython-36.pyc | Bin 1283 -> 0 bytes nodemanager/package.json | 11 -- {logserver => test/logserver}/curl_sim.sh | 0 {logserver => test/logserver}/curl_sim1.sh | 0 {logserver => test/logserver}/curl_sim2.sh | 0 {logserver => test/logserver}/curl_sim3.sh | 0 {logserver => test/logserver}/hpoc_deploy.sql | 0 {logserver => test/logserver}/logserver.py | 0 {logserver => test/logserver}/nagios_setup.sh | 0 {logserver => test/logserver}/pelist.txt | 0 .../logserver}/templates/index.html | 0 {logserver => test/logserver}/webserver.py | 0 test/restapi.txt | 114 ++++++++++++++++++ 15 files changed, 116 insertions(+), 11 deletions(-) delete mode 100644 logserver/__pycache__/webserver.cpython-34.pyc delete mode 100644 logserver/__pycache__/webserver.cpython-36.pyc delete mode 100644 nodemanager/package.json rename {logserver => test/logserver}/curl_sim.sh (100%) rename {logserver => test/logserver}/curl_sim1.sh (100%) rename {logserver => test/logserver}/curl_sim2.sh (100%) rename {logserver => test/logserver}/curl_sim3.sh (100%) rename {logserver => test/logserver}/hpoc_deploy.sql (100%) rename {logserver => test/logserver}/logserver.py (100%) rename {logserver => test/logserver}/nagios_setup.sh (100%) rename {logserver => test/logserver}/pelist.txt (100%) rename {logserver => test/logserver}/templates/index.html (100%) rename {logserver => test/logserver}/webserver.py (100%) create mode 100644 test/restapi.txt diff --git a/.gitignore b/.gitignore index 7ccc567..6d66c12 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,5 @@ logs/ master.zip original.release.json stageworkshop-master/ +venv/ +.idea/ \ No newline at end of file diff --git a/logserver/__pycache__/webserver.cpython-34.pyc b/logserver/__pycache__/webserver.cpython-34.pyc deleted file mode 100644 index 59a0656f45cd875f1932073de40bb00136429059..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1390 zcmZ8hO^+Kz5Url?@p|!wK=|0pN?aHv*zAJDC5j@2gbNr6a~dIy#xuRF&Db;P?jf;d zpOyoEg}>BSPF(l_Iq_;{*ICkbm#e#9SG|5+?SF^IgT?8eznr^7f76|YF+D}s1x$&r zpo(ZEC>6AFD0N8itV5McOOH~Yq#zm4H4q_vSGrVs^al_gT>~DG3~3ot=`*4Xs0?|P z(F2Y$OQL^}h!V&8FREPsW=Be{OQ|v=S51{0Ib|Y;=D>T3u0My7#H>IEtiTiD5IO~e zTN0_qiDyY*qi=JQ&vPvcfsNb(^dt0V=sJf%gnbuek7y3}LQFXbh&?`26CV@t63Vr@<>ZZ?#H~u9~{Y zO4(HPCiN~ZF21(jRjrNnS}m20nq2EwwJOu7yg7sQnp&B}x8Z7|UsmPZhDE(z%feU} zP8t-g(zU{%RXDq+Wp$~198vM1RS099a;_pce~}vD=gp@RRo7;s3$nLmTW+VolqE(38hc33Ou4|!*a zGu?hQd;b0NXFs2@66<^=AFb-Lt>hyGoi;^X%Ci?g{rJOKFI%QfZd#qVYKRPJC|}E> zH8PDZr75oRs!AMnj1?OwsoTnQNAArDPajtcZL*crI{#H_p3plUU^tg%XDEhoH}ghJ3XHqP>M^2I z@FAQ-sqZ^Ad)-iBM#fE7K@?Sp7Q&xH&W{HUy0BR!+I=IPWbjSsGh8TA2-b-|_6!uqO4QFY<&B=0V@tD@*}Uja{8 PWzCQ4ffzYM=lgyh diff --git a/logserver/__pycache__/webserver.cpython-36.pyc b/logserver/__pycache__/webserver.cpython-36.pyc deleted file mode 100644 index 6190576affa1452c52ab22921d1cc5c7308b7e55..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1283 zcmZ9MOK%e~5P)s(Bb(6DmPbJ?hh8I92~ClZ5JE^mxj>L8Cktt{S-UjBiMO^FNR*t? z0~dY>zlOW7ocIepF|$jf(p}AD){mL-=b0R?ueW}^`~I;P5b~Q`dElEo2eC(>1QAq` zG39j3IHUN@R4@v8B&m#-ls8@z5s2`dh;YexA{(Qoh?Z2uqH#`Oz9rZ(X(xZc7ilve z9jVNI_8Ue{gfyv><3eRl_H&%Sd>9zdLF@xil5i>rXMzfLPIyp9pQPzrIRAVSw@i_W%Jhx%_>AGiY$OfF4*D^2SR!Cp(#vocfljM)N5<~AzidqrU!6N186 z7L9}^&1&R=c2~uV(D2SGa9F%`(JlZQ0&~zRb1I;8e*{%={kIY8_ZY<90_AW6vZgE2 zv=3-7bd@34c&R*u4zkxW^@$k3v80*aLw_}&m?m_#i~XjjQrgZ_rSnYLXPwKnv(^xL z@>}QBjn!;3D+)EtGdI+e?#Sv1#KXM&8WgLO6PLPaA=}ghFbH$^aV~%>yPisO74hyW z7nVdZHM_X;hL2A0AHg8H${F9N8l>~DJG9}=m1&x|>JBI$K!Ozw9~lh=RJpqnyt=@u WVv?uzZ9hZ67W-$DwphY$Zu|uSyDKdK diff --git a/nodemanager/package.json b/nodemanager/package.json deleted file mode 100644 index 92c5c4c..0000000 --- a/nodemanager/package.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "name": "nodemanager", - "version": "1.0.0", - "description": "Central Log server", - "main": "app.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "author": "", - "license": "ISC" -} diff --git a/logserver/curl_sim.sh b/test/logserver/curl_sim.sh similarity index 100% rename from logserver/curl_sim.sh rename to test/logserver/curl_sim.sh diff --git a/logserver/curl_sim1.sh b/test/logserver/curl_sim1.sh similarity index 100% rename from logserver/curl_sim1.sh rename to test/logserver/curl_sim1.sh diff --git a/logserver/curl_sim2.sh b/test/logserver/curl_sim2.sh similarity index 100% rename from logserver/curl_sim2.sh rename to test/logserver/curl_sim2.sh diff --git a/logserver/curl_sim3.sh b/test/logserver/curl_sim3.sh similarity index 100% rename from logserver/curl_sim3.sh rename to test/logserver/curl_sim3.sh diff --git a/logserver/hpoc_deploy.sql b/test/logserver/hpoc_deploy.sql similarity index 100% rename from logserver/hpoc_deploy.sql rename to test/logserver/hpoc_deploy.sql diff --git a/logserver/logserver.py b/test/logserver/logserver.py similarity index 100% rename from logserver/logserver.py rename to test/logserver/logserver.py diff --git a/logserver/nagios_setup.sh b/test/logserver/nagios_setup.sh similarity index 100% rename from logserver/nagios_setup.sh rename to test/logserver/nagios_setup.sh diff --git a/logserver/pelist.txt b/test/logserver/pelist.txt similarity index 100% rename from logserver/pelist.txt rename to test/logserver/pelist.txt diff --git a/logserver/templates/index.html b/test/logserver/templates/index.html similarity index 100% rename from logserver/templates/index.html rename to test/logserver/templates/index.html diff --git a/logserver/webserver.py b/test/logserver/webserver.py similarity index 100% rename from logserver/webserver.py rename to test/logserver/webserver.py diff --git a/test/restapi.txt b/test/restapi.txt new file mode 100644 index 0000000..180ca9c --- /dev/null +++ b/test/restapi.txt @@ -0,0 +1,114 @@ +####################################################################################################### +# EULA acceptance +####################################################################################################### + curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{ + "username": "SE with $(basename ${0})", + "companyName": "Nutanix", + "jobTitle": "SE" + }' https://localhost:9440/PrismGateway/services/rest/v1/eulas/accept + + +####################################################################################################### +# Disable Pulse +####################################################################################################### +curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT --data '{ + "defaultNutanixEmail": null, + "emailContactList": null, + "enable": false, + "enableDefaultNutanixEmail": false, + "isPulsePromptNeeded": false, + "nosVersion": null, + "remindLater": null, + "verbosityType": null + }' https://localhost:9440/PrismGateway/services/rest/v1/pulse + + + + +####################################################################################################### +# Set Dataservices - PE +####################################################################################################### + + + +####################################################################################################### +# Set NTP servers - PE +####################################################################################################### + + + +####################################################################################################### +# Rename the default storage pool to SP01 +####################################################################################################### + + + +####################################################################################################### +# Create a Storage Container Images if not exist +####################################################################################################### + + + +####################################################################################################### +# Create two networks (Primary and Secondary) +####################################################################################################### +curl -X POST \ + https://10.42.9.37:9440/api/nutanix/v0.8/networks \ + -H 'Content-Type: application/json' \ + -H 'Postman-Token: 836ee60b-0398-456b-967b-7221c3355545' \ + -H 'cache-control: no-cache' \ + -d '{"name":"VLAN30","vlanId":"30","ipConfig":{"dhcpOptions":{"domainNameServers":"8.8.8.8","domainSearch":"ntx-demo.local","domainName":"ntnx-demo.local"},"networkAddress":"10.10.30.0","prefixLength":"24","defaultGateway":"10.10.30.254","pool":[{"range":"10.10.30.100 10.10.30.200"}]}} +' + + +####################################################################################################### +# Upload the right AutoDC version +####################################################################################################### + + + +####################################################################################################### +# Install, Configure and start the AutoDC +####################################################################################################### + + + +####################################################################################################### +# Configure the AutoDC's DNS server +####################################################################################################### + + + +####################################################################################################### +# Authentication PE to the installed DC +####################################################################################################### + + + +####################################################################################################### +# Create role mapping +####################################################################################################### + + + +####################################################################################################### +# Upload PC files to be used - PE +####################################################################################################### + + + +####################################################################################################### +# Deploy PC +####################################################################################################### + + + +####################################################################################################### +# Register PE to PC +####################################################################################################### + + + +####################################################################################################### +# Upload Nutanix Files files +####################################################################################################### From ab4ded754ca0aa9027f13a6ff0898e30b8e886cf Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 23 May 2019 08:24:59 -0400 Subject: [PATCH 138/691] Update lib.pe.sh --- scripts/lib.pe.sh | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 2bb0661..94af07f 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -210,10 +210,10 @@ function file_analytics_install() { # Create File Server ############################################################################################################################################################################### -function create_file_server(){ - local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --insecure " - local _loop=0 - local +function create_file_server() { + #local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --insecure " + #local _loop=0 + #local local _fileserver_name="Bootcamp-FS" local _internal_nw_name="${1}" local _internal_nw_uuid @@ -302,19 +302,21 @@ EOF ) # Start the create process - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL}| grep "taskUuid" | wc -l) +# _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL}| grep "taskUuid" | wc -l) + +curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL} # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... - if [[ $_response -lt 1 ]]; then - # Check if Karbon has been enabled - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL}| grep "taskUuid" | wc -l) - while [ $_response -ne 1 ]; do - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL}| grep "taskUuid" | wc -l) - done - log "File Server has been created." - else - log "File Server is not being created, check the logs." - fi +# if [[ $_response -lt 1 ]]; then +# # Check if Karbon has been enabled +# _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL}| grep "taskUuid" | wc -l) +# while [ $_response -ne 1 ]; do +# _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL}| grep "taskUuid" | wc -l) +# done +# log "File Server has been created." +# else +# log "File Server is not being created, check the logs." +# fi } From 58582a0ddbf8baf8ede417c52a2871980e794455 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 23 May 2019 09:47:48 -0400 Subject: [PATCH 139/691] Update lib.pe.sh --- scripts/lib.pe.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 94af07f..d649ec8 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -214,6 +214,7 @@ function create_file_server() { #local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --insecure " #local _loop=0 #local + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' local _fileserver_name="Bootcamp-FS" local _internal_nw_name="${1}" local _internal_nw_uuid @@ -304,6 +305,8 @@ EOF # Start the create process # _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL}| grep "taskUuid" | wc -l) +echo $HTTP_JSON_BODY + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL} # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... From c20925c04988f17a3aa7077f22c623e3f7bf7c57 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 23 May 2019 10:09:12 -0400 Subject: [PATCH 140/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index d649ec8..a01638d 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -215,7 +215,7 @@ function create_file_server() { #local _loop=0 #local local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' - local _fileserver_name="Bootcamp-FS" + local _fileserver_name="BootcampFS" local _internal_nw_name="${1}" local _internal_nw_uuid local _external_nw_name="${2}" From f8dbc75d56b49d9bfe235c4e730fe0f3ede7efed Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 23 May 2019 10:14:42 -0400 Subject: [PATCH 141/691] Update lib.pe.sh --- scripts/lib.pe.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index a01638d..fbfe0a1 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -211,9 +211,6 @@ function file_analytics_install() { ############################################################################################################################################################################### function create_file_server() { - #local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --insecure " - #local _loop=0 - #local local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' local _fileserver_name="BootcampFS" local _internal_nw_name="${1}" From 4bee02bf3f11675a1096211cb1a89dcaaf2d4483 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 23 May 2019 13:22:17 -0400 Subject: [PATCH 142/691] Update lib.pe.sh --- scripts/lib.pe.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index fbfe0a1..09e3ef6 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -232,7 +232,7 @@ function create_file_server() { log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" HTTP_JSON_BODY=$(cat < Date: Thu, 23 May 2019 15:31:08 -0400 Subject: [PATCH 143/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 09e3ef6..5e9d940 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -304,7 +304,7 @@ EOF echo $HTTP_JSON_BODY -curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST ${HTTP_JSON_BODY} ${_httpURL} +curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST $HTTP_JSON_BODY $_httpURL # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... # if [[ $_response -lt 1 ]]; then From aba4c508860e16b8a9f2cfb0eba3f38440b75e7d Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 24 May 2019 14:22:48 -0400 Subject: [PATCH 144/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 5e9d940..4bff596 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -304,7 +304,7 @@ EOF echo $HTTP_JSON_BODY -curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST $HTTP_JSON_BODY $_httpURL +#curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST $HTTP_JSON_BODY $_httpURL # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... # if [[ $_response -lt 1 ]]; then From 0ec388493389106c1e4d25a62a930e3f98da015f Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 28 May 2019 17:35:33 +0200 Subject: [PATCH 145/691] Start of the Karbon stuff --- scripts/lib.pc.sh | 34 +++++++++------------------------- 1 file changed, 9 insertions(+), 25 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 0336a55..1e26fb6 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -213,33 +213,17 @@ function karbon_enable() { function karbon_image_download() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' local _loop=0 - local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" - local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " - local _httpURL="https://localhost:7050/acs/image/download" + local _startDownload="https://localhost:7050/acs/image/download" + local _getuuidDownload="https://localhost:7050/acs/image/list" + + # Create the Basic Authentication using base6 commands + basicauth=$(echo "admin:${PE_PASSWORD}" | base64) + + # Call the UUID URL so we have the right UUID for the image + $UUID_Centos=$(curl ) + # Use the UUID to download the image - # Start the enablement process - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) - # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... - if [[ $_response -eq 1 ]]; then - # Check if Karbon has been enabled - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) - while [ $_response -ne 1 ]; do - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) - done - log "Karbon has been enabled." - else - log "Retrying to enable Karbon one more time." - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) - if [[ $_response -eq 1 ]]; then - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) - if [ $_response -lt 1 ]; then - log "Karbon CentOS image has not been downloaded." - else - log "Karbon CentOS image has been downloaded." - fi - fi - fi } ############################################################################################################################################################################### From a2f0e00ec2c4a4291c598d34d12bb1c9b3a19582 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 29 May 2019 14:07:35 -0700 Subject: [PATCH 146/691] Updated to newer Era Version in staging --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 5cfdcd3..7c81714 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -35,7 +35,7 @@ QCOW2_IMAGES=(\ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ + ERA-Server-build-1.0.1.4.qcow2 \ hycu-3.5.0-6253.qcow2 \ ) ISO_IMAGES=(\ From 5db5ac4fc4fd16d91705d3159bc51f36339e03bf Mon Sep 17 00:00:00 2001 From: Nathan C Date: Wed, 29 May 2019 20:22:31 -0700 Subject: [PATCH 147/691] Update global.vars.sh Adding the Era MSSQL QCOW2 to staging. --- scripts/global.vars.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 7c81714..d38c60c 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -36,6 +36,7 @@ QCOW2_IMAGES=(\ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ ERA-Server-build-1.0.1.4.qcow2 \ + MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ ) ISO_IMAGES=(\ From ea56d68253eb28396f0089b048639489c94e7a0e Mon Sep 17 00:00:00 2001 From: Nathan C Date: Thu, 30 May 2019 17:40:37 -0700 Subject: [PATCH 148/691] Updated for Era Bootcamp --- scripts/bootcamp.sh | 6 +- scripts/global.vars.sh | 13 +++- scripts/lib.common.sh | 18 ++++++ scripts/lib.pe.sh | 139 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 173 insertions(+), 3 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 3f9e024..ccbc075 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -55,7 +55,11 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + files_install && sleep 30 + + create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 + + file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish fi diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 231ff2f..d38c60c 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -6,6 +6,7 @@ PC_DEV_VERSION='5.10.3' PC_CURRENT_VERSION='5.10.3' PC_STABLE_VERSION='5.8.2' FILES_VERSION='3.5.0' +FILE_ANALYTICS_VERSION='1.0.1' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -34,7 +35,9 @@ QCOW2_IMAGES=(\ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ + ERA-Server-build-1.0.1.4.qcow2 \ + MSSQL-2016-VM.qcow2 \ + hycu-3.5.0-6253.qcow2 \ ) ISO_IMAGES=(\ CentOS7.iso \ @@ -93,6 +96,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' + FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -122,6 +127,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -151,6 +158,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -172,7 +181,7 @@ case "${OCTET[0]}.${OCTET[1]}" in NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) - DNS_SERVERS='10.42.196.10,10.42.194.10' + DNS_SERVERS="10.42.196.10,10.42.194.10" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR JQ_REPOS=(\ diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index b62137f..df5ed67 100644 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -569,6 +569,24 @@ function ntnx_download() { _source_url="${FILES_URL}" fi ;; + FILE_ANALYTICS | file_analytics ) + args_required 'FILE_ANALYTICS_VERSION' + _meta_url="${FILE_ANALYTICS_METAURL}" + + if [[ -z ${_meta_url} ]]; then + _error=22 + log "Error ${_error}: unsupported FILES_VERSION=${FILE_ANALYTICS_VERSION}!" + log 'Sync the following to global.var.sh...' + log 'Browse to https://portal.nutanix.com/#/page/releases/afsDetails?targetVal=GA' + log " - Find ${FILE_ANALYTICS_VERSION} in the Additional Releases section on the lower right side" + log ' - Provide the metadata URL option to this function, both case stanzas.' + exit ${_error} + fi + + if [[ ! -z ${FILE_ANALYTICS_URL} ]]; then + _source_url="${FILE_ANALYTICS_URL}" + fi + ;; * ) _error=88 log "Error ${_error}:: couldn't determine software-type ${_ncli_softwaretype}!" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 25e1359..b1f27c9 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -181,6 +181,145 @@ function files_install() { fi } +############################################################################################################################################################################### +# Routine to get the Nutanix File Analytics injected +############################################################################################################################################################################### + +function file_analytics_install() { + local _ncli_softwaretype='FILE_ANALYTICS' + local _ncli_software_type='file_analytics' + local _test + + dependencies 'install' 'jq' || exit 13 + + log "IDEMPOTENCY: checking for ${_ncli_software_type} completed..." + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ncli --json=true software list \ + | jq -r \ + '.data[] | select(.softwareType == "'${_ncli_softwaretype}'") | select(.status == "COMPLETED") | .version') + + if [[ ${_test} != "${FILE_ANALYTICS_VERSION}" ]]; then + log "Files ${FILE_ANALYTICS_VERSION} not completed. ${_test}" + ntnx_download "${_ncli_software_type}" + else + log "IDEMPOTENCY: Files ${FILE_ANALYTICS_VERSION} already completed." + fi +} + +############################################################################################################################################################################### +# Create File Server +############################################################################################################################################################################### + +function create_file_server() { + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local _fileserver_name="BootcampFS" + local _internal_nw_name="${1}" + local _internal_nw_uuid + local _external_nw_name="${2}" + local _external_nw_uuid + local _test + local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/vfilers" + + log "Get cluster network and storage container UUIDs..." + _internal_nw_uuid=$(acli "net.get ${_internal_nw_name}" \ + | grep "uuid" | cut -f 2 -d ':' | xargs) + _external_nw_uuid=$(acli "net.get ${_external_nw_name}" \ + | grep "uuid" | cut -f 2 -d ':' | xargs) + _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \ + | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) + log "${_internal_nw_name} network UUID: ${_internal_nw_uuid}" + log "${_external_nw_name} network UUID: ${_external_nw_uuid}" + log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" + + HTTP_JSON_BODY=$(cat < Date: Fri, 31 May 2019 14:12:44 +0200 Subject: [PATCH 149/691] Updated Karbon CentOS image download --- scripts/lib.pc.sh | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 1e26fb6..68d5036 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -6,6 +6,9 @@ # 12th of April 2019 - Willem Essenstam # Added a "-d" character in the flow_enable so the command would run. # Changed the Karbon Eanable function so it also checks that Karbon has been enabled. Some small typos changed so the Karbon part should work +# +# 31-05-2019 - Willem Essenstam +# Added the download bits for the Centos Image for Karbon ############################################################################################################################################################################### @@ -220,8 +223,19 @@ function karbon_image_download() { basicauth=$(echo "admin:${PE_PASSWORD}" | base64) # Call the UUID URL so we have the right UUID for the image - $UUID_Centos=$(curl ) + uuid=$(curl -X GET -H "X-NTNX-AUTH: Basic ${_auth}" https://localhost:7050/acs/image/list $CURL_HTTP_OPTS | jq '.[].uuid' | tr -d \/\") + # Use the UUID to download the image + response=$(curl -X POST $_startDownload -d "{\"uuid\":\"$uuid\"}" -H "X-NTNX-AUTH: Basic $_auth" $CURL_HTTP_OPTS) + if [ -z $response ]; then + log "Downlaod of the CenOS image for Karbon has been started. Trying one more time..." + response=$(curl -X POST $_startDownload -d "{\"uuid\":\"$uuid\"}" -H "X-NTNX-AUTH: Basic $_auth" $CURL_HTTP_OPTS) + if [ -z $response ]; then + log "Download of CentOS image for Karbon failed... Please run manually." + fi + else + log "Download of CentOS image for Karbon has started..." + fi } From 3b728353ab853942dbc1c4133de471c342460636 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 31 May 2019 14:35:41 +0200 Subject: [PATCH 150/691] Update bootstrap.sh --- bootstrap.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bootstrap.sh b/bootstrap.sh index 42ec6dd..5101ba9 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -3,6 +3,9 @@ # Example use from a Nutanix CVM: # curl --remote-name --location https://raw.githubusercontent.com/nutanixworkshops/stageworkshop/master/bootstrap.sh && sh ${_##*/} # +# Development/Beta version +# curl --remote-name --location https://raw.githubusercontent.com/jncox/stageworkshop/master/bootstrap.sh && sh ${_##*/} +# # For testing: # curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/} From ee87162a1c5c5deb90a36e496e12d47b383b6dfa Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 4 Jun 2019 13:29:05 -0700 Subject: [PATCH 151/691] Update global.vars.sh --- scripts/global.vars.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index d38c60c..4ece3df 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -38,6 +38,7 @@ QCOW2_IMAGES=(\ ERA-Server-build-1.0.1.4.qcow2 \ MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ ) ISO_IMAGES=(\ CentOS7.iso \ @@ -47,6 +48,7 @@ ISO_IMAGES=(\ Nutanix-VirtIO-1.1.3.iso \ SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ + VeeamBR_9.5.4.2615.Update4.iso \ ) # shellcheck disable=2206 From a958c6d13e6811c721ec03700c0bec0da5a553f5 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Tue, 4 Jun 2019 13:29:19 -0700 Subject: [PATCH 152/691] Update global.vars.sh --- scripts/global.vars.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index d38c60c..4ece3df 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -38,6 +38,7 @@ QCOW2_IMAGES=(\ ERA-Server-build-1.0.1.4.qcow2 \ MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ ) ISO_IMAGES=(\ CentOS7.iso \ @@ -47,6 +48,7 @@ ISO_IMAGES=(\ Nutanix-VirtIO-1.1.3.iso \ SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ + VeeamBR_9.5.4.2615.Update4.iso \ ) # shellcheck disable=2206 From 240212bff00fefb1bc50d8978157878e02b4ee59 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 5 Jun 2019 11:41:28 +0200 Subject: [PATCH 153/691] Karbon Download added Start the Download of the CentOS Karbon image --- scripts/bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 8d41210..fa8f718 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -121,6 +121,7 @@ case ${1} in && calm_enable \ && karbon_enable \ && lcm \ + && karbon_image_download \ && images \ && flow_enable \ && pc_cluster_img_import \ From a88a4239fb110a8709bdcbd5fd3eeb364be94998 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 28 Jun 2019 10:07:34 -0700 Subject: [PATCH 154/691] Update global.vars.sh Updated for 4-node on 1-node VLAN --- scripts/global.vars.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4ece3df..05938ff 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -180,9 +180,9 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' - NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" - NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) - NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + #NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" + #NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) + #NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) DNS_SERVERS="10.42.196.10,10.42.194.10" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR From a7ff2cbec17212b4a6271cd180998cf037f284f7 Mon Sep 17 00:00:00 2001 From: Nathan C Date: Fri, 28 Jun 2019 10:11:27 -0700 Subject: [PATCH 155/691] Update global.vars.sh Updated for 4-Node to be on 1-Node VLAN --- scripts/global.vars.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4ece3df..05938ff 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -180,9 +180,9 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' - NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" - NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) - NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + #NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" + #NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) + #NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) DNS_SERVERS="10.42.196.10,10.42.194.10" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR From 1f4e7fa1070aa7dddff0f63fe10f1a794683fbd4 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 2 Jul 2019 13:34:20 +0200 Subject: [PATCH 156/691] Update lib.pc.sh --- scripts/lib.pc.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 68d5036..f0d966f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -218,26 +218,26 @@ function karbon_image_download() { local _loop=0 local _startDownload="https://localhost:7050/acs/image/download" local _getuuidDownload="https://localhost:7050/acs/image/list" - + # Create the Basic Authentication using base6 commands - basicauth=$(echo "admin:${PE_PASSWORD}" | base64) + _auth=$(echo "admin:${PE_PASSWORD}" | base64) # Call the UUID URL so we have the right UUID for the image - uuid=$(curl -X GET -H "X-NTNX-AUTH: Basic ${_auth}" https://localhost:7050/acs/image/list $CURL_HTTP_OPTS | jq '.[].uuid' | tr -d \/\") + uuid=$(curl -X GET -H "X-NTNX-AUTH: Basic ${_auth}" https://localhost:7050/acs/image/list $CURL_HTTP_OPTS | jq '.[0].uuid' | tr -d \/\") + log "UUID for The Karbon image is: $uuid" # Use the UUID to download the image - response=$(curl -X POST $_startDownload -d "{\"uuid\":\"$uuid\"}" -H "X-NTNX-AUTH: Basic $_auth" $CURL_HTTP_OPTS) + response=$(curl -X POST ${_startDownload} -d "{\"uuid\":\"${uuid}\"}" -H "X-NTNX-AUTH: Basic ${_auth}" ${CURL_HTTP_OPTS}) + if [ -z $response ]; then - log "Downlaod of the CenOS image for Karbon has been started. Trying one more time..." - response=$(curl -X POST $_startDownload -d "{\"uuid\":\"$uuid\"}" -H "X-NTNX-AUTH: Basic $_auth" $CURL_HTTP_OPTS) + log "Download of the CenOS image for Karbon has not been started. Trying one more time..." + response=$(curl -X POST ${_startDownload} -d "{\"uuid\":\"${uuid}\"}" -H "X-NTNX-AUTH: Basic ${_auth}" ${CURL_HTTP_OPTS}) if [ -z $response ]; then log "Download of CentOS image for Karbon failed... Please run manually." fi else log "Download of CentOS image for Karbon has started..." fi - - } ############################################################################################################################################################################### From 22c5ca802f19e1c0b53ac289cfa5be53b49af60d Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 8 Jul 2019 14:08:11 +0200 Subject: [PATCH 157/691] Update lib.pc.sh LCM change due to V2 framework. Need to test... --- scripts/lib.pc.sh | 143 +++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 130 insertions(+), 13 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index f0d966f..62082b0 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -50,6 +50,38 @@ function flow_enable() { ############################################################################################################################################################################### # Routine to be run/loop till yes we are ok. ############################################################################################################################################################################### + +##TODO! Needs rewriting as we need to change the url to https://localhost:9440/api/nutanix/v3/tasks/ +## This will have a result in the following way: +# { +# "status": "RUNNING", +# "last_update_time": "2019-07-08T08:38:04Z", +# "logical_timestamp": 2, +# "entity_reference_list": [], +# "creation_time": "2019-07-08T08:36:01Z", +# "cluster_reference": { +# "kind": "cluster", +# "uuid": "5be7fd77-5161-49b5-8be6-b6280c62721f" +# }, +# "subtask_reference_list": [ +# { +# "kind": "task", +# "uuid": "656210bb-b24d-495b-946a-390566b1c269" +# }, +# { +# "kind": "task", +# "uuid": "796a55c4-7662-4155-be35-8526c4db042c" +# } +# ], +# "progress_message": "LCM operations", +# "creation_time_usecs": 1562574961570658, +# "operation_type": "kLcmRootTask", +# "percentage_complete": 20, +# "api_version": "3.1", +# "uuid": "bf672a00-13e0-42e0-bbb6-dba47a9a1cd4" +#} +# Need to grab the percentage_complete value including the status to make disissions + function loop(){ local _attempts=30 @@ -106,17 +138,100 @@ function lcm() { # Need loop so we can create the full json more dynamical - # Run the Curl command and save the oputput in a temp file - curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json - - # Fill the uuid array with the correct values - uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " ")) - - # Grabbing the versions of the UUID and put them in a versions array - for uuid in "${uuid_arr[@]}" - do - version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply_json.json | tail -4 | head -n 1 | tr -d \")) - done + # Issue is taht after the LCM inventory the LCM will be updated to a version 2.0 and the API call needs to change!!! + # We need to figure out if we are running V1 or V2! + lcm_version=$(curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"get_config\"}}"}' -H 'Content-Type: application/json' ${_url_lcm} | jq '.value' | tr -d \\ | sed 's/^"\(.*\)"$/\1/' | sed 's/.return/return/g' | jq '.return.lcm_cpdb_table_def_list.entity' | tr -d \"| grep "lcm_entity_v2" | wc -l) + + if [ $lcm_version -lt 1]; then + # V1: Run the Curl command and save the oputput in a temp file + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json + + # Fill the uuid array with the correct values + uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " ")) + + # Grabbing the versions of the UUID and put them in a versions array + for uuid in "${uuid_arr[@]}" + do + version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply_json.json | tail -4 | head -n 1 | tr -d \")) + done + else + #''_V2: run the other V2 API call to get the UUIDs of the to be updated software parts + # Grab the installed version of the software first UUIDs + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{ + "entity_type": "lcm_entity_v2", + "group_member_count": 500, + "group_member_attributes": [{ + "attribute": "id" + }, { + "attribute": "uuid" + }, { + "attribute": "entity_model" + }, { + "attribute": "version" + }, { + "attribute": "location_id" + }, { + "attribute": "entity_class" + }, { + "attribute": "description" + }, { + "attribute": "last_updated_time_usecs" + }, { + "attribute": "request_version" + }, { + "attribute": "_master_cluster_uuid_" + }, { + "attribute": "entity_type" + }, { + "attribute": "single_group_uuid" + }], + "query_name": "lcm:EntityGroupModel", + "grouping_attribute": "location_id", + "filter_criteria": "entity_model!=AOS;entity_model!=NCC;entity_model!=PC;_master_cluster_uuid_==[no_val]" + }' $_url_groups > reply_json_uuid.json + + # Fill the uuid array with the correct values + uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="uuid") | .values[0].values[0]' reply_json_uuid.json | sort -u | tr "\"" " " | tr -s " ")) + + # Grab the available updates from the PC after LCMm has run + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{ + "entity_type": "lcm_available_version_v2", + "group_member_count": 500, + "group_member_attributes": [{ + "attribute": "uuid" + }, { + "attribute": "entity_uuid" + }, { + "attribute": "entity_class" + }, { + "attribute": "status" + }, { + "attribute": "version" + }, { + "attribute": "dependencies" + }, { + "attribute": "single_group_uuid" + }, { + "attribute": "_master_cluster_uuid_" + }, { + "attribute": "order" + }], + "query_name": "lcm:VersionModel", + "filter_criteria": "_master_cluster_uuid_==[no_val]" + }' $_url_groups > reply_json_ver.json + + # Grabbing the versions of the UUID and put them in a versions array + for uuid in "${uuid_arr[@]}" + do + # Get the latest version from the to be updated uuid + version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | tail -1 | tr -d \")) + + # Get the UUID corresponding with the version found earlier + uuid_arr_new+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="uuid") .values[].values[]' reply_json_ver.json | tail -1 | tr -d \")) + done + # Copy the right info into the to be used array + uuid_arr=("${uuid_arr_new[@]}") + fi # Set the parameter to create the ugrade plan # Create the curl json string '-d blablablablabla' so we can call the string and not the full json data line @@ -142,9 +257,11 @@ function lcm() { # Run the generate plan task _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm}) - # Remove the temp json file as we don't need it anymore + # Remove the temp json files as we don't need it anymore rm -rf reply_json.json - + rm -rf reply_json_ver.json + rm -rf reply_json_uuid.json + # Notify the log server that the LCM has created a plan log "LCM Inventory has created a plan" From 0efb936588b841316932336d1954a11f9978485b Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 8 Jul 2019 16:30:42 +0200 Subject: [PATCH 158/691] Update global.vars.sh Small issue in the script using 5.10.2 as the AOS --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 05938ff..8a54893 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -66,7 +66,7 @@ NW1_DHCP_START="${IPV4_PREFIX}.50" NW1_DHCP_END="${IPV4_PREFIX}.125" NW2_NAME='Secondary' -NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) +NW2_VLAN=$((OCTET[2]*10+1)) NW2_SUBNET="${IPV4_PREFIX}.129/25" NW2_DHCP_START="${IPV4_PREFIX}.132" NW2_DHCP_END="${IPV4_PREFIX}.253" From f6b05cc44780ebc85b45e6e3b6bcf3f60db169b6 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 9 Jul 2019 14:36:10 +0200 Subject: [PATCH 159/691] Update lib.pc.sh Rewritten Loop and LCM modules --- scripts/lib.pc.sh | 105 +++++----------------------------------------- 1 file changed, 11 insertions(+), 94 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 62082b0..f953e26 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -50,36 +50,6 @@ function flow_enable() { ############################################################################################################################################################################### # Routine to be run/loop till yes we are ok. ############################################################################################################################################################################### - -##TODO! Needs rewriting as we need to change the url to https://localhost:9440/api/nutanix/v3/tasks/ -## This will have a result in the following way: -# { -# "status": "RUNNING", -# "last_update_time": "2019-07-08T08:38:04Z", -# "logical_timestamp": 2, -# "entity_reference_list": [], -# "creation_time": "2019-07-08T08:36:01Z", -# "cluster_reference": { -# "kind": "cluster", -# "uuid": "5be7fd77-5161-49b5-8be6-b6280c62721f" -# }, -# "subtask_reference_list": [ -# { -# "kind": "task", -# "uuid": "656210bb-b24d-495b-946a-390566b1c269" -# }, -# { -# "kind": "task", -# "uuid": "796a55c4-7662-4155-be35-8526c4db042c" -# } -# ], -# "progress_message": "LCM operations", -# "creation_time_usecs": 1562574961570658, -# "operation_type": "kLcmRootTask", -# "percentage_complete": 20, -# "api_version": "3.1", -# "uuid": "bf672a00-13e0-42e0-bbb6-dba47a9a1cd4" -#} # Need to grab the percentage_complete value including the status to make disissions function loop(){ @@ -87,21 +57,23 @@ function loop(){ local _attempts=30 local _loops=0 local _sleep=60 - local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local CURL_HTTP_OPTS=" --max-time 25 --silent -H 'Content-Type:application/json' -H 'Accept:application/json' --insecure " # What is the progress of the taskid?? while true; do (( _loops++ )) # Get the progress of the task - _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}?filterCriteria=parent_task_uuid%3D%3D${_task_id} | jq '.entities[0].percentageCompleted' 2>nul | tr -d \") + _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}/${_task_id} | jq '.percentage_complete' 2>nul | tr -d \") + echo "Progress of the TASK with task id ${_task_id}: ${_progress}" + if (( ${_progress} == 100 )); then - log "The step has been succesfuly run" + echo "The step has been succesfuly run" break; elif (( ${_loops} > ${_attempts} )); then - log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + echo "Warning ${_error} @${1}: Giving up after ${_loop} tries." return ${_error} else - log "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds" + echo "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds" sleep ${_sleep} fi done @@ -114,7 +86,7 @@ function loop(){ function lcm() { local _url_lcm='https://localhost:9440/PrismGateway/services/rest/v1/genesis' - local _url_progress='https://localhost:9440/PrismGateway/services/rest/v1/progress_monitors' + local _url_progress='https://localhost:9440/api/nutanix/v3/tasks' local _url_groups='https://localhost:9440/api/nutanix/v3/groups' local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' @@ -157,68 +129,13 @@ function lcm() { else #''_V2: run the other V2 API call to get the UUIDs of the to be updated software parts # Grab the installed version of the software first UUIDs - curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{ - "entity_type": "lcm_entity_v2", - "group_member_count": 500, - "group_member_attributes": [{ - "attribute": "id" - }, { - "attribute": "uuid" - }, { - "attribute": "entity_model" - }, { - "attribute": "version" - }, { - "attribute": "location_id" - }, { - "attribute": "entity_class" - }, { - "attribute": "description" - }, { - "attribute": "last_updated_time_usecs" - }, { - "attribute": "request_version" - }, { - "attribute": "_master_cluster_uuid_" - }, { - "attribute": "entity_type" - }, { - "attribute": "single_group_uuid" - }], - "query_name": "lcm:EntityGroupModel", - "grouping_attribute": "location_id", - "filter_criteria": "entity_model!=AOS;entity_model!=NCC;entity_model!=PC;_master_cluster_uuid_==[no_val]" - }' $_url_groups > reply_json_uuid.json + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_entity_v2","group_member_count": 500,"group_member_attributes": [{"attribute": "id"}, {"attribute": "uuid"}, {"attribute": "entity_model"}, {"attribute": "version"},{"attribute": "location_id"}, {"attribute": "entity_class"}, {"attribute": "description"}, {"attribute": "last_updated_time_usecs"},{"attribute": "request_version"}, {"attribute": "_master_cluster_uuid_"}, {"attribute": "entity_type"}, {"attribute": "single_group_uuid"}],"query_name": "lcm:EntityGroupModel","grouping_attribute": "location_id","filter_criteria": "entity_model!=AOS;entity_model!=NCC;entity_model!=PC;_master_cluster_uuid_==[no_val]"}' $_url_groups > reply_json_uuid.json # Fill the uuid array with the correct values uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="uuid") | .values[0].values[0]' reply_json_uuid.json | sort -u | tr "\"" " " | tr -s " ")) # Grab the available updates from the PC after LCMm has run - curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{ - "entity_type": "lcm_available_version_v2", - "group_member_count": 500, - "group_member_attributes": [{ - "attribute": "uuid" - }, { - "attribute": "entity_uuid" - }, { - "attribute": "entity_class" - }, { - "attribute": "status" - }, { - "attribute": "version" - }, { - "attribute": "dependencies" - }, { - "attribute": "single_group_uuid" - }, { - "attribute": "_master_cluster_uuid_" - }, { - "attribute": "order" - }], - "query_name": "lcm:VersionModel", - "filter_criteria": "_master_cluster_uuid_==[no_val]" - }' $_url_groups > reply_json_ver.json + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version_v2","group_member_count": 500,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"}, {"attribute": "entity_class"}, {"attribute": "status"}, {"attribute": "version"}, {"attribute": "dependencies"},{"attribute": "single_group_uuid"}, {"attribute": "_master_cluster_uuid_"}, {"attribute": "order"}],"query_name": "lcm:VersionModel","filter_criteria": "_master_cluster_uuid_==[no_val]"}' $_url_groups > reply_json_ver.json # Grabbing the versions of the UUID and put them in a versions array for uuid in "${uuid_arr[@]}" @@ -244,8 +161,8 @@ function lcm() { while [ $count -lt ${#uuid_arr[@]} ] do _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," + echo "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" let count=count+1 - log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" done # Remove the last "," as we don't need it. From 2396bd4acda8673cad6b801f917d30d373e8d29e Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 10 Jul 2019 10:36:57 +0200 Subject: [PATCH 160/691] Update of the LCM module for LCM v2 Framework --- scripts/lib.pc.sh | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index f953e26..985cbc0 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -57,7 +57,7 @@ function loop(){ local _attempts=30 local _loops=0 local _sleep=60 - local CURL_HTTP_OPTS=" --max-time 25 --silent -H 'Content-Type:application/json' -H 'Accept:application/json' --insecure " + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " # What is the progress of the taskid?? while true; do @@ -112,9 +112,9 @@ function lcm() { # Issue is taht after the LCM inventory the LCM will be updated to a version 2.0 and the API call needs to change!!! # We need to figure out if we are running V1 or V2! - lcm_version=$(curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"get_config\"}}"}' -H 'Content-Type: application/json' ${_url_lcm} | jq '.value' | tr -d \\ | sed 's/^"\(.*\)"$/\1/' | sed 's/.return/return/g' | jq '.return.lcm_cpdb_table_def_list.entity' | tr -d \"| grep "lcm_entity_v2" | wc -l) + lcm_version=$(curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"get_config\"}}"}' ${_url_lcm} | jq '.value' | tr -d \\ | sed 's/^"\(.*\)"$/\1/' | sed 's/.return/return/g' | jq '.return.lcm_cpdb_table_def_list.entity' | tr -d \"| grep "lcm_entity_v2" | wc -l) - if [ $lcm_version -lt 1]; then + if [ $lcm_version -lt 1 ]; then # V1: Run the Curl command and save the oputput in a temp file curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json @@ -129,7 +129,7 @@ function lcm() { else #''_V2: run the other V2 API call to get the UUIDs of the to be updated software parts # Grab the installed version of the software first UUIDs - curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_entity_v2","group_member_count": 500,"group_member_attributes": [{"attribute": "id"}, {"attribute": "uuid"}, {"attribute": "entity_model"}, {"attribute": "version"},{"attribute": "location_id"}, {"attribute": "entity_class"}, {"attribute": "description"}, {"attribute": "last_updated_time_usecs"},{"attribute": "request_version"}, {"attribute": "_master_cluster_uuid_"}, {"attribute": "entity_type"}, {"attribute": "single_group_uuid"}],"query_name": "lcm:EntityGroupModel","grouping_attribute": "location_id","filter_criteria": "entity_model!=AOS;entity_model!=NCC;entity_model!=PC;_master_cluster_uuid_==[no_val]"}' $_url_groups > reply_json_uuid.json + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_entity_v2","group_member_count": 500,"group_member_attributes": [{"attribute": "id"}, {"attribute": "uuid"}, {"attribute": "entity_model"}, {"attribute": "version"}, {"attribute": "location_id"}, {"attribute": "entity_class"}, {"attribute": "description"}, {"attribute": "last_updated_time_usecs"}, {"attribute": "request_version"}, {"attribute": "_master_cluster_uuid_"}, {"attribute": "entity_type"}, {"attribute": "single_group_uuid"}],"query_name": "lcm:EntityGroupModel","grouping_attribute": "location_id","filter_criteria": "entity_model!=AOS;entity_model!=NCC;entity_model!=PC;_master_cluster_uuid_==[no_val]"}' $_url_groups > reply_json_uuid.json # Fill the uuid array with the correct values uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="uuid") | .values[0].values[0]' reply_json_uuid.json | sort -u | tr "\"" " " | tr -s " ")) @@ -142,12 +142,8 @@ function lcm() { do # Get the latest version from the to be updated uuid version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | tail -1 | tr -d \")) - - # Get the UUID corresponding with the version found earlier - uuid_arr_new+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="uuid") .values[].values[]' reply_json_ver.json | tail -1 | tr -d \")) done # Copy the right info into the to be used array - uuid_arr=("${uuid_arr_new[@]}") fi # Set the parameter to create the ugrade plan @@ -161,7 +157,7 @@ function lcm() { while [ $count -lt ${#uuid_arr[@]} ] do _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," - echo "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" + log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" let count=count+1 done @@ -173,11 +169,6 @@ function lcm() { # Run the generate plan task _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm}) - - # Remove the temp json files as we don't need it anymore - rm -rf reply_json.json - rm -rf reply_json_ver.json - rm -rf reply_json_uuid.json # Notify the log server that the LCM has created a plan log "LCM Inventory has created a plan" @@ -198,13 +189,18 @@ function lcm() { log "LCM Upgrade has encountered an error!!!!" else # Notify the logserver that we are starting the LCM Upgrade - log "LCM Upgrade starting..." + log "LCM Upgrade starting...Process may take up to 30 minutes!!!" # Run the progess checker loop fi fi + # Remove the temp json files as we don't need it anymore + rm -rf reply_json.json + rm -rf reply_json_ver.json + rm -rf reply_json_uuid.json + } ############################################################################################################################################################################### From 30902ca908658b63fa696fdf165d8a990a88536a Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 10 Jul 2019 10:43:08 +0200 Subject: [PATCH 161/691] Timing update on lib.pc.sh for LCM --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 985cbc0..6ead8bd 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -54,7 +54,7 @@ function flow_enable() { function loop(){ - local _attempts=30 + local _attempts=40 local _loops=0 local _sleep=60 local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " @@ -189,7 +189,7 @@ function lcm() { log "LCM Upgrade has encountered an error!!!!" else # Notify the logserver that we are starting the LCM Upgrade - log "LCM Upgrade starting...Process may take up to 30 minutes!!!" + log "LCM Upgrade starting...Process may take up to 40 minutes!!!" # Run the progess checker loop From 7cef44567bcb2a208b22306c005feaaeb7fd2a55 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 10 Jul 2019 12:30:04 +0200 Subject: [PATCH 162/691] Update lib.pc.sh --- scripts/lib.pc.sh | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6ead8bd..1e1500f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -64,16 +64,15 @@ function loop(){ (( _loops++ )) # Get the progress of the task _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}/${_task_id} | jq '.percentage_complete' 2>nul | tr -d \") - echo "Progress of the TASK with task id ${_task_id}: ${_progress}" if (( ${_progress} == 100 )); then - echo "The step has been succesfuly run" + log "The step has been succesfuly run" break; elif (( ${_loops} > ${_attempts} )); then - echo "Warning ${_error} @${1}: Giving up after ${_loop} tries." + log "Warning ${_error} @${1}: Giving up after ${_loop} tries." return ${_error} else - echo "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds" + log "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds" sleep ${_sleep} fi done From ec0944e2f4adc422de9f7b6e632c9aaee706973b Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 10 Jul 2019 12:31:24 +0200 Subject: [PATCH 163/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 1e1500f..f47047d 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -52,6 +52,8 @@ function flow_enable() { ############################################################################################################################################################################### # Need to grab the percentage_complete value including the status to make disissions +# TODO: Also look at the status!! + function loop(){ local _attempts=40 From 2f0ec1900f6e4df2f7827bdf272e58cc0b8b6b3d Mon Sep 17 00:00:00 2001 From: Nathan C Date: Mon, 15 Jul 2019 08:00:36 -0700 Subject: [PATCH 164/691] LCM Update fixes --- scripts/global.vars.sh | 2 +- scripts/lib.pc.sh | 109 ++++++++++++++++++++++++++++++++--------- 2 files changed, 88 insertions(+), 23 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 05938ff..8a54893 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -66,7 +66,7 @@ NW1_DHCP_START="${IPV4_PREFIX}.50" NW1_DHCP_END="${IPV4_PREFIX}.125" NW2_NAME='Secondary' -NW2_VLAN=$(( ${OCTET[2]} * 10 + 1 )) +NW2_VLAN=$((OCTET[2]*10+1)) NW2_SUBNET="${IPV4_PREFIX}.129/25" NW2_DHCP_START="${IPV4_PREFIX}.132" NW2_DHCP_END="${IPV4_PREFIX}.253" diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 97b2c90..f47047d 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -6,6 +6,9 @@ # 12th of April 2019 - Willem Essenstam # Added a "-d" character in the flow_enable so the command would run. # Changed the Karbon Eanable function so it also checks that Karbon has been enabled. Some small typos changed so the Karbon part should work +# +# 31-05-2019 - Willem Essenstam +# Added the download bits for the Centos Image for Karbon ############################################################################################################################################################################### @@ -47,18 +50,23 @@ function flow_enable() { ############################################################################################################################################################################### # Routine to be run/loop till yes we are ok. ############################################################################################################################################################################### +# Need to grab the percentage_complete value including the status to make disissions + +# TODO: Also look at the status!! + function loop(){ - local _attempts=30 + local _attempts=40 local _loops=0 local _sleep=60 - local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " # What is the progress of the taskid?? while true; do (( _loops++ )) # Get the progress of the task - _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}?filterCriteria=parent_task_uuid%3D%3D${_task_id} | jq '.entities[0].percentageCompleted' 2>nul | tr -d \") + _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}/${_task_id} | jq '.percentage_complete' 2>nul | tr -d \") + if (( ${_progress} == 100 )); then log "The step has been succesfuly run" break; @@ -79,7 +87,7 @@ function loop(){ function lcm() { local _url_lcm='https://localhost:9440/PrismGateway/services/rest/v1/genesis' - local _url_progress='https://localhost:9440/PrismGateway/services/rest/v1/progress_monitors' + local _url_progress='https://localhost:9440/api/nutanix/v3/tasks' local _url_groups='https://localhost:9440/api/nutanix/v3/groups' local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' @@ -103,17 +111,41 @@ function lcm() { # Need loop so we can create the full json more dynamical - # Run the Curl command and save the oputput in a temp file - curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json - - # Fill the uuid array with the correct values - uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " ")) - - # Grabbing the versions of the UUID and put them in a versions array - for uuid in "${uuid_arr[@]}" - do - version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply_json.json | tail -4 | head -n 1 | tr -d \")) - done + # Issue is taht after the LCM inventory the LCM will be updated to a version 2.0 and the API call needs to change!!! + # We need to figure out if we are running V1 or V2! + lcm_version=$(curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"get_config\"}}"}' ${_url_lcm} | jq '.value' | tr -d \\ | sed 's/^"\(.*\)"$/\1/' | sed 's/.return/return/g' | jq '.return.lcm_cpdb_table_def_list.entity' | tr -d \"| grep "lcm_entity_v2" | wc -l) + + if [ $lcm_version -lt 1 ]; then + # V1: Run the Curl command and save the oputput in a temp file + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json + + # Fill the uuid array with the correct values + uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " ")) + + # Grabbing the versions of the UUID and put them in a versions array + for uuid in "${uuid_arr[@]}" + do + version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply_json.json | tail -4 | head -n 1 | tr -d \")) + done + else + #''_V2: run the other V2 API call to get the UUIDs of the to be updated software parts + # Grab the installed version of the software first UUIDs + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_entity_v2","group_member_count": 500,"group_member_attributes": [{"attribute": "id"}, {"attribute": "uuid"}, {"attribute": "entity_model"}, {"attribute": "version"}, {"attribute": "location_id"}, {"attribute": "entity_class"}, {"attribute": "description"}, {"attribute": "last_updated_time_usecs"}, {"attribute": "request_version"}, {"attribute": "_master_cluster_uuid_"}, {"attribute": "entity_type"}, {"attribute": "single_group_uuid"}],"query_name": "lcm:EntityGroupModel","grouping_attribute": "location_id","filter_criteria": "entity_model!=AOS;entity_model!=NCC;entity_model!=PC;_master_cluster_uuid_==[no_val]"}' $_url_groups > reply_json_uuid.json + + # Fill the uuid array with the correct values + uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="uuid") | .values[0].values[0]' reply_json_uuid.json | sort -u | tr "\"" " " | tr -s " ")) + + # Grab the available updates from the PC after LCMm has run + curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version_v2","group_member_count": 500,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"}, {"attribute": "entity_class"}, {"attribute": "status"}, {"attribute": "version"}, {"attribute": "dependencies"},{"attribute": "single_group_uuid"}, {"attribute": "_master_cluster_uuid_"}, {"attribute": "order"}],"query_name": "lcm:VersionModel","filter_criteria": "_master_cluster_uuid_==[no_val]"}' $_url_groups > reply_json_ver.json + + # Grabbing the versions of the UUID and put them in a versions array + for uuid in "${uuid_arr[@]}" + do + # Get the latest version from the to be updated uuid + version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | tail -1 | tr -d \")) + done + # Copy the right info into the to be used array + fi # Set the parameter to create the ugrade plan # Create the curl json string '-d blablablablabla' so we can call the string and not the full json data line @@ -126,8 +158,8 @@ function lcm() { while [ $count -lt ${#uuid_arr[@]} ] do _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," - let count=count+1 log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" + let count=count+1 done # Remove the last "," as we don't need it. @@ -138,10 +170,7 @@ function lcm() { # Run the generate plan task _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm}) - - # Remove the temp json file as we don't need it anymore - rm -rf reply_json.json - + # Notify the log server that the LCM has created a plan log "LCM Inventory has created a plan" @@ -161,13 +190,18 @@ function lcm() { log "LCM Upgrade has encountered an error!!!!" else # Notify the logserver that we are starting the LCM Upgrade - log "LCM Upgrade starting..." + log "LCM Upgrade starting...Process may take up to 40 minutes!!!" # Run the progess checker loop fi fi + # Remove the temp json files as we don't need it anymore + rm -rf reply_json.json + rm -rf reply_json_ver.json + rm -rf reply_json_uuid.json + } ############################################################################################################################################################################### @@ -203,7 +237,38 @@ function karbon_enable() { log "Karbon has been enabled." fi fi - fi + fi +} + +############################################################################################################################################################################### +# Download Karbon CentOS Image +############################################################################################################################################################################### + +function karbon_image_download() { + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local _loop=0 + local _startDownload="https://localhost:7050/acs/image/download" + local _getuuidDownload="https://localhost:7050/acs/image/list" + + # Create the Basic Authentication using base6 commands + _auth=$(echo "admin:${PE_PASSWORD}" | base64) + + # Call the UUID URL so we have the right UUID for the image + uuid=$(curl -X GET -H "X-NTNX-AUTH: Basic ${_auth}" https://localhost:7050/acs/image/list $CURL_HTTP_OPTS | jq '.[0].uuid' | tr -d \/\") + log "UUID for The Karbon image is: $uuid" + + # Use the UUID to download the image + response=$(curl -X POST ${_startDownload} -d "{\"uuid\":\"${uuid}\"}" -H "X-NTNX-AUTH: Basic ${_auth}" ${CURL_HTTP_OPTS}) + + if [ -z $response ]; then + log "Download of the CenOS image for Karbon has not been started. Trying one more time..." + response=$(curl -X POST ${_startDownload} -d "{\"uuid\":\"${uuid}\"}" -H "X-NTNX-AUTH: Basic ${_auth}" ${CURL_HTTP_OPTS}) + if [ -z $response ]; then + log "Download of CentOS image for Karbon failed... Please run manually." + fi + else + log "Download of CentOS image for Karbon has started..." + fi } ############################################################################################################################################################################### From 4ead7d754baa6c76130242a3ec1dd059169b12c9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 22 Jul 2019 09:10:06 -0700 Subject: [PATCH 165/691] Updates with mlavi stuff --- bootstrap.sh | 12 +++++++++++- scripts/files.sh | 27 +++++++++++++++++++++++++++ stage_workshop.sh | 15 ++++++++++----- 3 files changed, 48 insertions(+), 6 deletions(-) create mode 100644 scripts/files.sh diff --git a/bootstrap.sh b/bootstrap.sh index 5101ba9..d94931a 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -51,6 +51,16 @@ _ERROR=0 if (( ${_ERROR} == 1 )); then echo "Error ${_ERROR}: This script should be run on a Nutanix CVM!" #echo RESTORE: + +######################################################################### +### Added to verify user is Nutanix 7/22/2019 from mlavi version of file. +######################################################################### + + exit ${_ERROR} +elif [[ $(whoami) != 'nutanix' ]]; then + _ERROR=50 + echo "PBC-50: This guardrail can be relaxed with proper testing for the future." + echo "Error ${_ERROR}: This script should be run as user nutanix!" exit ${_ERROR} fi @@ -134,7 +144,7 @@ cat < 0 )); then + _libraries+='lib.pe.sh' + _pe_launch='files.sh' + fi if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='ts2019.sh' @@ -299,11 +309,6 @@ function select_workshop() { #__main__ -# Source Workshop common routines + global variables -. scripts/lib.common.sh -. scripts/global.vars.sh -begin - _VALIDATE='Validate Staged Clusters' _CLUSTER_FILE='Cluster Input File' CLUSTER_LIST= From 7b785a1e76d66f92eca7a8ca766ce879ed63fbcc Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 22 Jul 2019 09:36:15 -0700 Subject: [PATCH 166/691] Updates for 5.10.5 --- scripts/global.vars.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 8a54893..6153b59 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -3,7 +3,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' PC_DEV_VERSION='5.10.3' -PC_CURRENT_VERSION='5.10.3' +PC_CURRENT_VERSION='5.10.5' PC_STABLE_VERSION='5.8.2' FILES_VERSION='3.5.0' FILE_ANALYTICS_VERSION='1.0.1' @@ -92,8 +92,8 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.5.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' @@ -123,8 +123,8 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' @@ -154,8 +154,8 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' From bea2e33ebf0e0c768c86bfc58310eef7c1d133aa Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 22 Jul 2019 12:49:45 -0700 Subject: [PATCH 167/691] updated for 5.10.5 --- bootstrap.sh | 15 ++++++++++++++- scripts/global.vars.sh | 14 +++++++------- stage_workshop.sh | 15 ++++++++++----- 3 files changed, 31 insertions(+), 13 deletions(-) diff --git a/bootstrap.sh b/bootstrap.sh index b68142c..9a438fa 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -3,6 +3,9 @@ # Example use from a Nutanix CVM: # curl --remote-name --location https://raw.githubusercontent.com/nutanixworkshops/stageworkshop/master/bootstrap.sh && sh ${_##*/} # +# Development/Beta version +# curl --remote-name --location https://raw.githubusercontent.com/jncox/stageworkshop/master/bootstrap.sh && sh ${_##*/} +# # For testing: # curl --remote-name --location https://raw.githubusercontent.com/mlavi/stageworkshop/master/bootstrap.sh && SOURCE=${_} sh ${_##*/} @@ -48,6 +51,16 @@ _ERROR=0 if (( ${_ERROR} == 1 )); then echo "Error ${_ERROR}: This script should be run on a Nutanix CVM!" #echo RESTORE: + +######################################################################### +### Added to verify user is Nutanix 7/22/2019 from mlavi version of file. +######################################################################### + + exit ${_ERROR} +elif [[ $(whoami) != 'nutanix' ]]; then + _ERROR=50 + echo "PBC-50: This guardrail can be relaxed with proper testing for the future." + echo "Error ${_ERROR}: This script should be run as user nutanix!" exit ${_ERROR} fi @@ -131,7 +144,7 @@ cat < 0 )); then + _libraries+='lib.pe.sh' + _pe_launch='files.sh' + fi if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='ts2019.sh' @@ -299,11 +309,6 @@ function select_workshop() { #__main__ -# Source Workshop common routines + global variables -. scripts/lib.common.sh -. scripts/global.vars.sh -begin - _VALIDATE='Validate Staged Clusters' _CLUSTER_FILE='Cluster Input File' CLUSTER_LIST= From 599c1c0c6fdbee06467a402ea217cd19bbd5eaf0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 22 Jul 2019 13:27:40 -0700 Subject: [PATCH 168/691] Updated for 5.11 testing --- scripts/global.vars.sh | 14 +++++++------- stage_workshop.sh | 15 +++++++++------ 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6153b59..7206549 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,7 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.10.3' +PC_DEV_VERSION='5.11' PC_CURRENT_VERSION='5.10.5' PC_STABLE_VERSION='5.8.2' FILES_VERSION='3.5.0' @@ -90,8 +90,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.5.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' @@ -121,8 +121,8 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' @@ -152,8 +152,8 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' diff --git a/stage_workshop.sh b/stage_workshop.sh index 4d4ba3e..2ba81f9 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,10 +11,11 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current (AutoDC2)" \ -#"Citrix Desktop on AHV Workshop (AOS 5.10+/AHV PC 5.10+) = Development" \ -"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Development" \ -"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable (AutoDC2)" \ +"Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current" \ +"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Current" \ +"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \ +"Development Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +#"Citrix Desktop on AHV Workshop (AOS 5.11+/AHV PC 5.11+) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -55,12 +56,14 @@ function stage_clusters() { _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i Citrix | wc ${WC_ARG}) > 0 )); then - _pe_launch='stage_citrixhow.sh' - _pc_launch='stage_citrixhow_pc.sh' + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='citrix.sh' + _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i Files | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh' _pe_launch='files.sh' + _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' From 024c01672eb421ff96c4af23dee5b7a8662cb4a1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 23 Jul 2019 08:32:47 -0700 Subject: [PATCH 169/691] Updated for 5.11 - Files and Era --- scripts/global.vars.sh | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 7206549..198b029 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -5,8 +5,8 @@ RELEASE='release.json' PC_DEV_VERSION='5.11' PC_CURRENT_VERSION='5.10.5' PC_STABLE_VERSION='5.8.2' -FILES_VERSION='3.5.0' -FILE_ANALYTICS_VERSION='1.0.1' +FILES_VERSION='3.5.1' +FILE_ANALYTICS_VERSION='1.1.0' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -35,7 +35,7 @@ QCOW2_IMAGES=(\ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ - ERA-Server-build-1.0.1.4.qcow2 \ + ERA-Server-build-1.1.0.qcow2 \ MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ @@ -96,10 +96,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' - FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.1.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/fileanalytics-1.1.0.json' + FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-1.1.0.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -127,10 +127,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.1.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-1.1.0.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-1.1.0.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -158,10 +158,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.1.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-1.1.0.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-1.1.0.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ From 6e6100008c587d103f7ed681ee6ce6e52f322265 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 24 Jul 2019 09:02:04 -0700 Subject: [PATCH 170/691] Updates for bootcamp specific staging --- stage_workshop.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 2ba81f9..559543d 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -15,7 +15,9 @@ WORKSHOPS=(\ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Current" \ "Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \ "Development Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ -#"Citrix Desktop on AHV Workshop (AOS 5.11+/AHV PC 5.11+) = Development" \ +#"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +#"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +#"Citrix Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { From 35c97206e555f7e9414b437a713312449796be17 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 24 Jul 2019 10:25:21 -0700 Subject: [PATCH 171/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 559543d..e4f1e13 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -14,7 +14,7 @@ WORKSHOPS=(\ "Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Current" \ "Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \ -"Development Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +"Development Bootcamp (AOS 5.10+/AHV PC 5.11+) = Development" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Citrix Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ From 88c22839e4f2e8c871313d63dcda80f68312f3c1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 24 Jul 2019 16:38:44 -0700 Subject: [PATCH 172/691] updates for Prosm Pro Lab --- scripts/lib.pc.sh | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index f47047d..6206e5f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -121,7 +121,7 @@ function lcm() { # Fill the uuid array with the correct values uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " ")) - + # Grabbing the versions of the UUID and put them in a versions array for uuid in "${uuid_arr[@]}" do @@ -170,7 +170,7 @@ function lcm() { # Run the generate plan task _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm}) - + # Notify the log server that the LCM has created a plan log "LCM Inventory has created a plan" @@ -214,10 +214,10 @@ function karbon_enable() { local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" - + # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) - + # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... if [[ $_response -eq 1 ]]; then # Check if Karbon has been enabled @@ -505,7 +505,8 @@ function seedPC() { unzip /home/nutanix/seedPC.zip pushd /home/nutanix/lab/ - _setup=$(/home/nutanix/lab/setupEnv.sh ${PC_HOST} > /dev/null 2>&1) + #_setup=$(/home/nutanix/lab/setupEnv.sh ${PC_HOST} > /dev/null 2>&1) + _setup=$(/home/nutanix/lab/initialize_lab.sh ${PC_HOST} > /dev/null 2>&1) log "Running Setup Script|$_setup" popd From 7e1696a38a1543afbc8d9a8d4f016feb3754f70f Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 25 Jul 2019 19:08:58 +0200 Subject: [PATCH 173/691] Small update --- scripts/bootcamp.sh | 8 ------- scripts/global.vars.sh | 48 ------------------------------------------ scripts/lib.pc.sh | 12 ++--------- 3 files changed, 2 insertions(+), 66 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index cb40810..5e645b8 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -111,15 +111,7 @@ case ${1} in && pc_dns_add \ && pc_ui \ && pc_auth \ -<<<<<<< HEAD - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi -======= && pc_smtp ->>>>>>> upstream/master ssp_auth \ && calm_enable \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index d54547c..198b029 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,19 +2,11 @@ # shellcheck disable=SC2034 RELEASE='release.json' -<<<<<<< HEAD PC_DEV_VERSION='5.11' PC_CURRENT_VERSION='5.10.5' PC_STABLE_VERSION='5.8.2' FILES_VERSION='3.5.1' FILE_ANALYTICS_VERSION='1.1.0' -======= -PC_DEV_VERSION='5.10.3' -PC_CURRENT_VERSION='5.10.5' -PC_STABLE_VERSION='5.8.2' -FILES_VERSION='3.5.0' -FILE_ANALYTICS_VERSION='1.0.1' ->>>>>>> upstream/master NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -43,11 +35,7 @@ QCOW2_IMAGES=(\ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ -<<<<<<< HEAD ERA-Server-build-1.1.0.qcow2 \ -======= - ERA-Server-build-1.0.1.4.qcow2 \ ->>>>>>> upstream/master MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ @@ -102,28 +90,16 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR -<<<<<<< HEAD PC_DEV_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' -======= - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' ->>>>>>> upstream/master PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.5.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' -<<<<<<< HEAD FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.1.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/fileanalytics-1.1.0.json' FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-1.1.0.qcow2' -======= - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' - FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' ->>>>>>> upstream/master JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -145,28 +121,16 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX -<<<<<<< HEAD PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' -======= - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' ->>>>>>> upstream/master PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' -<<<<<<< HEAD FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.1.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-1.1.0.json' FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-1.1.0.qcow2' -======= - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' ->>>>>>> upstream/master JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -188,28 +152,16 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters -<<<<<<< HEAD PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' -======= - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.3-stable-prism_central.tar' ->>>>>>> upstream/master PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' -<<<<<<< HEAD FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.1.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-1.1.0.json' FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-1.1.0.qcow2' -======= - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file-analytics-1.0.1.qcow2' ->>>>>>> upstream/master JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 0733acc..6206e5f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -121,11 +121,7 @@ function lcm() { # Fill the uuid array with the correct values uuid_arr=($(jq '.group_results[].entity_results[].data[] | select (.name=="entity_uuid") | .values[0].values[0]' reply_json.json | sort -u | tr "\"" " " | tr -s " ")) -<<<<<<< HEAD -======= - ->>>>>>> upstream/master # Grabbing the versions of the UUID and put them in a versions array for uuid in "${uuid_arr[@]}" do @@ -174,11 +170,7 @@ function lcm() { # Run the generate plan task _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST $_json_data ${_url_lcm}) -<<<<<<< HEAD -======= - ->>>>>>> upstream/master # Notify the log server that the LCM has created a plan log "LCM Inventory has created a plan" @@ -222,10 +214,10 @@ function karbon_enable() { local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"KarbonUIService\\\\\\\",\\\\\\\"KarbonCoreService\\\\\\\"]}\\\"}}\"}" local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"KarbonUIService\\\"}}\"} " local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" - + # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) - + # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... if [[ $_response -eq 1 ]]; then # Check if Karbon has been enabled From d6f30ffc8898391101f19bffd68129cc328ace61 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 6 Aug 2019 10:12:10 -0400 Subject: [PATCH 174/691] Updates for standalone labs --- scripts/{citrix.sh => citrix_bootcamp.sh} | 0 scripts/era_bootcamp.sh | 147 ++++++++++++++++++++++ scripts/{files.sh => files_bootcamp.sh} | 0 stage_workshop.sh | 9 +- 4 files changed, 152 insertions(+), 4 deletions(-) rename scripts/{citrix.sh => citrix_bootcamp.sh} (100%) create mode 100644 scripts/era_bootcamp.sh rename scripts/{files.sh => files_bootcamp.sh} (100%) diff --git a/scripts/citrix.sh b/scripts/citrix_bootcamp.sh similarity index 100% rename from scripts/citrix.sh rename to scripts/citrix_bootcamp.sh diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh new file mode 100644 index 0000000..5e645b8 --- /dev/null +++ b/scripts/era_bootcamp.sh @@ -0,0 +1,147 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + ## TODO: If Debug is set we should run with bash -x. Maybe this???? Or are we going to use a fourth parameter + # if [ ! -z DEBUG ]; then + # bash_cmd='bash' + # else + # bash_cmd='bash -x' + # fi + # _command="EMAIL=${EMAIL} \ + # PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + # PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup ${bash_cmd} ${HOME}/${PC_LAUNCH} IMAGES" + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 + + create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 + + file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && lcm \ + && karbon_image_download \ + && images \ + && flow_enable \ + && pc_cluster_img_import \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/files.sh b/scripts/files_bootcamp.sh similarity index 100% rename from scripts/files.sh rename to scripts/files_bootcamp.sh diff --git a/stage_workshop.sh b/stage_workshop.sh index fcc73d9..172d2cd 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -59,17 +59,18 @@ function stage_clusters() { fi if (( $(echo ${_workshop} | grep -i Citrix | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='citrix.sh' + _pe_launch='citrix_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i Files | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i Era | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh' - _pe_launch='files.sh' + _pe_launch='era_bootcamp.sh' _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i Files | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh' - _pe_launch='files.sh' + _pe_launch='files_bootcamp.sh' + _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' From 8f503e17df6db220e6999422d33f1e6c9b5ebd44 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 13 Aug 2019 17:33:55 -0700 Subject: [PATCH 175/691] Updates for AutoDC IP --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 198b029..5b02a6d 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -76,7 +76,7 @@ SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' SMTP_SERVER_PORT=25 AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file -AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 3))" +AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 4))" LDAP_PORT=389 AUTH_FQDN='ntnxlab.local' AUTH_DOMAIN='NTNXLAB' From 6e2fe8d5f4ca1219a1f38b599b46df38398803ed Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 14 Aug 2019 13:09:32 -0700 Subject: [PATCH 176/691] enable seedPC --- scripts/bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 5e645b8..582411c 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -119,6 +119,7 @@ case ${1} in && lcm \ && karbon_image_download \ && images \ + && seedPC \ && flow_enable \ && pc_cluster_img_import \ && prism_check 'PC' From fd296f2e0200f6db40da5caf4723915f145fb984 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 15 Aug 2019 17:27:02 -0700 Subject: [PATCH 177/691] Updated to Enable Objects --- scripts/lib.pc.sh | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6206e5f..9000057 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -271,6 +271,42 @@ function karbon_image_download() { fi } +############################################################################################################################################################################### +# Routine to enable Objects +############################################################################################################################################################################### + +function objects_enable() { + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local _loop=0 + local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"ossService\\\\\\\"]}\\\"}}\"}" + local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"ossService\\\"}}\"} " + local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" + + # Start the enablement process + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) + + # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... + if [[ $_response -eq 1 ]]; then + # Check if Karbon has been enabled + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) + while [ $_response -ne 1 ]; do + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) + done + log "Objects has been enabled." + else + log "Retrying to enable Objects one more time." + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) + if [[ $_response -eq 1 ]]; then + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) + if [ $_response -lt 1 ]; then + log "Objects isn't enabled. Please use the UI to enable it." + else + log "Objects has been enabled." + fi + fi + fi +} + ############################################################################################################################################################################### # Routine for PC_Admin ############################################################################################################################################################################### From db6bc35f7799c171efc01cfb5f1ba31951e23afe Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 15 Aug 2019 17:29:20 -0700 Subject: [PATCH 178/691] Uodated to enable Objects --- scripts/bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 582411c..9cfc577 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,6 +116,7 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ + && objects_enable \ && lcm \ && karbon_image_download \ && images \ From 700e88356aaeefe1c2c8decb3ee554a1ef8b1812 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 18 Aug 2019 14:57:04 -0700 Subject: [PATCH 179/691] Update lib.pc.sh --- scripts/lib.pc.sh | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6206e5f..e4ae062 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -11,6 +11,41 @@ # Added the download bits for the Centos Image for Karbon ############################################################################################################################################################################### +############################################################################################################################################################################### +# Routine to deploy Era +############################################################################################################################################################################### + +function era_deploy() { + local _attempts=30 + local _loops=0 + local _sleep=60 + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local _url_flow='https://localhost:9440/api/nutanix/v3/services/microseg' + + # Create the JSON payload + _json_data='{"state":"ENABLE"}' + + log "Enable Nutanix Flow..." + + # Enabling Flow and put the task id in a variable + _task_id=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow | jq '.task_uuid' | tr -d \") + + # Try one more time then fail, but continue + if [ -z $_task_id ]; then + log "Flow not yet enabled. Will retry...." + _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) + + if [ -z $_task_id ]; then + log "Flow still not enabled.... ***Not retrying. Please enable via UI.***" + fi + else + log "Flow has been Enabled..." + fi + + + +} + ############################################################################################################################################################################### # Routine to enable Flow From d3a5dab6d14f712dfc1c2864061ce9ffd1bb4bf8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 18 Aug 2019 15:09:18 -0700 Subject: [PATCH 180/691] Objects --- scripts/lib.pc.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b8678bf..5bd3b80 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -313,9 +313,8 @@ function karbon_image_download() { function objects_enable() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' local _loop=0 - local _json_data_set_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"enable_service_with_prechecks\\\",\\\".kwargs\\\":{\\\"service_list_json\\\":\\\"{\\\\\\\"service_list\\\\\\\":[\\\\\\\"ossService\\\\\\\"]}\\\"}}\"}" - local _json_is_enable="{\"value\":\"{\\\".oid\\\":\\\"ClusterManager\\\",\\\".method\\\":\\\"is_service_enabled\\\",\\\".kwargs\\\":{\\\"service_name\\\":\\\"ossService\\\"}}\"} " - local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/genesis" + local _json_data_set_enable="{\"state\":\"ENABLE\"}" + local _httpURL="https://localhost:9440/api/nutanix/v3/services/oss" # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) From d24d3d6449ccc90b1934828d8fa73fbe00fde9cd Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 18 Aug 2019 16:14:09 -0700 Subject: [PATCH 181/691] objects testing --- scripts/bootcamp.sh | 2 +- scripts/lib.pc.sh | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 9cfc577..2fa51d0 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,7 +116,7 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && objects_enable \ + #&& objects_enable \ && lcm \ && karbon_image_download \ && images \ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 5bd3b80..62d6172 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -317,11 +317,11 @@ function objects_enable() { local _httpURL="https://localhost:9440/api/nutanix/v3/services/oss" # Start the enablement process - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}) - # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... - if [[ $_response -eq 1 ]]; then - # Check if Karbon has been enabled + # The response should be a Task UUID + if [[ $_response -z ]]; then + # Check if OSS has been enabled _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) while [ $_response -ne 1 ]; do _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) From f809770a8657c6d90c66ca01106fd0893c238fab Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 18 Aug 2019 16:24:22 -0700 Subject: [PATCH 182/691] Update bootcamp.sh --- scripts/bootcamp.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 2fa51d0..582411c 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,7 +116,6 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - #&& objects_enable \ && lcm \ && karbon_image_download \ && images \ From 2056b6e833e95de3f8ec224d023ec001970a5037 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 18 Aug 2019 17:00:36 -0700 Subject: [PATCH 183/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 62d6172..67310c9 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -320,7 +320,7 @@ function objects_enable() { _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}) # The response should be a Task UUID - if [[ $_response -z ]]; then + if [[ -z $_response ]]; then # Check if OSS has been enabled _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) while [ $_response -ne 1 ]; do From d6a99be0b3f0e482437d244ff43f4c04b11f94ac Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 18 Aug 2019 17:16:48 -0700 Subject: [PATCH 184/691] Update lib.pc.sh --- scripts/lib.pc.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 67310c9..cc13b79 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -322,16 +322,16 @@ function objects_enable() { # The response should be a Task UUID if [[ -z $_response ]]; then # Check if OSS has been enabled - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "task_uuid" | wc -l) while [ $_response -ne 1 ]; do - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "task_uuid" | wc -l) done log "Objects has been enabled." else log "Retrying to enable Objects one more time." - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "[true, null]" | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "task_uuid" | wc -l) if [[ $_response -eq 1 ]]; then - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "[true, null]" | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "task_uuid" | wc -l) if [ $_response -lt 1 ]; then log "Objects isn't enabled. Please use the UI to enable it." else From bf95642b196c95307d889cb6d8b18e70ea469037 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 18 Aug 2019 17:18:53 -0700 Subject: [PATCH 185/691] Update bootcamp.sh --- scripts/bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 582411c..9cfc577 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,6 +116,7 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ + && objects_enable \ && lcm \ && karbon_image_download \ && images \ From ac5a5891be03340ad7a94d0aeca4191e76b08790 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Sun, 18 Aug 2019 17:46:03 -0700 Subject: [PATCH 186/691] Update lib.pc.sh Check Objects --- scripts/lib.pc.sh | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index cc13b79..96bd5a2 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -312,32 +312,32 @@ function karbon_image_download() { function objects_enable() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' - local _loop=0 + local _loops=0 local _json_data_set_enable="{\"state\":\"ENABLE\"}" + local _json_data_check="{\"entity_type\":\"objectstore\"}" + local _httpURL_check="https://localhost:9440/oss/api/nutanix/v3/groups" local _httpURL="https://localhost:9440/api/nutanix/v3/services/oss" # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}) # The response should be a Task UUID - if [[ -z $_response ]]; then + if [[ ! -z $_response ]]; then # Check if OSS has been enabled - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "task_uuid" | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_check ${_httpURL_check}| grep "objectstore" | wc -l) while [ $_response -ne 1 ]; do - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "task_uuid" | wc -l) + _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_check ${_httpURL_check}| grep "objectstore" | wc -l) + if [[ $loops -ne 30 ]]; then + sleep 10 + (( _loops++ )) + else + log "Objects isn't enabled. Please use the UI to enable it." + break + fi done log "Objects has been enabled." else - log "Retrying to enable Objects one more time." - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}| grep "task_uuid" | wc -l) - if [[ $_response -eq 1 ]]; then - _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_is_enable ${_httpURL}| grep "task_uuid" | wc -l) - if [ $_response -lt 1 ]; then - log "Objects isn't enabled. Please use the UI to enable it." - else - log "Objects has been enabled." - fi - fi + log "Objects isn't enabled. Please use the UI to enable it." fi } From 6ebfcf93a17b7f941642f23e6191a4933a51b8a4 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 20 Aug 2019 11:59:00 -0700 Subject: [PATCH 187/691] Changed order of LCM and Objects Objects before LCM BLows up LCM!! --- release.json | 16 ++++++++-------- scripts/bootcamp.sh | 2 +- test/restapi.txt | 8 ++++---- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/release.json b/release.json index 5ca9477..81e8eb4 100644 --- a/release.json +++ b/release.json @@ -1,18 +1,18 @@ { "Major": 2, "Minor": 0, - "Patch": 6, - "PreReleaseTag": "ci.14", - "PreReleaseTagWithDash": "-ci.14", + "Patch": 7, + "PreReleaseTag": "ci.15", + "PreReleaseTagWithDash": "-ci.15", "PreReleaseLabel": "ci", - "PreReleaseNumber": 14, + "PreReleaseNumber": 15, "BuildMetaData": "", "BuildMetaDataPadded": "", "FullBuildMetaData": "Branch.master.Sha.3d62e775126b97ddac481a1fcc81920d42d998fc", - "MajorMinorPatch": "2.0.6", - "SemVer": "2.0.6-ci.14", - "LegacySemVer": "2.0.6-ci14", - "LegacySemVerPadded": "2.0.6-ci0014", + "MajorMinorPatch": "2.0.7", + "SemVer": "2.0.6-ci.15", + "LegacySemVer": "2.0.7-ci15", + "LegacySemVerPadded": "2.0.7-ci0015", "AssemblySemVer": "2.0.6.0", "AssemblySemFileVer": "2.0.6.0", "FullSemVer": "2.0.6-ci.14", diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 9cfc577..e453e92 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,8 +116,8 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && objects_enable \ && lcm \ + && objects_enable \ && karbon_image_download \ && images \ && seedPC \ diff --git a/test/restapi.txt b/test/restapi.txt index 180ca9c..5b4f867 100644 --- a/test/restapi.txt +++ b/test/restapi.txt @@ -98,15 +98,15 @@ curl -X POST \ ####################################################################################################### -# Deploy PC +# Deploy an Objectstore ####################################################################################################### - +curl 'https://10.42.87.39:9440/oss/api/nutanix/v3/objectstores' --data-binary '{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"TEST","description":"TEST","resources":{"domain":"test.local","cluster_reference":{"kind":"cluster","uuid":"0005906e-20db-d253-2dc2-002590ad0daa"},"buckets_infra_network_dns":"10.42.87.42","buckets_infra_network_vip":"10.42.87.43","buckets_infra_network_reference":{"kind":"subnet","uuid":"850e3978-2a93-4c7d-a72f-7b83972af1cf"},"client_access_network_reference":{"kind":"subnet","uuid":"9c9c6cf7-35ce-4453-8d72-ac23e691c625"},"aggregate_resources":{"total_vcpu_count":30,"total_memory_size_mib":98304,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"10.42.87.129","ipv4_end":"10.42.87.132"}}}}' --compressed --insecure ####################################################################################################### -# Register PE to PC +# Progress creation of an object store ####################################################################################################### - +curl 'https://10.42.87.39:9440/oss/api/nutanix/v3/groups' --data-binary '{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"},{"attribute":"domain"},{"attribute":"num_msp_workers"},{"attribute":"usage_bytes"},{"attribute":"num_buckets"},{"attribute":"num_objects"},{"attribute":"num_alerts_internal"},{"attribute":"client_access_network_ip_used_list"},{"attribute":"total_capacity_gib"},{"attribute":"last_completed_step"},{"attribute":"state"},{"attribute":"percentage_complete"},{"attribute":"ipv4_address"},{"attribute":"num_alerts_critical"},{"attribute":"num_alerts_info"},{"attribute":"num_alerts_warning"},{"attribute":"error_message_list"},{"attribute":"cluster_name"},{"attribute":"client_access_network_name"},{"attribute":"client_access_network_ip_list"},{"attribute":"buckets_infra_network_name"},{"attribute":"buckets_infra_network_vip"},{"attribute":"buckets_infra_network_dns"}]}' --compressed --insecure ####################################################################################################### From 6ac7393a90fa074c25cbd794fdc5cc089f68535f Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 20 Aug 2019 15:19:55 -0700 Subject: [PATCH 188/691] Revert to old order of the LCM vs Objects so we can grab the log files for LCM and Objects --- scripts/bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index e453e92..9cfc577 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,8 +116,8 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && lcm \ && objects_enable \ + && lcm \ && karbon_image_download \ && images \ && seedPC \ From 99360bedf23942a3666f3ae76b6c686dc924a967 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 20 Aug 2019 15:27:48 -0700 Subject: [PATCH 189/691] Update restapi.txt --- test/restapi.txt | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/test/restapi.txt b/test/restapi.txt index 5b4f867..1ab738a 100644 --- a/test/restapi.txt +++ b/test/restapi.txt @@ -98,17 +98,23 @@ curl -X POST \ ####################################################################################################### -# Deploy an Objectstore +# Upload Nutanix Files files ####################################################################################################### -curl 'https://10.42.87.39:9440/oss/api/nutanix/v3/objectstores' --data-binary '{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"TEST","description":"TEST","resources":{"domain":"test.local","cluster_reference":{"kind":"cluster","uuid":"0005906e-20db-d253-2dc2-002590ad0daa"},"buckets_infra_network_dns":"10.42.87.42","buckets_infra_network_vip":"10.42.87.43","buckets_infra_network_reference":{"kind":"subnet","uuid":"850e3978-2a93-4c7d-a72f-7b83972af1cf"},"client_access_network_reference":{"kind":"subnet","uuid":"9c9c6cf7-35ce-4453-8d72-ac23e691c625"},"aggregate_resources":{"total_vcpu_count":30,"total_memory_size_mib":98304,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"10.42.87.129","ipv4_end":"10.42.87.132"}}}}' --compressed --insecure + ####################################################################################################### -# Progress creation of an object store +# Deploy an Objectstore ####################################################################################################### -curl 'https://10.42.87.39:9440/oss/api/nutanix/v3/groups' --data-binary '{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"},{"attribute":"domain"},{"attribute":"num_msp_workers"},{"attribute":"usage_bytes"},{"attribute":"num_buckets"},{"attribute":"num_objects"},{"attribute":"num_alerts_internal"},{"attribute":"client_access_network_ip_used_list"},{"attribute":"total_capacity_gib"},{"attribute":"last_completed_step"},{"attribute":"state"},{"attribute":"percentage_complete"},{"attribute":"ipv4_address"},{"attribute":"num_alerts_critical"},{"attribute":"num_alerts_info"},{"attribute":"num_alerts_warning"},{"attribute":"error_message_list"},{"attribute":"cluster_name"},{"attribute":"client_access_network_name"},{"attribute":"client_access_network_ip_list"},{"attribute":"buckets_infra_network_name"},{"attribute":"buckets_infra_network_vip"},{"attribute":"buckets_infra_network_dns"}]}' --compressed --insecure +curl 'https://10.42.87.39:9440/oss/api/nutanix/v3/objectstores' --data-binary '{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"TEST","description":"TEST","resources":{"domain":"test.local","cluster_reference":{"kind":"cluster","uuid":"0005906e-20db-d253-2dc2-002590ad0daa"},"buckets_infra_network_dns":"10.42.87.42","buckets_infra_network_vip":"10.42.87.43","buckets_infra_network_reference":{"kind":"subnet","uuid":"850e3978-2a93-4c7d-a72f-7b83972af1cf"},"client_access_network_reference":{"kind":"subnet","uuid":"9c9c6cf7-35ce-4453-8d72-ac23e691c625"},"aggregate_resources":{"total_vcpu_count":30,"total_memory_size_mib":98304,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"10.42.87.129","ipv4_end":"10.42.87.132"}}}}' --compressed --insecure +In need of a different amount of UUIDs before this api will work: +1. Subnet for the client_access_network; maybe the secondary network? Where is the Windows VM running? +2. Subnet for the infrastructure; the containers need to be able to talk to the CVMs +3. Cluster UUID +These UUIDs need to be found dynamically as they differ PER installation/cluster ####################################################################################################### -# Upload Nutanix Files files +# Progress creation of an object store ####################################################################################################### +curl 'https://10.42.87.39:9440/oss/api/nutanix/v3/groups' --data-binary '{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"},{"attribute":"domain"},{"attribute":"num_msp_workers"},{"attribute":"usage_bytes"},{"attribute":"num_buckets"},{"attribute":"num_objects"},{"attribute":"num_alerts_internal"},{"attribute":"client_access_network_ip_used_list"},{"attribute":"total_capacity_gib"},{"attribute":"last_completed_step"},{"attribute":"state"},{"attribute":"percentage_complete"},{"attribute":"ipv4_address"},{"attribute":"num_alerts_critical"},{"attribute":"num_alerts_info"},{"attribute":"num_alerts_warning"},{"attribute":"error_message_list"},{"attribute":"cluster_name"},{"attribute":"client_access_network_name"},{"attribute":"client_access_network_ip_list"},{"attribute":"buckets_infra_network_name"},{"attribute":"buckets_infra_network_vip"},{"attribute":"buckets_infra_network_dns"}]}' --compressed --insecure From ecdacab7d19f01ea772e64e812812a930078763f Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 21 Aug 2019 14:52:42 -0700 Subject: [PATCH 190/691] Updated order of Objects enable and LCM run --- scripts/bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 9cfc577..e453e92 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,8 +116,8 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && objects_enable \ && lcm \ + && objects_enable \ && karbon_image_download \ && images \ && seedPC \ From 89db305ea9292d98a7b18e1cb1908361a58c106c Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 23 Aug 2019 06:26:09 -0700 Subject: [PATCH 191/691] Small changes to the Global.var.sh file and some testing files for Objectstore creation --- scripts/global.vars.sh | 1 + test/data.json | 1 + test/restapi.txt | 15 +++++++++++--- test/test_oss.sh | 47 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 61 insertions(+), 3 deletions(-) create mode 100644 test/data.json create mode 100644 test/test_oss.sh diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 5b02a6d..6ef5654 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -56,6 +56,7 @@ OCTET=(${PE_HOST//./ }) # zero index IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) +VLAN=${OCTET[3]} DNS_SERVERS='8.8.8.8' NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' diff --git a/test/data.json b/test/data.json new file mode 100644 index 0000000..3151fd9 --- /dev/null +++ b/test/data.json @@ -0,0 +1 @@ +{"api_version":"3.1","metadata":{"total_matches": 2, "kind": "subnet", "length": 2, "offset": 0},"entities":[{"status": {"state": "COMPLETE", "name": "Primary", "resources": {"vswitch_name": "br0", "subnet_type": "VLAN", "ip_config": {"default_gateway_ip": "10.42.3.1", "dhcp_server_address": {"ip": "10.42.3.126"}, "pool_list": [{"range": "10.42.3.50 10.42.3.125"}], "prefix_length": 25, "subnet_ip": "10.42.3.0", "dhcp_options": {"domain_name_server_list": ["10.42.3.41", "10.42.196.10", "10.42.194.10"], "domain_search_list": ["NTNXLAB"], "domain_name": "NTNXLAB"}}, "vlan_id": 0}, "cluster_reference": {"kind": "cluster", "name": "PHX-POC003", "uuid": "000590a7-e56b-3415-1a72-0cc47ac3b4a0"}}, "spec": {"name": "Primary", "resources": {"vswitch_name": "br0", "subnet_type": "VLAN", "ip_config": {"default_gateway_ip": "10.42.3.1", "dhcp_server_address": {"ip": "10.42.3.126"}, "pool_list": [{"range": "10.42.3.50 10.42.3.125"}], "prefix_length": 25, "subnet_ip": "10.42.3.0", "dhcp_options": {"domain_name_server_list": ["10.42.3.41", "10.42.196.10", "10.42.194.10"], "domain_search_list": ["NTNXLAB"], "domain_name": "NTNXLAB"}}, "vlan_id": 0}, "cluster_reference": {"kind": "cluster", "name": "PHX-POC003", "uuid": "000590a7-e56b-3415-1a72-0cc47ac3b4a0"}}, "metadata": {"last_update_time": "2019-08-21T23:48:27Z", "kind": "subnet", "uuid": "5709b4a1-f481-43f3-9b2e-2bf8e3a855f6", "spec_version": 0, "creation_time": "2019-08-21T23:48:27Z", "categories_mapping": {}, "categories": {}}},{"status": {"state": "COMPLETE", "name": "Secondary", "resources": {"vswitch_name": "br0", "subnet_type": "VLAN", "ip_config": {"default_gateway_ip": "10.42.3.129", "dhcp_server_address": {"ip": "10.42.3.254"}, "pool_list": [{"range": "10.42.3.132 10.42.3.253"}], "prefix_length": 25, "subnet_ip": "10.42.3.128", "dhcp_options": {"domain_name_server_list": ["10.42.3.41", "10.42.196.10", "10.42.194.10"], "domain_search_list": ["NTNXLAB"], "domain_name": "NTNXLAB"}}, "vlan_id": 31}, "cluster_reference": {"kind": "cluster", "name": "PHX-POC003", "uuid": "000590a7-e56b-3415-1a72-0cc47ac3b4a0"}}, "spec": {"name": "Secondary", "resources": {"vswitch_name": "br0", "subnet_type": "VLAN", "ip_config": {"default_gateway_ip": "10.42.3.129", "dhcp_server_address": {"ip": "10.42.3.254"}, "pool_list": [{"range": "10.42.3.132 10.42.3.253"}], "prefix_length": 25, "subnet_ip": "10.42.3.128", "dhcp_options": {"domain_name_server_list": ["10.42.3.41", "10.42.196.10", "10.42.194.10"], "domain_search_list": ["NTNXLAB"], "domain_name": "NTNXLAB"}}, "vlan_id": 31}, "cluster_reference": {"kind": "cluster", "name": "PHX-POC003", "uuid": "000590a7-e56b-3415-1a72-0cc47ac3b4a0"}}, "metadata": {"last_update_time": "2019-08-21T23:48:27Z", "kind": "subnet", "uuid": "a6bb09cf-bc81-43f0-a4f1-1941a9c6486a", "spec_version": 0, "creation_time": "2019-08-21T23:48:27Z", "categories_mapping": {}, "categories": {}}}]} \ No newline at end of file diff --git a/test/restapi.txt b/test/restapi.txt index 1ab738a..79db559 100644 --- a/test/restapi.txt +++ b/test/restapi.txt @@ -106,15 +106,24 @@ curl -X POST \ ####################################################################################################### # Deploy an Objectstore ####################################################################################################### -curl 'https://10.42.87.39:9440/oss/api/nutanix/v3/objectstores' --data-binary '{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"TEST","description":"TEST","resources":{"domain":"test.local","cluster_reference":{"kind":"cluster","uuid":"0005906e-20db-d253-2dc2-002590ad0daa"},"buckets_infra_network_dns":"10.42.87.42","buckets_infra_network_vip":"10.42.87.43","buckets_infra_network_reference":{"kind":"subnet","uuid":"850e3978-2a93-4c7d-a72f-7b83972af1cf"},"client_access_network_reference":{"kind":"subnet","uuid":"9c9c6cf7-35ce-4453-8d72-ac23e691c625"},"aggregate_resources":{"total_vcpu_count":30,"total_memory_size_mib":98304,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"10.42.87.129","ipv4_end":"10.42.87.132"}}}}' --compressed --insecure +curl -X POST https://10.42.87.39:9440/oss/api/nutanix/v3/objectstores -d '{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"","description":"","resources":{"domain":"","cluster_reference":{"kind":"cluster","uuid":""},"buckets_infra_network_dns":"10.42..16","buckets_infra_network_vip":"10.42..17","buckets_infra_network_reference":{"kind":"subnet","uuid":""},"client_access_network_reference":{"kind":"subnet","uuid":".18","ipv4_end":"10.42..21"}}}}' --insecure --silent -H 'Content-Type: application/json' In need of a different amount of UUIDs before this api will work: 1. Subnet for the client_access_network; maybe the secondary network? Where is the Windows VM running? 2. Subnet for the infrastructure; the containers need to be able to talk to the CVMs 3. Cluster UUID -These UUIDs need to be found dynamically as they differ PER installation/cluster + +These UUIDs need to be found dynamically as they differ PER installation/cluster and is the PE UUID NOT the PC UUID!!! + +Using: curl -X POST --user : --insecure --silent -d '{"kind":"cluster"}' https://:9440/api/nutanix/v3/subnets/list +we can grab the json that has the corresponding : +- UUID of the cluster +- UUID of the network we need to have to get the command running. As we need to have them run in the primary network, we can use that UUID in the command ####################################################################################################### # Progress creation of an object store ####################################################################################################### -curl 'https://10.42.87.39:9440/oss/api/nutanix/v3/groups' --data-binary '{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"},{"attribute":"domain"},{"attribute":"num_msp_workers"},{"attribute":"usage_bytes"},{"attribute":"num_buckets"},{"attribute":"num_objects"},{"attribute":"num_alerts_internal"},{"attribute":"client_access_network_ip_used_list"},{"attribute":"total_capacity_gib"},{"attribute":"last_completed_step"},{"attribute":"state"},{"attribute":"percentage_complete"},{"attribute":"ipv4_address"},{"attribute":"num_alerts_critical"},{"attribute":"num_alerts_info"},{"attribute":"num_alerts_warning"},{"attribute":"error_message_list"},{"attribute":"cluster_name"},{"attribute":"client_access_network_name"},{"attribute":"client_access_network_ip_list"},{"attribute":"buckets_infra_network_name"},{"attribute":"buckets_infra_network_vip"},{"attribute":"buckets_infra_network_dns"}]}' --compressed --insecure +curl https://10.42.87.39:9440/oss/api/nutanix/v3/groups -d + + + '{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"},{"attribute":"domain"},{"attribute":"num_msp_workers"},{"attribute":"usage_bytes"},{"attribute":"num_buckets"},{"attribute":"num_objects"},{"attribute":"num_alerts_internal"},{"attribute":"client_access_network_ip_used_list"},{"attribute":"total_capacity_gib"},{"attribute":"last_completed_step"},{"attribute":"state"},{"attribute":"percentage_complete"},{"attribute":"ipv4_address"},{"attribute":"num_alerts_critical"},{"attribute":"num_alerts_info"},{"attribute":"num_alerts_warning"},{"attribute":"error_message_list"},{"attribute":"cluster_name"},{"attribute":"client_access_network_name"},{"attribute":"client_access_network_ip_list"},{"attribute":"buckets_infra_network_name"},{"attribute":"buckets_infra_network_vip"},{"attribute":"buckets_infra_network_dns"}]}' --compressed --insecure diff --git a/test/test_oss.sh b/test/test_oss.sh new file mode 100644 index 0000000..6dab6f1 --- /dev/null +++ b/test/test_oss.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash + +# Run the needed variabkles +./global.vars.sh + +# Test script to get the objects store creation +# Get the UUIDs of: +# - UUID of the cluster +# - UUID of the network we need to have to get the command running. As we need to have them run in the primary network, we can use that UUID in the command + +function object_store() { + local _attempts=30 + local _loops=0 + local _sleep=60 + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local _url_network='https://localhost:9440/api/nutanix/v3/subnets/list' + local _url_oss='https://localhost:9440/oss/api/nutanix/v3/objectstores' + local PRISM_ADMIN='admin' + local PE_PASSWORD='techX2019!' + + # Payload for the _json_data + _json_data='{"kind":"subnet"}' + + # Get the json data and split into CLUSTER_UUID and Primary_Network_UUID + CLUSTER_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[].spec | select (.name=="Primary") | .cluster_reference.uuid' | tr -d \") + echo ${CLUSTER_UUID} + + PRIM_NETWORK_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[] | select (.spec.name=="Primary") | .metadata.uuid' | tr -d \") + echo ${PRIM_NETWORK_UUID} + + _json_data_oss='{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"TEST","description":"NTNXLAB","resources":{"domain":"ntnxlab.local","cluster_reference":{"kind":"cluster","uuid":"' + _json_data_oss+=${CLUSTER_UUID} + _json_data_oss+='"},"buckets_infra_network_dns":"10.42.VLANX.16","buckets_infra_network_vip":"10.42.VLANX.17","buckets_infra_network_reference":{"kind":"subnet","uuid":"' + _json_data_oss+=${PRIM_NETWORK_UUID} + _json_data_oss+='"},"client_access_network_reference":{"kind":"subnet","uuid":"' + _json_data_oss+=${PRIM_NETWORK_UUID} + _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":98304,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"10.42.VLANX.18","ipv4_end":"10.42.VLANX.21"}}}}' + + # Set the right VLAN dynamically so we are configuring in the right network + sed "s/VLANX/$VLAN/g" + + echo "curl -X POST -d $_json_data_oss $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_oss" + +} + +object_store + From b5730d5006e762bb23274ca0b531c1ff925ce454 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 23 Aug 2019 07:07:40 -0700 Subject: [PATCH 192/691] Update bootcamp.sh --- scripts/bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index e453e92..9cfc577 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,8 +116,8 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && lcm \ && objects_enable \ + && lcm \ && karbon_image_download \ && images \ && seedPC \ From 41d0080327a376e97fb2d99a5aeb883a03959dd7 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 23 Aug 2019 07:19:35 -0700 Subject: [PATCH 193/691] Update bootcamp.sh --- scripts/bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 9cfc577..e453e92 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,8 +116,8 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && objects_enable \ && lcm \ + && objects_enable \ && karbon_image_download \ && images \ && seedPC \ From 750fd1ac102544161029030fa41fe8eea85f3318 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 23 Aug 2019 08:21:51 -0700 Subject: [PATCH 194/691] Update lib.pc.sh --- scripts/lib.pc.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 96bd5a2..38ca7dc 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -320,7 +320,8 @@ function objects_enable() { # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}) - + log "Enabling Objects....." + # The response should be a Task UUID if [[ ! -z $_response ]]; then # Check if OSS has been enabled From dccce595ee049d73ed34d6f41d822b1abffbfaa3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 3 Sep 2019 09:35:20 -0700 Subject: [PATCH 195/691] Updatyes for 5.11 release --- scripts/global.vars.sh | 28 ++++++++++++++-------------- stage_workshop.sh | 9 +++++---- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6ef5654..0dba5b4 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -3,8 +3,8 @@ # shellcheck disable=SC2034 RELEASE='release.json' PC_DEV_VERSION='5.11' -PC_CURRENT_VERSION='5.10.5' -PC_STABLE_VERSION='5.8.2' +PC_CURRENT_VERSION='5.11' +PC_STABLE_VERSION='5.10.5' FILES_VERSION='3.5.1' FILE_ANALYTICS_VERSION='1.1.0' NTNX_INIT_PASSWORD='nutanix/4u' @@ -93,10 +93,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR PC_DEV_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.5.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pc_deploy-5.8.2.json' - PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.5.json' + PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.1.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/fileanalytics-1.1.0.json' @@ -124,10 +124,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.1.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-1.1.0.json' @@ -155,10 +155,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pc_deploy-5.8.2.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.8.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.1.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-1.1.0.json' diff --git a/stage_workshop.sh b/stage_workshop.sh index 172d2cd..00d2c9f 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,13 +11,14 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Bootcamp (AOS 5.10+/AHV PC 5.10+) = Current" \ -"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Current" \ -"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \ -"Development Bootcamp (AOS 5.10+/AHV PC 5.11+) = Development" \ +"Bootcamp (AOS 5.10+/AHV PC 5.11+) = Current" \ +"Previous Bootcamp (AOS 5.10+/AHV PC 5.10+) = Stable" \ +"Development Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Citrix Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +#"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { From a07dffab425d80b3c5624b465210652f62ed2bcc Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 6 Sep 2019 18:15:25 -0700 Subject: [PATCH 196/691] Update stage_workshop.sh --- stage_workshop.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 00d2c9f..0736ed9 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,9 +11,9 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Bootcamp (AOS 5.10+/AHV PC 5.11+) = Current" \ -"Previous Bootcamp (AOS 5.10+/AHV PC 5.10+) = Stable" \ -"Development Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +"Bootcamp Staging (AOS 5.10+/AHV PC 5.11+) = Current" \ +"Previous Bootcamp Staging (AOS 5.10+/AHV PC 5.10+) = Stable" \ +"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ From 80f99bcb1fed95d6c2ea41f20d113166bc2af448 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 9 Sep 2019 10:36:31 +0200 Subject: [PATCH 197/691] Updated global.vasr.sh to add Move qcow2 file --- scripts/global.vars.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 0dba5b4..26286a3 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -39,6 +39,7 @@ QCOW2_IMAGES=(\ MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ + move3.2.0.qcow2 \ ) ISO_IMAGES=(\ CentOS7.iso \ @@ -49,6 +50,7 @@ ISO_IMAGES=(\ SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ VeeamBR_9.5.4.2615.Update4.iso \ + move3.2.0.qcow2 \ ) # shellcheck disable=2206 From 73d11f35c497c692a87cb20ecee4578362793be8 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 9 Sep 2019 11:05:39 +0200 Subject: [PATCH 198/691] Add object stpore creation Routinde has inserted to creata small Object Srtore. NO checking if it has been successful!!!!! Need to get that in! --- scripts/global.vars.sh | 7 ++++++- scripts/lib.pc.sh | 44 +++++++++++++++++++++++++++++++++++++++++- test/test_oss.sh | 15 ++++++-------- 3 files changed, 55 insertions(+), 11 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 26286a3..4485c36 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -58,10 +58,11 @@ OCTET=(${PE_HOST//./ }) # zero index IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) -VLAN=${OCTET[3]} DNS_SERVERS='8.8.8.8' NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' + + NW1_NAME='Primary' NW1_VLAN=0 NW1_SUBNET="${IPV4_PREFIX}.1/25" @@ -74,6 +75,10 @@ NW2_SUBNET="${IPV4_PREFIX}.129/25" NW2_DHCP_START="${IPV4_PREFIX}.132" NW2_DHCP_END="${IPV4_PREFIX}.253" +# Stuff needed for object_store +VLAN=${OCTET[2]} +NETWORK="${OCTET[0]}.${OCTET[1]}" + SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' SMTP_SERVER_PORT=25 diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 38ca7dc..76eeff5 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -321,7 +321,7 @@ function objects_enable() { # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}) log "Enabling Objects....." - + # The response should be a Task UUID if [[ ! -z $_response ]]; then # Check if OSS has been enabled @@ -342,6 +342,45 @@ function objects_enable() { fi } +############################################################################################################################################################################### +# Create an object store called ntnx_object.ntnxlab.local +############################################################################################################################################################################### + +function object_store() { + local _attempts=30 + local _loops=0 + local _sleep=60 + local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + local _url_network='https://localhost:9440/api/nutanix/v3/subnets/list' + local _url_oss='https://localhost:9440/oss/api/nutanix/v3/objectstores' + + # Payload for the _json_data + _json_data='{"kind":"subnet"}' + + # Get the json data and split into CLUSTER_UUID and Primary_Network_UUID + CLUSTER_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[].spec | select (.name=="Primary") | .cluster_reference.uuid' | tr -d \") + echo ${CLUSTER_UUID} + + PRIM_NETWORK_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[] | select (.spec.name=="Primary") | .metadata.uuid' | tr -d \") + echo ${PRIM_NETWORK_UUID} + + _json_data_oss='{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"ntnx_objects","description":"NTNXLAB","resources":{"domain":"ntnxlab.local","cluster_reference":{"kind":"cluster","uuid":"' + _json_data_oss+=${CLUSTER_UUID} + _json_data_oss+='"},"buckets_infra_network_dns":"NETWORKX.VLANX.16","buckets_infra_network_vip":"NETWORKX.VLANX.17","buckets_infra_network_reference":{"kind":"subnet","uuid":"' + _json_data_oss+=${PRIM_NETWORK_UUID} + _json_data_oss+='"},"client_access_network_reference":{"kind":"subnet","uuid":"' + _json_data_oss+=${PRIM_NETWORK_UUID} + _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":32768,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"NETWORKX.VLANX.18","ipv4_end":"NETWORKX.VLANX.21"}}}}' + + # Set the right VLAN dynamically so we are configuring in the right network + _json_data_oss=${_json_data_oss//VLANX/${VLAN}} + _json_data_oss=${_json_data_oss//NETWORKX/${NETWORK}} + + curl -X POST -d $_json_data_oss $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_oss + +} + + ############################################################################################################################################################################### # Routine for PC_Admin ############################################################################################################################################################################### @@ -563,6 +602,9 @@ function pc_passwd() { # log "cURL reset password _test=${_test}" } + + + ############################################################################################################################################################################### # Seed PC data for Prism Pro Labs ############################################################################################################################################################################### diff --git a/test/test_oss.sh b/test/test_oss.sh index 6dab6f1..a5a27d7 100644 --- a/test/test_oss.sh +++ b/test/test_oss.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash -# Run the needed variabkles -./global.vars.sh +# Run the needed variables +. global.vars.sh # Test script to get the objects store creation -# Get the UUIDs of: +# Get the UUIDs of: # - UUID of the cluster # - UUID of the network we need to have to get the command running. As we need to have them run in the primary network, we can use that UUID in the command @@ -15,8 +15,6 @@ function object_store() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' local _url_network='https://localhost:9440/api/nutanix/v3/subnets/list' local _url_oss='https://localhost:9440/oss/api/nutanix/v3/objectstores' - local PRISM_ADMIN='admin' - local PE_PASSWORD='techX2019!' # Payload for the _json_data _json_data='{"kind":"subnet"}' @@ -34,14 +32,13 @@ function object_store() { _json_data_oss+=${PRIM_NETWORK_UUID} _json_data_oss+='"},"client_access_network_reference":{"kind":"subnet","uuid":"' _json_data_oss+=${PRIM_NETWORK_UUID} - _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":98304,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"10.42.VLANX.18","ipv4_end":"10.42.VLANX.21"}}}}' + _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":32768,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"10.42.VLANX.18","ipv4_end":"10.42.VLANX.21"}}}}' # Set the right VLAN dynamically so we are configuring in the right network - sed "s/VLANX/$VLAN/g" + _json_data_oss=${_json_data_oss//VLANX/${VLAN}} - echo "curl -X POST -d $_json_data_oss $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_oss" + curl -X POST -d $_json_data_oss $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_oss } object_store - From a0cc693bbe9282b59c0c7f74f4145a3fca896af7 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 9 Sep 2019 11:25:23 +0200 Subject: [PATCH 199/691] Enabled the object store to be created. --- scripts/bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index e453e92..94b64ee 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -118,6 +118,7 @@ case ${1} in && karbon_enable \ && lcm \ && objects_enable \ + && object_store \ && karbon_image_download \ && images \ && seedPC \ From 65f015e7db5453cee4f03c010de60347d746f4b9 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 9 Sep 2019 13:32:49 +0200 Subject: [PATCH 200/691] Chaneg in the name of the object store Underscores "_" are not allowed!!! Changed to ntnx-objects --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 76eeff5..9cc8a22 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -364,7 +364,7 @@ function object_store() { PRIM_NETWORK_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[] | select (.spec.name=="Primary") | .metadata.uuid' | tr -d \") echo ${PRIM_NETWORK_UUID} - _json_data_oss='{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"ntnx_objects","description":"NTNXLAB","resources":{"domain":"ntnxlab.local","cluster_reference":{"kind":"cluster","uuid":"' + _json_data_oss='{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"ntnx-objects","description":"NTNXLAB","resources":{"domain":"ntnxlab.local","cluster_reference":{"kind":"cluster","uuid":"' _json_data_oss+=${CLUSTER_UUID} _json_data_oss+='"},"buckets_infra_network_dns":"NETWORKX.VLANX.16","buckets_infra_network_vip":"NETWORKX.VLANX.17","buckets_infra_network_reference":{"kind":"subnet","uuid":"' _json_data_oss+=${PRIM_NETWORK_UUID} From 9f0b3db6ddccf4a7918e3b21c2eded0f52e203ed Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 12 Sep 2019 08:32:37 -0700 Subject: [PATCH 201/691] Update global.vars.sh --- scripts/global.vars.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4485c36..b826446 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -50,7 +50,6 @@ ISO_IMAGES=(\ SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ VeeamBR_9.5.4.2615.Update4.iso \ - move3.2.0.qcow2 \ ) # shellcheck disable=2206 From 9cfe6a24b05df45d727d16f2a3dffc71cf12f6f2 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 17 Sep 2019 17:01:57 -0700 Subject: [PATCH 202/691] Update bootcamp.sh --- scripts/bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 94b64ee..1de05f3 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,8 +116,8 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && lcm \ && objects_enable \ + && lcm \ && object_store \ && karbon_image_download \ && images \ From 7ad9bb94ddbba45f31c38fd0091bea8b7143970a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 17 Sep 2019 17:10:04 -0700 Subject: [PATCH 203/691] Update global.vars.sh --- scripts/global.vars.sh | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index b826446..22351f2 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -5,8 +5,8 @@ RELEASE='release.json' PC_DEV_VERSION='5.11' PC_CURRENT_VERSION='5.11' PC_STABLE_VERSION='5.10.5' -FILES_VERSION='3.5.1' -FILE_ANALYTICS_VERSION='1.1.0' +FILES_VERSION='3.5.2' +FILE_ANALYTICS_VERSION='2.0.0' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -103,10 +103,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.5.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.1.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/fileanalytics-1.1.0.json' - FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-1.1.0.qcow2' + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.2.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.2-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/fileanalytics-2.0.0.json' + FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.0.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -134,10 +134,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.1.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-1.1.0.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-1.1.0.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.2.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.2-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-2.0.0.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.0.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -165,10 +165,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.1.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.1-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-1.1.0.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-1.1.0.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.2.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.2-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-2.0.0.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.0.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ From 5682b1588ee115d66813f49917141345b3eb0e2a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 18 Sep 2019 20:40:01 -0700 Subject: [PATCH 204/691] Update bootcamp.sh --- scripts/bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 1de05f3..837505e 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -116,6 +116,7 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ + && lcm \ && objects_enable \ && lcm \ && object_store \ From 8e88b0784b3d6e8a4879cb218cf6d77b6fbbfc1d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 20 Sep 2019 10:11:35 -0700 Subject: [PATCH 205/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 22351f2..beb03b1 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -35,7 +35,7 @@ QCOW2_IMAGES=(\ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ - ERA-Server-build-1.1.0.qcow2 \ + ERA-Server-build-1.1.1.qcow2 \ MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ From 5e5e4dfe2c6ba8f9a1c6bf230a0a261c367125bb Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 23 Sep 2019 13:27:07 -0400 Subject: [PATCH 206/691] Updates for Prism Pro and Updated Images --- scripts/bootcamp.sh | 1 + scripts/global.vars.sh | 1 + scripts/lib.pc.sh | 37 +++++++++++++++++++++++++++++++++++++ 3 files changed, 39 insertions(+) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 837505e..01f9ad7 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -123,6 +123,7 @@ case ${1} in && karbon_image_download \ && images \ && seedPC \ + && prismproserver_deploy \ && flow_enable \ && pc_cluster_img_import \ && prism_check 'PC' diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index beb03b1..12e78d4 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -40,6 +40,7 @@ QCOW2_IMAGES=(\ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ move3.2.0.qcow2 \ + WindowsToolsVM.qcow2 \ ) ISO_IMAGES=(\ CentOS7.iso \ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9cc8a22..5403a29 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -46,6 +46,43 @@ function era_deploy() { } +############################################################################################################################################################################### +# Routine to deploy PrismProServer +############################################################################################################################################################################### +function prismproserver_deploy() { +VMNAME='PrismProServer' + +### Import Image ### + +if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${VMNAME} | wc --lines) == 0 )); then + log "Import ${VMNAME} image from ${SOURCE_URL}..." + acli image.create ${VMNAME} \ + image_type=kDiskImage wait=true \ + container=${STORAGE_IMAGES} source_url=${SOURCE_URL} +else + log "Image found, assuming ready. Skipping ${VMNAME} import." +fi + +### Deploy PrismProServer ### + +log "Create ${VMNAME} VM based on ${VMNAME} image" +acli "vm.create ${VMNAME} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" +# vmstat --wide --unit M --active # suggests 2G sufficient, was 4G +#acli "vm.disk_create ${VMNAME} cdrom=true empty=true" +acli "vm.disk_create ${VMNAME} clone_from_image=${VMNAME}" +acli "vm.nic_create ${VMNAME} network=${NW1_NAME}" +#acli "vm.nic_create ${VMNAME} network=${NW1_NAME} ip=${AUTH_HOST}" + +log "Power on ${VMNAME} VM..." +acli "vm.on ${VMNAME}" + +_attempts=20 +_loop=0 +_sleep=10 + +while true ; do + (( _loop++ )) +} ############################################################################################################################################################################### # Routine to enable Flow From 8dbf5b54dcf82073be08c77f2e7a2ca6e6dbe4f9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 08:49:50 -0400 Subject: [PATCH 207/691] pdates for PrismPro Server --- scripts/bootcamp.sh | 2 +- scripts/lib.pc.sh | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 01f9ad7..66a5886 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -123,7 +123,7 @@ case ${1} in && karbon_image_download \ && images \ && seedPC \ - && prismproserver_deploy \ + && prism_pro_server_deploy \ && flow_enable \ && pc_cluster_img_import \ && prism_check 'PC' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 5403a29..e75492e 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -49,7 +49,8 @@ function era_deploy() { ############################################################################################################################################################################### # Routine to deploy PrismProServer ############################################################################################################################################################################### -function prismproserver_deploy() { +function prism_pro_server_deploy() { + VMNAME='PrismProServer' ### Import Image ### From ebbb6ceb530986fbea7d966ac6261099a6fb1b55 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 08:53:23 -0400 Subject: [PATCH 208/691] Update lib.pc.sh --- scripts/lib.pc.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index e75492e..db8fefa 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -83,6 +83,9 @@ _sleep=10 while true ; do (( _loop++ )) + + + } ############################################################################################################################################################################### From b0943785086f8d061fd12d5c4770051f3d10dacb Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 10:31:23 -0400 Subject: [PATCH 209/691] Update lib.pc.sh --- scripts/lib.pc.sh | 75 ----------------------------------------------- 1 file changed, 75 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index db8fefa..df23da6 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -11,82 +11,7 @@ # Added the download bits for the Centos Image for Karbon ############################################################################################################################################################################### -############################################################################################################################################################################### -# Routine to deploy Era -############################################################################################################################################################################### - -function era_deploy() { - local _attempts=30 - local _loops=0 - local _sleep=60 - local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' - local _url_flow='https://localhost:9440/api/nutanix/v3/services/microseg' - - # Create the JSON payload - _json_data='{"state":"ENABLE"}' - - log "Enable Nutanix Flow..." - - # Enabling Flow and put the task id in a variable - _task_id=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow | jq '.task_uuid' | tr -d \") - - # Try one more time then fail, but continue - if [ -z $_task_id ]; then - log "Flow not yet enabled. Will retry...." - _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) - - if [ -z $_task_id ]; then - log "Flow still not enabled.... ***Not retrying. Please enable via UI.***" - fi - else - log "Flow has been Enabled..." - fi - - - -} - -############################################################################################################################################################################### -# Routine to deploy PrismProServer -############################################################################################################################################################################### -function prism_pro_server_deploy() { -VMNAME='PrismProServer' - -### Import Image ### - -if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${VMNAME} | wc --lines) == 0 )); then - log "Import ${VMNAME} image from ${SOURCE_URL}..." - acli image.create ${VMNAME} \ - image_type=kDiskImage wait=true \ - container=${STORAGE_IMAGES} source_url=${SOURCE_URL} -else - log "Image found, assuming ready. Skipping ${VMNAME} import." -fi - -### Deploy PrismProServer ### - -log "Create ${VMNAME} VM based on ${VMNAME} image" -acli "vm.create ${VMNAME} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" -# vmstat --wide --unit M --active # suggests 2G sufficient, was 4G -#acli "vm.disk_create ${VMNAME} cdrom=true empty=true" -acli "vm.disk_create ${VMNAME} clone_from_image=${VMNAME}" -acli "vm.nic_create ${VMNAME} network=${NW1_NAME}" -#acli "vm.nic_create ${VMNAME} network=${NW1_NAME} ip=${AUTH_HOST}" - -log "Power on ${VMNAME} VM..." -acli "vm.on ${VMNAME}" - -_attempts=20 -_loop=0 -_sleep=10 - -while true ; do - (( _loop++ )) - - - -} ############################################################################################################################################################################### # Routine to enable Flow From 0920585446e43ce09ab85d4587cde9b55d2b7972 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 10:39:18 -0400 Subject: [PATCH 210/691] Update bootcamp.sh --- scripts/bootcamp.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 66a5886..837505e 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -123,7 +123,6 @@ case ${1} in && karbon_image_download \ && images \ && seedPC \ - && prism_pro_server_deploy \ && flow_enable \ && pc_cluster_img_import \ && prism_check 'PC' From 3a0c6ca508608fa9efd91ce53736414a6631b207 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 11:06:20 -0400 Subject: [PATCH 211/691] Update bootcamp.sh --- scripts/bootcamp.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 837505e..94b64ee 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -118,7 +118,6 @@ case ${1} in && karbon_enable \ && lcm \ && objects_enable \ - && lcm \ && object_store \ && karbon_image_download \ && images \ From 29b7c1a3a040adac48789a497131a712636bd13f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 12:56:46 -0400 Subject: [PATCH 212/691] Updated for Prism Pro --- scripts/bootcamp.sh | 1 + scripts/lib.pe.sh | 42 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 94b64ee..6d6c79e 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -114,6 +114,7 @@ case ${1} in && pc_smtp ssp_auth \ + && prism_pro_server_deploy \ && calm_enable \ && karbon_enable \ && lcm \ diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index b1f27c9..6ee788f 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -156,6 +156,48 @@ function authentication_source() { esac } +############################################################################################################################################################################### +# Routine to deploy PrismProServer +############################################################################################################################################################################### +function prism_pro_server_deploy() { + +VMNAME='PrismProServer' + +### Import Image ### + +if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${VMNAME} | wc --lines) == 0 )); then + log "Import ${VMNAME} image from ${SOURCE_URL}..." + acli image.create ${VMNAME} \ + image_type=kDiskImage wait=true \ + container=${STORAGE_IMAGES} source_url=${SOURCE_URL} +else + log "Image found, assuming ready. Skipping ${VMNAME} import." +fi + +### Deploy PrismProServer ### + +log "Create ${VMNAME} VM based on ${VMNAME} image" +acli "vm.create ${VMNAME} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" +# vmstat --wide --unit M --active # suggests 2G sufficient, was 4G +#acli "vm.disk_create ${VMNAME} cdrom=true empty=true" +acli "vm.disk_create ${VMNAME} clone_from_image=${VMNAME}" +acli "vm.nic_create ${VMNAME} network=${NW1_NAME}" +#acli "vm.nic_create ${VMNAME} network=${NW1_NAME} ip=${AUTH_HOST}" + +log "Power on ${VMNAME} VM..." +acli "vm.on ${VMNAME}" + +_attempts=20 +_loop=0 +_sleep=10 + +while true ; do + (( _loop++ )) + + + +} + ############################################################################################################################################################################### # Routine to get the Nutanix Files injected ############################################################################################################################################################################### From ade207734da1edee19b9c7adf414883655360345 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 13:52:22 -0400 Subject: [PATCH 213/691] Update lib.pe.sh --- scripts/lib.pe.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 6ee788f..81418fa 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -159,6 +159,7 @@ function authentication_source() { ############################################################################################################################################################################### # Routine to deploy PrismProServer ############################################################################################################################################################################### + function prism_pro_server_deploy() { VMNAME='PrismProServer' @@ -193,7 +194,7 @@ _sleep=10 while true ; do (( _loop++ )) - +done } From ba3c270558f20d77a8bf45375729a0914b68b9e8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 14:49:14 -0400 Subject: [PATCH 214/691] Update bootcamp.sh --- scripts/bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 6d6c79e..ac9465b 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -26,6 +26,7 @@ case ${1} in && pe_init \ && network_configure \ && authentication_source \ + && prism_pro_server_deploy \ && pe_auth if (( $? == 0 )) ; then @@ -114,7 +115,6 @@ case ${1} in && pc_smtp ssp_auth \ - && prism_pro_server_deploy \ && calm_enable \ && karbon_enable \ && lcm \ From 1b5a8eaee1dc641cfc339cd86be9d8c358732ba3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 15:20:20 -0400 Subject: [PATCH 215/691] updated for PrismPro --- scripts/global.vars.sh | 6 +++--- scripts/lib.pe.sh | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 12e78d4..b84a0a0 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -119,7 +119,7 @@ case "${OCTET[0]}.${OCTET[1]}" in ) QCOW2_REPOS=(\ 'http://10.55.251.38/workshop_staging/' \ - 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ @@ -150,7 +150,7 @@ case "${OCTET[0]}.${OCTET[1]}" in ) QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ - 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ @@ -181,7 +181,7 @@ case "${OCTET[0]}.${OCTET[1]}" in ) QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ - 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 81418fa..6c67eaf 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -167,10 +167,10 @@ VMNAME='PrismProServer' ### Import Image ### if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${VMNAME} | wc --lines) == 0 )); then - log "Import ${VMNAME} image from ${SOURCE_URL}..." + log "Import ${VMNAME} image from ${QCOW2_REPOS}..." acli image.create ${VMNAME} \ image_type=kDiskImage wait=true \ - container=${STORAGE_IMAGES} source_url=${SOURCE_URL} + container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}${VMNAME}.qcow2" else log "Image found, assuming ready. Skipping ${VMNAME} import." fi From 17976010d66f21d9f79ba14286c48047b3bb4b7f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 24 Sep 2019 16:12:20 -0400 Subject: [PATCH 216/691] Update lib.pe.sh --- scripts/lib.pe.sh | 7 ------- 1 file changed, 7 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 6c67eaf..8c88d52 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -188,13 +188,6 @@ acli "vm.nic_create ${VMNAME} network=${NW1_NAME}" log "Power on ${VMNAME} VM..." acli "vm.on ${VMNAME}" -_attempts=20 -_loop=0 -_sleep=10 - -while true ; do - (( _loop++ )) -done } From befa5c0ed8875e0302b2615c293ec14ce65ae77a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 26 Sep 2019 15:15:53 -0400 Subject: [PATCH 217/691] Update lib.pe.sh --- scripts/lib.pe.sh | 63 +++++++++++++++++++++++++---------------------- 1 file changed, 33 insertions(+), 30 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 8c88d52..44a4eee 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -247,30 +247,34 @@ function file_analytics_install() { ############################################################################################################################################################################### function create_file_server() { - local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + #local CURL_HTTP_OPTS=' --max-time 25 --silent --show-error --header Content-Type:application/json --header Accept:application/json --insecure ' local _fileserver_name="BootcampFS" local _internal_nw_name="${1}" local _internal_nw_uuid local _external_nw_name="${2}" local _external_nw_uuid local _test + local _maxtries=5 + local _tries=0 local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/vfilers" + local _ntp_formatted="$(echo $NTP_SERVERS | sed -r 's/[^,]+/'\"'&'\"'/g')" + - log "Get cluster network and storage container UUIDs..." - _internal_nw_uuid=$(acli "net.get ${_internal_nw_name}" \ + echo "Get cluster network and storage container UUIDs..." + _internal_nw_uuid=$(acli net.get ${_internal_nw_name} \ | grep "uuid" | cut -f 2 -d ':' | xargs) - _external_nw_uuid=$(acli "net.get ${_external_nw_name}" \ + _external_nw_uuid=$(acli net.get ${_external_nw_name} \ | grep "uuid" | cut -f 2 -d ':' | xargs) _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \ | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) - log "${_internal_nw_name} network UUID: ${_internal_nw_uuid}" - log "${_external_nw_name} network UUID: ${_external_nw_uuid}" - log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" + echo "${_internal_nw_name} network UUID: ${_internal_nw_uuid}" + echo "${_external_nw_name} network UUID: ${_external_nw_uuid}" + echo "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" HTTP_JSON_BODY=$(cat < Date: Mon, 7 Oct 2019 16:13:20 +0200 Subject: [PATCH 218/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 44a4eee..c01a3bb 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -616,7 +616,7 @@ function pe_init() { log "Rename default container to ${STORAGE_DEFAULT}" default_container=$(ncli container ls | grep -P '^(?!.*VStore Name).*Name' \ - | cut -d ':' -f 2 | sed s/' '//g | grep '^default-container-') + | cut -d ':' -f 2 | sed s/' '//g | grep 'default-container-') ncli container edit name="${default_container}" new-name="${STORAGE_DEFAULT}" log "Rename default storage pool to ${STORAGE_POOL}" From 65cc65648b3e3aa5a1e1edceddcba87b980e81ef Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 7 Oct 2019 17:48:40 +0200 Subject: [PATCH 219/691] Update lib.pe.sh --- scripts/lib.pe.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index c01a3bb..1a9442b 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -483,8 +483,10 @@ function pc_install() { log "Get cluster network and storage container UUIDs..." _nw_uuid=$(acli "net.get ${_nw_name}" \ | grep "uuid" | cut -f 2 -d ':' | xargs) - _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \ + _storage_default_uuid=$(ncli container ls name=${STORAGE_IMAGES} \ | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) + #_storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} \ + # | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) log "${_nw_name} network UUID: ${_nw_uuid}" log "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" From 80308fe9dbf59c1cfd54644f1586e9fc717f72c5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 10 Oct 2019 09:56:13 +0200 Subject: [PATCH 220/691] Updates for 5.11 --- scripts/bootcamp.sh | 4 ++-- scripts/global.vars.sh | 14 +++++++------- scripts/lib.pe.sh | 4 ++-- stage_workshop.sh | 10 +++++----- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index ac9465b..71b0cbb 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -26,8 +26,8 @@ case ${1} in && pe_init \ && network_configure \ && authentication_source \ - && prism_pro_server_deploy \ - && pe_auth + && pe_auth \ + && prism_pro_server_deploy if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index b84a0a0..3e40a6f 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,7 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.11' +PC_DEV_VERSION='5.11.1' PC_CURRENT_VERSION='5.11' PC_STABLE_VERSION='5.10.5' FILES_VERSION='3.5.2' @@ -98,8 +98,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.1.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.1-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.5.json' @@ -129,8 +129,8 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.1.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.1-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' @@ -160,8 +160,8 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.1.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.1-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 1a9442b..c3c5640 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -162,7 +162,7 @@ function authentication_source() { function prism_pro_server_deploy() { -VMNAME='PrismProServer' +VMNAME='PrismProLabUtilityServer' ### Import Image ### @@ -618,7 +618,7 @@ function pe_init() { log "Rename default container to ${STORAGE_DEFAULT}" default_container=$(ncli container ls | grep -P '^(?!.*VStore Name).*Name' \ - | cut -d ':' -f 2 | sed s/' '//g | grep 'default-container-') + | cut -d ':' -f 2 | sed s/' '//g | grep '^default-container-') ncli container edit name="${default_container}" new-name="${STORAGE_DEFAULT}" log "Rename default storage pool to ${STORAGE_POOL}" diff --git a/stage_workshop.sh b/stage_workshop.sh index 0736ed9..b5340b0 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,9 +11,9 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Bootcamp Staging (AOS 5.10+/AHV PC 5.11+) = Current" \ +"Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Current" \ "Previous Bootcamp Staging (AOS 5.10+/AHV PC 5.10+) = Stable" \ -"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Development" \ +"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.1) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ @@ -38,11 +38,11 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.11" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.11.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.10" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.11" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.8" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.10" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" fi From 0a395e336059cb4e93e527f40994dd2b139d03b9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 15 Oct 2019 11:09:56 -0700 Subject: [PATCH 221/691] Updates for PrismProServer Rename --- scripts/lib.pe.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index c3c5640..4a46e03 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -602,10 +602,10 @@ function pe_init() { STORAGE_DEFAULT STORAGE_POOL STORAGE_IMAGES \ SLEEP ATTEMPTS' - if [[ `ncli cluster get-params | grep 'External Data' | \ - awk -F: '{print $2}' | tr -d '[:space:]'` == "${DATA_SERVICE_IP}" ]]; then - log "IDEMPOTENCY: Data Services IP set, skip." - else + #if [[ `ncli cluster get-params | grep 'External Data' | \ + # awk -F: '{print $2}' | tr -d '[:space:]'` == "${DATA_SERVICE_IP}" ]]; then + # log "IDEMPOTENCY: Data Services IP set, skip." + #else log "Configure SMTP" ncli cluster set-smtp-server port=${SMTP_SERVER_PORT} \ from-email-address=${SMTP_SERVER_FROM} address=${SMTP_SERVER_ADDRESS} @@ -636,7 +636,7 @@ function pe_init() { log "Set Data Services IP address to ${DATA_SERVICE_IP}" ncli cluster edit-params external-data-services-ip-address=${DATA_SERVICE_IP} - fi + #fi } ############################################################################################################################################################################### From 41ea627052f06ce2cdcf2fd747ecdd3207b47c73 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 21 Oct 2019 12:05:37 -0700 Subject: [PATCH 222/691] Updates for Objects --- scripts/bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index 71b0cbb..fdf622a 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -117,8 +117,8 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && lcm \ && objects_enable \ + && lcm \ && object_store \ && karbon_image_download \ && images \ From 45fd50312f15f9b0c85652944751a3d8628037c3 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 29 Oct 2019 17:04:10 +0100 Subject: [PATCH 223/691] Small Updates --- test/minigts_ce.sh | 30 +++++++++++++ test/minigts_ce_list.txt | 50 ++++++++++++++++++++++ test/nht_prep.sh | 91 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 171 insertions(+) create mode 100644 test/minigts_ce.sh create mode 100644 test/minigts_ce_list.txt create mode 100644 test/nht_prep.sh diff --git a/test/minigts_ce.sh b/test/minigts_ce.sh new file mode 100644 index 0000000..f5b604f --- /dev/null +++ b/test/minigts_ce.sh @@ -0,0 +1,30 @@ +#!/bin/bash +# +BIN=/usr/bin +DOMAIN=ntnxlab.local +PASSWD="nutanix/4u" +IFS="," + +while read -u50 Webname IP1 DBName IP2 DRIP1 DRIP2 +do + echo Adding: "$Webname" with IP "$IP1" with DRIP "$DRIP1" and "$DBName" with IP "$IP2" and DRIP "$DRIP2" + OCTETDC1=(${IP1//./,}) + OCTETDC2=(${DRIP1//./,}) + DC1="${OCTETDC1[0]}.${OCTETDC1[1]}.${OCTETDC1[2]}.41" + DC2="${OCTETDC2[0]}.${OCTETDC2[1]}.${OCTETDC2[2]}.41" + echo "Using Domain controlers: DC1: $DC1 and DC2: $DC2" + SSHPASS=$PASSWD sshpass -e ssh root@$DC1 samba-tool dns add $DC1 $DOMAIN $Webname A $IP1 -U administrator --password $PASSWD + SSHPASS=$PASSWD sshpass -e ssh root@$DC1 samba-tool dns add $DC1 $DOMAIN $DBName A $IP2 -U administrator --password $PASSWD + echo "Updating the DR side......" + SSHPASS=$PASSWD sshpass -e ssh root@$DC2 samba-tool dns add $DC2 $DOMAIN $Webname A $DRIP1 -U administrator --password $PASSWD + SSHPASS=$PASSWD sshpass -e ssh root@$DC2 samba-tool dns add $DC2 $DOMAIN $DBName A $DRIP2 -U administrator --password $PASSWD + echo "--------------------------------------------------------------------------------------------------------------------------------" + echo "" +done 50< <(cat minigts_ce_list.txt ) + +for i in 104 111 184 110 99 4 96 95 69 86 81 61 +do + DC="10.42.$i.41" + SSHPASS=$PASSWD sshpass -e ssh root@$DC "samba-tool dns query $DC $DOMAIN @ ALL -U administrator --password $PASSWD" + echo "--------------------------------------------------------" +done diff --git a/test/minigts_ce_list.txt b/test/minigts_ce_list.txt new file mode 100644 index 0000000..a1eb21c --- /dev/null +++ b/test/minigts_ce_list.txt @@ -0,0 +1,50 @@ +drweb1,10.42.4.230,drdb1,10.42.4.239,10.42.86.240,10.42.86.249, +drweb1,10.42.69.230,drdb1,10.42.69.233,10.42.111.240,10.42.111.243, +drweb1,10.42.86.230,drdb1,10.42.86.231,10.42.4.240,10.42.4.241, +drweb1,10.42.96.230,drdb1,10.42.96.237,10.42.81.240,10.42.81.247, +drweb1,10.42.110.230,drdb1,10.42.110.231,10.42.61.240,10.42.61.241, +drweb1,10.38.184.230,drdb1,10.38.184.235,10.42.99.240,10.42.99.245, +drweb10,10.42.61.238,drdb10,10.42.61.239,10.38.184.248,10.38.184.249, +drweb2,10.42.4.232,drdb2,10.42.4.237,10.42.86.242,10.42.86.247, +drweb2,10.42.69.232,drdb2,10.42.69.231,10.42.111.242,10.42.111.241, +drweb2,10.42.86.232,drdb2,10.42.86.237,10.42.4.242,10.42.4.247, +drweb2,10.42.96.232,drdb2,10.42.96.235,10.42.81.242,10.42.81.245, +drweb2,10.42.110.232,drdb2,10.42.110.233,10.42.61.242,10.42.61.243, +drweb2,10.38.184.232,drdb2,10.38.184.237,10.42.99.242,10.42.99.247, +drweb3,10.42.4.234,drdb3,10.42.4.235,10.42.86.244,10.42.86.245, +drweb3,10.42.69.234,drdb3,10.42.69.235,10.42.111.244,10.42.111.245, +drweb3,10.42.86.234,drdb3,10.42.86.235,10.42.4.244,10.42.4.245, +drweb3,10.42.96.234,drdb3,10.42.96.231,10.42.81.244,10.42.81.241, +drweb3,10.42.110.234,drdb3,10.42.110.237,10.42.61.244,10.42.61.247, +drweb3,10.38.184.234,drdb3,10.38.184.231,10.42.99.244,10.42.99.241, +drweb4,10.42.4.236,drdb4,10.42.4.231,10.42.86.246,10.42.86.241, +drweb4,10.42.69.236,drdb4,10.42.69.237,10.42.111.246,10.42.111.247, +drweb4,10.42.86.236,drdb4,10.42.86.233,10.42.4.246,10.42.4.243, +drweb4,10.42.96.236,drdb4,10.42.96.233,10.42.81.246,10.42.81.243, +drweb4,10.42.110.236,drdb4,10.42.110.235,10.42.61.246,10.42.61.245, +drweb4,10.38.184.236,drdb4,10.38.184.233,10.42.99.246,10.42.99.243, +drweb5,10.42.95.230,drdb5,10.42.95.231,10.42.104.240,10.42.104.241, +drweb5,10.42.81.230,drdb5,10.42.81.235,10.42.95.240,10.42.95.245, +drweb5,10.42.99.230,drdb5,10.42.99.237,10.42.96.240,10.42.96.247, +drweb5,10.42.104.230,drdb5,10.42.104.235,10.42.69.240,10.42.69.245, +drweb5,10.42.111.230,drdb5,10.42.111.233,10.42.110.240,10.42.110.243, +drweb5,10.42.4.238,drdb5,10.42.4.233,10.42.86.248,10.42.86.243, +drweb6,10.42.81.232,drdb6,10.42.81.237,10.42.95.242,10.42.95.247, +drweb6,10.42.95.232,drdb6,10.42.95.233,10.42.104.242,10.42.104.243, +drweb6,10.42.99.232,drdb6,10.42.99.231,10.42.96.242,10.42.96.241, +drweb6,10.42.104.232,drdb6,10.42.104.233,10.42.69.242,10.42.69.243, +drweb6,10.42.111.232,drdb6,10.42.111.231,10.42.110.242,10.42.110.241, +drweb6,10.42.61.230,drdb6,10.42.61.231,10.38.184.240,10.38.184.241, +drweb7,10.42.81.234,drdb7,10.42.81.233,10.42.95.244,10.42.95.243, +drweb7,10.42.99.234,drdb7,10.42.99.233,10.42.96.244,10.42.96.243, +drweb7,10.42.104.234,drdb7,10.42.104.231,10.42.69.244,10.42.69.241, +drweb7,10.42.111.234,drdb7,10.42.111.235,10.42.110.244,10.42.110.245, +drweb7,10.42.61.232,drdb7,10.42.61.233,10.38.184.242,10.38.184.243, +drweb7,10.42.95.234,drdb7,10.42.95.237,10.42.104.244,10.42.104.247, +drweb8,10.42.61.234,drdb8,10.42.61.235,10.38.184.244,10.38.184.245, +drweb8,10.42.81.236,drdb8,10.42.81.231,10.42.95.246,10.42.95.241, +drweb8,10.42.95.236,drdb8,10.42.95.235,10.42.104.246,10.42.104.245, +drweb8,10.42.99.236,drdb8,10.42.99.235,10.42.96.246,10.42.96.245, +drweb8,10.42.104.236,drdb8,10.42.104.237,10.42.69.246,10.42.69.247, +drweb8,10.42.111.236,drdb8,10.42.111.237,10.42.110.246,10.42.110.247, +drweb9,10.42.61.236,drdb9,10.42.61.237,10.38.184.246,10.38.184.247, diff --git a/test/nht_prep.sh b/test/nht_prep.sh new file mode 100644 index 0000000..2e14c2b --- /dev/null +++ b/test/nht_prep.sh @@ -0,0 +1,91 @@ +#!/bin/bash + +# SCript to stage the NHT clusters for the fourth node +# Create single node cluster +yes | cluster --cluster_name=NHTLab --dns_servers=10.42.196.10 --ntp_servers=10.42.196.10 --svm_ips=$(/sbin/ifconfig eth0 | grep 'inet ' | awk '{ print $2}') create + +# Give the cluster some time to settle +sleep 60 + +#Reset the admin password +ncli user reset-password user-name='admin' password='nht2EMEA!' + +#Rename the default SP to SP1 +default_sp=$(ncli storagepool ls | grep 'Name' | cut -d ':' -f 2 | sed s/' '//g) +ncli sp edit name="${default_sp}" new-name="SP1" + +# Create an Images container if it doesn't exist +(ncli container ls | grep -P '^(?!.*VStore Name).*Name' | cut -d ':' -f 2 | sed s/' '//g | grep "^Images" 2>&1 > /dev/null) \ + && echo "Container Images already exists" \ + || ncli container create name="Images" sp-name="SP1" + +# Accept the EULA +curl -u admin:'nht2EMEA!' -k -H 'Content-Type: application/json' -X POST \ + https://127.0.0.1:9440/PrismGateway/services/rest/v1/eulas/accept \ + -d '{ + "username": "SE", + "companyName": "NTNX", + "jobTitle": "SE" +}' + +# Disable Pulse in PE +curl -u admin:'nht2EMEA!' -k -H 'Content-Type: application/json' -X PUT \ + https://127.0.0.1:9440/PrismGateway/services/rest/v1/pulse \ + -d '{ + "defaultNutanixEmail": null, + "emailContactList": null, + "enable": false, + "enableDefaultNutanixEmail": false, + "isPulsePromptNeeded": false, + "nosVersion": null, + "remindLater": null, + "verbosityType": null +}' + + +# Upload the images +curl -X POST \ + https://127.0.0.1:9440/api/nutanix/v3/batch \ + -H 'Content-Type: application/json' \ + --insecure --user admin:'nht2EMEA!' \ + -d '{ + "action_on_failure":"CONTINUE", + "execution_order":"SEQUENTIAL", + "api_request_list":[ + { + "operation":"POST", + "path_and_params":"/api/nutanix/v3/images", + "body":{ + "spec":{ + "name":"X-Ray.qcow2", + "resources":{ + "image_type":"DISK_IMAGE", + "source_uri":"http://download.nutanix.com/xray/3.5.0/xray.qcow2" + } + }, + "metadata":{ + "kind":"image" + }, + "api_version":"3.1.0" + } + }, + { + "operation":"POST", + "path_and_params":"/api/nutanix/v3/images", + "body":{ + "spec":{ + "name":"Foundation.qcow2", + "resources":{ + "image_type":"DISK_IMAGE", + "source_uri":"http://download.nutanix.com/foundation/foundation-4.4.3/Foundation_VM-4.4.3-disk-0.qcow2" + } + }, + "metadata":{ + "kind":"image" + }, + "api_version":"3.1.0" + } + } + ], + "api_version":"3.0" +}' \ No newline at end of file From f5ff28bf3979e8c22099042ea9c31e97be869544 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Nov 2019 09:30:04 -0800 Subject: [PATCH 224/691] SNC Updates --- scripts/snc_bootcamp.sh | 156 ++++++++++++++++++++++++++++++++++++++++ stage_workshop.sh | 6 ++ 2 files changed, 162 insertions(+) create mode 100644 scripts/snc_bootcamp.sh diff --git a/scripts/snc_bootcamp.sh b/scripts/snc_bootcamp.sh new file mode 100644 index 0000000..abb779b --- /dev/null +++ b/scripts/snc_bootcamp.sh @@ -0,0 +1,156 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + ## Export Overrides needed for Single Node Clusters + export NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" + export NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) + export NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + && prism_pro_server_deploy + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + ## TODO: If Debug is set we should run with bash -x. Maybe this???? Or are we going to use a fourth parameter + # if [ ! -z DEBUG ]; then + # bash_cmd='bash' + # else + # bash_cmd='bash -x' + # fi + # _command="EMAIL=${EMAIL} \ + # PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + # PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup ${bash_cmd} ${HOME}/${PC_LAUNCH} IMAGES" + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 + + create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 + + file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && objects_enable \ + && lcm \ + && object_store \ + && karbon_image_download \ + && images \ + && seedPC \ + && flow_enable \ + && pc_cluster_img_import \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/stage_workshop.sh b/stage_workshop.sh index b5340b0..74d4517 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -12,6 +12,7 @@ begin # - PC #.# WORKSHOPS=(\ "Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Current" \ +"SNC Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Current" \ "Previous Bootcamp Staging (AOS 5.10+/AHV PC 5.10+) = Stable" \ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.1) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ @@ -53,6 +54,11 @@ function stage_clusters() { _pe_launch='bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i SNC Bootcamp | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='snc_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i Calm | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='calm.sh' From bee0b1efb91a09b94215b217eb1e868235958352 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Nov 2019 10:22:33 -0800 Subject: [PATCH 225/691] update fixes for SNC cluster staging --- scripts/global.vars.sh | 1 + scripts/lib.pe.sh | 2 +- scripts/snc_bootcamp.sh | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 3e40a6f..79bda41 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -66,6 +66,7 @@ NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp NW1_NAME='Primary' NW1_VLAN=0 NW1_SUBNET="${IPV4_PREFIX}.1/25" +NW1_GATEWAY="${IPV4_PREFIX}.1" NW1_DHCP_START="${IPV4_PREFIX}.50" NW1_DHCP_END="${IPV4_PREFIX}.125" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 4a46e03..79ae9ea 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -525,7 +525,7 @@ function pc_install() { "network_configuration":{ "subnet_mask":"255.255.255.128", "network_uuid":"${_nw_uuid}", - "default_gateway":"${IPV4_PREFIX}.1" + "default_gateway":"${NW1_GATEWAY}" }, "ip_list":["${PC_HOST}"] }], diff --git a/scripts/snc_bootcamp.sh b/scripts/snc_bootcamp.sh index abb779b..347551f 100644 --- a/scripts/snc_bootcamp.sh +++ b/scripts/snc_bootcamp.sh @@ -20,6 +20,7 @@ case ${1} in ## Export Overrides needed for Single Node Clusters export NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" + export NW1_GATEWAY="${IPV4_PREFIX}.$((${OCTET[3]} - 5))" export NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) export NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) From 57e6dc6bb75aff9a6f15fc50e29526e2e4a9561f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Nov 2019 14:32:44 -0800 Subject: [PATCH 226/691] additional fixes for SNC --- scripts/global.vars.sh | 2 +- scripts/lib.pe.sh | 2 +- scripts/snc_bootcamp.sh | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 79bda41..d2853c8 100644 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -60,7 +60,7 @@ DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) DNS_SERVERS='8.8.8.8' NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' - +SUBNET_MASK="255.255.255.128" NW1_NAME='Primary' diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 79ae9ea..224acc6 100644 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -523,7 +523,7 @@ function pc_install() { "data_disk_size_bytes":536870912000, "nic_list":[{ "network_configuration":{ - "subnet_mask":"255.255.255.128", + "subnet_mask":"${SUBNET_MASK}", "network_uuid":"${_nw_uuid}", "default_gateway":"${NW1_GATEWAY}" }, diff --git a/scripts/snc_bootcamp.sh b/scripts/snc_bootcamp.sh index 347551f..c8a8534 100644 --- a/scripts/snc_bootcamp.sh +++ b/scripts/snc_bootcamp.sh @@ -23,6 +23,7 @@ case ${1} in export NW1_GATEWAY="${IPV4_PREFIX}.$((${OCTET[3]} - 5))" export NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) export NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + export SUBNET_MASK="255.255.255.192" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable From c3492034432bdf57c89050654c74e62097e48edf Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Nov 2019 15:18:05 -0800 Subject: [PATCH 227/691] updates for SNC Clusters --- scripts/snc_bootcamp.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/snc_bootcamp.sh b/scripts/snc_bootcamp.sh index c8a8534..0676c87 100644 --- a/scripts/snc_bootcamp.sh +++ b/scripts/snc_bootcamp.sh @@ -21,8 +21,8 @@ case ${1} in ## Export Overrides needed for Single Node Clusters export NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" export NW1_GATEWAY="${IPV4_PREFIX}.$((${OCTET[3]} - 5))" - export NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) - export NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + export NW1_DHCP_START="${IPV4_PREFIX}.$((${OCTET[3]} + 33))" + export NW1_DHCP_END="${IPV4_PREFIX}.$((${OCTET[3]} + 53))" export SUBNET_MASK="255.255.255.192" args_required 'PE_HOST PC_LAUNCH' From 488c558fcfe04754ab72f5f1ed17ed847dece2f9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Nov 2019 15:23:43 -0800 Subject: [PATCH 228/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 74d4517..30397da 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -54,7 +54,7 @@ function stage_clusters() { _pe_launch='bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i SNC Bootcamp | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i SNC | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='snc_bootcamp.sh' _pc_launch=${_pe_launch} From f05484681c66d80adf6c883ad86d1dc5c605d8c3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Nov 2019 15:32:00 -0800 Subject: [PATCH 229/691] Updates for runtime --- scripts/citrix_bootcamp.sh | 0 scripts/era_bootcamp.sh | 0 scripts/files_bootcamp.sh | 0 scripts/global.vars.sh | 0 scripts/lib.common.sh | 0 scripts/lib.pc.org.sh | 0 scripts/lib.pe.sh | 0 scripts/lib.shell-convenience.sh | 0 scripts/localhost.sh | 0 scripts/snc_bootcamp.sh | 0 scripts/vmdisk2image-pc.sh | 0 scripts/we-lib.common.sh | 0 scripts/we-ts2019.sh | 0 13 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 scripts/citrix_bootcamp.sh mode change 100644 => 100755 scripts/era_bootcamp.sh mode change 100644 => 100755 scripts/files_bootcamp.sh mode change 100644 => 100755 scripts/global.vars.sh mode change 100644 => 100755 scripts/lib.common.sh mode change 100644 => 100755 scripts/lib.pc.org.sh mode change 100644 => 100755 scripts/lib.pe.sh mode change 100644 => 100755 scripts/lib.shell-convenience.sh mode change 100644 => 100755 scripts/localhost.sh mode change 100644 => 100755 scripts/snc_bootcamp.sh mode change 100644 => 100755 scripts/vmdisk2image-pc.sh mode change 100644 => 100755 scripts/we-lib.common.sh mode change 100644 => 100755 scripts/we-ts2019.sh diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh old mode 100644 new mode 100755 diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh old mode 100644 new mode 100755 diff --git a/scripts/lib.pc.org.sh b/scripts/lib.pc.org.sh old mode 100644 new mode 100755 diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh old mode 100644 new mode 100755 diff --git a/scripts/lib.shell-convenience.sh b/scripts/lib.shell-convenience.sh old mode 100644 new mode 100755 diff --git a/scripts/localhost.sh b/scripts/localhost.sh old mode 100644 new mode 100755 diff --git a/scripts/snc_bootcamp.sh b/scripts/snc_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/vmdisk2image-pc.sh b/scripts/vmdisk2image-pc.sh old mode 100644 new mode 100755 diff --git a/scripts/we-lib.common.sh b/scripts/we-lib.common.sh old mode 100644 new mode 100755 diff --git a/scripts/we-ts2019.sh b/scripts/we-ts2019.sh old mode 100644 new mode 100755 From ba50a2bd385412d6a4338379018871637f05d4d4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Nov 2019 16:27:53 -0800 Subject: [PATCH 230/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 30397da..0af8780 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -12,7 +12,7 @@ begin # - PC #.# WORKSHOPS=(\ "Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Current" \ -"SNC Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Current" \ +"SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Current" \ "Previous Bootcamp Staging (AOS 5.10+/AHV PC 5.10+) = Stable" \ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.1) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ From 23180c70edb125ffaebfaf37f231302ede6666e8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 19 Nov 2019 19:08:30 -0800 Subject: [PATCH 231/691] Update global.vars.sh --- scripts/global.vars.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index d2853c8..bbc7d89 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -40,7 +40,6 @@ QCOW2_IMAGES=(\ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ move3.2.0.qcow2 \ - WindowsToolsVM.qcow2 \ ) ISO_IMAGES=(\ CentOS7.iso \ From e15482b9e5207faf95f9d36a133a8ace94414691 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 4 Dec 2019 11:16:47 -0500 Subject: [PATCH 232/691] Update global.vars.sh --- scripts/global.vars.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index bbc7d89..d8eb652 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -35,6 +35,7 @@ QCOW2_IMAGES=(\ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ ERA-Server-build-1.1.1.qcow2 \ MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ From 7195dbacf25711ccce10f9140c8cf917cb40b596 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 10 Dec 2019 09:17:24 -0500 Subject: [PATCH 233/691] Updates for 5.11.2 and Updated Files/File Analytics --- scripts/global.vars.sh | 46 +++++++++++++++++++++--------------------- stage_workshop.sh | 8 ++++---- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index d8eb652..20f45ad 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,11 +2,11 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.11.1' +PC_DEV_VERSION='5.11.2' PC_CURRENT_VERSION='5.11' PC_STABLE_VERSION='5.10.5' -FILES_VERSION='3.5.2' -FILE_ANALYTICS_VERSION='2.0.0' +FILES_VERSION='3.6.0' +FILE_ANALYTICS_VERSION='2.0.1' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -36,7 +36,7 @@ QCOW2_IMAGES=(\ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ - ERA-Server-build-1.1.1.qcow2 \ + ERA-Server-build-1.1.1.3.qcow2 \ MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ @@ -47,7 +47,7 @@ ISO_IMAGES=(\ Windows2016.iso \ Windows2012R2.iso \ Windows10.iso \ - Nutanix-VirtIO-1.1.3.iso \ + Nutanix-VirtIO-1.1.5.iso \ SQLServer2014SP3.iso \ XenApp_and_XenDesktop_7_18.iso \ VeeamBR_9.5.4.2615.Update4.iso \ @@ -99,16 +99,16 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.1.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.1-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.5.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.5.2.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.2-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/fileanalytics-2.0.0.json' - FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.0.qcow2' + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.0.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1-metadata.json' + FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -130,16 +130,16 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.1.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.1-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.2.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.2-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-2.0.0.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.0.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -161,16 +161,16 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.1.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.1-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.5.2.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.5.2-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/fileanalytics-2.0.0.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.0.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ diff --git a/stage_workshop.sh b/stage_workshop.sh index 0af8780..b25795e 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,10 +11,10 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Current" \ -"SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11+) = Current" \ +"Bootcamp Staging (AOS 5.11+/AHV PC 5.11) = Current" \ +"SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11) = Current" \ "Previous Bootcamp Staging (AOS 5.10+/AHV PC 5.10+) = Stable" \ -"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.1) = Development" \ +"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2) = Development" \ "Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ @@ -39,7 +39,7 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.11.1" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.11.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.11" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" From 2b156b57e6a66303906e6b2cabbb46e9863c936f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 16 Dec 2019 15:10:09 -0800 Subject: [PATCH 234/691] 5.11.2 Updates --- scripts/global.vars.sh | 42 +++++++++++++++++++++--------------------- stage_workshop.sh | 17 +++++++++-------- 2 files changed, 30 insertions(+), 29 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 20f45ad..bb8713b 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,9 +2,9 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.11.2' -PC_CURRENT_VERSION='5.11' -PC_STABLE_VERSION='5.10.5' +PC_DEV_VERSION='5.16' +PC_CURRENT_VERSION='5.11.2' +PC_STABLE_VERSION='5.11' FILES_VERSION='3.6.0' FILE_ANALYTICS_VERSION='2.0.1' NTNX_INIT_PASSWORD='nutanix/4u' @@ -99,12 +99,12 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.10.5.json' - PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.16-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.16-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' + PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.0.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1-metadata.json' @@ -130,12 +130,12 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.16-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1-metadata.json' @@ -161,12 +161,12 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.10.5.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.10.5-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.16-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1-metadata.json' diff --git a/stage_workshop.sh b/stage_workshop.sh index b25795e..cbc305a 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,11 +11,12 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Bootcamp Staging (AOS 5.11+/AHV PC 5.11) = Current" \ -"SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11) = Current" \ -"Previous Bootcamp Staging (AOS 5.10+/AHV PC 5.10+) = Stable" \ -"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2) = Development" \ -"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ +"Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +"Previous Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ +"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ +"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ +#"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Citrix Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ @@ -39,11 +40,11 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.11.2" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.16" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.11" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.11.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.10" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.11" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" fi From 092ac667c27b750d1d87db7881c25bc7dc248b52 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 16 Dec 2019 19:04:09 -0800 Subject: [PATCH 235/691] Update stage_workshop.sh --- stage_workshop.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/stage_workshop.sh b/stage_workshop.sh index cbc305a..09b80ea 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -16,6 +16,7 @@ WORKSHOPS=(\ "Previous Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ +"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ From 9e080788e298f601616fce6f700a442db2430280 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 22 Dec 2019 13:30:33 -0800 Subject: [PATCH 236/691] Update global.vars.sh --- scripts/global.vars.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index bb8713b..ea10259 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -41,6 +41,7 @@ QCOW2_IMAGES=(\ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ move3.2.0.qcow2 \ + AutoXD.qcow2 \ ) ISO_IMAGES=(\ CentOS7.iso \ From 946f93188f34544f8352bc38b2f8c46ac5926d01 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 8 Jan 2020 16:20:29 +0300 Subject: [PATCH 237/691] Debugging LCM issue --- scripts/lib.pc.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index df23da6..c4c0eae 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -199,9 +199,9 @@ function lcm() { fi # Remove the temp json files as we don't need it anymore - rm -rf reply_json.json - rm -rf reply_json_ver.json - rm -rf reply_json_uuid.json + #rm -rf reply_json.json + #rm -rf reply_json_ver.json + #rm -rf reply_json_uuid.json } From 923c028592caabf614a62f2b5b318fb6cb3ac9ed Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 8 Jan 2020 17:08:28 +0300 Subject: [PATCH 238/691] Small Debug added --- scripts/lib.pc.sh | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index c4c0eae..7aedb13 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -117,6 +117,7 @@ function lcm() { lcm_version=$(curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"get_config\"}}"}' ${_url_lcm} | jq '.value' | tr -d \\ | sed 's/^"\(.*\)"$/\1/' | sed 's/.return/return/g' | jq '.return.lcm_cpdb_table_def_list.entity' | tr -d \"| grep "lcm_entity_v2" | wc -l) if [ $lcm_version -lt 1 ]; then + log "LCM Version 1 found.." # V1: Run the Curl command and save the oputput in a temp file curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_available_version","grouping_attribute": "entity_uuid","group_member_count": 1000,"group_member_attributes": [{"attribute": "uuid"},{"attribute": "entity_uuid"},{"attribute": "entity_class"},{"attribute": "status"},{"attribute": "version"},{"attribute": "dependencies"},{"attribute": "order"}]}' $_url_groups > reply_json.json @@ -129,6 +130,8 @@ function lcm() { version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[0]==$uuid) | select (.data[].name=="version") | .data[].values[].values[0]' reply_json.json | tail -4 | head -n 1 | tr -d \")) done else + log "LCM Version 2 found.." + #''_V2: run the other V2 API call to get the UUIDs of the to be updated software parts # Grab the installed version of the software first UUIDs curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d '{"entity_type": "lcm_entity_v2","group_member_count": 500,"group_member_attributes": [{"attribute": "id"}, {"attribute": "uuid"}, {"attribute": "entity_model"}, {"attribute": "version"}, {"attribute": "location_id"}, {"attribute": "entity_class"}, {"attribute": "description"}, {"attribute": "last_updated_time_usecs"}, {"attribute": "request_version"}, {"attribute": "_master_cluster_uuid_"}, {"attribute": "entity_type"}, {"attribute": "single_group_uuid"}],"query_name": "lcm:EntityGroupModel","grouping_attribute": "location_id","filter_criteria": "entity_model!=AOS;entity_model!=NCC;entity_model!=PC;_master_cluster_uuid_==[no_val]"}' $_url_groups > reply_json_uuid.json @@ -158,8 +161,10 @@ function lcm() { count=0 while [ $count -lt ${#uuid_arr[@]} ] do - _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," - log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" + if [[ ! -z ${version_ar[$count]} ]] + _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," + log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" + fi let count=count+1 done From 5b665c9ad5a3a808e634d04ad66fa46e62389f94 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 8 Jan 2020 18:31:29 +0300 Subject: [PATCH 239/691] Added Frame and Debug data --- scripts/frame.sh | 149 ++++++++++++++++++++++++++++++++++++++++++++++ stage_workshop.sh | 7 +++ 2 files changed, 156 insertions(+) create mode 100644 scripts/frame.sh diff --git a/scripts/frame.sh b/scripts/frame.sh new file mode 100644 index 0000000..8fac0f9 --- /dev/null +++ b/scripts/frame.sh @@ -0,0 +1,149 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + && prism_pro_server_deploy + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + ## TODO: If Debug is set we should run with bash -x. Maybe this???? Or are we going to use a fourth parameter + # if [ ! -z DEBUG ]; then + # bash_cmd='bash' + # else + # bash_cmd='bash -x' + # fi + # _command="EMAIL=${EMAIL} \ + # PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + # PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup ${bash_cmd} ${HOME}/${PC_LAUNCH} IMAGES" + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 + + create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 + + file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && objects_enable \ + && lcm \ + && object_store \ + && karbon_image_download \ + && images \ + && flow_enable \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/stage_workshop.sh b/stage_workshop.sh index 09b80ea..ccc25a4 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -13,6 +13,7 @@ begin WORKSHOPS=(\ "Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ "SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ "Previous Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ @@ -87,6 +88,12 @@ function stage_clusters() { _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i Frame | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='frame.sh' + _pc_launch=${_pe_launch} + fi + dependencies 'install' 'sshpass' if [[ -z ${PC_VERSION} ]]; then From ec4ef43c8b6e003fc66aa56b53b9ce180ec2d73a Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 8 Jan 2020 18:55:22 +0300 Subject: [PATCH 240/691] Added the +x --- scripts/frame.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 scripts/frame.sh diff --git a/scripts/frame.sh b/scripts/frame.sh old mode 100644 new mode 100755 From 2835a1a26e0a04fb97e696bf43401c94c8b45553 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 8 Jan 2020 20:35:30 +0300 Subject: [PATCH 241/691] Typo taken out --- scripts/frame.sh | 1 - scripts/lib.pc.sh | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/frame.sh b/scripts/frame.sh index 8fac0f9..fbcc522 100755 --- a/scripts/frame.sh +++ b/scripts/frame.sh @@ -27,7 +27,6 @@ case ${1} in && network_configure \ && authentication_source \ && pe_auth \ - && prism_pro_server_deploy if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7aedb13..26caf28 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -161,7 +161,7 @@ function lcm() { count=0 while [ $count -lt ${#uuid_arr[@]} ] do - if [[ ! -z ${version_ar[$count]} ]] + if [ ! -z ${version_ar[$count]} ]; then _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" fi From 810e938492ee50f3c66fbfdccbd92ea3f5fce783 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 8 Jan 2020 20:57:14 +0300 Subject: [PATCH 242/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 26caf28..e110e82 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -57,7 +57,7 @@ function flow_enable() { function loop(){ - local _attempts=40 + local _attempts=45 local _loops=0 local _sleep=60 local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " @@ -196,7 +196,7 @@ function lcm() { log "LCM Upgrade has encountered an error!!!!" else # Notify the logserver that we are starting the LCM Upgrade - log "LCM Upgrade starting...Process may take up to 40 minutes!!!" + log "LCM Upgrade starting...Process may take up to 45 minutes!!!" # Run the progess checker loop From 1976a82bc4077ad4bf2ac5a073b758dd451ec668 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 9 Jan 2020 18:35:15 -0800 Subject: [PATCH 243/691] Calm BP Upload --- scripts/global.vars.sh | 10 +- scripts/lib.pc.sh | 346 +++++++++++++++++++++++++++++++++++++++++ scripts/ts2020.sh | 176 +++++++++++++++++++++ 3 files changed, 529 insertions(+), 3 deletions(-) create mode 100644 scripts/ts2020.sh diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index ea10259..1495f94 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -15,6 +15,8 @@ STORAGE_DEFAULT='Default' STORAGE_IMAGES='Images' ATTEMPTS=40 SLEEP=60 +ERA_Blueprint='EraServerDeployment.json' +Citrix_Blueprint='' # Curl and SSH settings CURL_OPTS='--insecure --silent --show-error' # --verbose' @@ -59,6 +61,8 @@ OCTET=(${PE_HOST//./ }) # zero index IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) +ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 15)) +CITRIX_DDC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 8)) DNS_SERVERS='8.8.8.8' NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' SUBNET_MASK="255.255.255.128" @@ -128,6 +132,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' + BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX @@ -159,6 +164,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' + BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters @@ -190,9 +196,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' - #NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" - #NW1_DHCP_START=${IPV4_PREFIX}.$((${OCTET[3]} + 33)) - #NW1_DHCP_END=${IPV4_PREFIX}.$((${OCTET[3]} + 53)) + BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index df23da6..ac67337 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -779,6 +779,352 @@ function calm_enable() { done } +############################################################################################################################################################################### +# Routine to upload Citrix Calm Blueprint and set variables +############################################################################################################################################################################### + +function upload_citrix_calm_blueprint() { + local DIRECTORY="/home/nutanix/" + local CALM_PROJECT="default" + local DOMAIN=${AUTH_FQDN} + local AD_IP=${AUTH_HOST} + local PE_IP=${PE_HOST} + local NutanixAcropolisPlugin="none" + local CVM_NETWORK=${NW1_NAME} + local BPG_RKTOOLS_URL="none" + local DDC_IP=${CITRIX_DDC_HOST} + local NutanixAcropolis_Installed_Path="none" + + local VLAN_NAME=${NW1_VLAN} + local DOWNLOAD_BLUEPRINTS + + # download the blueprint + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint) + log "Downloading ${CALM_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" + + # ensure the directory that contains the blueprints to be imported is not empty + if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then + echo "There are no .json files found in the directory provided." + exit 0 + fi + + # create a list to store all bluprints found in the directory provided by user + declare -a LIST_OF_BLUEPRINTS=() + + # circle thru all of the files in the provided directory and add file names to a list of blueprints array + # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) + for FILE in "$DIRECTORY"/*.json; do + BASENAM="$(basename ${FILE})" + FILENAME="${BASENAM%.*}" + LIST_OF_BLUEPRINTS+=("$BASENAM") + done + + # echo $LIST_OF_BLUEPRINTS + # if the list of blueprints is not empty then: + if ((${#LIST_OF_BLUEPRINTS[@]})); then + # first check if the user has specified a project for the imported blueprints + # if they did, we need to make sure the project exists before assigning it to the BPs + + if [ $CALM_PROJECT != 'none' ]; then + + # curl command needed: + # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' + + # formulate the curl to check for project + _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" + + # make API call and store project_uuid + project_uuid=$(curl -s -k -X POST $_url_pc -H 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} -d "{\"kind\": \"project\", \"filter\": \"name==$CALM_PROJECT\"}" | jq -r '.entities[].metadata.uuid') + + if [ -z "$project_uuid" ]; then + # project wasn't found + # exit at this point as we don't want to assume all blueprints should then hit the 'default' project + echo "\nProject $CALM_PROJECT was not found. Please check the name and retry." + exit 0 + else + echo "\nProject $CALM_PROJECT exists..." + fi + fi + else + echo '\nNo JSON files found in' + $DIRECTORY +' ... nothing to import!' + fi + + # update the user with script progress... + _num_of_files=${#LIST_OF_BLUEPRINTS[@]} + echo "\nNumber of .json files found: ${_num_of_files}" + echo "\nStarting blueprint updates and then exporting to Calm one file at a time...\n\n" + + # go through the blueprint JSON files list found in the specified directory + for elem in "${LIST_OF_BLUEPRINTS[@]}"; do + # read the entire JSON file from the directory + JSONFile=${DIRECTORY}/"$elem" + + echo "\nCurrently updating blueprint $JSONFile..." + + # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint + tmp=$(mktemp) + + # ADD PROJECT (affects all BPs being imported) if no project was specified on the command line, we've already pre-set the project variable to 'none' if a project was specified, we need to add it into the JSON data + if [ $CALM_PROJECT != 'none' ]; then + # add the new atributes to the JSON and overwrite the old JSON file with the new one + $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) + fi + + # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") + if [ "$elem" == "${NAME}" ]; then + if [ "$DOMAIN" != "none" ]; then + tmp_DOMAIN=$(mktemp) + # add the new variable to the json file and save it + $(jq --arg var_name $DOMAIN'(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="DOMAIN")).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) + fi + if [ "$AD_IP" != "none" ]; then + tmp_AD_IP=$(mktemp) + $(jq --arg var_name $AD_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="AD_IP")).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) + fi + if [ "$PE_IP" != "none" ]; then + tmp_PE_IP=$(mktemp) + $(jq --arg var_name $PE_IP'(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_IP")).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) + fi + if [ "$NutanixAcropolisPlugin" != "none" ]; then + tmp_NutanixAcropolisPluginE=$(mktemp) + $(jq --arg var_name $NutanixAcropolisPlugin '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NutanixAcropolisPlugin")).value=$var_name' $JSONFile >"$tmp_NutanixAcropolisPlugin" && mv "$tmp_NutanixAcropolisPlugin" $JSONFile) + fi + if [ "$CVM_NETWORK" != "none" ]; then + tmp_CVM_NETWORK=$(mktemp) + $(jq --arg var_name $CVM_NETWORK '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CVM_NETWORK")).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) + fi + if [ "$BPG_RKTOOLS_URL" != "none" ]; then + tmp_BPG_RKTOOLS_URL=$(mktemp) + $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="BPG_RKTOOLS_URL")).value=$var_name' $JSONFile >"$tmp_BPG_RKTOOLS_URL" && mv "$tmp_BPG_RKTOOLS_URL" $JSONFile) + fi + if [ "$DDC_IP" != "none" ]; then + tmp_DDC_IP=$(mktemp) + $(jq --arg var_name $DDC_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="DDC_IP")).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) + fi + if [ "$NutanixAcropolis_Installed_Path" != "none" ]; then + tmp_NutanixAcropolis_Installed_Path=$(mktemp) + $(jq --arg var_name $NutanixAcropolis_Installed_Path '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NutanixAcropolis_Installed_PathL")).value=$var_name' $JSONFile >"$tmp_NutanixAcropolis_Installed_Path" && mv "$tmp_NutanixAcropolis_Installed_Path" $JSONFile) + fi + fi + + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) + tmp_removal=$(mktemp) + $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) + + # GET BP NAME (affects all BPs being imported) + # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all + blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) + blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " + blueprint_name="${blueprint_name#\"}" # will remove the prefix " + + if [ blueprint_name == 'null' ]; then + echo "\nUnprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?\n" + exit 0 + else + # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload + echo "\nUploading the updated blueprint: $blueprint_name...\n" + + # Example curl call from the console: + # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" + # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" + # bp_name="EraServerDeployment" + # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" + # password='techX2019!' + # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" + + url="https://localhost:9440/api/nutanix/v3/blueprints/import_file" + path_to_file=$JSONFile + bp_name=$blueprint_name + project_uuid=$project_uuid + password=$PE_PASSWORD + upload_result=$(curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password") + + #if the upload_result var is not empty then let's say it was succcessful + if [ -z "$upload_result" ]; then + echo "\nUpload for $bp_name did not finish." + else + echo "\nUpload for $bp_name finished." + echo "-----------------------------------------" + # echo "Result: $upload_result" + fi + fi + + echo "\nFinished uploading Citrix Blueprint and setting Variables!\n" + +} + +############################################################################################################################################################################### +# Routine to upload Era Calm Blueprint and set variables +############################################################################################################################################################################### + +function upload_era_calm_blueprint() { + local DIRECTORY="/home/nutanix/" + local CALM_PROJECT="default" + local ERA_IP=${ERA_HOST} + local PE_IP=${PE_HOST} + local CLSTR_NAME="none" + local CTR_UUID=${_storage_default_uuid} + local CTR_NAME=${STORAGE_DEFAULT} + local NETWORK_NAME=${NW1_NAME} + local VLAN_NAME=${NW1_VLAN} + local DOWNLOAD_BLUEPRINTS + + + + # download the blueprint + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${ERA_Blueprint} -o ${DIRECTORY}${ERA_Blueprint}) + log "Downloading ${ERA_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" + + # ensure the directory that contains the blueprints to be imported is not empty + if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then + echo "There are no .json files found in the directory provided." + exit 0 + fi + + # create a list to store all bluprints found in the directory provided by user + declare -a LIST_OF_BLUEPRINTS=() + + # circle thru all of the files in the provided directory and add file names to a list of blueprints array + # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) + for FILE in "$DIRECTORY"/*.json; do + BASENAM="$(basename ${FILE})" + FILENAME="${BASENAM%.*}" + LIST_OF_BLUEPRINTS+=("$BASENAM") + done + + # echo $LIST_OF_BLUEPRINTS + # if the list of blueprints is not empty then: + if ((${#LIST_OF_BLUEPRINTS[@]})); then + # first check if the user has specified a project for the imported blueprints + # if they did, we need to make sure the project exists before assigning it to the BPs + + if [ $CALM_PROJECT != 'none' ]; then + + # curl command needed: + # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' + + # formulate the curl to check for project + _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" + + # make API call and store project_uuid + project_uuid=$(curl -s -k -X POST $_url_pc -H 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} -d "{\"kind\": \"project\", \"filter\": \"name==$CALM_PROJECT\"}" | jq -r '.entities[].metadata.uuid') + + if [ -z "$project_uuid" ]; then + # project wasn't found + # exit at this point as we don't want to assume all blueprints should then hit the 'default' project + echo "\nProject $CALM_PROJECT was not found. Please check the name and retry." + exit 0 + else + echo "\nProject $CALM_PROJECT exists..." + fi + fi + else + echo '\nNo JSON files found in' + $DIRECTORY +' ... nothing to import!' + fi + + # update the user with script progress... + _num_of_files=${#LIST_OF_BLUEPRINTS[@]} + echo "\nNumber of .json files found: ${_num_of_files}" + echo "\nStarting blueprint updates and then exporting to Calm one file at a time...\n\n" + + # go through the blueprint JSON files list found in the specified directory + for elem in "${LIST_OF_BLUEPRINTS[@]}"; do + # read the entire JSON file from the directory + JSONFile=${DIRECTORY}/"$elem" + + echo "\nCurrently updating blueprint $JSONFile..." + + # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint + tmp=$(mktemp) + + # ADD PROJECT (affects all BPs being imported) if no project was specified on the command line, we've already pre-set the project variable to 'none' if a project was specified, we need to add it into the JSON data + if [ $CALM_PROJECT != 'none' ]; then + # add the new atributes to the JSON and overwrite the old JSON file with the new one + $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) + fi + + # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") + if [ "$elem" == "${NAME}" ]; then + if [ "$ERA_IP" != "none" ]; then + tmp_ERA_IP=$(mktemp) + # add the new variable to the json file and save it + $(jq --arg var_name $ERA_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="ERA_IP")).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) + # result="$(jq --arg newOBJ "${obj_with_replaced_variable}" '.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_VIP") | .+=$newOBJ' $JSONFile )" + fi + if [ "$PE_IP" != "none" ]; then + tmp_PE_IP=$(mktemp) + # add the new variable to the json file and save it + $(jq --arg var_name $PE_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_VIP")).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) + # result="$(jq --arg newOBJ "${obj_with_replaced_variable}" '.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_VIP") | .+=$newOBJ' $JSONFile )" + fi + if [ "$CLSTR_NAME" != "none" ]; then + tmp_CLSTR_NAME=$(mktemp) + $(jq --arg var_name $CLSTR_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CLSTR_NAME")).value=$var_name' $JSONFile >"$tmp_CLSTR_NAME" && mv "$tmp_CLSTR_NAME" $JSONFile) + fi + if [ "$CTR_UUID" != "none" ]; then + tmp_CTR_UUID=$(mktemp) + $(jq --arg var_name $CTR_UUID '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CTR_UUID")).value=$var_name' $JSONFile >"$tmp_CTR_UUID" && mv "$tmp_CTR_UUID" $JSONFile) + fi + if [ "$CTR_NAME" != "none" ]; then + tmp_CTR_NAME=$(mktemp) + $(jq --arg var_name $CTR_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CTR_NAME")).value=$var_name' $JSONFile >"$tmp_CTR_NAME" && mv "$tmp_CTR_NAME" $JSONFile) + fi + if [ "$NETWORK_NAME" != "none" ]; then + tmp_NETWORK_NAME=$(mktemp) + $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + fi + if [ "$VLAN_NAME" != "none" ]; then + tmp_VLAN_NAME=$(mktemp) + $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="VLAN_NAME")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) + fi + fi + + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) + tmp_removal=$(mktemp) + $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) + + # GET BP NAME (affects all BPs being imported) + # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all + blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) + blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " + blueprint_name="${blueprint_name#\"}" # will remove the prefix " + + if [ blueprint_name == 'null' ]; then + echo "\nUnprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?\n" + exit 0 + else + # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload + echo "\nUploading the updated blueprint: $blueprint_name...\n" + + # Example curl call from the console: + # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" + # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" + # bp_name="EraServerDeployment" + # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" + # password='techX2019!' + # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" + + url="https://localhost:9440/api/nutanix/v3/blueprints/import_file" + path_to_file=$JSONFile + bp_name=$blueprint_name + project_uuid=$project_uuid + password=$PE_PASSWORD + upload_result=$(curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password") + + #if the upload_result var is not empty then let's say it was succcessful + if [ -z "$upload_result" ]; then + echo "\nUpload for $bp_name did not finish." + else + echo "\nUpload for $bp_name finished." + echo "-----------------------------------------" + # echo "Result: $upload_result" + fi + fi + + echo "\nFinished uploading Era Blueprint and setting Variables!\n" + +} + ############################################################################################################################################################################### # Routine to make changes to the PC UI; Colors, naming and the Welcome Banner ############################################################################################################################################################################### diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh new file mode 100644 index 0000000..fc1a3a1 --- /dev/null +++ b/scripts/ts2020.sh @@ -0,0 +1,176 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + #export PC_DEV_VERSION='5.10.2' + #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' + #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' + #export FILES_VERSION='3.2.0.1' + #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.229" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + #export QCOW2_REPOS=(\ + #'http://10.42.8.50/images/' \ + #'https://s3.amazonaws.com/get-ahv-images/' \ + #) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + move-3.0.1.qcow2 \ + ERA-Server-build-1.0.1.qcow2 \ + sherlock-k8s-base-image_403.qcow2 \ + hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + ) + export ISO_IMAGES=(\ + Windows2012R2.iso \ + SQLServer2014SP3.iso \ + Nutanix-VirtIO-1.1.3.iso \ + VeeamBR_9.5.4.2615.Update4.iso \ + ) + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi + + ssp_auth \ + && calm_enable \ + && lcm \ + && images \ + && karbon_enable \ + && flow_enable \ + && pc_cluster_img_import \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; + #IMAGES | images ) + # . lib.pc.sh + #ts_images + #;; +esac From e5805eea481f2a7c1895e8b84d04be584eef4f02 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 13 Jan 2020 15:49:27 -0800 Subject: [PATCH 244/691] updates for Calm uploads --- scripts/ts2020.sh | 47 ++++++++++++++++++++++++++--------------------- stage_workshop.sh | 4 ++-- 2 files changed, 28 insertions(+), 23 deletions(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index fc1a3a1..c067152 100644 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -39,7 +39,8 @@ case ${1} in && pe_init \ && network_configure \ && authentication_source \ - && pe_auth + && pe_auth \ + && prism_pro_server_deploy if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -59,7 +60,11 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + files_install && sleep 30 + + create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 + + file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish fi @@ -73,30 +78,34 @@ case ${1} in PC | pc ) . lib.pc.sh - #export QCOW2_REPOS=(\ - #'http://10.42.8.50/images/' \ - #'https://s3.amazonaws.com/get-ahv-images/' \ - #) # talk to Nathan.C to populate S3, Sharon.S to populate Daisy File Share export QCOW2_IMAGES=(\ CentOS7.qcow2 \ Windows2016.qcow2 \ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ ToolsVM.qcow2 \ - move-3.0.1.qcow2 \ - ERA-Server-build-1.0.1.qcow2 \ - sherlock-k8s-base-image_403.qcow2 \ + Linux_ToolsVM.qcow2 \ + ERA-Server-build-1.1.1.3.qcow2 \ + MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ - 'http://download.nutanix.com/karbon/0.8/acs-centos7.qcow2' \ + move3.2.0.qcow2 \ + AutoXD.qcow2 \ ) export ISO_IMAGES=(\ + CentOS7.iso \ + Windows2016.iso \ Windows2012R2.iso \ + Windows10.iso \ + Nutanix-VirtIO-1.1.5.iso \ SQLServer2014SP3.iso \ - Nutanix-VirtIO-1.1.3.iso \ + XenApp_and_XenDesktop_7_18.iso \ VeeamBR_9.5.4.2615.Update4.iso \ ) + + run_once + dependencies 'install' 'jq' || exit 13 ssh_pubkey & # non-blocking, parallel suitable @@ -133,17 +142,17 @@ case ${1} in && pc_dns_add \ && pc_ui \ && pc_auth \ - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi + && pc_smtp ssp_auth \ && calm_enable \ + && karbon_enable \ + && objects_enable \ && lcm \ + && object_store \ + && karbon_image_download \ && images \ - && karbon_enable \ + && seedPC \ && flow_enable \ && pc_cluster_img_import \ && prism_check 'PC' @@ -169,8 +178,4 @@ case ${1} in FILES | files | afs ) files_install ;; - #IMAGES | images ) - # . lib.pc.sh - #ts_images - #;; esac diff --git a/stage_workshop.sh b/stage_workshop.sh index ccc25a4..cee15a0 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -18,7 +18,7 @@ WORKSHOPS=(\ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ -#"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ +"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Citrix Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ @@ -84,7 +84,7 @@ function stage_clusters() { fi if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='ts2019.sh' + _pe_launch='ts2020.sh' _pc_launch=${_pe_launch} fi From e7931a57a62c15ad3266d14fea1f21409d80585e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 13 Jan 2020 16:39:11 -0800 Subject: [PATCH 245/691] Update ts2020.sh --- scripts/ts2020.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 scripts/ts2020.sh diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh old mode 100644 new mode 100755 From f4a38c353b02e35c8e53eee14bd431dc12e76e4e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 14 Jan 2020 08:54:44 -0800 Subject: [PATCH 246/691] Update lib.pc.sh --- scripts/lib.pc.sh | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 39a8337..9e02765 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -954,7 +954,9 @@ function upload_citrix_calm_blueprint() { fi fi - echo "\nFinished uploading Citrix Blueprint and setting Variables!\n" + done + + echo "\nFinished uploading Citrix Blueprint and setting Variables!\n" } @@ -1126,7 +1128,9 @@ function upload_era_calm_blueprint() { fi fi - echo "\nFinished uploading Era Blueprint and setting Variables!\n" + done + + echo "\nFinished uploading Era Blueprint and setting Variables!\n" } From 583fd3767450cc3ea32dbc8f8caf56f46eb9e8d3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 16 Jan 2020 10:13:27 -0800 Subject: [PATCH 247/691] Updates for AutoAD deployment --- scripts/global.vars.sh | 20 +++++++++++++---- scripts/lib.pe.sh | 50 ++++++++++++++++++++++++++++++++++++++++-- scripts/ts2020.sh | 6 +++-- 3 files changed, 68 insertions(+), 8 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 1495f94..d67965d 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -16,7 +16,7 @@ STORAGE_IMAGES='Images' ATTEMPTS=40 SLEEP=60 ERA_Blueprint='EraServerDeployment.json' -Citrix_Blueprint='' +Citrix_Blueprint='CitrixBootcampInfra.json' # Curl and SSH settings CURL_OPTS='--insecure --silent --show-error' # --verbose' @@ -130,7 +130,11 @@ case "${OCTET[0]}.${OCTET[1]}" in AUTODC_REPOS=(\ 'http://10.55.251.38/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - ) + ) + AUTOAD_REPOS=(\ + 'http://10.55.251.38/workshop_staging/AutoAD.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ + ) PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' @@ -162,7 +166,11 @@ case "${OCTET[0]}.${OCTET[1]}" in AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - ) + ) + AUTOAD_REPOS=(\ + 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ + ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.42.196.10,10.42.194.10' @@ -194,7 +202,11 @@ case "${OCTET[0]}.${OCTET[1]}" in AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ - ) + ) + AUTOAD_REPOS=(\ + 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ + ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 224acc6..8886897 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -36,8 +36,54 @@ function authentication_source() { _pc_version=(${PC_VERSION//./ }) case "${AUTH_SERVER}" in - 'ActiveDirectory') - log "Manual setup = https://github.com/nutanixworkshops/labs/blob/master/setup/active_directory/active_directory_setup.rst" + 'AutoAD') + local _autoad_auth + local _autoad_index=1 + local _autoad_release=1 + local _autoad_service='samba-ad-dc' + local _autoad_restart="service ${_autoad_service} restart" + local _autoad_status="systemctl show ${_autoad_service} --property=SubState" + local _autoad_success='SubState=running' + + + dns_check "dc.${AUTH_FQDN}" + _result=$? + + if (( ${_result} == 0 )); then + log "${AUTH_SERVER}.IDEMPOTENCY: dc.${AUTH_FQDN} set, skip. ${_result}" + else + log "${AUTH_SERVER}.IDEMPOTENCY failed, no DNS record dc.${AUTH_FQDN}" + + _error=12 + _loop=0 + _sleep=${SLEEP} + + repo_source AUTOAD_REPOS[@] + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${AUTH_SERVER}| wc --lines) == 0 )); then + log "Import ${AUTH_SERVER} image from ${SOURCE_URL}..." + acli image.create ${AUTH_SERVER} \ + image_type=kDiskImage wait=true \ + container=${STORAGE_IMAGES} source_url=${SOURCE_URL} + else + log "Image found, assuming ready. Skipping ${AUTH_SERVER} import." + fi + + log "Create ${AUTH_SERVER} VM based on ${AUTH_SERVER} image" + acli "vm.create ${AUTH_SERVER} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" + # vmstat --wide --unit M --active # suggests 2G sufficient, was 4G + #acli "vm.disk_create ${AUTH_SERVER}${_autodc_release} cdrom=true empty=true" + acli "vm.disk_create ${AUTH_SERVER} clone_from_image=${AUTH_SERVER}" + acli "vm.nic_create ${AUTH_SERVER} network=${NW1_NAME} ip=${AUTH_HOST}" + + log "Power on ${AUTH_SERVER} VM..." + acli "vm.on ${AUTH_SERVER}" + + _attempts=20 + _loop=0 + _sleep=10 + + fi ;; 'AutoDC') local _autodc_auth diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index c067152..74c48de 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -31,6 +31,8 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.229" + export AUTH_SERVER='AutoAD' + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -85,7 +87,7 @@ case ${1} in Windows10-1709.qcow2 \ ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ - ERA-Server-build-1.1.1.3.qcow2 \ + ERA-Server-build-1.2.0.1.qcow2 \ MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ @@ -99,7 +101,7 @@ case ${1} in Windows10.iso \ Nutanix-VirtIO-1.1.5.iso \ SQLServer2014SP3.iso \ - XenApp_and_XenDesktop_7_18.iso \ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ VeeamBR_9.5.4.2615.Update4.iso \ ) From 07e1c6db58ef522da0d07b23023425ac8f026e00 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 21 Jan 2020 15:48:54 -0800 Subject: [PATCH 248/691] Bringing in Matt's updates --- scripts/bootcamp.sh | 7 ++++++- scripts/lib.pc.sh | 41 ++++++++++++++++++++++++++++++++++++++--- scripts/snc_bootcamp.sh | 10 ++++++++++ scripts/ts2020.sh | 2 +- stage_workshop.sh | 6 +++--- 5 files changed, 58 insertions(+), 8 deletions(-) diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh index fdf622a..b7fd3e6 100755 --- a/scripts/bootcamp.sh +++ b/scripts/bootcamp.sh @@ -74,6 +74,11 @@ case ${1} in PC | pc ) . lib.pc.sh + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + run_once dependencies 'install' 'jq' || exit 13 @@ -122,9 +127,9 @@ case ${1} in && object_store \ && karbon_image_download \ && images \ - && seedPC \ && flow_enable \ && pc_cluster_img_import \ + && seedPC \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9e02765..6a6c402 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -324,6 +324,8 @@ function object_store() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' local _url_network='https://localhost:9440/api/nutanix/v3/subnets/list' local _url_oss='https://localhost:9440/oss/api/nutanix/v3/objectstores' + local _url_oss_check='https://localhost:9440/oss/api/nutanix/v3/objectstores/list' + # Payload for the _json_data _json_data='{"kind":"subnet"}' @@ -335,19 +337,52 @@ function object_store() { PRIM_NETWORK_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_network | jq '.entities[] | select (.spec.name=="Primary") | .metadata.uuid' | tr -d \") echo ${PRIM_NETWORK_UUID} + echo "BUCKETS_DNS_IP: ${BUCKETS_DNS_IP}, BUCKETS_VIP: ${BUCKETS_VIP}, OBJECTS_NW_START: ${OBJECTS_NW_START}, OBJECTS_NW_END: ${OBJECTS_NW_END}" + sleep 5 _json_data_oss='{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"ntnx-objects","description":"NTNXLAB","resources":{"domain":"ntnxlab.local","cluster_reference":{"kind":"cluster","uuid":"' _json_data_oss+=${CLUSTER_UUID} - _json_data_oss+='"},"buckets_infra_network_dns":"NETWORKX.VLANX.16","buckets_infra_network_vip":"NETWORKX.VLANX.17","buckets_infra_network_reference":{"kind":"subnet","uuid":"' + _json_data_oss+='"},"buckets_infra_network_dns":"' + _json_data_oss+=${BUCKETS_DNS_IP} + _json_data_oss+='","buckets_infra_network_vip":"' + _json_data_oss+=${BUCKETS_VIP} + _json_data_oss+='","buckets_infra_network_reference":{"kind":"subnet","uuid":"' _json_data_oss+=${PRIM_NETWORK_UUID} _json_data_oss+='"},"client_access_network_reference":{"kind":"subnet","uuid":"' _json_data_oss+=${PRIM_NETWORK_UUID} - _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":32768,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"NETWORKX.VLANX.18","ipv4_end":"NETWORKX.VLANX.21"}}}}' + _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":32768,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"' + _json_data_oss+=${OBJECTS_NW_START} + _json_data_oss+='","ipv4_end":"' + _json_data_oss+=${OBJECTS_NW_END} + _json_data_oss+='"}}}}' # Set the right VLAN dynamically so we are configuring in the right network _json_data_oss=${_json_data_oss//VLANX/${VLAN}} _json_data_oss=${_json_data_oss//NETWORKX/${NETWORK}} - curl -X POST -d $_json_data_oss $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_oss + #curl -X POST -d $_json_data_oss $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_oss + _createresponse=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_oss ${_url_oss}) + log "Creating Object Store....." + + # The response should be a Task UUID + if [[ ! -z $_createresponse ]]; then + # Check if Object store is deployed + _response=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_url_oss_check}| grep "ntnx-objects" | wc -l) + while [ $_response -ne 1 ]; do + log "Object Store not yet created. $_loops/$_attempts... sleeping 10 seconds" + _response=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_url_oss_check}| grep "ntnx-objects" | wc -l) + if [[ $_loops -ne 30 ]]; then + _createresponse=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_oss ${_url_oss}) + sleep 10 + (( _loops++ )) + else + log "Objects store ntnx-objects not created. Please use the UI to create it." + break + fi + done + log "Objects store been created." + else + log "Objects store could not be created. Please use the UI to create it." + fi } diff --git a/scripts/snc_bootcamp.sh b/scripts/snc_bootcamp.sh index 0676c87..a5a554f 100755 --- a/scripts/snc_bootcamp.sh +++ b/scripts/snc_bootcamp.sh @@ -24,6 +24,16 @@ case ${1} in export NW1_DHCP_START="${IPV4_PREFIX}.$((${OCTET[3]} + 33))" export NW1_DHCP_END="${IPV4_PREFIX}.$((${OCTET[3]} + 53))" export SUBNET_MASK="255.255.255.192" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.$((${OCTET[3]} + 25))" + #export BUCKETS_VIP="${IPV4_PREFIX}.$((${OCTET[3]} + 26))" + #export OBJECTS_NW_START="${IPV4_PREFIX}.$((${OCTET[3]} + 27))" + #export OBJECTS_NW_END="${IPV4_PREFIX}.$((${OCTET[3]} + 30))" + + export NW2_NAME='' + export NW2_VLAN='' + export NW2_SUBNET='' + export NW2_DHCP_START='' + export NW2_DHCP_END='' args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 74c48de..95ad1e4 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -154,9 +154,9 @@ case ${1} in && object_store \ && karbon_image_download \ && images \ - && seedPC \ && flow_enable \ && pc_cluster_img_import \ + && seedPC \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/stage_workshop.sh b/stage_workshop.sh index cee15a0..a93e621 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -52,12 +52,12 @@ function stage_clusters() { # Map workshop to staging script(s) and libraries, # _pe_launch will be executed on PE - if (( $(echo ${_workshop} | grep -i Bootcamp | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Bootcamp Staging" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i SNC | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^SNC" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='snc_bootcamp.sh' _pc_launch=${_pe_launch} @@ -88,7 +88,7 @@ function stage_clusters() { _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i Frame | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Frame" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='frame.sh' _pc_launch=${_pe_launch} From 3a1301061767b0f2f87e542d4618ceceb52bc71e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 21 Jan 2020 15:57:23 -0800 Subject: [PATCH 249/691] Update stage_workshop.sh --- stage_workshop.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/stage_workshop.sh b/stage_workshop.sh index a93e621..5445ca9 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -19,6 +19,7 @@ WORKSHOPS=(\ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ "Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +#"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Citrix Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ From fe4182563d254e21306ed2aaf6e83948af56428f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 22 Jan 2020 18:36:48 -0800 Subject: [PATCH 250/691] Updates for AutoAD Add checks for completion --- scripts/lib.pe.sh | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 8886897..39905d5 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -42,8 +42,8 @@ function authentication_source() { local _autoad_release=1 local _autoad_service='samba-ad-dc' local _autoad_restart="service ${_autoad_service} restart" - local _autoad_status="systemctl show ${_autoad_service} --property=SubState" - local _autoad_success='SubState=running' + local _autoad_status="AD Is Running" + local _autoad_success='AD Is Running' dns_check "dc.${AUTH_FQDN}" @@ -81,7 +81,30 @@ function authentication_source() { _attempts=20 _loop=0 - _sleep=10 + _sleep=60 + + while true ; do + (( _loop++ )) + + _test=$(curl ${CURL_POST_OPTS} \ + -X GET \ + https://${AUTH_HOST}:8000/) + if [[ "${_test}" == "${_autoad_success}" ]]; then + log "${AUTH_SERVER} is ready." + sleep ${_sleep} + break + elif (( ${_loop} > ${_attempts} )); then + log "Error ${_error}: ${AUTH_SERVER} VM running: giving up after ${_loop} tries." + _result=$(source /etc/profile.d/nutanix_env.sh \ + && for _vm in $(source /etc/profile.d/nutanix_env.sh && acli vm.list | grep ${AUTH_SERVER}) ; do acli -y vm.delete $_vm; done) + # acli image.delete ${AUTH_SERVER}${_autodc_release} + log "Remediate by deleting the ${AUTH_SERVER} VM from PE (just attempted by this script: ${_result}) and then running acli $_" + exit ${_error} + else + log "_test ${_loop}/${_attempts}=|${_test}|: sleep ${_sleep} seconds..." + sleep ${_sleep} + fi + done fi ;; From 35c337c74810d9ec200cf93de7b43a22748b6d9a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 23 Jan 2020 08:39:28 -0800 Subject: [PATCH 251/691] Update lib.pe.sh --- scripts/lib.pe.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 39905d5..dd94f2a 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -79,7 +79,7 @@ function authentication_source() { log "Power on ${AUTH_SERVER} VM..." acli "vm.on ${AUTH_SERVER}" - _attempts=20 + _attempts=25 _loop=0 _sleep=60 @@ -429,7 +429,7 @@ echo $HTTP_JSON_BODY ############################################################################################################################################################################### -# Routine to crerate the networks +# Routine to create the networks ############################################################################################################################################################################### function network_configure() { local _network_name="${NW1_NAME}" From 336f8b92652ec1fd100326f9f13c16ec971a6ae7 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 23 Jan 2020 22:19:43 -0800 Subject: [PATCH 252/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index dd94f2a..6afcde2 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -88,7 +88,7 @@ function authentication_source() { _test=$(curl ${CURL_POST_OPTS} \ -X GET \ - https://${AUTH_HOST}:8000/) + http://${AUTH_HOST}:8000/) if [[ "${_test}" == "${_autoad_success}" ]]; then log "${AUTH_SERVER} is ready." sleep ${_sleep} From aeff6e1705b49fbe12ecaa5615d4df35d6f07790 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 24 Jan 2020 08:29:50 -0800 Subject: [PATCH 253/691] Update lib.pe.sh --- scripts/lib.pe.sh | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 6afcde2..bdb8b66 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -43,7 +43,7 @@ function authentication_source() { local _autoad_service='samba-ad-dc' local _autoad_restart="service ${_autoad_service} restart" local _autoad_status="AD Is Running" - local _autoad_success='AD Is Running' + local _autoad_success="AD Is Running" dns_check "dc.${AUTH_FQDN}" @@ -86,19 +86,17 @@ function authentication_source() { while true ; do (( _loop++ )) - _test=$(curl ${CURL_POST_OPTS} \ - -X GET \ - http://${AUTH_HOST}:8000/) + _test=$(curl ${CURL_OPTS} -X GET http://${AUTH_HOST}:8000/ | grep "${_autoad_success}") if [[ "${_test}" == "${_autoad_success}" ]]; then log "${AUTH_SERVER} is ready." sleep ${_sleep} break elif (( ${_loop} > ${_attempts} )); then log "Error ${_error}: ${AUTH_SERVER} VM running: giving up after ${_loop} tries." - _result=$(source /etc/profile.d/nutanix_env.sh \ - && for _vm in $(source /etc/profile.d/nutanix_env.sh && acli vm.list | grep ${AUTH_SERVER}) ; do acli -y vm.delete $_vm; done) + #_result=$(source /etc/profile.d/nutanix_env.sh \ + # && for _vm in $(source /etc/profile.d/nutanix_env.sh && acli vm.list | grep ${AUTH_SERVER}) ; do acli -y vm.delete $_vm; done) # acli image.delete ${AUTH_SERVER}${_autodc_release} - log "Remediate by deleting the ${AUTH_SERVER} VM from PE (just attempted by this script: ${_result}) and then running acli $_" + #log "Remediate by deleting the ${AUTH_SERVER} VM from PE (just attempted by this script: ${_result}) and then running acli $_" exit ${_error} else log "_test ${_loop}/${_attempts}=|${_test}|: sleep ${_sleep} seconds..." From 4bb0019eb9ff15ba481948c1e9d5ee6e4d158a2e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 24 Jan 2020 16:58:56 -0800 Subject: [PATCH 254/691] Update ts2020.sh --- scripts/ts2020.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 95ad1e4..c2bb809 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -33,6 +33,8 @@ case ${1} in export AUTH_SERVER='AutoAD' + export _external_nw_name="${1}" + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -157,6 +159,8 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && seedPC \ + && upload_citrix_calm_blueprint \ + && upload_era_calm_blueprint \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 7c083eb17db0cfcf2ca6b4a445d33a6b6d87f760 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 27 Jan 2020 11:44:49 -0800 Subject: [PATCH 255/691] Updates for Single Node GTS2020 --- scripts/snc_ts2020.sh | 206 ++++++++++++++++++++++++++++++++++++++++++ stage_workshop.sh | 7 +- 2 files changed, 212 insertions(+), 1 deletion(-) create mode 100755 scripts/snc_ts2020.sh diff --git a/scripts/snc_ts2020.sh b/scripts/snc_ts2020.sh new file mode 100755 index 0000000..a79b2c1 --- /dev/null +++ b/scripts/snc_ts2020.sh @@ -0,0 +1,206 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + #export PC_DEV_VERSION='5.10.2' + #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' + #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' + #export FILES_VERSION='3.2.0.1' + #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + + # Single Node Cluster Options + + export NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" + export NW1_GATEWAY="${IPV4_PREFIX}.$((${OCTET[3]} - 5))" + export NW1_DHCP_START="${IPV4_PREFIX}.$((${OCTET[3]} + 33))" + export NW1_DHCP_END="${IPV4_PREFIX}.$((${OCTET[3]} + 53))" + export SUBNET_MASK="255.255.255.192" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.$((${OCTET[3]} + 25))" + #export BUCKETS_VIP="${IPV4_PREFIX}.$((${OCTET[3]} + 26))" + #export OBJECTS_NW_START="${IPV4_PREFIX}.$((${OCTET[3]} + 27))" + #export OBJECTS_NW_END="${IPV4_PREFIX}.$((${OCTET[3]} + 30))" + + export NW2_NAME='' + export NW2_VLAN='' + export NW2_SUBNET='' + export NW2_DHCP_START='' + export NW2_DHCP_END='' + + #export NW2_DHCP_START="${IPV4_PREFIX}.132" + #export NW2_DHCP_END="${IPV4_PREFIX}.229" + + export AUTH_SERVER='AutoAD' + + export _external_nw_name="${1}" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + && prism_pro_server_deploy + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + files_install && sleep 30 + + create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 + + file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Windows2016.qcow2 \ + Windows2012R2.qcow2 \ + Windows10-1709.qcow2 \ + ToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ERA-Server-build-1.2.0.1.qcow2 \ + MSSQL-2016-VM.qcow2 \ + hycu-3.5.0-6253.qcow2 \ + VeeamAvailability_1.0.457.vmdk \ + move3.2.0.qcow2 \ + AutoXD.qcow2 \ + ) + export ISO_IMAGES=(\ + CentOS7.iso \ + Windows2016.iso \ + Windows2012R2.iso \ + Windows10.iso \ + Nutanix-VirtIO-1.1.5.iso \ + SQLServer2014SP3.iso \ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + VeeamBR_9.5.4.2615.Update4.iso \ + ) + + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && objects_enable \ + && lcm \ + && object_store \ + && karbon_image_download \ + && images \ + && flow_enable \ + && pc_cluster_img_import \ + && seedPC \ + && upload_citrix_calm_blueprint \ + && upload_era_calm_blueprint \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/stage_workshop.sh b/stage_workshop.sh index 5445ca9..a644514 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -19,6 +19,7 @@ WORKSHOPS=(\ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ "Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ @@ -88,7 +89,11 @@ function stage_clusters() { _pe_launch='ts2020.sh' _pc_launch=${_pe_launch} fi - + if (( $(echo ${_workshop} | grep -i "^SNC_GTS" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='snc_ts2020.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^Frame" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='frame.sh' From 0e98dcdd3896c1b8f08d9c73bab981d01b469aa8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 27 Jan 2020 11:50:07 -0800 Subject: [PATCH 256/691] Updates for Calm BP Upload --- scripts/snc_ts2020.sh | 1 - scripts/ts2020.sh | 1 - 2 files changed, 2 deletions(-) diff --git a/scripts/snc_ts2020.sh b/scripts/snc_ts2020.sh index a79b2c1..83d9cbc 100755 --- a/scripts/snc_ts2020.sh +++ b/scripts/snc_ts2020.sh @@ -178,7 +178,6 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && seedPC \ - && upload_citrix_calm_blueprint \ && upload_era_calm_blueprint \ && prism_check 'PC' diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index c2bb809..a163ab0 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -159,7 +159,6 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && seedPC \ - && upload_citrix_calm_blueprint \ && upload_era_calm_blueprint \ && prism_check 'PC' From cf1c0317f8d1eab8ffd4c712745b9cd304641c39 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 28 Jan 2020 14:02:11 -0600 Subject: [PATCH 257/691] GTS20 Image Updates --- scripts/snc_ts2020.sh | 4 ++-- scripts/ts2020.sh | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/snc_ts2020.sh b/scripts/snc_ts2020.sh index 83d9cbc..35d914e 100755 --- a/scripts/snc_ts2020.sh +++ b/scripts/snc_ts2020.sh @@ -110,9 +110,9 @@ case ${1} in Linux_ToolsVM.qcow2 \ ERA-Server-build-1.2.0.1.qcow2 \ MSSQL-2016-VM.qcow2 \ - hycu-3.5.0-6253.qcow2 \ + HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ VeeamAvailability_1.0.457.vmdk \ - move3.2.0.qcow2 \ + move-3.4.1.qcow2 \ AutoXD.qcow2 \ ) export ISO_IMAGES=(\ diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index a163ab0..68ad405 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -91,9 +91,9 @@ case ${1} in Linux_ToolsVM.qcow2 \ ERA-Server-build-1.2.0.1.qcow2 \ MSSQL-2016-VM.qcow2 \ - hycu-3.5.0-6253.qcow2 \ + HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ VeeamAvailability_1.0.457.vmdk \ - move3.2.0.qcow2 \ + move-3.4.1.qcow2 \ AutoXD.qcow2 \ ) export ISO_IMAGES=(\ From 5b183403e356b09e4b77b52aa80e99744e3a1ec5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 31 Jan 2020 15:49:22 -0800 Subject: [PATCH 258/691] Updates for Objects --- scripts/lib.pc.sh | 8 ++++---- scripts/snc_bootcamp.sh | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6a6c402..aafbd94 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -325,7 +325,7 @@ function object_store() { local _url_network='https://localhost:9440/api/nutanix/v3/subnets/list' local _url_oss='https://localhost:9440/oss/api/nutanix/v3/objectstores' local _url_oss_check='https://localhost:9440/oss/api/nutanix/v3/objectstores/list' - + # Payload for the _json_data _json_data='{"kind":"subnet"}' @@ -394,13 +394,13 @@ function object_store() { function pc_admin() { local _http_body local _test - local _admin_user='marklavi' + local _admin_user='nathan' _http_body=$(cat < Date: Mon, 3 Feb 2020 13:34:41 -0800 Subject: [PATCH 259/691] Updates for POPs --- scripts/global.vars.sh | 3 +++ scripts/lib.pc.sh | 4 ++-- scripts/lib.pe.sh | 24 +++++++++++------------- scripts/snc_ts2020.sh | 2 ++ scripts/ts2020.sh | 2 ++ 5 files changed, 20 insertions(+), 15 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index d67965d..cb6e8dd 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -15,8 +15,11 @@ STORAGE_DEFAULT='Default' STORAGE_IMAGES='Images' ATTEMPTS=40 SLEEP=60 +PrismOpsServer='PrismProLabUtilityServer' +SeedPC='seedPC.zp' ERA_Blueprint='EraServerDeployment.json' Citrix_Blueprint='CitrixBootcampInfra.json' +Beam_Blueprint='' # Curl and SSH settings CURL_OPTS='--insecure --silent --show-error' # --verbose' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index aafbd94..979f444 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -619,9 +619,9 @@ function seedPC() { local _test local _setup - _test=$(curl -L ${PC_DATA} -o /home/nutanix/seedPC.zip) + _test=$(curl -L ${PC_DATA} -o /home/nutanix/${SeedPC}) log "Pulling Prism Data| PC_DATA ${PC_DATA}|${_test}" - unzip /home/nutanix/seedPC.zip + unzip /home/nutanix/${SeedPC} pushd /home/nutanix/lab/ #_setup=$(/home/nutanix/lab/setupEnv.sh ${PC_HOST} > /dev/null 2>&1) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index bdb8b66..eecc229 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -229,31 +229,29 @@ function authentication_source() { function prism_pro_server_deploy() { -VMNAME='PrismProLabUtilityServer' - ### Import Image ### -if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${VMNAME} | wc --lines) == 0 )); then - log "Import ${VMNAME} image from ${QCOW2_REPOS}..." - acli image.create ${VMNAME} \ +if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PrismOpsServer} | wc --lines) == 0 )); then + log "Import ${PrismOpsServer} image from ${QCOW2_REPOS}..." + acli image.create ${PrismOpsServer} \ image_type=kDiskImage wait=true \ - container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}${VMNAME}.qcow2" + container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}${PrismOpsServer}.qcow2" else - log "Image found, assuming ready. Skipping ${VMNAME} import." + log "Image found, assuming ready. Skipping ${PrismOpsServer} import." fi ### Deploy PrismProServer ### -log "Create ${VMNAME} VM based on ${VMNAME} image" -acli "vm.create ${VMNAME} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" +log "Create ${PrismOpsServer} VM based on ${PrismOpsServer} image" +acli "vm.create ${PrismOpsServer} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" # vmstat --wide --unit M --active # suggests 2G sufficient, was 4G #acli "vm.disk_create ${VMNAME} cdrom=true empty=true" -acli "vm.disk_create ${VMNAME} clone_from_image=${VMNAME}" -acli "vm.nic_create ${VMNAME} network=${NW1_NAME}" +acli "vm.disk_create ${PrismOpsServer} clone_from_image=${PrismOpsServer}" +acli "vm.nic_create ${PrismOpsServer} network=${PrismOpsServer}" #acli "vm.nic_create ${VMNAME} network=${NW1_NAME} ip=${AUTH_HOST}" -log "Power on ${VMNAME} VM..." -acli "vm.on ${VMNAME}" +log "Power on ${VPrismOpsServer} VM..." +acli "vm.on ${PrismOpsServer}" diff --git a/scripts/snc_ts2020.sh b/scripts/snc_ts2020.sh index 35d914e..2dc7c26 100755 --- a/scripts/snc_ts2020.sh +++ b/scripts/snc_ts2020.sh @@ -51,6 +51,8 @@ case ${1} in #export NW2_DHCP_END="${IPV4_PREFIX}.229" export AUTH_SERVER='AutoAD' + export PrismOpsServer='GTSPrismOpsLabUtilityServer' + export SeedPC='GTSseedPC.zp' export _external_nw_name="${1}" diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 68ad405..0115034 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -32,6 +32,8 @@ case ${1} in export NW2_DHCP_END="${IPV4_PREFIX}.229" export AUTH_SERVER='AutoAD' + export PrismOpsServer='GTSPrismOpsLabUtilityServer' + export SeedPC='GTSseedPC.zp' export _external_nw_name="${1}" From 977e0d71fe03ea89ccb20ce5e573415e57d5c41e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 4 Feb 2020 10:22:32 -0800 Subject: [PATCH 260/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index eecc229..f365753 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -447,7 +447,7 @@ function network_configure() { log "Create primary network: Name: ${NW1_NAME}, VLAN: ${NW1_VLAN}, Subnet: ${NW1_SUBNET}, Domain: ${AUTH_DOMAIN}, Pool: ${NW1_DHCP_START} to ${NW1_DHCP_END}" acli "net.create ${NW1_NAME} vlan=${NW1_VLAN} ip_config=${NW1_SUBNET}" - acli "net.update_dhcp_dns ${NW1_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_DOMAIN}" + acli "net.update_dhcp_dns ${NW1_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" acli " net.add_dhcp_pool ${NW1_NAME} start=${NW1_DHCP_START} end=${NW1_DHCP_END}" if [[ ! -z "${NW2_NAME}" ]]; then From 25997cd20554098737013c46e2fb652f251fa489 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 4 Feb 2020 16:39:19 -0800 Subject: [PATCH 261/691] Updates for Project Creation --- scripts/lib.pc.sh | 130 ++++++++++++++++++++++++++++++++---------- scripts/lib.pe.sh | 2 +- scripts/snc_ts2020.sh | 3 +- scripts/ts2020.sh | 3 +- 4 files changed, 106 insertions(+), 32 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 979f444..6c4e3a3 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -825,7 +825,7 @@ function calm_enable() { function upload_citrix_calm_blueprint() { local DIRECTORY="/home/nutanix/" - local CALM_PROJECT="default" + local CALM_PROJECT="BootcampInfra" local DOMAIN=${AUTH_FQDN} local AD_IP=${AUTH_HOST} local PE_IP=${PE_HOST} @@ -1001,7 +1001,7 @@ function upload_citrix_calm_blueprint() { function upload_era_calm_blueprint() { local DIRECTORY="/home/nutanix/" - local CALM_PROJECT="default" + local CALM_PROJECT="BootcampInfra" local ERA_IP=${ERA_HOST} local PE_IP=${PE_HOST} local CLSTR_NAME="none" @@ -1235,34 +1235,106 @@ EOF ############################################################################################################################################################################### function pc_project() { - local _name + local _name local _count - local _uuid + local _pc_account_uuid + local _nw_name="${1}" + local _nw_uuid + +log "Get cluster network and PC Account UUIDs..." +_nw_uuid=$(acli "net.get ${_nw_name}" \ + | grep "uuid" | cut -f 2 -d ':' | xargs) + +_pc_account_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{}' "https://localhost:9440/api/nutanix/v3/accounts/list" | jq '.pc_account_uuid') + +log "Create BootcampInfra Project ..." +_http_body=$(cat </dev/null | grep ${_name} | wc --lines) +# if (( ${_count} > 0 )); then +# nuclei project.delete ${_name} confirm=false 2>/dev/null +# else +# log "Warning: _count=${_count}" +# fi + +# log "Creating ${_name}..." +# nuclei project.create name=${_name} description='test from NuCLeI!' 2>/dev/null +# _uuid=$(. /etc/profile.d/nutanix_env.sh \ +# && nuclei project.get ${_name} format=json 2>/dev/null \ +# | jq .metadata.project_reference.uuid | tr -d '"') +# log "${_name}.uuid = ${_uuid}" + - _name=${EMAIL%%@nutanix.com}.test - _count=$(. /etc/profile.d/nutanix_env.sh \ - && nuclei project.list 2>/dev/null | grep ${_name} | wc --lines) - if (( ${_count} > 0 )); then - nuclei project.delete ${_name} confirm=false 2>/dev/null - else - log "Warning: _count=${_count}" - fi - log "Creating ${_name}..." - nuclei project.create name=${_name} description='test from NuCLeI!' 2>/dev/null - _uuid=$(. /etc/profile.d/nutanix_env.sh \ - && nuclei project.get ${_name} format=json 2>/dev/null \ - | jq .metadata.project_reference.uuid | tr -d '"') - log "${_name}.uuid = ${_uuid}" - - # - project.get mark.lavi.test - # - project.update mark.lavi.test - # spec.resources.account_reference_list.kind= or .uuid - # spec.resources.default_subnet_reference.kind= - # spec.resources.environment_reference_list.kind= - # spec.resources.external_user_group_reference_list.kind= - # spec.resources.subnet_reference_list.kind= - # spec.resources.user_reference_list.kind= - - # {"spec":{"access_control_policy_list":[],"project_detail":{"name":"mark.lavi.test1","resources":{"external_user_group_reference_list":[],"user_reference_list":[],"environment_reference_list":[],"account_reference_list":[],"subnet_reference_list":[{"kind":"subnet","name":"Primary","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"},{"kind":"subnet","name":"Secondary","uuid":"4689bc7f-61dd-4527-bc7a-9d737ae61322"}],"default_subnet_reference":{"kind":"subnet","uuid":"a4000fcd-df41-42d7-9ffe-f1ab964b2796"}},"description":"test from NuCLeI!"},"user_list":[],"user_group_list":[]},"api_version":"3.1","metadata":{"creation_time":"2018-06-22T03:54:59Z","spec_version":0,"kind":"project","last_update_time":"2018-06-22T03:55:00Z","uuid":"1be7f66a-5006-4061-b9d2-76caefedd298","categories":{},"owner_reference":{"kind":"user","name":"admin","uuid":"00000000-0000-0000-0000-000000000000"}}} } diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index f365753..6aae247 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -453,7 +453,7 @@ function network_configure() { if [[ ! -z "${NW2_NAME}" ]]; then log "Create secondary network: Name: ${NW2_NAME}, VLAN: ${NW2_VLAN}, Subnet: ${NW2_SUBNET}, Pool: ${NW2_DHCP_START} to ${NW2_DHCP_END}" acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" - acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_DOMAIN}" + acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" fi fi diff --git a/scripts/snc_ts2020.sh b/scripts/snc_ts2020.sh index 2dc7c26..434d532 100755 --- a/scripts/snc_ts2020.sh +++ b/scripts/snc_ts2020.sh @@ -180,11 +180,12 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && seedPC \ + && pc_project \ && upload_era_calm_blueprint \ && prism_check 'PC' log "Non-blocking functions (in development) follow." - pc_project + #pc_project pc_admin # ntnx_download 'AOS' # function in lib.common.sh diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 0115034..df22a52 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -161,11 +161,12 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && seedPC \ + && pc_project \ && upload_era_calm_blueprint \ && prism_check 'PC' log "Non-blocking functions (in development) follow." - pc_project + #pc_project pc_admin # ntnx_download 'AOS' # function in lib.common.sh From e6a8d19ab25672f13b0237926f86e6a46c5a07f7 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 5 Feb 2020 08:52:08 +0100 Subject: [PATCH 262/691] Update in frame to run the LCM twice --- scripts/frame.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/frame.sh b/scripts/frame.sh index fbcc522..14fc14b 100755 --- a/scripts/frame.sh +++ b/scripts/frame.sh @@ -118,6 +118,7 @@ case ${1} in && karbon_enable \ && objects_enable \ && lcm \ + && lcm \ && object_store \ && karbon_image_download \ && images \ From d36e7a74ddd8d229ec78c716f6f482a1d2a23799 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 5 Feb 2020 08:19:53 -0800 Subject: [PATCH 263/691] Updates for PC 5.11.2.1 --- scripts/global.vars.sh | 14 +++++++------- stage_workshop.sh | 8 ++++---- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index cb6e8dd..e9264c0 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,7 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.16' +PC_DEV_VERSION='5.11.2.1' PC_CURRENT_VERSION='5.11.2' PC_STABLE_VERSION='5.11' FILES_VERSION='3.6.0' @@ -107,8 +107,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.16-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.16-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' @@ -143,8 +143,8 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.55.251.10,10.55.251.11' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.16-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' @@ -179,8 +179,8 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.16-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' diff --git a/stage_workshop.sh b/stage_workshop.sh index a644514..6c8ab28 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -16,10 +16,10 @@ WORKSHOPS=(\ "Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ "Previous Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ -"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ -"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ -"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \ -"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ +"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ +"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2) = Development" \ +"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ From f049c0de79243eff205c74ce610f1b28fa4a0bee Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 5 Feb 2020 16:52:27 -0800 Subject: [PATCH 264/691] Updates from MattS Files and Objects Updates --- hol_stageworkshop.sh | 237 ++++++++++++++++++++++++++++++++++++++++ scripts/global.vars.sh | 6 +- scripts/lib.pe.sh | 27 +++-- scripts/snc_bootcamp.sh | 11 +- 4 files changed, 267 insertions(+), 14 deletions(-) create mode 100755 hol_stageworkshop.sh diff --git a/hol_stageworkshop.sh b/hol_stageworkshop.sh new file mode 100755 index 0000000..1556bac --- /dev/null +++ b/hol_stageworkshop.sh @@ -0,0 +1,237 @@ +#!/usr/bin/env bash +# use bash -x to debug command substitution and evaluation instead of echo. +DEBUG= + +# Source Workshop common routines + global variables +source scripts/lib.common.sh +source scripts/global.vars.sh +begin + +# For WORKSHOPS keyword mappings to scripts and variables, please use: +# - Calm || Bootcamp || Citrix || Summit +# - PC #.# +WORKSHOPS=(\ +"Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +"Previous Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ +"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ +"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ +"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16 RC2) = Development" \ +"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2) = Current" \ +#"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ +#"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +#"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +#"Citrix Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +#"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \ +) # Adjust function stage_clusters, below, for file/script mappings as needed + + +function log() { + local _caller + + _caller=$(echo -n "$(caller 0 | awk '{print $2}')") + echo "$(date '+%Y-%m-%d %H:%M:%S')|$$|${_caller}|${1}" +} + +function checkStagingIsDone +{ + #Set Variables + pcIP=${1} + clusterPW=${2} + local _sleep=20m + local _attempts=7 + local _loop=0 + local _test + local _error=77 + + +#if the snc_bootcamp.sh script is still on the CVM, then the cluster is not yet ready + while true ; do + (( _loop++ )) + _test=$(sshpass -p "nutanix/4u" ssh -o StrictHostKeyChecking=no nutanix@$pcIP [[ -f /home/nutanix/.staging_complete ]] && echo "ready" || echo "notready") + + if [ "$_test" == "ready" ]; then + log "CVM with IP of $nodeIP is ready" + return 0 + elif (( _loop > _attempts )); then + log "Warning ${_error} @${pcIP}: Giving up after ${_loop} tries." + return ${_error} + else + log "@${1} ${_loop}/${_attempts}=${_test}: sleep ${_sleep}..." + sleep ${_sleep} + fi + done +} + +function stage_clusters() { + # Adjust map below as needed with $WORKSHOPS + local _cluster + local _container + local _dependency + local _fields + local _libraries='global.vars.sh lib.common.sh ' + local _pe_launch # will be transferred and executed on PE + local _pc_launch # will be transferred and executed on PC + local _sshkey=${SSH_PUBKEY} + #local _wc_arg='--lines' + local _wc_arg=${WC_ARG} + local _workshop=${WORKSHOPS[$((${WORKSHOP_NUM}-1))]} + + # Map to latest and greatest of each point release + # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download + # TODO: make WORKSHOPS and map a JSON configuration file? + + ## Set script vars since we know what versions we want to use + export PC_VERSION="${PC_CURRENT_VERSION}" + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='snc_bootcamp.sh' + _pc_launch=${_pe_launch} + + + dependencies 'install' 'sshpass' + + + # Send configuration scripts to remote clusters and execute Prism Element script + # shellcheck disable=2206 + PE_HOST=${1} + PE_PASSWORD=${2} + EMAIL=${3} + idcluster=${4} + + mysql --login-path=local -sN<<<"Use hol; UPDATE cluster SET fk_idclusterstatus = (SELECT idclusterstatus from clusterstatus WHERE cstatus = \"Staging\") WHERE idcluster = \"${idcluster}\";" 2>&1 + echo "Node $nodeIP with cluster ID of $idcluster marked as staging" + + pe_configuration_args "${_pc_launch}" + + . /opt/scripts/stageworkshop/scripts/global.vars.sh # re-import for relative settings + + prism_check 'PE' 60 + + if [[ -d cache ]]; then + pushd cache || true + for _dependency in ${JQ_PACKAGE} ${SSHPASS_PACKAGE}; do + if [[ -e ${_dependency} ]]; then + log "Sending cached ${_dependency} (optional)..." + remote_exec 'SCP' 'PE' "${_dependency}" 'OPTIONAL' + fi + done + popd || true + fi + + if (( $? == 0 )) ; then + log "Sending configuration script(s) to PE@${PE_HOST}" + else + _error=15 + log "Error ${_error}: Can't reach PE@${PE_HOST}" + exit ${_error} + fi + + if [[ -e ${RELEASE} ]]; then + log "Adding release version file..." + _libraries+=" ../${RELEASE}" + fi + + pushd /opt/scripts/stageworkshop/scripts \ + && remote_exec 'SCP' 'PE' "${_libraries} ${_pe_launch} ${_pc_launch}" \ + && popd || exit + + # For Calm container updates... + if [[ -d cache/pc-${PC_VERSION}/ ]]; then + log "Uploading PC updates in background..." + pushd cache/pc-${PC_VERSION} \ + && pkill scp || true + for _container in epsilon nucalm ; do + if [[ -f ${_container}.tar ]]; then + remote_exec 'SCP' 'PE' ${_container}.tar 'OPTIONAL' & + fi + done + popd || exit + else + log "No PC updates found in cache/pc-${PC_VERSION}/" + fi + + if [[ -f ${_sshkey} ]]; then + log "Sending ${_sshkey} for addition to cluster..." + remote_exec 'SCP' 'PE' ${_sshkey} 'OPTIONAL' + fi + + log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + unset PE_CONFIGURATION + + # shellcheck disable=SC2153 + cat < Gear > Cluster Lockdown, + the following will fail silently, use ssh nutanix@{PE|PC} instead. + + $ SSHPASS='${PE_PASSWORD}' sshpass -e ssh \\ + ${SSH_OPTS} \\ + nutanix@${PE_HOST} 'date; tail -f ${_pe_launch%%.sh}.log' + You can login to PE to see tasks in flight and eventual PC registration: + https://${PRISM_ADMIN}:${PE_PASSWORD}@${PE_HOST}:9440/ + +EOM + + if (( "$(echo ${_libraries} | grep -i lib.pc | wc ${_wc_arg})" > 0 )); then + # shellcheck disable=2153 + cat <&1 + echo "Node $nodeIP with cluster ID of $idcluster marked as ready. RC is $rc" + + elif [ $rc -eq 77 ] ; then + #Update Database to mark cluster as Error when the staging script is no longer on the CVM + + mysql --login-path=local -sN<<<"Use hol; UPDATE cluster SET fk_idclusterstatus = (SELECT idclusterstatus from clusterstatus WHERE cstatus = \"Error\") WHERE idcluster = \"${idcluster}\";" 2>&1 + echo "Node $nodeIP with cluster ID of $idcluster marked as ERROR. RC is $rc" + + fi + + finish + exit +} + +function pe_configuration_args() { + local _pc_launch="${1}" + + PE_CONFIGURATION="EMAIL=${EMAIL} PRISM_ADMIN=${PRISM_ADMIN} PE_PASSWORD=${PE_PASSWORD} PE_HOST=${PE_HOST} PC_LAUNCH=${_pc_launch} PC_VERSION=${PC_VERSION}" +} + + +#__main__ + +# Source Workshop common routines + global variables +. /opt/scripts/stageworkshop/scripts/lib.common.sh +. /opt/scripts/stageworkshop/scripts/global.vars.sh +begin + + +# shellcheck disable=SC2213 + + +#stage_clusters "${1}" "${2}" "${3}" + +mysql --login-path=local -sN<<<"Use hol; SELECT idcluster,nodeIP,peIP,dsIP,clusterPW,clustername FROM cluster WHERE fk_idclusterstatus = (SELECT idclusterstatus from clusterstatus WHERE cstatus = \"Created\");" | while read idcluster nodeIP peIP dsIP clusterPW clustername; do + stage_clusters "$peIP" "$clusterPW" "nutanixexpo@gmail.com" "$idcluster" & +done diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index e9264c0..a5b04c4 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -16,7 +16,7 @@ STORAGE_IMAGES='Images' ATTEMPTS=40 SLEEP=60 PrismOpsServer='PrismProLabUtilityServer' -SeedPC='seedPC.zp' +SeedPC='seedPC.zip' ERA_Blueprint='EraServerDeployment.json' Citrix_Blueprint='CitrixBootcampInfra.json' Beam_Blueprint='' @@ -85,6 +85,7 @@ NW2_DHCP_START="${IPV4_PREFIX}.132" NW2_DHCP_END="${IPV4_PREFIX}.253" # Stuff needed for object_store +# OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' VLAN=${OCTET[2]} NETWORK="${OCTET[0]}.${OCTET[1]}" @@ -141,6 +142,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' + OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -177,6 +179,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.42.196.10,10.42.194.10' + OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -213,6 +216,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" + OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR JQ_REPOS=(\ diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 6aae247..771a849 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -319,7 +319,7 @@ function create_file_server() { local _external_nw_name="${2}" local _external_nw_uuid local _test - local _maxtries=5 + local _maxtries=30 local _tries=0 local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/vfilers" local _ntp_formatted="$(echo $NTP_SERVERS | sed -r 's/[^,]+/'\"'&'\"'/g')" @@ -404,21 +404,28 @@ EOF #_response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL}| grep "taskUuid" | wc -l) echo $HTTP_JSON_BODY +# execute the API call to create the file server _response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" ${_httpURL} | grep "taskUuid" | wc -l) #curl $CURL_HTTP_OPTS --user $PRISM_ADMIN:$PE_PASSWORD -X POST -d $HTTP_JSON_BODY $_httpURL - # Check if we got a "1" back (start sequence received). If not, retry. If yes, check if enabled... - if [[ $_response -lt 1 ]]; then + # Check to ensure we get a response back, then start checking for the file server creation + if [[ ! -z $_response ]]; then # # Check if Files has been enabled - #_response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL} | grep "taskUuid" | wc -l) - #while [[ $_response -ne 1 || $_tries -lt $_maxtries ]]; do - # _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d ${HTTP_JSON_BODY} ${_httpURL} | grep "taskUuid" | wc -l) - # ((_tries=_tries+1)) - #done - echo "File Server has been created." + _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_fileserver_name | wc -l) + while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do + log "File Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" + sleep 1m + _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_fileserver_name | wc -l) + ((_tries++)) + done + if [[ $_checkresponse -eq 1 ]]; then + echo "File Server has been created." + else + echo "File Server creation failed. Check the staging logs." + fi else - echo "File Server is not being created, check the echos." + echo "File Server is not being created, check the staging logs." fi } diff --git a/scripts/snc_bootcamp.sh b/scripts/snc_bootcamp.sh index 55047f8..89471b6 100755 --- a/scripts/snc_bootcamp.sh +++ b/scripts/snc_bootcamp.sh @@ -75,7 +75,7 @@ case ${1} in files_install && sleep 30 - create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 + create_file_server "${NW1_NAME}" "${NW1_NAME}" && sleep 30 file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' @@ -103,6 +103,10 @@ case ${1} in export NUCLEI_SERVER='localhost' export NUCLEI_USERNAME="${PRISM_ADMIN}" export NUCLEI_PASSWORD="${PE_PASSWORD}" + export BUCKETS_DNS_IP="${IPV4_PREFIX}.$((${OCTET[3]} + 25))" + export BUCKETS_VIP="${IPV4_PREFIX}.$((${OCTET[3]} + 26))" + export OBJECTS_NW_START="${IPV4_PREFIX}.$((${OCTET[3]} + 27))" + export OBJECTS_NW_END="${IPV4_PREFIX}.$((${OCTET[3]} + 30))" # nuclei -debug -username admin -server localhost -password x vm.list if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX @@ -134,15 +138,16 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && objects_enable \ && lcm \ + && objects_enable \ && object_store \ && karbon_image_download \ && images \ && flow_enable \ && pc_cluster_img_import \ && seedPC \ - && prism_check 'PC' + && prism_check 'PC' \ + && finish_staging log "Non-blocking functions (in development) follow." pc_project From 5e2838a9b1f0cd06ac8dfe0560699ac41e221b6a Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 6 Feb 2020 11:14:47 +0100 Subject: [PATCH 265/691] Debug changes --- scripts/frame.sh | 4 ++-- scripts/lib.pc.sh | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/frame.sh b/scripts/frame.sh index 14fc14b..2078333 100755 --- a/scripts/frame.sh +++ b/scripts/frame.sh @@ -117,8 +117,8 @@ case ${1} in && calm_enable \ && karbon_enable \ && objects_enable \ - && lcm \ - && lcm \ + # && lcm \ + # && lcm \ && object_store \ && karbon_image_download \ && images \ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6c4e3a3..9c1cdb4 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -92,6 +92,10 @@ function lcm() { local _url_groups='https://localhost:9440/api/nutanix/v3/groups' local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + # Reset the variables we use so we're not adding extra values to the arrays + unset uuid_arr + unset version_ar + # Inventory download/run _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"value":"{\".oid\":\"LifeCycleManager\",\".method\":\"lcm_framework_rpc\",\".kwargs\":{\"method_class\":\"LcmFramework\",\"method\":\"perform_inventory\",\"args\":[\"http://download.nutanix.com/lcm/2.0\"]}}"}' ${_url_lcm} | jq '.value' 2>nul | cut -d "\\" -f 4 | tr -d \") From 9c6b0afdc8eea845c84d2b17615878ffe15648d6 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 6 Feb 2020 11:17:43 +0100 Subject: [PATCH 266/691] Update frame.sh --- scripts/frame.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/frame.sh b/scripts/frame.sh index 2078333..3773c01 100755 --- a/scripts/frame.sh +++ b/scripts/frame.sh @@ -119,7 +119,7 @@ case ${1} in && objects_enable \ # && lcm \ # && lcm \ - && object_store \ + object_store \ && karbon_image_download \ && images \ && flow_enable \ From 04eefe64b766a7fd563d78b00fdfd7b883ef9cb7 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 6 Feb 2020 12:53:37 +0100 Subject: [PATCH 267/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9c1cdb4..cf2db2a 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -150,7 +150,7 @@ function lcm() { for uuid in "${uuid_arr[@]}" do # Get the latest version from the to be updated uuid - version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | tail -1 | tr -d \")) + version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | sort |tail -1 | tr -d \")) done # Copy the right info into the to be used array fi From 5f156fec3ee2a37447cbc70b5995ed0d45c30a1f Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 6 Feb 2020 12:54:04 +0100 Subject: [PATCH 268/691] Update frame.sh --- scripts/frame.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/frame.sh b/scripts/frame.sh index 3773c01..fbcc522 100755 --- a/scripts/frame.sh +++ b/scripts/frame.sh @@ -117,9 +117,8 @@ case ${1} in && calm_enable \ && karbon_enable \ && objects_enable \ - # && lcm \ - # && lcm \ - object_store \ + && lcm \ + && object_store \ && karbon_image_download \ && images \ && flow_enable \ From 37402e5cec7f943cd7a9a9627655e8506ef77f6c Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 6 Feb 2020 13:03:08 +0100 Subject: [PATCH 269/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index cf2db2a..2ec4d46 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -843,7 +843,7 @@ function upload_citrix_calm_blueprint() { local DOWNLOAD_BLUEPRINTS # download the blueprint - DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint) + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint}) log "Downloading ${CALM_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" # ensure the directory that contains the blueprints to be imported is not empty From 83f1245b6d446646ca8a1dcdec34e31cc087ca65 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 6 Feb 2020 18:01:48 +0100 Subject: [PATCH 270/691] Small updates Small updates due to dynmically getting the AFS version installed in the cluster. Also started the SNC networking part. --- scripts/global.vars.sh | 58 +++++++++++++++++++++++++++++++++++------- scripts/lib.pe.sh | 9 ++++++- 2 files changed, 57 insertions(+), 10 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index a5b04c4..f9876ee 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -70,19 +70,59 @@ DNS_SERVERS='8.8.8.8' NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' SUBNET_MASK="255.255.255.128" +# Getting the network ready NW1_NAME='Primary' NW1_VLAN=0 -NW1_SUBNET="${IPV4_PREFIX}.1/25" -NW1_GATEWAY="${IPV4_PREFIX}.1" -NW1_DHCP_START="${IPV4_PREFIX}.50" -NW1_DHCP_END="${IPV4_PREFIX}.125" -NW2_NAME='Secondary' -NW2_VLAN=$((OCTET[2]*10+1)) -NW2_SUBNET="${IPV4_PREFIX}.129/25" -NW2_DHCP_START="${IPV4_PREFIX}.132" -NW2_DHCP_END="${IPV4_PREFIX}.253" +# TODO: Need to make changes to the network configuration if we are running against a single Node Cluster +# https://confluence.eng.nutanix.com:8443/pages/viewpage.action?spaceKey=SEW&title=Bootcamps%3A+Networking+Scheme + +case "${OCTET[4]}" in + + 7 ) # We are in Partition 1 + NW1_SUBNET="${IPV4_PREFIX}.1/26" + NW1_GATEWAY="${IPV4_PREFIX}.1" + NW1_DHCP_START="${IPV4_PREFIX}.38" + NW1_DHCP_END="${IPV4_PREFIX}.58" + ;; + + 71 ) # We are in Partition 2 + NW1_SUBNET="${IPV4_PREFIX}.65/26" + NW1_GATEWAY="${IPV4_PREFIX}.65" + NW1_DHCP_START="${IPV4_PREFIX}.102" + NW1_DHCP_END="${IPV4_PREFIX}.122" + ;; + + 135 ) # We are in Partition 3 + NW1_SUBNET="${IPV4_PREFIX}.129/26" + NW1_GATEWAY="${IPV4_PREFIX}.129" + NW1_DHCP_START="${IPV4_PREFIX}.166" + NW1_DHCP_END="${IPV4_PREFIX}.186" + ;; + + 199 ) # We are in Partition 4 + NW1_SUBNET="${IPV4_PREFIX}.193/26" + NW1_GATEWAY="${IPV4_PREFIX}.193" + NW1_DHCP_START="${IPV4_PREFIX}.230" + NW1_DHCP_END="${IPV4_PREFIX}.250" + ;; + + + * ) # For normal clusters + NW1_SUBNET="${IPV4_PREFIX}.1/25" + NW1_GATEWAY="${IPV4_PREFIX}.1" + NW1_DHCP_START="${IPV4_PREFIX}.50" + NW1_DHCP_END="${IPV4_PREFIX}.125" + + NW2_NAME='Secondary' + NW2_VLAN=$((OCTET[2]*10+1)) + NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_DHCP_START="${IPV4_PREFIX}.132" + NW2_DHCP_END="${IPV4_PREFIX}.253" + ;; + +esac # Stuff needed for object_store # OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 771a849..d1f8326 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -322,8 +322,13 @@ function create_file_server() { local _maxtries=30 local _tries=0 local _httpURL="https://localhost:9440/PrismGateway/services/rest/v1/vfilers" + local _grab_afs_version="https://localhost:9440/PrismGateway/services/rest/v1/upgrade/afs/softwares" local _ntp_formatted="$(echo $NTP_SERVERS | sed -r 's/[^,]+/'\"'&'\"'/g')" + # Get dynamically the version of the AFS that has been installed + afs_version=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_grab_afs_version} | jq '.entities[] | select (.status=="COMPLETED") .version' | tr -d \") + + log "Found installed version: $afs_version of Nutanix Files..." echo "Get cluster network and storage container UUIDs..." _internal_nw_uuid=$(acli net.get ${_internal_nw_name} \ @@ -370,7 +375,7 @@ function create_file_server() { ${_ntp_formatted} ], "sizeGib":"1024", - "version":"${FILES_VERSION}", + "version":"${afs_version}", "dnsDomainName":"${AUTH_FQDN}", "nameServicesDTO":{ "adDetails":{ @@ -434,6 +439,8 @@ echo $HTTP_JSON_BODY ############################################################################################################################################################################### # Routine to create the networks ############################################################################################################################################################################### + + function network_configure() { local _network_name="${NW1_NAME}" From 48f80742f840011c6470b9c6856990cdc4448e2d Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 6 Feb 2020 18:17:53 +0100 Subject: [PATCH 271/691] Update global.vars.sh Typo in the array OCTET. --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f9876ee..8f5d8c3 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -78,7 +78,7 @@ NW1_VLAN=0 # TODO: Need to make changes to the network configuration if we are running against a single Node Cluster # https://confluence.eng.nutanix.com:8443/pages/viewpage.action?spaceKey=SEW&title=Bootcamps%3A+Networking+Scheme -case "${OCTET[4]}" in +case "${OCTET[3]}" in 7 ) # We are in Partition 1 NW1_SUBNET="${IPV4_PREFIX}.1/26" From 18d458f60c19ca0564726193f99b6764e5e6cf26 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 6 Feb 2020 09:57:58 -0800 Subject: [PATCH 272/691] Updates for Create Project --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 2ec4d46..bee7adb 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1249,7 +1249,7 @@ log "Get cluster network and PC Account UUIDs..." _nw_uuid=$(acli "net.get ${_nw_name}" \ | grep "uuid" | cut -f 2 -d ':' | xargs) -_pc_account_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{}' "https://localhost:9440/api/nutanix/v3/accounts/list" | jq '.pc_account_uuid') +_pc_account_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{}' "https://localhost:9440/api/nutanix/v3/accounts/list" | jq '.entities[].status.resources | select (.type=="nutanix_pc") .data.cluster_account_reference_list[0].resources.data.pc_account_uuid' | tr -d \") log "Create BootcampInfra Project ..." _http_body=$(cat < Date: Thu, 6 Feb 2020 10:34:56 -0800 Subject: [PATCH 273/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index d1f8326..9d2ad94 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -247,7 +247,7 @@ acli "vm.create ${PrismOpsServer} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" # vmstat --wide --unit M --active # suggests 2G sufficient, was 4G #acli "vm.disk_create ${VMNAME} cdrom=true empty=true" acli "vm.disk_create ${PrismOpsServer} clone_from_image=${PrismOpsServer}" -acli "vm.nic_create ${PrismOpsServer} network=${PrismOpsServer}" +acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME}" #acli "vm.nic_create ${VMNAME} network=${NW1_NAME} ip=${AUTH_HOST}" log "Power on ${VPrismOpsServer} VM..." From 0ea9e41b4fe65ba4c16bceabeaa53c691c81da54 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 8 Feb 2020 14:17:24 -0800 Subject: [PATCH 274/691] Update lib.pc.sh updates for creating the project --- scripts/lib.pc.sh | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index bee7adb..6e5bb08 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1241,17 +1241,24 @@ EOF function pc_project() { local _name local _count + local _role="Project Admin" + local _role_uuid local _pc_account_uuid - local _nw_name="${1}" + local _nw_name="Primary" local _nw_uuid log "Get cluster network and PC Account UUIDs..." -_nw_uuid=$(acli "net.get ${_nw_name}" \ - | grep "uuid" | cut -f 2 -d ':' | xargs) +_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "{\"kind\": \"subnet\", \"filter\": \"name==${_nw_name}\"}" "https://localhost:9440/api/nutanix/v3/subnets/list" | jq '.entities[].metadata.uuid' | tr -d \") + +_role_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "{\"kind\": \"role\", \"filter\": \"name==${_role}\"}" "https://localhost:9440/api/nutanix/v3/accounts/list" | jq '.entities[].metadata.uuid' | tr -d \") _pc_account_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{}' "https://localhost:9440/api/nutanix/v3/accounts/list" | jq '.entities[].status.resources | select (.type=="nutanix_pc") .data.cluster_account_reference_list[0].resources.data.pc_account_uuid' | tr -d \") log "Create BootcampInfra Project ..." +log "NW UUID = ${_nw_uuid}" +log "Role UUID = ${_role_uuid}" +log "PC Account UUID = ${_pc_account_uuid}" + _http_body=$(cat < Date: Sat, 8 Feb 2020 16:38:55 -0800 Subject: [PATCH 275/691] Updates for File Analytivs and Calm Project --- scripts/global.vars.sh | 1 + scripts/lib.pc.sh | 42 +++++++++++++++++----- scripts/lib.pe.sh | 81 +++++++++++++++++++++++++++++++++++++++++- scripts/ts2020.sh | 4 ++- 4 files changed, 117 insertions(+), 11 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 8f5d8c3..6f512e1 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -64,6 +64,7 @@ OCTET=(${PE_HOST//./ }) # zero index IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) +FILE_ANALYTICS_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 22)) ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 15)) CITRIX_DDC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 8)) DNS_SERVERS='8.8.8.8' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6e5bb08..38fe68d 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1239,7 +1239,7 @@ EOF ############################################################################################################################################################################### function pc_project() { - local _name + local _name="BootcampInfra" local _count local _role="Project Admin" local _role_uuid @@ -1247,12 +1247,38 @@ function pc_project() { local _nw_name="Primary" local _nw_uuid -log "Get cluster network and PC Account UUIDs..." -_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "{\"kind\": \"subnet\", \"filter\": \"name==${_nw_name}\"}" "https://localhost:9440/api/nutanix/v3/subnets/list" | jq '.entities[].metadata.uuid' | tr -d \") +# Get the Network UUIDs +log "Get cluster network UUID" +_http_body=$(cat < Date: Sat, 8 Feb 2020 17:50:20 -0800 Subject: [PATCH 276/691] Update stage_workshop.sh --- stage_workshop.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 6c8ab28..0e77ac5 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -18,8 +18,8 @@ WORKSHOPS=(\ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ -"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2) = Development" \ -"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2) = Development" \ +"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ +"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ @@ -44,11 +44,11 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.16" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.11.2" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "^PC 5.11.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.11" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "^PC 5.11" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" fi From 46c9093a0868e48dc6ccaf428ace7cd5de76b302 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 8 Feb 2020 18:12:29 -0800 Subject: [PATCH 277/691] Update lib.pe.sh --- scripts/lib.pe.sh | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 4e73edd..a3c161c 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -493,26 +493,26 @@ EOF echo $HTTP_JSON_BODY # execute the API call to create the file analytics server - _response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" ${_httpURL} | grep "taskUuid" | wc -l) +_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" ${_httpURL} | grep "taskUuid" | wc -l) # Check to ensure we get a response back, then start checking for the file server creation - if [[ ! -z $_response ]]; then +# if [[ ! -z $_response ]]; then # # Check if Files has been enabled - _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name | wc -l) - while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do - log "File Analytics Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" - sleep 1m - _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name| wc -l) - ((_tries++)) - done - if [[ $_checkresponse -eq 1 ]]; then - echo "File Analytics has been created." - else - echo "File Analytics creation failed. Check the staging logs." - fi - else - echo "File Analytics is not being created, check the staging logs." - fi +# _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name | wc -l) +# while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do +# log "File Analytics Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" +# sleep 1m +# _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name| wc -l) +# ((_tries++)) +# done +# if [[ $_checkresponse -eq 1 ]]; then +# echo "File Analytics has been created." +# else +# echo "File Analytics creation failed. Check the staging logs." +# fi +# else +# echo "File Analytics is not being created, check the staging logs." +# fi } ############################################################################################################################################################################### From 2d45f83940a2387a256aaa4b11eb618f9dcc3ae4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 8 Feb 2020 22:37:12 -0800 Subject: [PATCH 278/691] File Analytics & Calm Project --- scripts/lib.pc.sh | 6 +++--- scripts/lib.pe.sh | 17 +++++++---------- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 38fe68d..f9f3dd7 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1256,7 +1256,7 @@ _http_body=$(cat < Date: Sat, 8 Feb 2020 23:07:26 -0800 Subject: [PATCH 279/691] Update stage_workshop.sh --- stage_workshop.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 0e77ac5..95cff4d 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -44,11 +44,11 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "^PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "^PC 5.11.2" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.11.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" - elif (( $(echo ${_workshop} | grep -i "^PC 5.11" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.11" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" fi From 301ba127c2eeab0f8b277ed7fede21745f81c4be Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 9 Feb 2020 11:43:20 -0800 Subject: [PATCH 280/691] Updates or File Analytics & Calm Projects --- scripts/lib.pe.sh | 4 ++-- scripts/ts2020.sh | 21 ++++++++++++--------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 1f612f9..7d6033c 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -441,9 +441,9 @@ echo $HTTP_JSON_BODY function create_file_analytics_server() { #local CURL_HTTP_OPTS=' --max-time 25 --silent --show-error --header Content-Type:application/json --header Accept:application/json --insecure ' local _file_analytics_server_name="BootcampFileAnalytics" - local _internal_nw_name="${1}" + local _internal_nw_name="${NW1_NAME}" local _internal_nw_uuid - local _external_nw_name="${2}" + local _external_nw_name="${NW2_NAME}" local _external_nw_uuid local _test local _maxtries=30 diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 2630366..6c67e27 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -46,7 +46,15 @@ case ${1} in && network_configure \ && authentication_source \ && pe_auth \ - && prism_pro_server_deploy + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -66,13 +74,8 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install && sleep 30 - create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 - - file_analytics_install && sleep 30 - - create_file_analytics_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish fi @@ -157,14 +160,14 @@ case ${1} in && karbon_enable \ && objects_enable \ && lcm \ + && pc_project \ + && upload_era_calm_blueprint \ && object_store \ && karbon_image_download \ && images \ && flow_enable \ && pc_cluster_img_import \ && seedPC \ - && pc_project \ - && upload_era_calm_blueprint \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 035accecaaefcbedca075e48a99b02c935e7e3d6 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 9 Feb 2020 12:25:34 -0800 Subject: [PATCH 281/691] FA * Calm Projects --- scripts/lib.pc.sh | 32 +++++++------------------------- scripts/lib.pe.sh | 30 ++++++++++++++++++------------ 2 files changed, 25 insertions(+), 37 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index f9f3dd7..5f298a5 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1244,40 +1244,22 @@ function pc_project() { local _role="Project Admin" local _role_uuid local _pc_account_uuid - local _nw_name="Primary" + local _nw_name="${NW1_NAME}" local _nw_uuid # Get the Network UUIDs log "Get cluster network UUID" -_http_body=$(cat < Date: Sun, 9 Feb 2020 13:42:19 -0800 Subject: [PATCH 282/691] FA & Galm Projects Updates to jq --- scripts/lib.pc.sh | 6 +++--- scripts/lib.pe.sh | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 5f298a5..96728c8 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1250,17 +1250,17 @@ function pc_project() { # Get the Network UUIDs log "Get cluster network UUID" _http_body='{"kind":"subnet","filter":"name==${_nw_name}"}' -_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" "https://localhost:9440/api/nutanix/v3/subnets/list" | jq '.entities[0].metadata.uuid' | tr -d \") +_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" "https://localhost:9440/api/nutanix/v3/subnets/list" | jq -r '.entities[].metadata.uuid' | tr -d \") # Get the Role UUIDs log "Get Role UUID" _http_body='{"kind":"role","filter":"name==${_role}"0}' -_role_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" "https://localhost:9440/api/nutanix/v3/accounts/list" | jq '.entities[0].metadata.uuid' | tr -d \") +_role_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" "https://localhost:9440/api/nutanix/v3/accounts/list" | jq -r '.entities[].metadata.uuid' | tr -d \") # Get the PC Account UUIDs log "Get PC Account UUID" _http_body='{"kind":"account","filter":"type==nutanix_pc"}' -_pc_account_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{}' "https://localhost:9440/api/nutanix/v3/accounts/list" | jq '.entities[0].status.resources.data.cluster_account_reference_list[0].resources.data.pc_account_uuid' | tr -d \") +_pc_account_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{}' "https://localhost:9440/api/nutanix/v3/accounts/list" | jq -r '.entities[].status.resources.data.cluster_account_reference_list[0].resources.data.pc_account_uuid' | tr -d \") log "Create BootcampInfra Project ..." log "NW UUID = ${_nw_uuid}" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 6c57ab8..befeaf7 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -457,7 +457,7 @@ function create_file_analytics_server() { #_internal_nw_uuid=$(acli net.get ${_internal_nw_name} | grep "uuid" | cut -f 2 -d ':' | xargs) log "Get cluster network UUID" _http_body='{"kind":"subnet","filter":"name==${_nw_name}"}' - _nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" "https://localhost:9440/api/nutanix/v3/subnets/list" | jq '.entities[0].metadata.uuid' | tr -d \") + _nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" "https://localhost:9440/api/nutanix/v3/subnets/list" | jq -r '.entities[].metadata.uuid' | tr -d \") # Get the Container UUIDs From 471f6d888ff237f7ee6ef691f5ab6ff2e29c4eaa Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 9 Feb 2020 15:17:50 -0800 Subject: [PATCH 283/691] Update lib.pe.sh --- scripts/lib.pe.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index befeaf7..1c59179 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -457,7 +457,7 @@ function create_file_analytics_server() { #_internal_nw_uuid=$(acli net.get ${_internal_nw_name} | grep "uuid" | cut -f 2 -d ':' | xargs) log "Get cluster network UUID" _http_body='{"kind":"subnet","filter":"name==${_nw_name}"}' - _nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" "https://localhost:9440/api/nutanix/v3/subnets/list" | jq -r '.entities[].metadata.uuid' | tr -d \") + _nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" https://localhost:9440/api/nutanix/v3/subnets/list | jq -r '.entities[].metadata.uuid' | tr -d \") # Get the Container UUIDs @@ -496,7 +496,7 @@ EOF echo $HTTP_JSON_BODY # execute the API call to create the file analytics server -_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" "https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform" | grep "taskUuid" | wc -l) +_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform | grep "taskUuid" | wc -l) # Check to ensure we get a response back, then start checking for the file server creation # if [[ ! -z $_response ]]; then From 0194f1622ffa9860fc069bbdf6d0e986bf5e364c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 9 Feb 2020 19:17:53 -0800 Subject: [PATCH 284/691] GTS Updates for Staging --- scripts/lib.pe.sh | 4 ++-- scripts/ts2020.sh | 6 ++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 1c59179..4ad363f 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -454,10 +454,10 @@ function create_file_analytics_server() { echo "Get cluster network and storage container UUIDs..." # Get the Network UUIDs - #_internal_nw_uuid=$(acli net.get ${_internal_nw_name} | grep "uuid" | cut -f 2 -d ':' | xargs) + _nw_uuid=$(acli net.get ${_nw_name} | grep "uuid" | cut -f 2 -d ':' | xargs) log "Get cluster network UUID" _http_body='{"kind":"subnet","filter":"name==${_nw_name}"}' - _nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" https://localhost:9440/api/nutanix/v3/subnets/list | jq -r '.entities[].metadata.uuid' | tr -d \") + #_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" https://localhost:9440/api/nutanix/v3/subnets/list | jq -r '.entities[].metadata.uuid' | tr -d \") # Get the Container UUIDs diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 6c67e27..6e3e850 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -104,13 +104,11 @@ case ${1} in AutoXD.qcow2 \ ) export ISO_IMAGES=(\ - CentOS7.iso \ - Windows2016.iso \ - Windows2012R2.iso \ - Windows10.iso \ Nutanix-VirtIO-1.1.5.iso \ SQLServer2014SP3.iso \ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + FrameCCA-2.1.0 \ + FrameGuestAgentInstaller_1.0.2.2_7930 \ VeeamBR_9.5.4.2615.Update4.iso \ ) From a1157fa391b32c5610947e49dcc4829001dae883 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 9 Feb 2020 20:09:58 -0800 Subject: [PATCH 285/691] Update lib.pe.sh --- scripts/lib.pe.sh | 35 ++++++++++++++++------------------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 4ad363f..02f4ecf 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -475,9 +475,6 @@ function create_file_analytics_server() { "container_name": "${STORAGE_DEFAULT}", "network": { "uuid": "${_nw_uuid}", - "ip": "", - "netmask": "", - "gateway": "" }, "resource": { "memory": "24", @@ -499,23 +496,23 @@ echo $HTTP_JSON_BODY _response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform | grep "taskUuid" | wc -l) # Check to ensure we get a response back, then start checking for the file server creation -# if [[ ! -z $_response ]]; then + if [[ ! -z $_response ]]; then # # Check if Files has been enabled -# _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name | wc -l) -# while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do -# log "File Analytics Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" -# sleep 1m -# _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name| wc -l) -# ((_tries++)) -# done -# if [[ $_checkresponse -eq 1 ]]; then -# echo "File Analytics has been created." -# else -# echo "File Analytics creation failed. Check the staging logs." -# fi -# else -# echo "File Analytics is not being created, check the staging logs." -# fi + _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name | wc -l) + while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do + log "File Analytics Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" + sleep 1m + _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name| wc -l) + ((_tries++)) + done + if [[ $_checkresponse -eq 1 ]]; then + echo "File Analytics has been created." + else + echo "File Analytics creation failed. Check the staging logs." + fi + else + echo "File Analytics is not being created, check the staging logs." + fi } ############################################################################################################################################################################### From e9c102af9f0a32ce470b89cf168ac780226a5a8b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 9 Feb 2020 20:12:03 -0800 Subject: [PATCH 286/691] Update ts2020.sh --- scripts/ts2020.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 6e3e850..20001d1 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -52,8 +52,6 @@ case ${1} in && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ && sleep 30 \ && file_analytics_install \ - && sleep 30 \ - && create_file_analytics_server \ && sleep 30 if (( $? == 0 )) ; then @@ -74,7 +72,8 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - + && create_file_analytics_server \ + && sleep 30 #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish From 8fd63259c61107424271084331c8f801f31005c9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 9 Feb 2020 20:51:58 -0800 Subject: [PATCH 287/691] Update ts2020.sh --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 20001d1..fcbc1c9 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -72,7 +72,7 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - && create_file_analytics_server \ + create_file_analytics_server \ && sleep 30 #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' From 570b1b0a9e1f17111599d653301c107b76784ce8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 10 Feb 2020 08:33:38 -0800 Subject: [PATCH 288/691] updates --- scripts/lib.pc.sh | 32 +++++++++++++++++++++++++------- scripts/lib.pe.sh | 16 ++++++++++++---- 2 files changed, 37 insertions(+), 11 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 96728c8..7722c10 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1249,18 +1249,36 @@ function pc_project() { # Get the Network UUIDs log "Get cluster network UUID" -_http_body='{"kind":"subnet","filter":"name==${_nw_name}"}' -_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" "https://localhost:9440/api/nutanix/v3/subnets/list" | jq -r '.entities[].metadata.uuid' | tr -d \") +_http_body=$(cat < Date: Mon, 10 Feb 2020 08:34:48 -0800 Subject: [PATCH 289/691] Update lib.pe.sh --- scripts/lib.pe.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 318779e..815303f 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -483,6 +483,9 @@ EOF "container_name": "${STORAGE_DEFAULT}", "network": { "uuid": "${_nw_uuid}", + "ip": "", + "netmask": "", + "gateway": "" }, "resource": { "memory": "24", From 59c8bacadb56eb50dfa7bc0b2f901d29a8b3c299 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 10 Feb 2020 13:33:59 -0800 Subject: [PATCH 290/691] jq updates --- scripts/lib.pc.sh | 10 +++++----- scripts/lib.pe.sh | 3 +-- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7722c10..63353d1 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1256,29 +1256,29 @@ _http_body=$(cat < Date: Mon, 10 Feb 2020 16:53:23 -0800 Subject: [PATCH 291/691] JQ Updates --- scripts/lib.pc.sh | 22 +++++----------------- scripts/lib.pe.sh | 10 ++-------- 2 files changed, 7 insertions(+), 25 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 63353d1..2d4f058 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1256,29 +1256,17 @@ _http_body=$(cat < Date: Mon, 10 Feb 2020 19:13:05 -0800 Subject: [PATCH 292/691] Update lib.pe.sh --- scripts/lib.pe.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 999ac2f..806604b 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -457,9 +457,9 @@ function create_file_analytics_server() { #_nw_uuid=$(acli net.get ${_nw_name} | grep "uuid" | cut -f 2 -d ':' | xargs) log "Get cluster network UUID" - _nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"subnet","filter": "name==Primary"}' 'https://localhost:9440/api/nutanix/v3/subnets/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + #_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"subnet","filter": "name==Primary"}' 'https://localhost:9440/api/nutanix/v3/subnets/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - #_nw_uuid=$(curl --location --request POST 'https://10.42.7.37:9440/api/nutanix/v3/subnets/list' --header 'Content-Type: application/json' --user 'admin:techX2019!' --insecure -s --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _nw_uuid=$(curl --location --request POST 'https://10.42.7.37:9440/api/nutanix/v3/subnets/list' --header 'Content-Type: application/json' --user 'admin:techX2019!' --insecure -s --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # Get the Container UUIDs log "Get ${STORAGE_DEFAULT} Container UUID" From ba884ec57f026de93c739b520fe4c6811284bc61 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 10 Feb 2020 21:19:44 -0800 Subject: [PATCH 293/691] Update lib.pe.sh --- scripts/lib.pe.sh | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 806604b..00ee6bc 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -446,7 +446,6 @@ function create_file_analytics_server() { local _test local _maxtries=30 local _tries=0 - local _httpURL="https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform" local _ntp_formatted="$(echo $NTP_SERVERS | sed -r 's/[^,]+/'\"'&'\"'/g')" log "Installing File Analytics version: ${FILE_ANALYTICS_VERSION}" @@ -459,7 +458,7 @@ function create_file_analytics_server() { #_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"subnet","filter": "name==Primary"}' 'https://localhost:9440/api/nutanix/v3/subnets/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - _nw_uuid=$(curl --location --request POST 'https://10.42.7.37:9440/api/nutanix/v3/subnets/list' --header 'Content-Type: application/json' --user 'admin:techX2019!' --insecure -s --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _nw_uuid=$(curl --location --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --insecure -s --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # Get the Container UUIDs log "Get ${STORAGE_DEFAULT} Container UUID" @@ -497,16 +496,18 @@ EOF echo $HTTP_JSON_BODY # execute the API call to create the file analytics server -_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" ${_httpURL} | grep "taskUuid" | wc -l) +#_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep "taskUuid" | wc -l) + +_response=$(curl --location --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --insecure -s --data "${HTTP_JSON_BODY}" | grep "taskUuid" | wc -l) # Check to ensure we get a response back, then start checking for the file server creation if [[ ! -z $_response ]]; then # # Check if Files has been enabled - _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name | wc -l) + _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform'v| grep $_file_analytics_server_name | wc -l) while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do log "File Analytics Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" sleep 1m - _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET ${_httpURL}| grep $_file_analytics_server_name| wc -l) + _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep $_file_analytics_server_name| wc -l) ((_tries++)) done if [[ $_checkresponse -eq 1 ]]; then From b6b4f8a38dcea777b34c45a631915ec5063e65e1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 10 Feb 2020 21:24:05 -0800 Subject: [PATCH 294/691] JQ --- scripts/lib.pc.sh | 6 +++--- scripts/lib.pe.sh | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 2d4f058..531fa6f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1256,17 +1256,17 @@ _http_body=$(cat < Date: Mon, 10 Feb 2020 23:07:23 -0800 Subject: [PATCH 295/691] Update lib.pe.sh --- scripts/lib.pe.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 0595a31..3a11abe 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -499,12 +499,12 @@ echo $HTTP_JSON_BODY #_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep "taskUuid" | wc -l) echo "Creating File Anlytics Server Now" -_response=$(curl --location --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --insecure -s --data "${HTTP_JSON_BODY}" | grep "taskUuid" | wc -l) +_response=$(curl --location --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --insecure -s --data '{"image_version": "${FILE_ANALYTICS_VERSION}","name":"${_file_analytics_server_name}","container_uuid": "${_storage_default_uuid}", "container_name": "${STORAGE_DEFAULT}","network": {"uuid": "${_nw_uuid}","ip": "","netmask": "","gateway": ""},"resource": {"memory": "24","cores": "2","vcpu": "4"},"dns_servers": [${AUTH_HOST}],"ntp_servers": [${_ntp_formatted}],"disk_size": "3"}' | grep "taskUuid" | wc -l) # Check to ensure we get a response back, then start checking for the file server creation if [[ ! -z $_response ]]; then # # Check if Files has been enabled - _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform'v| grep $_file_analytics_server_name | wc -l) + _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep $_file_analytics_server_name | wc -l) while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do log "File Analytics Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" sleep 1m From d84f0d709d7f5630a8dcda09c0b1291a799c4322 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 10 Feb 2020 23:33:41 -0800 Subject: [PATCH 296/691] Update lib.pe.sh --- scripts/lib.pe.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 3a11abe..5e1ff33 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -470,7 +470,7 @@ function create_file_analytics_server() { HTTP_JSON_BODY=$(cat < Date: Tue, 11 Feb 2020 08:29:04 -0800 Subject: [PATCH 297/691] Update ts2020.sh --- scripts/ts2020.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index fcbc1c9..6e3e850 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -52,6 +52,8 @@ case ${1} in && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ && sleep 30 \ && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ && sleep 30 if (( $? == 0 )) ; then @@ -72,8 +74,7 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - create_file_analytics_server \ - && sleep 30 + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish From 77830a9bffea64cddc6c19670c4cbaccb6fd1cf4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 09:01:35 -0800 Subject: [PATCH 298/691] Update lib.pe.sh --- scripts/lib.pe.sh | 47 ++++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 5e1ff33..ca755a3 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -467,29 +467,30 @@ function create_file_analytics_server() { echo "${_nw_name} network UUID: ${_nw_uuid}" echo "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" - HTTP_JSON_BODY=$(cat < Date: Tue, 11 Feb 2020 09:21:56 -0800 Subject: [PATCH 299/691] JQ --- scripts/lib.pc.sh | 15 ++++++--------- scripts/ts2020.sh | 2 +- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 531fa6f..9d6ecdf 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1249,13 +1249,7 @@ function pc_project() { # Get the Network UUIDs log "Get cluster network UUID" -_http_body=$(cat < Date: Tue, 11 Feb 2020 11:04:30 -0800 Subject: [PATCH 300/691] JQ Updates --- scripts/lib.common.sh | 33 ++++++++++++++++++++++++++++++++ scripts/lib.pc.sh | 31 ------------------------------ scripts/lib.pe.sh | 44 +++++++++++++++++++++++++++---------------- 3 files changed, 61 insertions(+), 47 deletions(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index df5ed67..f5b5318 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -972,3 +972,36 @@ function ssh_pubkey() { log "IDEMPOTENCY: found pubkey ${_name}" fi } + +############################################################################################################################################################################### +# Routine to be run/loop till yes we are ok. +############################################################################################################################################################################### +# Need to grab the percentage_complete value including the status to make disissions + +# TODO: Also look at the status!! + +function loop(){ + + local _attempts=45 + local _loops=0 + local _sleep=60 + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + + # What is the progress of the taskid?? + while true; do + (( _loops++ )) + # Get the progress of the task + _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}/${_task_id} | jq '.percentage_complete' 2>nul | tr -d \") + + if (( ${_progress} == 100 )); then + log "The step has been succesfuly run" + break; + elif (( ${_loops} > ${_attempts} )); then + log "Warning ${_error} @${1}: Giving up after ${_loop} tries." + return ${_error} + else + log "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds" + sleep ${_sleep} + fi + done +} diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9d6ecdf..b857a19 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -48,38 +48,7 @@ function flow_enable() { } -############################################################################################################################################################################### -# Routine to be run/loop till yes we are ok. -############################################################################################################################################################################### -# Need to grab the percentage_complete value including the status to make disissions - -# TODO: Also look at the status!! - -function loop(){ - - local _attempts=45 - local _loops=0 - local _sleep=60 - local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " - - # What is the progress of the taskid?? - while true; do - (( _loops++ )) - # Get the progress of the task - _progress=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} ${_url_progress}/${_task_id} | jq '.percentage_complete' 2>nul | tr -d \") - if (( ${_progress} == 100 )); then - log "The step has been succesfuly run" - break; - elif (( ${_loops} > ${_attempts} )); then - log "Warning ${_error} @${1}: Giving up after ${_loop} tries." - return ${_error} - else - log "Still running... loop $_loops/$_attempts. Step is at ${_progress}% ...Sleeping ${_sleep} seconds" - sleep ${_sleep} - fi - done -} ############################################################################################################################################################################### # Routine to start the LCM Inventory and the update. diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index ca755a3..5d5a9b0 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -447,6 +447,7 @@ function create_file_analytics_server() { local _maxtries=30 local _tries=0 local _ntp_formatted="$(echo $NTP_SERVERS | sed -r 's/[^,]+/'\"'&'\"'/g')" + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " log "Installing File Analytics version: ${FILE_ANALYTICS_VERSION}" @@ -500,25 +501,36 @@ echo $HTTP_JSON_BODY #_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep "taskUuid" | wc -l) echo "Creating File Anlytics Server Now" -_response=$(curl --location --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --insecure -s --data "${HTTP_JSON_BODY}") +#curl --location --insecure -s --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" +_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}") + +# If there has been a reply (task_id) then the URL has accepted by PC +# Changed (()) to [] so it works.... +if [ -z "$_task_id" ]; then + log "File Analytics Deploy has encountered an eror..." +else + log "File Analytics Deploy started.." + set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + + # Run the progess checker + loop # Check to ensure we get a response back, then start checking for the file server creation - if [[ ! -z $_response ]]; then +# if [[ ! -z $_response ]]; then # # Check if Files has been enabled - _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep $FILE_ANALYTICS_VERSION | wc -l) - while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do - log "File Analytics Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" - sleep 1m - _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep $FILE_ANALYTICS_VERSION | wc -l) - ((_tries++)) - done - if [[ $_checkresponse -eq 1 ]]; then - echo "File Analytics has been created." - else - echo "File Analytics creation failed. Check the staging logs." - fi - else - echo "File Analytics is not being created, check the staging logs." +# _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep $FILE_ANALYTICS_VERSION | wc -l) +# while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do +# log "File Analytics Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" +# _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep $FILE_ANALYTICS_VERSION | wc -l) +# ((_tries++)) +# done +# if [[ $_checkresponse -eq 1 ]]; then +# echo "File Analytics has been created." +# else +# echo "File Analytics creation failed. Check the staging logs." +# fi +# else +# echo "File Analytics is not being created, check the staging logs." fi } From 7d3824fdce36a1649e5423f585e2680541edd35e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 11:08:31 -0800 Subject: [PATCH 301/691] JQ --- scripts/lib.pe.sh | 2 +- scripts/ts2020.sh | 2 +- stage_workshop.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 5d5a9b0..a4ba06d 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -633,7 +633,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index fa19cae..7a0b799 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# -x + #-x #__main()__________ diff --git a/stage_workshop.sh b/stage_workshop.sh index 95cff4d..04a08de 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -187,7 +187,7 @@ EoM log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" ## TODO: If DEBUG is set, we run the below command with bash -x - remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION # shellcheck disable=SC2153 From cf84006c00df43730a67cb891e24a37d0e14c9f6 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 11:40:20 -0800 Subject: [PATCH 302/691] Update lib.pc.sh --- scripts/lib.pc.sh | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b857a19..e7c5213 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1215,6 +1215,7 @@ function pc_project() { local _pc_account_uuid local _nw_name="${NW1_NAME}" local _nw_uuid + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " # Get the Network UUIDs log "Get cluster network UUID" @@ -1302,7 +1303,17 @@ EOF ) - _create_project=$(curl --location --insecure -s --request POST 'https://localhost:9440/api/nutanix/v3/projects_internal' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${_http_body}") + _task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/projects_internal' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${_http_body}" | jq -r '.status.execution_context.task_uuid' | tr -d \") + + if [ -z "$_task_id" ]; then + log "Calm Project Create has encountered an error..." + else + log "Calm Project Create started.." + set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + + # Run the progess checker + loop + fi log "_ssp_connect=|${_ssp_connect}|" @@ -1323,7 +1334,3 @@ EOF # && nuclei project.get ${_name} format=json 2>/dev/null \ # | jq .metadata.project_reference.uuid | tr -d '"') # log "${_name}.uuid = ${_uuid}" - - - -} From 6e175e778fceb2b1402afd9255f3fb7bd19389ea Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 12:20:34 -0800 Subject: [PATCH 303/691] Update lib.common.sh --- scripts/lib.common.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index f5b5318..318f251 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -985,8 +985,10 @@ function loop(){ local _attempts=45 local _loops=0 local _sleep=60 + local _url_progress='https://localhost:9440/api/nutanix/v3/tasks' local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + echo ${_task_id} # What is the progress of the taskid?? while true; do (( _loops++ )) From 826757e3877f87c1ded564390fbe2b63edb33e8c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 15:23:56 -0800 Subject: [PATCH 304/691] JQ --- scripts/lib.pe.sh | 2 +- scripts/ts2020.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index a4ba06d..d6a65c9 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -502,7 +502,7 @@ echo $HTTP_JSON_BODY echo "Creating File Anlytics Server Now" #curl --location --insecure -s --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" -_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}") +_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") # If there has been a reply (task_id) then the URL has accepted by PC # Changed (()) to [] so it works.... diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 7a0b799..8619e00 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -107,7 +107,7 @@ case ${1} in Nutanix-VirtIO-1.1.5.iso \ SQLServer2014SP3.iso \ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ - FrameCCA-2.1.0 \ + FrameCCA-2.1.6 \ FrameGuestAgentInstaller_1.0.2.2_7930 \ VeeamBR_9.5.4.2615.Update4.iso \ ) From 791a81a4fd1487c7b2c6706e26b197761e292030 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 15:27:51 -0800 Subject: [PATCH 305/691] Update global.vars.sh --- scripts/global.vars.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6f512e1..154c464 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -6,7 +6,7 @@ PC_DEV_VERSION='5.11.2.1' PC_CURRENT_VERSION='5.11.2' PC_STABLE_VERSION='5.11' FILES_VERSION='3.6.0' -FILE_ANALYTICS_VERSION='2.0.1' +FILE_ANALYTICS_VERSION='2.1.0' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -157,8 +157,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.0.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1-metadata.json' - FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1.qcow2' + FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' + FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -194,8 +194,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -231,8 +231,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.0.1.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ From 9e835a34336cbde562b5f672e3574568cd96c447 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 15:37:54 -0800 Subject: [PATCH 306/691] JQ --- scripts/lib.pc.sh | 6 +++--- scripts/lib.pe.sh | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index e7c5213..d3897e2 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1220,17 +1220,17 @@ function pc_project() { # Get the Network UUIDs log "Get cluster network UUID" -_nw_uuid=$(curl --location --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --insecure -s --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") +_nw_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # Get the Role UUIDs log "Get Role UUID" -_role_uuid=$(curl --location --request POST 'https://localhost:9440/api/nutanix/v3/roles/list' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --insecure -s --data '{"kind":"role","filter":"name==Project Admin"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") +_role_uuid=$(curl ${CURL_HTTP_OPTS}--request POST 'https://localhost:9440/api/nutanix/v3/roles/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"role","filter":"name==Project Admin"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # Get the PC Account UUIDs log "Get PC Account UUID" -_pc_account_uuid=$(curl --location --request POST 'https://localhost:9440/api/nutanix/v3/accounts/list' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --insecure -s --data '{"kind":"account","filter":"type==nutanix_pc"}' | jq -r '.entities[] | .status.resources.data.cluster_account_reference_list[0].resources.data.pc_account_uuid' | tr -d \") +_pc_account_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/accounts/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"account","filter":"type==nutanix_pc"}' | jq -r '.entities[] | .status.resources.data.cluster_account_reference_list[0].resources.data.pc_account_uuid' | tr -d \") log "Create BootcampInfra Project ..." log "NW UUID = ${_nw_uuid}" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index d6a65c9..010312f 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -459,7 +459,7 @@ function create_file_analytics_server() { #_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"subnet","filter": "name==Primary"}' 'https://localhost:9440/api/nutanix/v3/subnets/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - _nw_uuid=$(curl --location --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --insecure -s --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _nw_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # Get the Container UUIDs log "Get ${STORAGE_DEFAULT} Container UUID" From c315789b655936c807089e44e673217aa55dcef8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 15:45:19 -0800 Subject: [PATCH 307/691] Update lib.pe.sh --- scripts/lib.pe.sh | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 010312f..668fac7 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -501,11 +501,14 @@ echo $HTTP_JSON_BODY #_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep "taskUuid" | wc -l) echo "Creating File Anlytics Server Now" -#curl --location --insecure -s --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --header 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" -_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") +curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" > reply_json_uuid.json +_task_id=($(jq -r '.task_uuid' reply_json.json | tr -d \")) + +#_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") # If there has been a reply (task_id) then the URL has accepted by PC # Changed (()) to [] so it works.... + if [ -z "$_task_id" ]; then log "File Analytics Deploy has encountered an eror..." else @@ -514,7 +517,7 @@ else # Run the progess checker loop - +fi # Check to ensure we get a response back, then start checking for the file server creation # if [[ ! -z $_response ]]; then # # Check if Files has been enabled @@ -531,7 +534,7 @@ else # fi # else # echo "File Analytics is not being created, check the staging logs." - fi + } ############################################################################################################################################################################### From 8172e1ae3ab2ca655e8d880eb1a75911d3bd5e29 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 18:37:22 -0800 Subject: [PATCH 308/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 668fac7..446fe79 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -502,7 +502,7 @@ echo $HTTP_JSON_BODY echo "Creating File Anlytics Server Now" curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" > reply_json_uuid.json -_task_id=($(jq -r '.task_uuid' reply_json.json | tr -d \")) +_task_id=($(jq -r '.task_uuid' reply_json_uuid.json | tr -d \")) #_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") From fc11b3d08530933297d7a3352bb665109ca5d0f1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 19:37:28 -0800 Subject: [PATCH 309/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 446fe79..3228bc2 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -501,7 +501,7 @@ echo $HTTP_JSON_BODY #_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep "taskUuid" | wc -l) echo "Creating File Anlytics Server Now" -curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" > reply_json_uuid.json +curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" #> reply_json_uuid.json _task_id=($(jq -r '.task_uuid' reply_json_uuid.json | tr -d \")) #_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") From 895f58f443211a288fc3d687ec7ecbb37ec44ec3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 19:41:06 -0800 Subject: [PATCH 310/691] Update lib.pe.sh --- scripts/lib.pe.sh | 35 +++++++++++------------------------ 1 file changed, 11 insertions(+), 24 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 3228bc2..bb268a3 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -502,38 +502,25 @@ echo $HTTP_JSON_BODY echo "Creating File Anlytics Server Now" curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" #> reply_json_uuid.json -_task_id=($(jq -r '.task_uuid' reply_json_uuid.json | tr -d \")) + +sleep 300 + +#_task_id=($(jq -r '.task_uuid' reply_json_uuid.json | tr -d \")) #_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") # If there has been a reply (task_id) then the URL has accepted by PC # Changed (()) to [] so it works.... -if [ -z "$_task_id" ]; then - log "File Analytics Deploy has encountered an eror..." -else - log "File Analytics Deploy started.." - set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run +#if [ -z "$_task_id" ]; then +# log "File Analytics Deploy has encountered an eror..." +#else +# log "File Analytics Deploy started.." +# set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run # Run the progess checker - loop -fi - # Check to ensure we get a response back, then start checking for the file server creation -# if [[ ! -z $_response ]]; then -# # Check if Files has been enabled -# _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep $FILE_ANALYTICS_VERSION | wc -l) -# while [[ $_checkresponse -ne 1 && $_tries -lt $_maxtries ]]; do -# log "File Analytics Server Not yet created. $_tries/$_maxtries... sleeping 1 minute" -# _checkresponse=$(curl ${CURL_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep $FILE_ANALYTICS_VERSION | wc -l) -# ((_tries++)) -# done -# if [[ $_checkresponse -eq 1 ]]; then -# echo "File Analytics has been created." -# else -# echo "File Analytics creation failed. Check the staging logs." -# fi -# else -# echo "File Analytics is not being created, check the staging logs." +# loop +#fi } From 707aa69fe5dbdfbf7a9b06b18bb3b2750f30ddcc Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 20:34:54 -0800 Subject: [PATCH 311/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index bb268a3..4a5b333 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -501,7 +501,7 @@ echo $HTTP_JSON_BODY #_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep "taskUuid" | wc -l) echo "Creating File Anlytics Server Now" -curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" #> reply_json_uuid.json +curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"image_version": "${FILE_ANALYTICS_VERSION}","vm_name":"${_file_analytics_server_name}","container_uuid": "${_storage_default_uuid}", "container_name": "${STORAGE_DEFAULT}","network": {"uuid": "${_nw_uuid}","ip": "","netmask": "","gateway": ""},"resource": {"memory": "24","cores": "2","vcpu": "4"},"dns_servers": [${AUTH_HOST}],"ntp_servers": [${_ntp_formatted}],"disk_size": "3"}' #> reply_json_uuid.json sleep 300 From 0d6829812dc9dd2aee92ad35fcf48997b34ac034 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 11 Feb 2020 20:40:30 -0800 Subject: [PATCH 312/691] Update lib.pc.sh --- scripts/lib.pc.sh | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index d3897e2..7732f1f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1302,18 +1302,21 @@ _http_body=$(cat < Date: Wed, 12 Feb 2020 11:15:23 +0400 Subject: [PATCH 313/691] For Debug For debug reasons taken file analytics out of the script --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 8619e00..210c68f 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -51,7 +51,7 @@ case ${1} in && sleep 30 \ && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ && sleep 30 \ - && file_analytics_install \ + # && file_analytics_install \ && sleep 30 \ && create_file_analytics_server \ && sleep 30 From d3df541ed25d5e3e04aa851c398a58f50275bf48 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 12 Feb 2020 11:16:34 +0400 Subject: [PATCH 314/691] Update ts2020.sh --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 210c68f..7d1faa7 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -52,7 +52,7 @@ case ${1} in && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ && sleep 30 \ # && file_analytics_install \ - && sleep 30 \ + sleep 30 \ && create_file_analytics_server \ && sleep 30 From f8c7904b07e0094ddf0e4395b61694e1551a1924 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 12 Feb 2020 17:20:12 +0400 Subject: [PATCH 315/691] Update lib.pe.sh Small changes to debug the code... --- scripts/lib.pe.sh | 51 +++++++++++++++++++++++------------------------ 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 4a5b333..a7325a8 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -486,7 +486,7 @@ HTTP_JSON_BODY=$(cat < reply_json_uuid.json - -sleep 300 - -#_task_id=($(jq -r '.task_uuid' reply_json_uuid.json | tr -d \")) -#_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") - -# If there has been a reply (task_id) then the URL has accepted by PC -# Changed (()) to [] so it works.... - -#if [ -z "$_task_id" ]; then -# log "File Analytics Deploy has encountered an eror..." -#else -# log "File Analytics Deploy started.." -# set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run - - # Run the progess checker -# loop -#fi + # execute the API call to create the file analytics server + #_response=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | grep "taskUuid" | wc -l) + echo "Creating File Anlytics Server Now" + echo $HTTP_JSON_BODY + + #curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" + + #sleep 300 + + _task_id=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | jq -r '.task_uuid' | tr -d \") + + echo $_task_id + + # If there has been a reply (task_id) then the URL has accepted by PC + # Changed (()) to [] so it works.... + + if [ -z "$_task_id" ]; then + log "File Analytics Deploy has encountered an eror..." + else + log "File Analytics Deploy started.." + set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + # Run the progess checker + loop + fi } From c96afbdac854cdcff7a275c2e470712dceb7c2b2 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 12 Feb 2020 17:22:37 +0400 Subject: [PATCH 316/691] Update ts2020.sh --- scripts/ts2020.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 7d1faa7..8619e00 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -51,8 +51,8 @@ case ${1} in && sleep 30 \ && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ && sleep 30 \ - # && file_analytics_install \ - sleep 30 \ + && file_analytics_install \ + && sleep 30 \ && create_file_analytics_server \ && sleep 30 From 4391a28e9ea84ecf898e77b293e14da43137a0a0 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 12 Feb 2020 19:49:41 +0400 Subject: [PATCH 317/691] Update lib.pe.sh Last change... --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index a7325a8..f37c498 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -505,7 +505,7 @@ EOF #sleep 300 - _task_id=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | jq -r '.task_uuid' | tr -d \") + _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | jq -r '.task_uuid' | tr -d \") echo $_task_id From 6f3d4ba5adc9b8bdbcbb4af9d0ef77a07619a460 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 12 Feb 2020 19:59:56 +0400 Subject: [PATCH 318/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index f37c498..5d4d185 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -507,7 +507,7 @@ EOF _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/PrismGateway/services/rest/v2.0/analyticsplatform' | jq -r '.task_uuid' | tr -d \") - echo $_task_id + log "Task uuid for the FileAnalytics server is " $_task_id " ....." # If there has been a reply (task_id) then the URL has accepted by PC # Changed (()) to [] so it works.... From 0a07ec49cb7410931e2d60cee6b3f02bd3442d83 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 12 Feb 2020 08:57:30 -0800 Subject: [PATCH 319/691] Update lib.pc.sh --- scripts/lib.pc.sh | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7732f1f..286826b 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1237,7 +1237,7 @@ log "NW UUID = ${_nw_uuid}" log "Role UUID = ${_role_uuid}" log "PC Account UUID = ${_pc_account_uuid}" -_http_body=$(cat < Date: Wed, 12 Feb 2020 14:00:11 -0800 Subject: [PATCH 320/691] JQ --- scripts/lib.pc.sh | 6 +++--- scripts/ts2020.sh | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 286826b..ff5be37 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1251,7 +1251,7 @@ HTTP_JSON_BODY=$(cat < Date: Wed, 12 Feb 2020 20:26:02 -0800 Subject: [PATCH 321/691] JQ --- scripts/global.vars.sh | 14 ++++++------ scripts/lib.pc.sh | 51 ++++++++++++++++++++++++++++++++++++++++-- scripts/lib.pe.sh | 14 ++++++------ 3 files changed, 63 insertions(+), 16 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 154c464..9156f11 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -5,7 +5,7 @@ RELEASE='release.json' PC_DEV_VERSION='5.11.2.1' PC_CURRENT_VERSION='5.11.2' PC_STABLE_VERSION='5.11' -FILES_VERSION='3.6.0' +FILES_VERSION='3.6.1.2' FILE_ANALYTICS_VERSION='2.1.0' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' @@ -155,8 +155,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.0.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.1.2.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' JQ_REPOS=(\ @@ -192,8 +192,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.1.2.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' JQ_REPOS=(\ @@ -229,8 +229,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.0-stable.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.1.2.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' JQ_REPOS=(\ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index ff5be37..964abb0 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1210,6 +1210,7 @@ EOF function pc_project() { local _name="BootcampInfra" local _count + local _user_group_uuid local _role="Project Admin" local _role_uuid local _pc_account_uuid @@ -1217,6 +1218,50 @@ function pc_project() { local _nw_uuid local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " +# Creating User Group +log "Creating User Group" + +HTTP_JSON_BODY=$(cat < Date: Wed, 12 Feb 2020 22:57:08 -0800 Subject: [PATCH 322/691] Update lib.pc.sh --- scripts/lib.pc.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 964abb0..fd5dae8 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1238,20 +1238,20 @@ HTTP_JSON_BODY=$(cat < Date: Wed, 12 Feb 2020 22:58:47 -0800 Subject: [PATCH 323/691] Update ts2020.sh --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 1cc6ce8..c007f1a 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -93,7 +93,7 @@ case ${1} in CentOS7.qcow2 \ Windows2016.qcow2 \ Windows2012R2.qcow2 \ - Windows10v1903.qcow2 \ + #Windows10v1903.qcow2 \ ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ ERA-Server-build-1.2.0.1.qcow2 \ From 9502ef27e60a4a80dc06cd30c7be7e8a56ec76ba Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 13 Feb 2020 08:54:37 -0800 Subject: [PATCH 324/691] Update ts2020.sh --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index c007f1a..1cc6ce8 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -93,7 +93,7 @@ case ${1} in CentOS7.qcow2 \ Windows2016.qcow2 \ Windows2012R2.qcow2 \ - #Windows10v1903.qcow2 \ + Windows10v1903.qcow2 \ ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ ERA-Server-build-1.2.0.1.qcow2 \ From a33f373a28251a9e2624f1a5282c20921f39cf73 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 13 Feb 2020 09:59:48 -0800 Subject: [PATCH 325/691] JQ and PrismOps Host IP --- scripts/global.vars.sh | 1 + scripts/lib.pc.sh | 7 ++----- scripts/lib.pe.sh | 4 ++-- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 9156f11..add5fb1 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -65,6 +65,7 @@ IPV4_PREFIX=${OCTET[0]}.${OCTET[1]}.${OCTET[2]} DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) FILE_ANALYTICS_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 22)) +PrismOpsServer_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 5))" ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 15)) CITRIX_DDC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 8)) DNS_SERVERS='8.8.8.8' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index fd5dae8..c110562 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1241,12 +1241,9 @@ EOF echo "Creating User Group Now" echo $HTTP_JSON_BODY - _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/api/nutanix/v3/user_groups' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${HTTP_JSON_BODY}" 'https://localhost:9440/api/nutanix/v3/user_groups' | jq -r '.status.execution_context.task_uuid' | tr -d \") - log "Task uuid for the User Group Create is " $_task_id " ....." - #Sleep 60 - - #_task_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/projects_internal' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${_http_body}" | jq -r '.status.execution_context.task_uuid' | tr -d \") + log "Task uuid for the User Group Create is $_task_id ....." if [ -z "$_task_id" ]; then log "User Group Create has encountered an error..." diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 1031ad2..a238faa 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -247,8 +247,8 @@ acli "vm.create ${PrismOpsServer} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" # vmstat --wide --unit M --active # suggests 2G sufficient, was 4G #acli "vm.disk_create ${VMNAME} cdrom=true empty=true" acli "vm.disk_create ${PrismOpsServer} clone_from_image=${PrismOpsServer}" -acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME}" -#acli "vm.nic_create ${VMNAME} network=${NW1_NAME} ip=${AUTH_HOST}" +#acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME}" +acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME} ip=${PrismOpsServer_HOST}" log "Power on ${VPrismOpsServer} VM..." acli "vm.on ${PrismOpsServer}" From a15b42b2b6d30330dc25ff32460cadfc37dc02cb Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 13 Feb 2020 12:35:51 -0800 Subject: [PATCH 326/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index c110562..2d70ec8 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1255,9 +1255,9 @@ EOF fi # Get the User Group UUID -log "Get cluster network UUID" +log "Get User Group UUID" -_user_group_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/accounts/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") +_user_group_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/user_groups/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # Get the Network UUIDs log "Get cluster network UUID" From 8951886b3f1d5acc3c8d8f1f92692609760f6e24 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 13 Feb 2020 13:52:36 -0800 Subject: [PATCH 327/691] Update ts2020.sh --- scripts/ts2020.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 1cc6ce8..fa2d884 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -93,13 +93,12 @@ case ${1} in CentOS7.qcow2 \ Windows2016.qcow2 \ Windows2012R2.qcow2 \ - Windows10v1903.qcow2 \ + Win10v1903.qcow2 \ ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ ERA-Server-build-1.2.0.1.qcow2 \ MSSQL-2016-VM.qcow2 \ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ - VeeamAvailability_1.0.457.vmdk \ move-3.4.1.qcow2 \ AutoXD.qcow2 \ ) @@ -109,7 +108,7 @@ case ${1} in Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ FrameCCA-2.1.6 \ FrameGuestAgentInstaller_1.0.2.2_7930 \ - VeeamBR_9.5.4.2615.Update4.iso \ + VeeamBackup_Replication_10.0.0.4207.BETA2.iso \ ) From 4241a029a398eea7c47149432e87d3b4346874c5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 14 Feb 2020 13:27:18 -0800 Subject: [PATCH 328/691] Update ts2020.sh --- scripts/ts2020.sh | 4 ---- 1 file changed, 4 deletions(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index fa2d884..81038fa 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -97,18 +97,14 @@ case ${1} in ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ ERA-Server-build-1.2.0.1.qcow2 \ - MSSQL-2016-VM.qcow2 \ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ move-3.4.1.qcow2 \ - AutoXD.qcow2 \ ) export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ - SQLServer2014SP3.iso \ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ FrameCCA-2.1.6 \ FrameGuestAgentInstaller_1.0.2.2_7930 \ - VeeamBackup_Replication_10.0.0.4207.BETA2.iso \ ) From 090cda224ffebbd674142ac28bd09d5a70bb5e5e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 14 Feb 2020 15:15:19 -0800 Subject: [PATCH 329/691] Update ts2020.sh --- scripts/ts2020.sh | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 81038fa..36cd3a2 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -91,20 +91,30 @@ case ${1} in export QCOW2_IMAGES=(\ CentOS7.qcow2 \ - Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Win10v1903.qcow2 \ - ToolsVM.qcow2 \ - Linux_ToolsVM.qcow2 \ - ERA-Server-build-1.2.0.1.qcow2 \ - HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ - move-3.4.1.qcow2 \ + #Windows2016.qcow2 \ + #Windows2012R2.qcow2 \ + #Win10v1903.qcow2 \ + #ToolsVM.qcow2 \ + #Linux_ToolsVM.qcow2 \ + #ERA-Server-build-1.2.0.1.qcow2 \ + #HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ + #move-3.4.1.qcow2 \ + #GTSOracle/19c-april/19c-bootdisk.qcow2 \ + #GTSOracle/19c-april/19c-disk1.qcow2 \ + #GTSOracle/19c-april/19c-disk2.qcow2 \ + #GTSOracle/19c-april/19c-disk3.qcow2 \ + #GTSOracle/19c-april/19c-disk4.qcow2 \ + #GTSOracle/19c-april/19c-disk5.qcow2 \ + #GTSOracle/19c-april/19c-disk6.qcow2 \ + #GTSOracle/19c-april/19c-disk7.qcow2 \ + #GTSOracle/19c-april/19c-disk8.qcow2 \ + #GTSOracle/19c-april/19c-disk9.qcow2 \ ) export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ - Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ - FrameCCA-2.1.6 \ - FrameGuestAgentInstaller_1.0.2.2_7930 \ + #Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + #FrameCCA-2.1.6.iso \ + #FrameGuestAgentInstaller_1.0.2.2_7930.iso \ ) From 323a8ab758f90f4c008d3e09064e7f204aa8443a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 14 Feb 2020 15:55:28 -0800 Subject: [PATCH 330/691] Update ts2020.sh --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 36cd3a2..5870655 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -95,7 +95,7 @@ case ${1} in #Windows2012R2.qcow2 \ #Win10v1903.qcow2 \ #ToolsVM.qcow2 \ - #Linux_ToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ #ERA-Server-build-1.2.0.1.qcow2 \ #HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ #move-3.4.1.qcow2 \ From 432f8e08238175ae811efec25a1d9098f60ef59b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 14 Feb 2020 21:25:18 -0800 Subject: [PATCH 331/691] Updates for modifying and uploading ERA Calm BP --- scripts/lib.pc.sh | 731 +++++++++++++++++++++++----------------------- scripts/lib.pe.sh | 4 +- scripts/ts2020.sh | 5 +- 3 files changed, 376 insertions(+), 364 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 2d70ec8..a785089 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -792,355 +792,9 @@ function calm_enable() { done } -############################################################################################################################################################################### -# Routine to upload Citrix Calm Blueprint and set variables -############################################################################################################################################################################### - -function upload_citrix_calm_blueprint() { - local DIRECTORY="/home/nutanix/" - local CALM_PROJECT="BootcampInfra" - local DOMAIN=${AUTH_FQDN} - local AD_IP=${AUTH_HOST} - local PE_IP=${PE_HOST} - local NutanixAcropolisPlugin="none" - local CVM_NETWORK=${NW1_NAME} - local BPG_RKTOOLS_URL="none" - local DDC_IP=${CITRIX_DDC_HOST} - local NutanixAcropolis_Installed_Path="none" - - local VLAN_NAME=${NW1_VLAN} - local DOWNLOAD_BLUEPRINTS - - # download the blueprint - DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint}) - log "Downloading ${CALM_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" - - # ensure the directory that contains the blueprints to be imported is not empty - if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then - echo "There are no .json files found in the directory provided." - exit 0 - fi - - # create a list to store all bluprints found in the directory provided by user - declare -a LIST_OF_BLUEPRINTS=() - - # circle thru all of the files in the provided directory and add file names to a list of blueprints array - # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) - for FILE in "$DIRECTORY"/*.json; do - BASENAM="$(basename ${FILE})" - FILENAME="${BASENAM%.*}" - LIST_OF_BLUEPRINTS+=("$BASENAM") - done - - # echo $LIST_OF_BLUEPRINTS - # if the list of blueprints is not empty then: - if ((${#LIST_OF_BLUEPRINTS[@]})); then - # first check if the user has specified a project for the imported blueprints - # if they did, we need to make sure the project exists before assigning it to the BPs - - if [ $CALM_PROJECT != 'none' ]; then - - # curl command needed: - # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' - - # formulate the curl to check for project - _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" - - # make API call and store project_uuid - project_uuid=$(curl -s -k -X POST $_url_pc -H 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} -d "{\"kind\": \"project\", \"filter\": \"name==$CALM_PROJECT\"}" | jq -r '.entities[].metadata.uuid') - - if [ -z "$project_uuid" ]; then - # project wasn't found - # exit at this point as we don't want to assume all blueprints should then hit the 'default' project - echo "\nProject $CALM_PROJECT was not found. Please check the name and retry." - exit 0 - else - echo "\nProject $CALM_PROJECT exists..." - fi - fi - else - echo '\nNo JSON files found in' + $DIRECTORY +' ... nothing to import!' - fi - - # update the user with script progress... - _num_of_files=${#LIST_OF_BLUEPRINTS[@]} - echo "\nNumber of .json files found: ${_num_of_files}" - echo "\nStarting blueprint updates and then exporting to Calm one file at a time...\n\n" - - # go through the blueprint JSON files list found in the specified directory - for elem in "${LIST_OF_BLUEPRINTS[@]}"; do - # read the entire JSON file from the directory - JSONFile=${DIRECTORY}/"$elem" - - echo "\nCurrently updating blueprint $JSONFile..." - - # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint - tmp=$(mktemp) - - # ADD PROJECT (affects all BPs being imported) if no project was specified on the command line, we've already pre-set the project variable to 'none' if a project was specified, we need to add it into the JSON data - if [ $CALM_PROJECT != 'none' ]; then - # add the new atributes to the JSON and overwrite the old JSON file with the new one - $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) - fi - - # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ "$elem" == "${NAME}" ]; then - if [ "$DOMAIN" != "none" ]; then - tmp_DOMAIN=$(mktemp) - # add the new variable to the json file and save it - $(jq --arg var_name $DOMAIN'(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="DOMAIN")).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) - fi - if [ "$AD_IP" != "none" ]; then - tmp_AD_IP=$(mktemp) - $(jq --arg var_name $AD_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="AD_IP")).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) - fi - if [ "$PE_IP" != "none" ]; then - tmp_PE_IP=$(mktemp) - $(jq --arg var_name $PE_IP'(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_IP")).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) - fi - if [ "$NutanixAcropolisPlugin" != "none" ]; then - tmp_NutanixAcropolisPluginE=$(mktemp) - $(jq --arg var_name $NutanixAcropolisPlugin '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NutanixAcropolisPlugin")).value=$var_name' $JSONFile >"$tmp_NutanixAcropolisPlugin" && mv "$tmp_NutanixAcropolisPlugin" $JSONFile) - fi - if [ "$CVM_NETWORK" != "none" ]; then - tmp_CVM_NETWORK=$(mktemp) - $(jq --arg var_name $CVM_NETWORK '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CVM_NETWORK")).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) - fi - if [ "$BPG_RKTOOLS_URL" != "none" ]; then - tmp_BPG_RKTOOLS_URL=$(mktemp) - $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="BPG_RKTOOLS_URL")).value=$var_name' $JSONFile >"$tmp_BPG_RKTOOLS_URL" && mv "$tmp_BPG_RKTOOLS_URL" $JSONFile) - fi - if [ "$DDC_IP" != "none" ]; then - tmp_DDC_IP=$(mktemp) - $(jq --arg var_name $DDC_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="DDC_IP")).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) - fi - if [ "$NutanixAcropolis_Installed_Path" != "none" ]; then - tmp_NutanixAcropolis_Installed_Path=$(mktemp) - $(jq --arg var_name $NutanixAcropolis_Installed_Path '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NutanixAcropolis_Installed_PathL")).value=$var_name' $JSONFile >"$tmp_NutanixAcropolis_Installed_Path" && mv "$tmp_NutanixAcropolis_Installed_Path" $JSONFile) - fi - fi - - # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) - tmp_removal=$(mktemp) - $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) - - # GET BP NAME (affects all BPs being imported) - # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all - blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) - blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " - blueprint_name="${blueprint_name#\"}" # will remove the prefix " - - if [ blueprint_name == 'null' ]; then - echo "\nUnprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?\n" - exit 0 - else - # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload - echo "\nUploading the updated blueprint: $blueprint_name...\n" - - # Example curl call from the console: - # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" - # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" - # bp_name="EraServerDeployment" - # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" - # password='techX2019!' - # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" - - url="https://localhost:9440/api/nutanix/v3/blueprints/import_file" - path_to_file=$JSONFile - bp_name=$blueprint_name - project_uuid=$project_uuid - password=$PE_PASSWORD - upload_result=$(curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password") - - #if the upload_result var is not empty then let's say it was succcessful - if [ -z "$upload_result" ]; then - echo "\nUpload for $bp_name did not finish." - else - echo "\nUpload for $bp_name finished." - echo "-----------------------------------------" - # echo "Result: $upload_result" - fi - fi - - done - - echo "\nFinished uploading Citrix Blueprint and setting Variables!\n" - -} - -############################################################################################################################################################################### -# Routine to upload Era Calm Blueprint and set variables -############################################################################################################################################################################### - -function upload_era_calm_blueprint() { - local DIRECTORY="/home/nutanix/" - local CALM_PROJECT="BootcampInfra" - local ERA_IP=${ERA_HOST} - local PE_IP=${PE_HOST} - local CLSTR_NAME="none" - local CTR_UUID=${_storage_default_uuid} - local CTR_NAME=${STORAGE_DEFAULT} - local NETWORK_NAME=${NW1_NAME} - local VLAN_NAME=${NW1_VLAN} - local DOWNLOAD_BLUEPRINTS - - - - # download the blueprint - DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${ERA_Blueprint} -o ${DIRECTORY}${ERA_Blueprint}) - log "Downloading ${ERA_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" - - # ensure the directory that contains the blueprints to be imported is not empty - if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then - echo "There are no .json files found in the directory provided." - exit 0 - fi - - # create a list to store all bluprints found in the directory provided by user - declare -a LIST_OF_BLUEPRINTS=() - - # circle thru all of the files in the provided directory and add file names to a list of blueprints array - # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) - for FILE in "$DIRECTORY"/*.json; do - BASENAM="$(basename ${FILE})" - FILENAME="${BASENAM%.*}" - LIST_OF_BLUEPRINTS+=("$BASENAM") - done - - # echo $LIST_OF_BLUEPRINTS - # if the list of blueprints is not empty then: - if ((${#LIST_OF_BLUEPRINTS[@]})); then - # first check if the user has specified a project for the imported blueprints - # if they did, we need to make sure the project exists before assigning it to the BPs - - if [ $CALM_PROJECT != 'none' ]; then - - # curl command needed: - # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' - - # formulate the curl to check for project - _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" - - # make API call and store project_uuid - project_uuid=$(curl -s -k -X POST $_url_pc -H 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} -d "{\"kind\": \"project\", \"filter\": \"name==$CALM_PROJECT\"}" | jq -r '.entities[].metadata.uuid') - - if [ -z "$project_uuid" ]; then - # project wasn't found - # exit at this point as we don't want to assume all blueprints should then hit the 'default' project - echo "\nProject $CALM_PROJECT was not found. Please check the name and retry." - exit 0 - else - echo "\nProject $CALM_PROJECT exists..." - fi - fi - else - echo '\nNo JSON files found in' + $DIRECTORY +' ... nothing to import!' - fi - - # update the user with script progress... - _num_of_files=${#LIST_OF_BLUEPRINTS[@]} - echo "\nNumber of .json files found: ${_num_of_files}" - echo "\nStarting blueprint updates and then exporting to Calm one file at a time...\n\n" - - # go through the blueprint JSON files list found in the specified directory - for elem in "${LIST_OF_BLUEPRINTS[@]}"; do - # read the entire JSON file from the directory - JSONFile=${DIRECTORY}/"$elem" - - echo "\nCurrently updating blueprint $JSONFile..." - - # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint - tmp=$(mktemp) - - # ADD PROJECT (affects all BPs being imported) if no project was specified on the command line, we've already pre-set the project variable to 'none' if a project was specified, we need to add it into the JSON data - if [ $CALM_PROJECT != 'none' ]; then - # add the new atributes to the JSON and overwrite the old JSON file with the new one - $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) - fi - - # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ "$elem" == "${NAME}" ]; then - if [ "$ERA_IP" != "none" ]; then - tmp_ERA_IP=$(mktemp) - # add the new variable to the json file and save it - $(jq --arg var_name $ERA_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="ERA_IP")).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) - # result="$(jq --arg newOBJ "${obj_with_replaced_variable}" '.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_VIP") | .+=$newOBJ' $JSONFile )" - fi - if [ "$PE_IP" != "none" ]; then - tmp_PE_IP=$(mktemp) - # add the new variable to the json file and save it - $(jq --arg var_name $PE_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_VIP")).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) - # result="$(jq --arg newOBJ "${obj_with_replaced_variable}" '.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_VIP") | .+=$newOBJ' $JSONFile )" - fi - if [ "$CLSTR_NAME" != "none" ]; then - tmp_CLSTR_NAME=$(mktemp) - $(jq --arg var_name $CLSTR_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CLSTR_NAME")).value=$var_name' $JSONFile >"$tmp_CLSTR_NAME" && mv "$tmp_CLSTR_NAME" $JSONFile) - fi - if [ "$CTR_UUID" != "none" ]; then - tmp_CTR_UUID=$(mktemp) - $(jq --arg var_name $CTR_UUID '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CTR_UUID")).value=$var_name' $JSONFile >"$tmp_CTR_UUID" && mv "$tmp_CTR_UUID" $JSONFile) - fi - if [ "$CTR_NAME" != "none" ]; then - tmp_CTR_NAME=$(mktemp) - $(jq --arg var_name $CTR_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CTR_NAME")).value=$var_name' $JSONFile >"$tmp_CTR_NAME" && mv "$tmp_CTR_NAME" $JSONFile) - fi - if [ "$NETWORK_NAME" != "none" ]; then - tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) - fi - if [ "$VLAN_NAME" != "none" ]; then - tmp_VLAN_NAME=$(mktemp) - $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="VLAN_NAME")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) - fi - fi - - # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) - tmp_removal=$(mktemp) - $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) - - # GET BP NAME (affects all BPs being imported) - # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all - blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) - blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " - blueprint_name="${blueprint_name#\"}" # will remove the prefix " - - if [ blueprint_name == 'null' ]; then - echo "\nUnprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?\n" - exit 0 - else - # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload - echo "\nUploading the updated blueprint: $blueprint_name...\n" - - # Example curl call from the console: - # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" - # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" - # bp_name="EraServerDeployment" - # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" - # password='techX2019!' - # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" - - url="https://localhost:9440/api/nutanix/v3/blueprints/import_file" - path_to_file=$JSONFile - bp_name=$blueprint_name - project_uuid=$project_uuid - password=$PE_PASSWORD - upload_result=$(curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password") - - #if the upload_result var is not empty then let's say it was succcessful - if [ -z "$upload_result" ]; then - echo "\nUpload for $bp_name did not finish." - else - echo "\nUpload for $bp_name finished." - echo "-----------------------------------------" - # echo "Result: $upload_result" - fi - fi - done - echo "\nFinished uploading Era Blueprint and setting Variables!\n" -} ############################################################################################################################################################################### # Routine to make changes to the PC UI; Colors, naming and the Welcome Banner @@ -1369,18 +1023,373 @@ EOF } -# _name=${EMAIL%%@nutanix.com}.test -# _count=$(. /etc/profile.d/nutanix_env.sh \ -# && nuclei project.list 2>/dev/null | grep ${_name} | wc --lines) -# if (( ${_count} > 0 )); then -# nuclei project.delete ${_name} confirm=false 2>/dev/null -# else -# log "Warning: _count=${_count}" -# fi - -# log "Creating ${_name}..." -# nuclei project.create name=${_name} description='test from NuCLeI!' 2>/dev/null -# _uuid=$(. /etc/profile.d/nutanix_env.sh \ -# && nuclei project.get ${_name} format=json 2>/dev/null \ -# | jq .metadata.project_reference.uuid | tr -d '"') -# log "${_name}.uuid = ${_uuid}" +############################################################################################################################################################################### +# Routine to upload Era Calm Blueprint and set variables +############################################################################################################################################################################### + +function upload_era_calm_blueprint() { + local DIRECTORY="/home/nutanix/" + local BLUEPRINT=${ERA_Blueprint} + local CALM_PROJECT="BootcampInfra" + local ERA_IP=${ERA_HOST} + local PE_IP=${PE_HOST} + local CLSTR_NAME="none" + local CTR_UUID=${_storage_default_uuid} + local CTR_NAME=${STORAGE_DEFAULT} + local NETWORK_NAME=${NW1_NAME} + local VLAN_NAME=${NW1_VLAN} + local ERAADMIN_PASSWORD="nutanix/4u" + local PE_CREDS_PASSWORD="${PE_PASSWORD}" + local ERACLI_PASSWORD="-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG +ZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK +6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9 +HtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy +hCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR +uz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp +6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0 +MrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c +1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj +8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl +JDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf +h45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk +QVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c +oDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0 +EjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj +uFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M +Ez2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k +7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk +hztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC +kPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME +rECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF +2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z +iUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ +dQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP +gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF +-----END RSA PRIVATE KEY-----" + local DOWNLOAD_BLUEPRINTS + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" + local ERA_IMAGE_UUID + + #Getting the ERA_IMAGE_UUID -- WHen changing the image make sure to change in the name filter + ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "ERA Image UUID = $ERA_IMAGE_UUID" + + # download the blueprint + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}${BLUEPRINT}) + log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" + + # ensure the directory that contains the blueprints to be imported is not empty + if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then + echo "There are no .json files found in the directory provided." + exit 0 + fi + + # create a list to store all bluprints found in the directory provided by user + #declare -a LIST_OF_BLUEPRINTS=() + + # circle thru all of the files in the provided directory and add file names to a list of blueprints array + # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) + #for FILE in "$DIRECTORY"/*.json; do + # BASENAM="$(basename ${FILE})" + # FILENAME="${BASENAM%.*}" + # LIST_OF_BLUEPRINTS+=("$BASENAM") + #done + + + if [ $CALM_PROJECT != 'none' ]; then + + # make API call and store project_uuid + project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==$CALM_PROJECT"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') + + if [ -z "$project_uuid" ]; then + # project wasn't found + # exit at this point as we don't want to assume all blueprints should then hit the 'default' project + echo "Project $CALM_PROJECT was not found. Please check the name and retry." + exit 0 + else + echo "Project $CALM_PROJECT exists..." + fi + fi + + # update the user with script progress... + + echo "Starting blueprint updates and then Uploading to Calm..." + + # read the entire JSON file from the directory + JSONFile=${DIRECTORY}/${BLUEPRINT} + + echo "Currently updating blueprint $JSONFile..." + + echo "${CALM_PROJECT} network UUID: ${project_uuid}" + echo "ERA_IP=${ERA_HOST}" + echo "PE_IP=${PE_HOST}" + + # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint + tmp=$(mktemp) + + # ADD PROJECT , we need to add it into the JSON data + if [ $CALM_PROJECT != 'none' ]; then + # add the new atributes to the JSON and overwrite the old JSON file with the new one + $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) + fi + + # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") + if [ ${BLUEPRINT} == "${NAME}" ]; then + if [ "$ERA_IP" != "none" ]; then + tmp_ERA_IP=$(mktemp) + # add the new variable to the json file and save it + $(jq --arg var_name $ERA_IP '(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="ERA_IP")).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) + fi + if [ "$ERA_IMAGE" != "none" ]; then + tmp_ERA_IMAGE=$(mktemp) + $(jq --arg var_name $ERA_IMAGE '(.spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) + fi + if [ "$ERA_IMAGE_UUID" != "none" ]; then + tmp_ERA_IMAGE_UUID=$(mktemp) + $(jq --arg var_name $ERA_IMAGE_UUID '(.spec.resources.disk_list[0].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_ERA_IMAGE_UUID" && mv "$tmp_ERA_IMAGE_UUID" $JSONFile) + fi + if [ "$NETWORK_NAME" != "none" ]; then + tmp_NETWORK_NAME=$(mktemp) + $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + fi + if [ "$VLAN_NAME" != "none" ]; then + tmp_VLAN_NAME=$(mktemp) + $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) + fi + if [ "$ERAADMIN_PASSWORD" != "none" ]; then + tmp_ERAADMIN_PASSWORD=$(mktemp) + $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="EraAdmin")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) + fi + if [ "$PE_CREDS_PASSWORD" != "none" ]; then + tmp_PE_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="pe_creds")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + fi + if [ "$ERACLI_PASSWORD" != "none" ]; then + tmp_ERACLI_PASSWORD=$(mktemp) + $(jq --arg var_name $ERACLI_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="EraCLI")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) + fi + fi + + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) + tmp_removal=$(mktemp) + $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) + + # GET BP NAME (affects all BPs being imported) + # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all + blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) + blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " + blueprint_name="${blueprint_name#\"}" # will remove the prefix " + + if [ $blueprint_name == 'null' ]; then + echo "Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?" + exit 0 + else + # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload + echo "Uploading the updated blueprint: $blueprint_name..." + + # Example curl call from the console: + # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" + # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" + # bp_name="EraServerDeployment" + # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" + # password='techX2019!' + # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" + + path_to_file=$JSONFile + bp_name=$blueprint_name + project_uuid=$project_uuid + password=$PE_PASSWORD + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') + + #if the upload_result var is not empty then let's say it was succcessful + if [ -z "$upload_result" ]; then + echo "Upload for $bp_name did not finish." + else + echo "Upload for $bp_name finished." + echo "-----------------------------------------" + # echo "Result: $upload_result" + fi + fi + + echo "Finished uploading ${BLUEPRINT} and setting Variables!" + +} + +############################################################################################################################################################################### +# Routine to upload Citrix Calm Blueprint and set variables +############################################################################################################################################################################### + +function upload_citrix_calm_blueprint() { + local DIRECTORY="/home/nutanix/" + local CALM_PROJECT="BootcampInfra" + local DOMAIN=${AUTH_FQDN} + local AD_IP=${AUTH_HOST} + local PE_IP=${PE_HOST} + local NutanixAcropolisPlugin="none" + local CVM_NETWORK=${NW1_NAME} + local BPG_RKTOOLS_URL="none" + local DDC_IP=${CITRIX_DDC_HOST} + local NutanixAcropolis_Installed_Path="none" + + local VLAN_NAME=${NW1_VLAN} + local DOWNLOAD_BLUEPRINTS + + # download the blueprint + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint}) + log "Downloading ${CALM_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" + + # ensure the directory that contains the blueprints to be imported is not empty + if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then + echo "There are no .json files found in the directory provided." + exit 0 + fi + + # create a list to store all bluprints found in the directory provided by user + declare -a LIST_OF_BLUEPRINTS=() + + # circle thru all of the files in the provided directory and add file names to a list of blueprints array + # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) + for FILE in "$DIRECTORY"/*.json; do + BASENAM="$(basename ${FILE})" + FILENAME="${BASENAM%.*}" + LIST_OF_BLUEPRINTS+=("$BASENAM") + done + + # echo $LIST_OF_BLUEPRINTS + # if the list of blueprints is not empty then: + if ((${#LIST_OF_BLUEPRINTS[@]})); then + # first check if the user has specified a project for the imported blueprints + # if they did, we need to make sure the project exists before assigning it to the BPs + + if [ $CALM_PROJECT != 'none' ]; then + + # curl command needed: + # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' + + # formulate the curl to check for project + _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" + + # make API call and store project_uuid + project_uuid=$(curl -s -k -X POST $_url_pc -H 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} -d "{\"kind\": \"project\", \"filter\": \"name==$CALM_PROJECT\"}" | jq -r '.entities[].metadata.uuid') + + if [ -z "$project_uuid" ]; then + # project wasn't found + # exit at this point as we don't want to assume all blueprints should then hit the 'default' project + echo "\nProject $CALM_PROJECT was not found. Please check the name and retry." + exit 0 + else + echo "\nProject $CALM_PROJECT exists..." + fi + fi + else + echo '\nNo JSON files found in' + $DIRECTORY +' ... nothing to import!' + fi + + # update the user with script progress... + _num_of_files=${#LIST_OF_BLUEPRINTS[@]} + echo "\nNumber of .json files found: ${_num_of_files}" + echo "\nStarting blueprint updates and then exporting to Calm one file at a time...\n\n" + + # go through the blueprint JSON files list found in the specified directory + for elem in "${LIST_OF_BLUEPRINTS[@]}"; do + # read the entire JSON file from the directory + JSONFile=${DIRECTORY}/"$elem" + + echo "\nCurrently updating blueprint $JSONFile..." + + # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint + tmp=$(mktemp) + + # ADD PROJECT (affects all BPs being imported) if no project was specified on the command line, we've already pre-set the project variable to 'none' if a project was specified, we need to add it into the JSON data + if [ $CALM_PROJECT != 'none' ]; then + # add the new atributes to the JSON and overwrite the old JSON file with the new one + $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) + fi + + # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") + if [ "$elem" == "${NAME}" ]; then + if [ "$DOMAIN" != "none" ]; then + tmp_DOMAIN=$(mktemp) + # add the new variable to the json file and save it + $(jq --arg var_name $DOMAIN'(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="DOMAIN")).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) + fi + if [ "$AD_IP" != "none" ]; then + tmp_AD_IP=$(mktemp) + $(jq --arg var_name $AD_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="AD_IP")).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) + fi + if [ "$PE_IP" != "none" ]; then + tmp_PE_IP=$(mktemp) + $(jq --arg var_name $PE_IP'(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_IP")).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) + fi + if [ "$NutanixAcropolisPlugin" != "none" ]; then + tmp_NutanixAcropolisPluginE=$(mktemp) + $(jq --arg var_name $NutanixAcropolisPlugin '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NutanixAcropolisPlugin")).value=$var_name' $JSONFile >"$tmp_NutanixAcropolisPlugin" && mv "$tmp_NutanixAcropolisPlugin" $JSONFile) + fi + if [ "$CVM_NETWORK" != "none" ]; then + tmp_CVM_NETWORK=$(mktemp) + $(jq --arg var_name $CVM_NETWORK '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CVM_NETWORK")).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) + fi + if [ "$BPG_RKTOOLS_URL" != "none" ]; then + tmp_BPG_RKTOOLS_URL=$(mktemp) + $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="BPG_RKTOOLS_URL")).value=$var_name' $JSONFile >"$tmp_BPG_RKTOOLS_URL" && mv "$tmp_BPG_RKTOOLS_URL" $JSONFile) + fi + if [ "$DDC_IP" != "none" ]; then + tmp_DDC_IP=$(mktemp) + $(jq --arg var_name $DDC_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="DDC_IP")).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) + fi + if [ "$NutanixAcropolis_Installed_Path" != "none" ]; then + tmp_NutanixAcropolis_Installed_Path=$(mktemp) + $(jq --arg var_name $NutanixAcropolis_Installed_Path '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NutanixAcropolis_Installed_PathL")).value=$var_name' $JSONFile >"$tmp_NutanixAcropolis_Installed_Path" && mv "$tmp_NutanixAcropolis_Installed_Path" $JSONFile) + fi + fi + + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) + tmp_removal=$(mktemp) + $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) + + # GET BP NAME (affects all BPs being imported) + # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all + blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) + blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " + blueprint_name="${blueprint_name#\"}" # will remove the prefix " + + if [ blueprint_name == 'null' ]; then + echo "\nUnprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?\n" + exit 0 + else + # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload + echo "\nUploading the updated blueprint: $blueprint_name...\n" + + # Example curl call from the console: + # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" + # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" + # bp_name="EraServerDeployment" + # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" + # password='techX2019!' + # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" + + url="https://localhost:9440/api/nutanix/v3/blueprints/import_file" + path_to_file=$JSONFile + bp_name=$blueprint_name + project_uuid=$project_uuid + password=$PE_PASSWORD + upload_result=$(curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password") + + #if the upload_result var is not empty then let's say it was succcessful + if [ -z "$upload_result" ]; then + echo "\nUpload for $bp_name did not finish." + else + echo "\nUpload for $bp_name finished." + echo "-----------------------------------------" + # echo "Result: $upload_result" + fi + fi + + done + + echo "\nFinished uploading Citrix Blueprint and setting Variables!\n" + +} diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index a238faa..40a6e5c 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -459,7 +459,9 @@ function create_file_analytics_server() { #_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"subnet","filter": "name==Primary"}' 'https://localhost:9440/api/nutanix/v3/subnets/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - _nw_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Secondary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + #_nw_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Secondary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + _nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"subnet","filter": "name==Secondary"}' 'https://localhost:9440/api/nutanix/v3/subnets/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # Get the Container UUIDs log "Get ${STORAGE_DEFAULT} Container UUID" diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 5870655..3d63502 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -90,8 +90,9 @@ case ${1} in . lib.pc.sh export QCOW2_IMAGES=(\ + ERA-Server-build-1.2.0.1.qcow2 \ + Windows2016.qcow2 \ CentOS7.qcow2 \ - #Windows2016.qcow2 \ #Windows2012R2.qcow2 \ #Win10v1903.qcow2 \ #ToolsVM.qcow2 \ @@ -111,8 +112,8 @@ case ${1} in #GTSOracle/19c-april/19c-disk9.qcow2 \ ) export ISO_IMAGES=(\ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ Nutanix-VirtIO-1.1.5.iso \ - #Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ #FrameCCA-2.1.6.iso \ #FrameGuestAgentInstaller_1.0.2.2_7930.iso \ ) From 5d42949b1a0a3018472ffeeeaba545b62051f3f6 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 00:18:16 -0800 Subject: [PATCH 332/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index a785089..0d28385 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1102,7 +1102,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF if [ $CALM_PROJECT != 'none' ]; then # make API call and store project_uuid - project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==$CALM_PROJECT"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') + project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') if [ -z "$project_uuid" ]; then # project wasn't found From 6d3849c82e319f9d8a5dd3e1c80b26f3ce2336db Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 14:11:28 -0800 Subject: [PATCH 333/691] Updates for Calm BPs --- scripts/lib.pc.sh | 300 ++++++++++++++++++++++++++-------------------- scripts/ts2020.sh | 1 + 2 files changed, 168 insertions(+), 133 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 0d28385..3c34703 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1068,11 +1068,12 @@ dQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF -----END RSA PRIVATE KEY-----" local DOWNLOAD_BLUEPRINTS - local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " - #Getting the ERA_IMAGE_UUID -- WHen changing the image make sure to change in the name filter + + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "ERA Image UUID = $ERA_IMAGE_UUID" @@ -1119,13 +1120,13 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "Starting blueprint updates and then Uploading to Calm..." # read the entire JSON file from the directory - JSONFile=${DIRECTORY}/${BLUEPRINT} + JSONFile=${DIRECTORY}${BLUEPRINT} echo "Currently updating blueprint $JSONFile..." echo "${CALM_PROJECT} network UUID: ${project_uuid}" - echo "ERA_IP=${ERA_HOST}" - echo "PE_IP=${PE_HOST}" + echo "ERA_IP=${ERA_IP}" + echo "PE_IP=${PE_IP}" # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint tmp=$(mktemp) @@ -1138,11 +1139,13 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") if [ ${BLUEPRINT} == "${NAME}" ]; then + # Profile Variables if [ "$ERA_IP" != "none" ]; then tmp_ERA_IP=$(mktemp) # add the new variable to the json file and save it $(jq --arg var_name $ERA_IP '(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="ERA_IP")).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) fi + # VM Configuration if [ "$ERA_IMAGE" != "none" ]; then tmp_ERA_IMAGE=$(mktemp) $(jq --arg var_name $ERA_IMAGE '(.spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) @@ -1159,6 +1162,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF tmp_VLAN_NAME=$(mktemp) $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) fi + # Credentials if [ "$ERAADMIN_PASSWORD" != "none" ]; then tmp_ERAADMIN_PASSWORD=$(mktemp) $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="EraAdmin")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) @@ -1201,7 +1205,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF path_to_file=$JSONFile bp_name=$blueprint_name project_uuid=$project_uuid - password=$PE_PASSWORD + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') #if the upload_result var is not empty then let's say it was succcessful @@ -1228,14 +1232,32 @@ function upload_citrix_calm_blueprint() { local DOMAIN=${AUTH_FQDN} local AD_IP=${AUTH_HOST} local PE_IP=${PE_HOST} + local DDC_IP=${CITRIX_DDC_HOST} local NutanixAcropolisPlugin="none" local CVM_NETWORK=${NW1_NAME} + local NETWORK_NAME=${NW1_NAME} + local VLAN_NAME=${NW1_VLAN} local BPG_RKTOOLS_URL="none" - local DDC_IP=${CITRIX_DDC_HOST} local NutanixAcropolis_Installed_Path="none" - - local VLAN_NAME=${NW1_VLAN} + local LOCAL_PASSWORD="nutanix/4u" + local DOMAIN_CREDS_PASSWORD="nutanix/4u" + local PE_CREDS_PASSWORD="${PE_PASSWORD}" + local SQL_CREDS_PASSWORD="nutanix/4u" local DOWNLOAD_BLUEPRINTS + local SERVER_IMAGE="Windows2016.qcow2" + local SERVER_IMAGE_UUID + local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" + local CITRIX_IMAGE_UUID + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "Server Image UUID = $SERVER_IMAGE_UUID" + + CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" # download the blueprint DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint}) @@ -1247,149 +1269,161 @@ function upload_citrix_calm_blueprint() { exit 0 fi - # create a list to store all bluprints found in the directory provided by user - declare -a LIST_OF_BLUEPRINTS=() - - # circle thru all of the files in the provided directory and add file names to a list of blueprints array - # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) - for FILE in "$DIRECTORY"/*.json; do - BASENAM="$(basename ${FILE})" - FILENAME="${BASENAM%.*}" - LIST_OF_BLUEPRINTS+=("$BASENAM") - done - - # echo $LIST_OF_BLUEPRINTS - # if the list of blueprints is not empty then: - if ((${#LIST_OF_BLUEPRINTS[@]})); then - # first check if the user has specified a project for the imported blueprints - # if they did, we need to make sure the project exists before assigning it to the BPs - - if [ $CALM_PROJECT != 'none' ]; then + if [ $CALM_PROJECT != 'none' ]; then - # curl command needed: - # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' + # curl command needed: + # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' - # formulate the curl to check for project - _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" + # formulate the curl to check for project + _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" - # make API call and store project_uuid - project_uuid=$(curl -s -k -X POST $_url_pc -H 'Content-Type: application/json' --user ${PRISM_ADMIN}:${PE_PASSWORD} -d "{\"kind\": \"project\", \"filter\": \"name==$CALM_PROJECT\"}" | jq -r '.entities[].metadata.uuid') + # make API call and store project_uuid + project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') - if [ -z "$project_uuid" ]; then - # project wasn't found - # exit at this point as we don't want to assume all blueprints should then hit the 'default' project - echo "\nProject $CALM_PROJECT was not found. Please check the name and retry." - exit 0 - else - echo "\nProject $CALM_PROJECT exists..." - fi + if [ -z "$project_uuid" ]; then + # project wasn't found + # exit at this point as we don't want to assume all blueprints should then hit the 'default' project + echo "Project $CALM_PROJECT was not found. Please check the name and retry." + exit 0 + else + echo "Project $CALM_PROJECT exists..." fi - else - echo '\nNo JSON files found in' + $DIRECTORY +' ... nothing to import!' fi # update the user with script progress... - _num_of_files=${#LIST_OF_BLUEPRINTS[@]} - echo "\nNumber of .json files found: ${_num_of_files}" - echo "\nStarting blueprint updates and then exporting to Calm one file at a time...\n\n" - # go through the blueprint JSON files list found in the specified directory - for elem in "${LIST_OF_BLUEPRINTS[@]}"; do - # read the entire JSON file from the directory - JSONFile=${DIRECTORY}/"$elem" + echo "Starting blueprint updates and then Uploading to Calm..." - echo "\nCurrently updating blueprint $JSONFile..." + # read the entire JSON file from the directory + JSONFile=${DIRECTORY}${BLUEPRINT} - # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint - tmp=$(mktemp) + echo "Currently updating blueprint $JSONFile..." - # ADD PROJECT (affects all BPs being imported) if no project was specified on the command line, we've already pre-set the project variable to 'none' if a project was specified, we need to add it into the JSON data - if [ $CALM_PROJECT != 'none' ]; then - # add the new atributes to the JSON and overwrite the old JSON file with the new one - $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) - fi + echo "${CALM_PROJECT} network UUID: ${project_uuid}" + echo "DOMAIN=${DOMAIN}" + echo "AD_IP=${AD_IP}" + echo "PE_IP=${PE_IP}" + echo "DDC_IP=${DDC_IP}" + echo "CVM_NETWORK=${CVM_NETWORK}" - # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ "$elem" == "${NAME}" ]; then - if [ "$DOMAIN" != "none" ]; then - tmp_DOMAIN=$(mktemp) - # add the new variable to the json file and save it - $(jq --arg var_name $DOMAIN'(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="DOMAIN")).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) - fi - if [ "$AD_IP" != "none" ]; then - tmp_AD_IP=$(mktemp) - $(jq --arg var_name $AD_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="AD_IP")).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) - fi - if [ "$PE_IP" != "none" ]; then - tmp_PE_IP=$(mktemp) - $(jq --arg var_name $PE_IP'(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="PE_IP")).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) - fi - if [ "$NutanixAcropolisPlugin" != "none" ]; then - tmp_NutanixAcropolisPluginE=$(mktemp) - $(jq --arg var_name $NutanixAcropolisPlugin '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NutanixAcropolisPlugin")).value=$var_name' $JSONFile >"$tmp_NutanixAcropolisPlugin" && mv "$tmp_NutanixAcropolisPlugin" $JSONFile) - fi - if [ "$CVM_NETWORK" != "none" ]; then - tmp_CVM_NETWORK=$(mktemp) - $(jq --arg var_name $CVM_NETWORK '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="CVM_NETWORK")).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) - fi - if [ "$BPG_RKTOOLS_URL" != "none" ]; then - tmp_BPG_RKTOOLS_URL=$(mktemp) - $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="BPG_RKTOOLS_URL")).value=$var_name' $JSONFile >"$tmp_BPG_RKTOOLS_URL" && mv "$tmp_BPG_RKTOOLS_URL" $JSONFile) - fi - if [ "$DDC_IP" != "none" ]; then - tmp_DDC_IP=$(mktemp) - $(jq --arg var_name $DDC_IP '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="DDC_IP")).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) - fi - if [ "$NutanixAcropolis_Installed_Path" != "none" ]; then - tmp_NutanixAcropolis_Installed_Path=$(mktemp) - $(jq --arg var_name $NutanixAcropolis_Installed_Path '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NutanixAcropolis_Installed_PathL")).value=$var_name' $JSONFile >"$tmp_NutanixAcropolis_Installed_Path" && mv "$tmp_NutanixAcropolis_Installed_Path" $JSONFile) - fi + # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint + tmp=$(mktemp) + + # ADD PROJECT (affects all BPs being imported) if no project was specified on the command line, we've already pre-set the project variable to 'none' if a project was specified, we need to add it into the JSON data + if [ $CALM_PROJECT != 'none' ]; then + # add the new atributes to the JSON and overwrite the old JSON file with the new one + $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) + fi + + # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") + if [ ${BLUEPRINT} == "${NAME}" ]; then + # Profile Variables + if [ "$DOMAIN" != "none" ]; then + tmp_DOMAIN=$(mktemp) + # add the new variable to the json file and save it + $(jq --arg var_name $DOMAIN'(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="DOMAIN")).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) + fi + if [ "$AD_IP" != "none" ]; then + tmp_AD_IP=$(mktemp) + $(jq --arg var_name $AD_IP '(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="AD_IP")).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) + fi + if [ "$PE_IP" != "none" ]; then + tmp_PE_IP=$(mktemp) + $(jq --arg var_name $PE_IP'(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="PE_IP")).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) fi + if [ "$DDC_IP" != "none" ]; then + tmp_DDC_IP=$(mktemp) + $(jq --arg var_name $DDC_IP '(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="DDC_IP")).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) + fi + if [ "$CVM_NETWORK" != "none" ]; then + tmp_CVM_NETWORK=$(mktemp) + $(jq --arg var_name $CVM_NETWORK '(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="CVM_NETWORK")).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) + fi + # VM Configuration + #if [ "$SERVER_IMAGE" != "none" ]; then + # tmp_SERVER_IMAGE=$(mktemp) + # $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv #"$tmp_SERVER_IMAGE" $JSONFile) + #fi + if [ "$SERVER_IMAGE_UUID" != "none" ]; then + tmp_SERVER_IMAGE_UUID=$(mktemp) + $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.disk_list[0].data_source_reference | select (.name=="Windows2016.qcow2")).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) + fi + #if [ "$CITRIX_IMAGE" != "none" ]; then + # tmp_CITRIX_IMAGE=$(mktemp) + # $(jq --arg var_name $CITRIX_IMAGE '(.spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE" && mv "$tmp_CITRIX_IMAGE" $JSONFile) + #fi + if [ "$CITRIX_IMAGE_UUID" != "none" ]; then + tmp_CITRIX_IMAGE_UUID=$(mktemp) + $(jq --arg var_name $CITRIX_IMAGE_UUID '(.spec.resources.disk_list[0].data_source_reference | select (.name=="Citrix_Virtual_Apps_and_Desktops_7_1912.iso")).uuid=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE_UUID" && mv "$tmp_CITRIX_IMAGE_UUID" $JSONFile) + fi + if [ "$NETWORK_NAME" != "none" ]; then + tmp_NETWORK_NAME=$(mktemp) + $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + fi + if [ "$VLAN_NAME" != "none" ]; then + tmp_VLAN_NAME=$(mktemp) + $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) + fi + # Credentials + if [ "$LOCAL_PASSWORD" != "none" ]; then + tmp_LOCAL_PASSWORD=$(mktemp) + $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="EraAdmin")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) + fi + if [ "$PE_CREDS_PASSWORD" != "none" ]; then + tmp_PE_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="pe_creds")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + fi + if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then + tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="EraAdmin")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) + fi + if [ "$SQL_CREDS_PASSWORD" != "none" ]; then + tmp_SQL_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="pe_creds")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) + fi + fi - # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) - tmp_removal=$(mktemp) - $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) + tmp_removal=$(mktemp) + $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) - # GET BP NAME (affects all BPs being imported) - # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all - blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) - blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " - blueprint_name="${blueprint_name#\"}" # will remove the prefix " + # GET BP NAME (affects all BPs being imported) + # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all + blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) + blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " + blueprint_name="${blueprint_name#\"}" # will remove the prefix " - if [ blueprint_name == 'null' ]; then - echo "\nUnprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?\n" - exit 0 + if [ $blueprint_name == 'null' ]; then + echo "Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?" + exit 0 + else + # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload + echo "Uploading the updated blueprint: $blueprint_name..." + + # Example curl call from the console: + # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" + # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" + # bp_name="EraServerDeployment" + # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" + # password='techX2019!' + # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" + + path_to_file=$JSONFile + bp_name=$blueprint_name + project_uuid=$project_uuid + + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') + + #if the upload_result var is not empty then let's say it was succcessful + if [ -z "$upload_result" ]; then + echo "Upload for $bp_name did not finish." else - # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload - echo "\nUploading the updated blueprint: $blueprint_name...\n" - - # Example curl call from the console: - # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" - # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" - # bp_name="EraServerDeployment" - # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" - # password='techX2019!' - # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" - - url="https://localhost:9440/api/nutanix/v3/blueprints/import_file" - path_to_file=$JSONFile - bp_name=$blueprint_name - project_uuid=$project_uuid - password=$PE_PASSWORD - upload_result=$(curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password") - - #if the upload_result var is not empty then let's say it was succcessful - if [ -z "$upload_result" ]; then - echo "\nUpload for $bp_name did not finish." - else - echo "\nUpload for $bp_name finished." - echo "-----------------------------------------" - # echo "Result: $upload_result" - fi + echo "Upload for $bp_name finished." + echo "-----------------------------------------" + # echo "Result: $upload_result" fi + fi - done - - echo "\nFinished uploading Citrix Blueprint and setting Variables!\n" + echo "Finished uploading ${BLUEPRINT} and setting Variables!" } diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 3d63502..73ace0e 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -172,6 +172,7 @@ case ${1} in && pc_cluster_img_import \ && seedPC \ && upload_era_calm_blueprint \ + && upload_citrix_calm_blueprint \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 578606271f0e07237f07e06fea371dfafc431a87 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 14:57:53 -0800 Subject: [PATCH 334/691] CICSInfra Calm BP --- scripts/global.vars.sh | 1 + scripts/lib.pc.sh | 1 + 2 files changed, 2 insertions(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index add5fb1..95ca882 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -20,6 +20,7 @@ SeedPC='seedPC.zip' ERA_Blueprint='EraServerDeployment.json' Citrix_Blueprint='CitrixBootcampInfra.json' Beam_Blueprint='' +CICDInfra_Blueprint='CICD_Infra.json' # Curl and SSH settings CURL_OPTS='--insecure --silent --show-error' # --verbose' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 3c34703..6fdde3c 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1228,6 +1228,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF function upload_citrix_calm_blueprint() { local DIRECTORY="/home/nutanix/" + local BLUEPRINT=${Citrix_Blueprint} local CALM_PROJECT="BootcampInfra" local DOMAIN=${AUTH_FQDN} local AD_IP=${AUTH_HOST} From cd90b87b9333fd5670f0c712a4c6a570d8eb3cb7 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 16:00:16 -0800 Subject: [PATCH 335/691] CICDInfra BP & Citrix Upload BP Updates --- scripts/lib.pc.sh | 188 +++++++++++++++++++++++++++++++++++++++++++++- scripts/ts2020.sh | 1 + 2 files changed, 185 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6fdde3c..082199e 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1368,19 +1368,199 @@ function upload_citrix_calm_blueprint() { # Credentials if [ "$LOCAL_PASSWORD" != "none" ]; then tmp_LOCAL_PASSWORD=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="EraAdmin")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) + $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="LOCAL")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) fi if [ "$PE_CREDS_PASSWORD" != "none" ]; then tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="pe_creds")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="PE_CREDS")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) fi if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="EraAdmin")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="DOMAIN_CREDS")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) fi if [ "$SQL_CREDS_PASSWORD" != "none" ]; then tmp_SQL_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="pe_creds")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="SQL_CREDS")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) + fi + fi + + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) + tmp_removal=$(mktemp) + $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) + + # GET BP NAME (affects all BPs being imported) + # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all + blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) + blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " + blueprint_name="${blueprint_name#\"}" # will remove the prefix " + + if [ $blueprint_name == 'null' ]; then + echo "Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?" + exit 0 + else + # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload + echo "Uploading the updated blueprint: $blueprint_name..." + + # Example curl call from the console: + # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" + # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" + # bp_name="EraServerDeployment" + # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" + # password='techX2019!' + # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" + + path_to_file=$JSONFile + bp_name=$blueprint_name + project_uuid=$project_uuid + + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') + + #if the upload_result var is not empty then let's say it was succcessful + if [ -z "$upload_result" ]; then + echo "Upload for $bp_name did not finish." + else + echo "Upload for $bp_name finished." + echo "-----------------------------------------" + # echo "Result: $upload_result" + fi + fi + + echo "Finished uploading ${BLUEPRINT} and setting Variables!" + +} + +############################################################################################################################################################################### +# Routine to upload CICDInfra Calm Blueprint and set variables +############################################################################################################################################################################### + +function upload_CICDInfra_calm_blueprint() { + local DIRECTORY="/home/nutanix/" + local BLUEPRINT=${CICDInfra_Blueprint} + local CALM_PROJECT="BootcampInfra" + local ERA_IP=${ERA_HOST} + local PE_IP=${PE_HOST} + local NETWORK_NAME=${NW1_NAME} + local VLAN_NAME=${NW1_VLAN} + local CENTOS_PASSWORD="-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG +ZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK +6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9 +HtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy +hCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR +uz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp +6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0 +MrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c +1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj +8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl +JDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf +h45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk +QVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c +oDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0 +EjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj +uFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M +Ez2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k +7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk +hztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC +kPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME +rECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF +2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z +iUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ +dQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP +gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF +-----END RSA PRIVATE KEY-----" + local DOWNLOAD_BLUEPRINTS + local SERVER_IMAGE="CentOS7.qcow2" + local SERVER_IMAGE_UUID + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + + + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "Server Image UUID = $SERVER_IMAGE_UUID" + + # download the blueprint + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}${BLUEPRINT}) + log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" + + # ensure the directory that contains the blueprints to be imported is not empty + if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then + echo "There are no .json files found in the directory provided." + exit 0 + fi + + # create a list to store all bluprints found in the directory provided by user + #declare -a LIST_OF_BLUEPRINTS=() + + # circle thru all of the files in the provided directory and add file names to a list of blueprints array + # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) + #for FILE in "$DIRECTORY"/*.json; do + # BASENAM="$(basename ${FILE})" + # FILENAME="${BASENAM%.*}" + # LIST_OF_BLUEPRINTS+=("$BASENAM") + #done + + + if [ $CALM_PROJECT != 'none' ]; then + + # make API call and store project_uuid + project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') + + if [ -z "$project_uuid" ]; then + # project wasn't found + # exit at this point as we don't want to assume all blueprints should then hit the 'default' project + echo "Project $CALM_PROJECT was not found. Please check the name and retry." + exit 0 + else + echo "Project $CALM_PROJECT exists..." + fi + fi + + # update the user with script progress... + + echo "Starting blueprint updates and then Uploading to Calm..." + + # read the entire JSON file from the directory + JSONFile=${DIRECTORY}${BLUEPRINT} + + echo "Currently updating blueprint $JSONFile..." + + echo "${CALM_PROJECT} network UUID: ${project_uuid}" + + + # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint + tmp=$(mktemp) + + # ADD PROJECT , we need to add it into the JSON data + if [ $CALM_PROJECT != 'none' ]; then + # add the new atributes to the JSON and overwrite the old JSON file with the new one + $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) + fi + + # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") + if [ ${BLUEPRINT} == "${NAME}" ]; then + # Profile Variables + # VM Configuration + if [ "$SERVER_IMAGE" != "none" ]; then + tmp_SERVER_IMAGE=$(mktemp) + $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) + fi + if [ "$SERVER_IMAGE_UUID" != "none" ]; then + tmp_SERVER_IMAGE_UUID=$(mktemp) + $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.disk_list[0].data_source_reference | select (.name=="CentOS7.qcow2")).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) + fi + if [ "$NETWORK_NAME" != "none" ]; then + tmp_NETWORK_NAME=$(mktemp) + $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + fi + if [ "$VLAN_NAME" != "none" ]; then + tmp_VLAN_NAME=$(mktemp) + $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) + fi + # Credentials + if [ "$CENTOS_PASSWORD" != "none" ]; then + tmp_CENTOS_PASSWORD=$(mktemp) + $(jq --arg var_name $CENTOS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="CENTOS")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD" && mv "$tmp_CENTOS_PASSWORD" $JSONFile) fi fi diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 73ace0e..14716dc 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -173,6 +173,7 @@ case ${1} in && seedPC \ && upload_era_calm_blueprint \ && upload_citrix_calm_blueprint \ + && upload_CICDInfra_calm_blueprint \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From d0112fbff0b2e240737fa249dcf0e25370342a54 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 17:34:04 -0800 Subject: [PATCH 336/691] Updates for launch --- scripts/lib.pc.sh | 36 +++++++++++++++++++++++++++++++++--- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 082199e..09bf781 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1206,7 +1206,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then @@ -1220,8 +1220,23 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "Finished uploading ${BLUEPRINT} and setting Variables!" + #Getting the Blueprint UUID + ERA_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==EraServerDeployment"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "ERA Blueprint UUID = $ERA_BLUEPRINT_UUID" + + # GET The Blueprint payload + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Era Server", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json + + # Launch the BLUEPRINT + + echo "Launching the Era Server Application" + + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}/launch" + } + ############################################################################################################################################################################### # Routine to upload Citrix Calm Blueprint and set variables ############################################################################################################################################################################### @@ -1413,7 +1428,7 @@ function upload_citrix_calm_blueprint() { bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then @@ -1427,8 +1442,23 @@ function upload_citrix_calm_blueprint() { echo "Finished uploading ${BLUEPRINT} and setting Variables!" + #Getting the Blueprint UUID + CITRIX_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CitrixBootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "Citrix Blueprint UUID = $CITRIX_BLUEPRINT_UUID" + + # GET The Blueprint payload + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Citrix Infra", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json + + # Launch the BLUEPRINT + + echo "Launching the Era Server Application" + + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}/launch" + } + ############################################################################################################################################################################### # Routine to upload CICDInfra Calm Blueprint and set variables ############################################################################################################################################################################### @@ -1593,7 +1623,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then From e5d56cd72df52564fe61a887eaca650890d873a3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 19:09:54 -0800 Subject: [PATCH 337/691] Updates for Peer and Images --- scripts/global.vars.sh | 18 ++++++++ scripts/lib.pe.sh | 97 ++++++++++++++++++++++++++++++++++++++++++ scripts/ts2020.sh | 10 +++-- 3 files changed, 121 insertions(+), 4 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 95ca882..9cc017b 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -29,6 +29,24 @@ CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' SSH_OPTS+=' -q' # -v' +#################################################### +# +# 3rd Party images used at GTS or Add-On Labs +# +################################################### +#Peer Software +PeerMgmtServer='Windows2016-PeerMgmt-14feb20' +PeerAgentServer='Windows2016-PeerAgent-12feb20' +PMC="PeerMgmt" +AGENTA="PeerAgent-Files" +AGENTB="PeerAgent-Win" + +#Hycu +HycuServer='HYCU-4.0.3-Demo' + +#Veeam +VeeamServer='' + ################################## # # Look for JQ, AutoDC, and QCOW2 Repos in DC specific below. diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 40a6e5c..adc9071 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -896,3 +896,100 @@ function pc_destroy() { acli vm.off ${_vm} && acli -y vm.delete ${_vm} done } + +############################################################################################################################################################################### +# Routine to deploy the Peer Management Center +############################################################################################################################################################################### +# MTM TODO When integrating with Nutanix scripts, need to change echo to log and put quotes around text after all acli commands +deploy_peer_mgmt_server() { + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PeerMgmtServer} | wc --lines) == 0 )); then + log "Import ${PeerMgmtServer} image from ${QCOW2_REPOS}..." + acli image.create ${PeerMgmtServer} \ + image_type=kDiskImage wait=true \ + container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}peer/${PeerMgmtServer}.qcow2" + else + log "Image found, assuming ready. Skipping ${PeerMgmtServer} import." + fi + + echo "Creating temp folder and applying perms..." + mkdir /home/nutanix/peer_staging/ + + VMNAME=$1 + + ### Get sysyprep config file ready ### + + echo "${VMNAME} - Prepping sysprep config..." + # MTM Create a temp folder for sysprep file work as to not clutter up nutanix home + #mkdir /home/nutanix/peer_staging/ + + #MTM todo have unattend.xml staged somewhere else + wget http://10.42.194.11/workshop_staging/peer/unattend.xml -P /home/nutanix/peer_staging/ + mv /home/nutanix/peer_staging/unattend.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml + chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml + sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml + + ### Deploy PMC Server ### + + echo "${VMNAME} - Deploying VM..." + #log "Create ${VMNAME} VM based on ${IMAGENAME} image" + #acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" + acli "vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" + acli "vm.disk_create ${VMNAME} clone_from_image=${PeerMgmtServer}" + # MTM TODO replace net1 with appropriate variable + acli "vm.nic_create ${VMNAME} network=Secondary" + + #log "Power on ${VMNAME} VM..." + echo "${VMNAME} - Powering on..." + acli "vm.on ${VMNAME}" + + echo "${VMNAME} - Deployed." + +} + +############################################################################################################################################################################### +# Routine to deploy a Peer Agent +############################################################################################################################################################################### +# MTM TODO When integrating with Nutanix scripts, need to change echo to log and put quotes around text after all acli commands +deploy_peer_agent_server() { + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PeerAgentServer} | wc --lines) == 0 )); then + log "Import ${PeerAgentServer} image from ${QCOW2_REPOS}..." + acli image.create ${PeerAgentServer} \ + image_type=kDiskImage wait=true \ + container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}peer/${PeerAgentServer}.qcow2" + else + log "Image found, assuming ready. Skipping ${PeerAgentServer} import." + fi + + VMNAME=$1 + + ### Get sysyprep config file ready ### + + echo "${VMNAME} - Prepping sysprep config..." + # MTM Create a temp folder for sysprep file work as to not clutter up nutanix home + #mkdir /home/nutanix/peer_staging/ + + #MTM todo have unattend.xml staged somewhere else + wget http://10.42.194.11/workshop_staging/peer/unattend.xml -P /home/nutanix/peer_staging/ + mv /home/nutanix/peer_staging/unattend.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml + chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml + sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml + + ### Deploy Agent Server ### + + echo "${VMNAME} - Deploying VM..." + #log "Create ${VMNAME} VM based on ${IMAGENAME} image" + #acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" + acli "vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" + acli "vm.disk_create ${VMNAME} clone_from_image=${PeerAgentServer}" + # MTM TODO replace net1 with appropriate variable + acli "vm.nic_create ${VMNAME} network=Secondary" + + #log "Power on ${VMNAME} VM..." + echo "${VMNAME} - Powering on..." + acli "vm.on ${VMNAME}" + + echo "${VMNAME} - Deployed." + +} diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 14716dc..d44a2ad 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -74,7 +74,9 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - + && deploy_peer_mgmt_server "${PMC}" \ + && deploy_peer_agent_server "${AGENTA}" \ + && deploy_peer_agent_server "${AGENTB}" #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish @@ -93,13 +95,11 @@ case ${1} in ERA-Server-build-1.2.0.1.qcow2 \ Windows2016.qcow2 \ CentOS7.qcow2 \ - #Windows2012R2.qcow2 \ #Win10v1903.qcow2 \ #ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ - #ERA-Server-build-1.2.0.1.qcow2 \ - #HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ #move-3.4.1.qcow2 \ + #MSSQL-2016-VM.qcow2 \ #GTSOracle/19c-april/19c-bootdisk.qcow2 \ #GTSOracle/19c-april/19c-disk1.qcow2 \ #GTSOracle/19c-april/19c-disk2.qcow2 \ @@ -110,6 +110,8 @@ case ${1} in #GTSOracle/19c-april/19c-disk7.qcow2 \ #GTSOracle/19c-april/19c-disk8.qcow2 \ #GTSOracle/19c-april/19c-disk9.qcow2 \ + #Windows2012R2.qcow2 \ + #HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ ) export ISO_IMAGES=(\ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ From 4e356eb321089c083007b47620c673c9bc8ce7f1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 19:24:43 -0800 Subject: [PATCH 338/691] Update ts2020.sh --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index d44a2ad..4065f74 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -74,7 +74,7 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - && deploy_peer_mgmt_server "${PMC}" \ + deploy_peer_mgmt_server "${PMC}" \ && deploy_peer_agent_server "${AGENTA}" \ && deploy_peer_agent_server "${AGENTB}" #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & From 2544e844b839f5b99fa914bb0b95938b5335c727 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 21:30:47 -0800 Subject: [PATCH 339/691] Updates for Calm BP upload --- scripts/lib.pc.sh | 154 ++++++++++++++++++++++++++++++++++++++-------- scripts/ts2020.sh | 32 +++++----- 2 files changed, 143 insertions(+), 43 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 09bf781..21a5002 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1028,7 +1028,7 @@ EOF ############################################################################################################################################################################### function upload_era_calm_blueprint() { - local DIRECTORY="/home/nutanix/" + local DIRECTORY="/home/nutanix/era" local BLUEPRINT=${ERA_Blueprint} local CALM_PROJECT="BootcampInfra" local ERA_IP=${ERA_HOST} @@ -1070,8 +1070,9 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF local DOWNLOAD_BLUEPRINTS local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID - local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + local CURL_HTTP_OPTS="--max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure" + mkdir $DIRECTORY #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1079,7 +1080,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "ERA Image UUID = $ERA_IMAGE_UUID" # download the blueprint - DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}${BLUEPRINT}) + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" # ensure the directory that contains the blueprints to be imported is not empty @@ -1089,15 +1090,48 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi # create a list to store all bluprints found in the directory provided by user - #declare -a LIST_OF_BLUEPRINTS=() + declare -a LIST_OF_BLUEPRINTS=() # circle thru all of the files in the provided directory and add file names to a list of blueprints array # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) - #for FILE in "$DIRECTORY"/*.json; do - # BASENAM="$(basename ${FILE})" - # FILENAME="${BASENAM%.*}" - # LIST_OF_BLUEPRINTS+=("$BASENAM") - #done + for FILE in "$DIRECTORY"/*.json; do + BASENAM="$(basename ${FILE})" + FILENAME="${BASENAM%.*}" + LIST_OF_BLUEPRINTS+=("$BASENAM") + done + + # echo $LIST_OF_BLUEPRINTS + # if the list of blueprints is not empty then: + if ((${#LIST_OF_BLUEPRINTS[@]})); then + + if [ $CALM_PROJECT != 'none' ]; then + + # curl command needed: + # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' + + # formulate the curl to check for project + _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" + + # make API call and store project_uuid + project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') + + if [ -z "$project_uuid" ]; then + # project wasn't found + # exit at this point as we don't want to assume all blueprints should then hit the 'default' project + echo "Project $CALM_PROJECT was not found. Please check the name and retry." + exit 0 + else + echo "Project $CALM_PROJECT exists..." + fi + fi + else + echo 'No JSON files found in' + $DIRECTORY +' ... nothing to import!' + fi + + # update the user with script progress... + _num_of_files=${#LIST_OF_BLUEPRINTS[@]} + echo "Number of .json files found: ${_num_of_files}" + echo "Starting blueprint updates and then Uploading to Calm..." if [ $CALM_PROJECT != 'none' ]; then @@ -1119,8 +1153,11 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "Starting blueprint updates and then Uploading to Calm..." + + for elem in "${LIST_OF_BLUEPRINTS[@]}"; do # read the entire JSON file from the directory - JSONFile=${DIRECTORY}${BLUEPRINT} + #JSONFile=${DIRECTORY}${BLUEPRINT + JSONFile=${DIRECTORY}/"$elem" echo "Currently updating blueprint $JSONFile..." @@ -1138,7 +1175,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ ${BLUEPRINT} == "${NAME}" ]; then + if [ "$elem" == "${NAME}" ]; then # Profile Variables if [ "$ERA_IP" != "none" ]; then tmp_ERA_IP=$(mktemp) @@ -1218,6 +1255,8 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi fi + done + echo "Finished uploading ${BLUEPRINT} and setting Variables!" #Getting the Blueprint UUID @@ -1236,13 +1275,12 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF } - ############################################################################################################################################################################### # Routine to upload Citrix Calm Blueprint and set variables ############################################################################################################################################################################### function upload_citrix_calm_blueprint() { - local DIRECTORY="/home/nutanix/" + local DIRECTORY="/home/nutanix/citrix" local BLUEPRINT=${Citrix_Blueprint} local CALM_PROJECT="BootcampInfra" local DOMAIN=${AUTH_FQDN} @@ -1266,6 +1304,8 @@ function upload_citrix_calm_blueprint() { local CITRIX_IMAGE_UUID local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + mkdir $DIRECTORY + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1285,6 +1325,21 @@ function upload_citrix_calm_blueprint() { exit 0 fi + # create a list to store all bluprints found in the directory provided by user + declare -a LIST_OF_BLUEPRINTS=() + + # circle thru all of the files in the provided directory and add file names to a list of blueprints array + # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) + for FILE in "$DIRECTORY"/*.json; do + BASENAM="$(basename ${FILE})" + FILENAME="${BASENAM%.*}" + LIST_OF_BLUEPRINTS+=("$BASENAM") + done + + # echo $LIST_OF_BLUEPRINTS + # if the list of blueprints is not empty then: + if ((${#LIST_OF_BLUEPRINTS[@]})); then + if [ $CALM_PROJECT != 'none' ]; then # curl command needed: @@ -1305,13 +1360,19 @@ function upload_citrix_calm_blueprint() { echo "Project $CALM_PROJECT exists..." fi fi + else + echo 'No JSON files found in' + $DIRECTORY +' ... nothing to import!' + fi # update the user with script progress... - + _num_of_files=${#LIST_OF_BLUEPRINTS[@]} + echo "Number of .json files found: ${_num_of_files}" echo "Starting blueprint updates and then Uploading to Calm..." + for elem in "${LIST_OF_BLUEPRINTS[@]}"; do # read the entire JSON file from the directory - JSONFile=${DIRECTORY}${BLUEPRINT} + #JSONFile=${DIRECTORY}${BLUEPRINT} + JSONFile=${DIRECTORY}/"$elem" echo "Currently updating blueprint $JSONFile..." @@ -1332,7 +1393,7 @@ function upload_citrix_calm_blueprint() { fi # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ ${BLUEPRINT} == "${NAME}" ]; then + if [ "$elem" == "${NAME}" ]; then # Profile Variables if [ "$DOMAIN" != "none" ]; then tmp_DOMAIN=$(mktemp) @@ -1428,7 +1489,7 @@ function upload_citrix_calm_blueprint() { bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid "https://localhost:9440/api/nutanix/v3/blueprints/import_file") #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then @@ -1440,6 +1501,8 @@ function upload_citrix_calm_blueprint() { fi fi +done + echo "Finished uploading ${BLUEPRINT} and setting Variables!" #Getting the Blueprint UUID @@ -1458,13 +1521,12 @@ function upload_citrix_calm_blueprint() { } - ############################################################################################################################################################################### # Routine to upload CICDInfra Calm Blueprint and set variables ############################################################################################################################################################################### function upload_CICDInfra_calm_blueprint() { - local DIRECTORY="/home/nutanix/" + local DIRECTORY="/home/nutanix/cicdinfra" local BLUEPRINT=${CICDInfra_Blueprint} local CALM_PROJECT="BootcampInfra" local ERA_IP=${ERA_HOST} @@ -1503,6 +1565,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF local SERVER_IMAGE_UUID local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + mkdir $DIRECTORY #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1520,15 +1583,48 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi # create a list to store all bluprints found in the directory provided by user - #declare -a LIST_OF_BLUEPRINTS=() + declare -a LIST_OF_BLUEPRINTS=() # circle thru all of the files in the provided directory and add file names to a list of blueprints array # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) - #for FILE in "$DIRECTORY"/*.json; do - # BASENAM="$(basename ${FILE})" - # FILENAME="${BASENAM%.*}" - # LIST_OF_BLUEPRINTS+=("$BASENAM") - #done + for FILE in "$DIRECTORY"/*.json; do + BASENAM="$(basename ${FILE})" + FILENAME="${BASENAM%.*}" + LIST_OF_BLUEPRINTS+=("$BASENAM") + done + + # echo $LIST_OF_BLUEPRINTS + # if the list of blueprints is not empty then: + if ((${#LIST_OF_BLUEPRINTS[@]})); then + + if [ $CALM_PROJECT != 'none' ]; then + + # curl command needed: + # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' + + # formulate the curl to check for project + _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" + + # make API call and store project_uuid + project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') + + if [ -z "$project_uuid" ]; then + # project wasn't found + # exit at this point as we don't want to assume all blueprints should then hit the 'default' project + echo "Project $CALM_PROJECT was not found. Please check the name and retry." + exit 0 + else + echo "Project $CALM_PROJECT exists..." + fi + fi + else + echo 'No JSON files found in' + $DIRECTORY +' ... nothing to import!' + fi + + # update the user with script progress... + _num_of_files=${#LIST_OF_BLUEPRINTS[@]} + echo "Number of .json files found: ${_num_of_files}" + echo "Starting blueprint updates and then Uploading to Calm..." if [ $CALM_PROJECT != 'none' ]; then @@ -1550,8 +1646,10 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "Starting blueprint updates and then Uploading to Calm..." + for elem in "${LIST_OF_BLUEPRINTS[@]}"; do # read the entire JSON file from the directory - JSONFile=${DIRECTORY}${BLUEPRINT} + #JSONFile=${DIRECTORY}${BLUEPRINT} + JSONFile=${DIRECTORY}/"$elem" echo "Currently updating blueprint $JSONFile..." @@ -1568,7 +1666,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ ${BLUEPRINT} == "${NAME}" ]; then + if [ "$elem" == "${NAME}" ]; then # Profile Variables # VM Configuration if [ "$SERVER_IMAGE" != "none" ]; then @@ -1635,6 +1733,8 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi fi +done + echo "Finished uploading ${BLUEPRINT} and setting Variables!" } diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 4065f74..3a0f868 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -46,15 +46,15 @@ case ${1} in && network_configure \ && authentication_source \ && pe_auth \ - && prism_pro_server_deploy \ - && files_install \ - && sleep 30 \ - && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ - && sleep 30 \ - && file_analytics_install \ - && sleep 30 \ - && create_file_analytics_server \ - && sleep 30 + #&& prism_pro_server_deploy \ + #&& files_install \ + #&& sleep 30 \ + #&& create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + #&& sleep 30 \ + #&& file_analytics_install \ + #&& sleep 30 \ + #&& create_file_analytics_server \ + #&& sleep 30 if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -74,9 +74,9 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - deploy_peer_mgmt_server "${PMC}" \ - && deploy_peer_agent_server "${AGENTA}" \ - && deploy_peer_agent_server "${AGENTB}" + #deploy_peer_mgmt_server "${PMC}" \ + #&& deploy_peer_agent_server "${AGENTA}" \ + #&& deploy_peer_agent_server "${AGENTB}" #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish @@ -164,13 +164,13 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - && objects_enable \ + #&& objects_enable \ && lcm \ && pc_project \ - && object_store \ - && karbon_image_download \ + #&& object_store \ + #&& karbon_image_download \ && images \ - && flow_enable \ + #&& flow_enable \ && pc_cluster_img_import \ && seedPC \ && upload_era_calm_blueprint \ From 1501698b1e213ca023e57f73ab549f1410dffc35 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 21:34:24 -0800 Subject: [PATCH 340/691] Update ts2020.sh --- scripts/ts2020.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 3a0f868..081f90c 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -164,13 +164,13 @@ case ${1} in ssp_auth \ && calm_enable \ && karbon_enable \ - #&& objects_enable \ + && objects_enable \ && lcm \ && pc_project \ #&& object_store \ #&& karbon_image_download \ - && images \ - #&& flow_enable \ + images \ + && flow_enable \ && pc_cluster_img_import \ && seedPC \ && upload_era_calm_blueprint \ From 18a1949e0995af3251e100418f0afb82b431890e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 15 Feb 2020 23:46:33 -0800 Subject: [PATCH 341/691] Calm BP --- scripts/lib.pc.sh | 4 ++-- scripts/lib.pe.sh | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 21a5002..242f28b 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1243,7 +1243,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then @@ -1489,7 +1489,7 @@ function upload_citrix_calm_blueprint() { bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid "https://localhost:9440/api/nutanix/v3/blueprints/import_file") + upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index adc9071..b5f9277 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -903,6 +903,8 @@ function pc_destroy() { # MTM TODO When integrating with Nutanix scripts, need to change echo to log and put quotes around text after all acli commands deploy_peer_mgmt_server() { + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PeerMgmtServer} | wc --lines) == 0 )); then log "Import ${PeerMgmtServer} image from ${QCOW2_REPOS}..." acli image.create ${PeerMgmtServer} \ From 102f071ea363c673c1c9a0475d402f8cab2bbae4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 16 Feb 2020 02:02:30 -0800 Subject: [PATCH 342/691] Calm BP Test --- scripts/lib.pc.sh | 6 +++--- scripts/ts2020.sh | 28 ++++++++++++++-------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 242f28b..3fc4076 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1243,7 +1243,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) + upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then @@ -1489,7 +1489,7 @@ function upload_citrix_calm_blueprint() { bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) + upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then @@ -1721,7 +1721,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') + upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid 'https://localhost:9440/api/nutanix/v3/blueprints/import_file') #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 081f90c..f267414 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -46,15 +46,15 @@ case ${1} in && network_configure \ && authentication_source \ && pe_auth \ - #&& prism_pro_server_deploy \ - #&& files_install \ - #&& sleep 30 \ - #&& create_file_server "${NW1_NAME}" "${NW2_NAME}" \ - #&& sleep 30 \ - #&& file_analytics_install \ - #&& sleep 30 \ - #&& create_file_analytics_server \ - #&& sleep 30 + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -74,7 +74,7 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - #deploy_peer_mgmt_server "${PMC}" \ + deploy_peer_mgmt_server "${PMC}" \ #&& deploy_peer_agent_server "${AGENTA}" \ #&& deploy_peer_agent_server "${AGENTB}" #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & @@ -96,7 +96,7 @@ case ${1} in Windows2016.qcow2 \ CentOS7.qcow2 \ #Win10v1903.qcow2 \ - #ToolsVM.qcow2 \ + ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ #move-3.4.1.qcow2 \ #MSSQL-2016-VM.qcow2 \ @@ -167,9 +167,9 @@ case ${1} in && objects_enable \ && lcm \ && pc_project \ - #&& object_store \ - #&& karbon_image_download \ - images \ + && object_store \ + && karbon_image_download \ + && images \ && flow_enable \ && pc_cluster_img_import \ && seedPC \ From 749853e0943b70f4502088b6617a9c605a83d8b2 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 16 Feb 2020 18:02:51 -0800 Subject: [PATCH 343/691] Calm BPs --- scripts/lib.pc.sh | 297 ++++++++++++++++++++-------------------------- scripts/lib.pe.sh | 8 +- scripts/ts2020.sh | 4 +- 3 files changed, 135 insertions(+), 174 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 3fc4076..1bcccf0 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1039,7 +1039,9 @@ function upload_era_calm_blueprint() { local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} local ERAADMIN_PASSWORD="nutanix/4u" + local ERAADMIN_PASSWORD_MODIFIED="true" local PE_CREDS_PASSWORD="${PE_PASSWORD}" + local PE_CREDS_PASSWORD_MODIFIED="true" local ERACLI_PASSWORD="-----BEGIN RSA PRIVATE KEY----- MIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG ZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK @@ -1067,6 +1069,7 @@ iUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ dQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF -----END RSA PRIVATE KEY-----" + local ERACLI_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID @@ -1079,6 +1082,10 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "ERA Image UUID = $ERA_IMAGE_UUID" + NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "NETWORK UUID = $NETWORK_UUID" + # download the blueprint DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" @@ -1089,55 +1096,15 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF exit 0 fi - # create a list to store all bluprints found in the directory provided by user - declare -a LIST_OF_BLUEPRINTS=() - - # circle thru all of the files in the provided directory and add file names to a list of blueprints array - # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) - for FILE in "$DIRECTORY"/*.json; do - BASENAM="$(basename ${FILE})" - FILENAME="${BASENAM%.*}" - LIST_OF_BLUEPRINTS+=("$BASENAM") - done - - # echo $LIST_OF_BLUEPRINTS - # if the list of blueprints is not empty then: - if ((${#LIST_OF_BLUEPRINTS[@]})); then - if [ $CALM_PROJECT != 'none' ]; then # curl command needed: # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' - # formulate the curl to check for project - _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" - # make API call and store project_uuid project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') - if [ -z "$project_uuid" ]; then - # project wasn't found - # exit at this point as we don't want to assume all blueprints should then hit the 'default' project - echo "Project $CALM_PROJECT was not found. Please check the name and retry." - exit 0 - else - echo "Project $CALM_PROJECT exists..." - fi - fi - else - echo 'No JSON files found in' + $DIRECTORY +' ... nothing to import!' - fi - - # update the user with script progress... - _num_of_files=${#LIST_OF_BLUEPRINTS[@]} - echo "Number of .json files found: ${_num_of_files}" - echo "Starting blueprint updates and then Uploading to Calm..." - - - if [ $CALM_PROJECT != 'none' ]; then - - # make API call and store project_uuid - project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') + echo "Projet UUID = $project_uuid" if [ -z "$project_uuid" ]; then # project wasn't found @@ -1153,17 +1120,17 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "Starting blueprint updates and then Uploading to Calm..." - - for elem in "${LIST_OF_BLUEPRINTS[@]}"; do # read the entire JSON file from the directory - #JSONFile=${DIRECTORY}${BLUEPRINT - JSONFile=${DIRECTORY}/"$elem" + JSONFile="${DIRECTORY}${BLUEPRINT}" echo "Currently updating blueprint $JSONFile..." echo "${CALM_PROJECT} network UUID: ${project_uuid}" echo "ERA_IP=${ERA_IP}" echo "PE_IP=${PE_IP}" + echo "ERA_IMAGE=${ERA_IMAGE}" + echo "ERA_IMAGE_UUID=${ERA_IMAGE_UUID}" + echo "NETWORK_UUID=${NETWORK_UUID}" # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint tmp=$(mktemp) @@ -1175,42 +1142,62 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ "$elem" == "${NAME}" ]; then + if [ ${BLUEPRINT} == "${NAME}" ]; then # Profile Variables if [ "$ERA_IP" != "none" ]; then tmp_ERA_IP=$(mktemp) # add the new variable to the json file and save it - $(jq --arg var_name $ERA_IP '(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="ERA_IP")).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) + $(jq --arg var_name $ERA_IP '.spec.resources.app_profile_list[0].variable_list[0].value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) fi # VM Configuration if [ "$ERA_IMAGE" != "none" ]; then tmp_ERA_IMAGE=$(mktemp) - $(jq --arg var_name $ERA_IMAGE '(.spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) + $(jq --arg var_name $ERA_IMAGE '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) fi if [ "$ERA_IMAGE_UUID" != "none" ]; then tmp_ERA_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $ERA_IMAGE_UUID '(.spec.resources.disk_list[0].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_ERA_IMAGE_UUID" && mv "$tmp_ERA_IMAGE_UUID" $JSONFile) + $(jq --arg var_name $ERA_IMAGE_UUID '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_ERA_IMAGE_UUID" && mv "$tmp_ERA_IMAGE_UUID" $JSONFile) fi if [ "$NETWORK_NAME" != "none" ]; then tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + $(jq --arg var_name $NETWORK_NAME '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) fi - if [ "$VLAN_NAME" != "none" ]; then - tmp_VLAN_NAME=$(mktemp) - $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) + if [ "$NETWORK_UUID" != "none" ]; then + tmp_NETWORK_UUID=$(mktemp) + $(jq --arg var_name $NETWORK_UUID '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) fi + #if [ "$NETWORK_NAME" != "none" ]; then + # tmp_NETWORK_NAME=$(mktemp) + # $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + #fi + #if [ "$VLAN_NAME" != "none" ]; then + # tmp_VLAN_NAME=$(mktemp) + # $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) + #fi # Credentials if [ "$ERAADMIN_PASSWORD" != "none" ]; then tmp_ERAADMIN_PASSWORD=$(mktemp) - $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="EraAdmin")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) + $(jq --arg var_name $ERAADMIN_PASSWORD '.spec.resources.credential_definition_list[0].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) + fi + if [ "$ERAADMIN_PASSWORD_MODIFIED" != "none" ]; then + tmp_ERAADMIN_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $ERAADMIN_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD_MODIFIED" && mv "$tmp_ERAADMIN_PASSWORD_MODIFIED" $JSONFile) fi if [ "$PE_CREDS_PASSWORD" != "none" ]; then tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="pe_creds")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD '.spec.resources.credential_definition_list[1].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + fi + if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) fi if [ "$ERACLI_PASSWORD" != "none" ]; then tmp_ERACLI_PASSWORD=$(mktemp) - $(jq --arg var_name $ERACLI_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="EraCLI")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) + $(jq --arg var_name $ERACLI_PASSWORD '.spec.resources.credential_definition_list[2].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) + fi + if [ "$ERACLI_PASSWORD_MODIFIED" != "none" ]; then + tmp_ERACLI_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) fi fi @@ -1255,8 +1242,6 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi fi - done - echo "Finished uploading ${BLUEPRINT} and setting Variables!" #Getting the Blueprint UUID @@ -1294,10 +1279,15 @@ function upload_citrix_calm_blueprint() { local BPG_RKTOOLS_URL="none" local NutanixAcropolis_Installed_Path="none" local LOCAL_PASSWORD="nutanix/4u" + local LOCAL_PASSWORD_MODIFIED="true" local DOMAIN_CREDS_PASSWORD="nutanix/4u" + local DOMAIN_PASSWORD_MODIFIED="true" local PE_CREDS_PASSWORD="${PE_PASSWORD}" + local PE_CREDS_PASSWORD_MODIFIED="true" local SQL_CREDS_PASSWORD="nutanix/4u" + local SQL_CREDS_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS + local NETWORK_UUID local SERVER_IMAGE="Windows2016.qcow2" local SERVER_IMAGE_UUID local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" @@ -1315,6 +1305,10 @@ function upload_citrix_calm_blueprint() { echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" + NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "NETWORK UUID = $NETWORK_UUID" + # download the blueprint DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint}) log "Downloading ${CALM_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" @@ -1325,32 +1319,16 @@ function upload_citrix_calm_blueprint() { exit 0 fi - # create a list to store all bluprints found in the directory provided by user - declare -a LIST_OF_BLUEPRINTS=() - - # circle thru all of the files in the provided directory and add file names to a list of blueprints array - # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) - for FILE in "$DIRECTORY"/*.json; do - BASENAM="$(basename ${FILE})" - FILENAME="${BASENAM%.*}" - LIST_OF_BLUEPRINTS+=("$BASENAM") - done - - # echo $LIST_OF_BLUEPRINTS - # if the list of blueprints is not empty then: - if ((${#LIST_OF_BLUEPRINTS[@]})); then - if [ $CALM_PROJECT != 'none' ]; then # curl command needed: # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' - # formulate the curl to check for project - _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" - # make API call and store project_uuid project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') + echo "Projet UUID = $project_uuid" + if [ -z "$project_uuid" ]; then # project wasn't found # exit at this point as we don't want to assume all blueprints should then hit the 'default' project @@ -1360,19 +1338,12 @@ function upload_citrix_calm_blueprint() { echo "Project $CALM_PROJECT exists..." fi fi - else - echo 'No JSON files found in' + $DIRECTORY +' ... nothing to import!' - fi # update the user with script progress... - _num_of_files=${#LIST_OF_BLUEPRINTS[@]} - echo "Number of .json files found: ${_num_of_files}" + echo "Starting blueprint updates and then Uploading to Calm..." - for elem in "${LIST_OF_BLUEPRINTS[@]}"; do - # read the entire JSON file from the directory - #JSONFile=${DIRECTORY}${BLUEPRINT} - JSONFile=${DIRECTORY}/"$elem" + JSONFile=${DIRECTORY}${BLUEPRINT} echo "Currently updating blueprint $JSONFile..." @@ -1382,6 +1353,11 @@ function upload_citrix_calm_blueprint() { echo "PE_IP=${PE_IP}" echo "DDC_IP=${DDC_IP}" echo "CVM_NETWORK=${CVM_NETWORK}" + echo "SERVER_IMAGE=${SERVER_IMAGE}" + echo "SERVER_IMAGE_UUID=${SERVER_IMAGE_UUID}" + echo "CITRIX_IMAGE=${CITRIX_IMAGE}" + echo "CITRIX_IMAGE_UUID=${CITRIX_IMAGE_UUID}" + echo "NETWORK_UUID=${NETWORK_UUID}" # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint tmp=$(mktemp) @@ -1393,70 +1369,90 @@ function upload_citrix_calm_blueprint() { fi # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ "$elem" == "${NAME}" ]; then + if [ ${BLUEPRINT} == "${NAME}" ]; then # Profile Variables if [ "$DOMAIN" != "none" ]; then tmp_DOMAIN=$(mktemp) # add the new variable to the json file and save it - $(jq --arg var_name $DOMAIN'(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="DOMAIN")).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) + $(jq --arg var_name $DOMAIN'.spec.resources.app_profile_list[0].variable_list[0].value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) fi if [ "$AD_IP" != "none" ]; then tmp_AD_IP=$(mktemp) - $(jq --arg var_name $AD_IP '(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="AD_IP")).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) + $(jq --arg var_name $AD_IP '.spec.resources.app_profile_list[0].variable_list[1].value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) fi if [ "$PE_IP" != "none" ]; then tmp_PE_IP=$(mktemp) - $(jq --arg var_name $PE_IP'(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="PE_IP")).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) + $(jq --arg var_name $PE_IP'.spec.resources.app_profile_list[0].variable_list[2].value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) fi if [ "$DDC_IP" != "none" ]; then tmp_DDC_IP=$(mktemp) - $(jq --arg var_name $DDC_IP '(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="DDC_IP")).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) + $(jq --arg var_name $DDC_IP '.spec.resources.app_profile_list[0].variable_list[6].value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) fi if [ "$CVM_NETWORK" != "none" ]; then tmp_CVM_NETWORK=$(mktemp) - $(jq --arg var_name $CVM_NETWORK '(.spec.resources.app_profile_list[0].variable_list[] | select (.name=="CVM_NETWORK")).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) + $(jq --arg var_name $CVM_NETWORK '.spec.resources.app_profile_list[0].variable_list[4].value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) fi # VM Configuration - #if [ "$SERVER_IMAGE" != "none" ]; then - # tmp_SERVER_IMAGE=$(mktemp) - # $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv #"$tmp_SERVER_IMAGE" $JSONFile) - #fi + if [ "$SERVER_IMAGE" != "none" ]; then + tmp_SERVER_IMAGE=$(mktemp) + $(jq --arg var_name $SERVER_IMAGE '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) + fi if [ "$SERVER_IMAGE_UUID" != "none" ]; then tmp_SERVER_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.disk_list[0].data_source_reference | select (.name=="Windows2016.qcow2")).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) + $(jq --arg var_name $SERVER_IMAGE_UUID '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) + fi + if [ "$CITRIX_IMAGE" != "none" ]; then + tmp_CITRIX_IMAGE=$(mktemp) + $(jq --arg var_name $CITRIX_IMAGE '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference.name=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE" && mv "$tmp_CITRIX_IMAGE" $JSONFile) fi - #if [ "$CITRIX_IMAGE" != "none" ]; then - # tmp_CITRIX_IMAGE=$(mktemp) - # $(jq --arg var_name $CITRIX_IMAGE '(.spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE" && mv "$tmp_CITRIX_IMAGE" $JSONFile) - #fi if [ "$CITRIX_IMAGE_UUID" != "none" ]; then tmp_CITRIX_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $CITRIX_IMAGE_UUID '(.spec.resources.disk_list[0].data_source_reference | select (.name=="Citrix_Virtual_Apps_and_Desktops_7_1912.iso")).uuid=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE_UUID" && mv "$tmp_CITRIX_IMAGE_UUID" $JSONFile) + $(jq --arg var_name $CITRIX_IMAGE_UUID '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE_UUID" && mv "$tmp_CITRIX_IMAGE_UUID" $JSONFile) fi if [ "$NETWORK_NAME" != "none" ]; then tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + $(jq --arg var_name $NETWORK_NAME '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) fi - if [ "$VLAN_NAME" != "none" ]; then - tmp_VLAN_NAME=$(mktemp) - $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) + if [ "$NETWORK_UUID" != "none" ]; then + tmp_NETWORK_UUID=$(mktemp) + $(jq --arg var_name $NETWORK_UUID '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) fi + #if [ "$VLAN_NAME" != "none" ]; then + # tmp_VLAN_NAME=$(mktemp) + # $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) + #fi # Credentials if [ "$LOCAL_PASSWORD" != "none" ]; then tmp_LOCAL_PASSWORD=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="LOCAL")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) + $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) fi - if [ "$PE_CREDS_PASSWORD" != "none" ]; then - tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="PE_CREDS")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + if [ "$LOCAL_PASSWORD_MODIFIED" != "none" ]; then + tmp_LOCAL_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $LOCAL_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD_MODIFIED" && mv "$tmp_LOCAL_PASSWORD_MODIFIED" $JSONFile) fi if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="DOMAIN_CREDS")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) + fi + if [ "$DOMAIN_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_DOMAIN_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" && mv "$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" $JSONFile) + fi + if [ "$PE_CREDS_PASSWORD" != "none" ]; then + tmp_PE_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + fi + if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) fi if [ "$SQL_CREDS_PASSWORD" != "none" ]; then tmp_SQL_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="SQL_CREDS")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[3].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) + fi + if [ "$SQL_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_SQL_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[3].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) fi fi @@ -1489,7 +1485,7 @@ function upload_citrix_calm_blueprint() { bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) + upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid "https://localhost:9440/api/nutanix/v3/blueprints/import_file") #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then @@ -1501,8 +1497,6 @@ function upload_citrix_calm_blueprint() { fi fi -done - echo "Finished uploading ${BLUEPRINT} and setting Variables!" #Getting the Blueprint UUID @@ -1560,7 +1554,9 @@ iUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ dQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF -----END RSA PRIVATE KEY-----" + local CENTOS_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS + local NETWORK_UUID local SERVER_IMAGE="CentOS7.qcow2" local SERVER_IMAGE_UUID local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " @@ -1572,6 +1568,10 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "Server Image UUID = $SERVER_IMAGE_UUID" + NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "NETWORK UUID = $NETWORK_UUID" + # download the blueprint DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}${BLUEPRINT}) log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" @@ -1582,55 +1582,15 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF exit 0 fi - # create a list to store all bluprints found in the directory provided by user - declare -a LIST_OF_BLUEPRINTS=() - - # circle thru all of the files in the provided directory and add file names to a list of blueprints array - # IMPORTANT NOTE: THE FILES NAMES FOR THE JSON FILES BEING IMPORTED CAN'T HAVE ANY SPACES (IN THIS SCRIPT) - for FILE in "$DIRECTORY"/*.json; do - BASENAM="$(basename ${FILE})" - FILENAME="${BASENAM%.*}" - LIST_OF_BLUEPRINTS+=("$BASENAM") - done - - # echo $LIST_OF_BLUEPRINTS - # if the list of blueprints is not empty then: - if ((${#LIST_OF_BLUEPRINTS[@]})); then - if [ $CALM_PROJECT != 'none' ]; then # curl command needed: # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' - # formulate the curl to check for project - _url_pc="https://localhost:9440/api/nutanix/v3/projects/list" - # make API call and store project_uuid project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') - if [ -z "$project_uuid" ]; then - # project wasn't found - # exit at this point as we don't want to assume all blueprints should then hit the 'default' project - echo "Project $CALM_PROJECT was not found. Please check the name and retry." - exit 0 - else - echo "Project $CALM_PROJECT exists..." - fi - fi - else - echo 'No JSON files found in' + $DIRECTORY +' ... nothing to import!' - fi - - # update the user with script progress... - _num_of_files=${#LIST_OF_BLUEPRINTS[@]} - echo "Number of .json files found: ${_num_of_files}" - echo "Starting blueprint updates and then Uploading to Calm..." - - - if [ $CALM_PROJECT != 'none' ]; then - - # make API call and store project_uuid - project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') + echo "Projet UUID = $project_uuid" if [ -z "$project_uuid" ]; then # project wasn't found @@ -1643,17 +1603,14 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi # update the user with script progress... - echo "Starting blueprint updates and then Uploading to Calm..." - for elem in "${LIST_OF_BLUEPRINTS[@]}"; do - # read the entire JSON file from the directory - #JSONFile=${DIRECTORY}${BLUEPRINT} - JSONFile=${DIRECTORY}/"$elem" + JSONFile=${DIRECTORY}${BLUEPRINT} echo "Currently updating blueprint $JSONFile..." echo "${CALM_PROJECT} network UUID: ${project_uuid}" + echo "NETWORK_UUID=${NETWORK_UUID}" # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint @@ -1666,29 +1623,33 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ "$elem" == "${NAME}" ]; then + if [ ${BLUEPRINT} == "${NAME}" ]; then # Profile Variables # VM Configuration if [ "$SERVER_IMAGE" != "none" ]; then tmp_SERVER_IMAGE=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) + $(jq --arg var_name $SERVER_IMAGE '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) fi if [ "$SERVER_IMAGE_UUID" != "none" ]; then tmp_SERVER_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.disk_list[0].data_source_reference | select (.name=="CentOS7.qcow2")).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) + $(jq --arg var_name $SERVER_IMAGE_UUID '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) fi if [ "$NETWORK_NAME" != "none" ]; then tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + $(jq --arg var_name $NETWORK_NAME '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) fi - if [ "$VLAN_NAME" != "none" ]; then - tmp_VLAN_NAME=$(mktemp) - $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) + if [ "$NETWORK_UUID" != "none" ]; then + tmp_NETWORK_UUID=$(mktemp) + $(jq --arg var_name $NETWORK_UUID '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) fi # Credentials if [ "$CENTOS_PASSWORD" != "none" ]; then tmp_CENTOS_PASSWORD=$(mktemp) - $(jq --arg var_name $CENTOS_PASSWORD '(.spec.resources.credential_definition_list[0].variable_list[] | select (.name=="CENTOS")).secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD" && mv "$tmp_CENTOS_PASSWORD" $JSONFile) + $(jq --arg var_name $CENTOS_PASSWORD '.spec.resources.credential_definition_list[0].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD" && mv "$tmp_CENTOS_PASSWORD" $JSONFile) + fi + if [ "$CENTOS_PASSWORD_MODIFIED" != "none" ]; then + tmp_CENTOS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $CENTOS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD_MODIFIED" && mv "$tmp_CENTOS_PASSWORD_MODIFIED" $JSONFile) fi fi diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index b5f9277..47c9310 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -935,8 +935,8 @@ deploy_peer_mgmt_server() { echo "${VMNAME} - Deploying VM..." #log "Create ${VMNAME} VM based on ${IMAGENAME} image" - #acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" - acli "vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" + acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" + #acli "vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" acli "vm.disk_create ${VMNAME} clone_from_image=${PeerMgmtServer}" # MTM TODO replace net1 with appropriate variable acli "vm.nic_create ${VMNAME} network=Secondary" @@ -982,8 +982,8 @@ deploy_peer_agent_server() { echo "${VMNAME} - Deploying VM..." #log "Create ${VMNAME} VM based on ${IMAGENAME} image" - #acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" - acli "vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" + acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" + #acli "vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" acli "vm.disk_create ${VMNAME} clone_from_image=${PeerAgentServer}" # MTM TODO replace net1 with appropriate variable acli "vm.nic_create ${VMNAME} network=Secondary" diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index f267414..c0818b1 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -75,8 +75,8 @@ case ${1} in log "PC = https://${PC_HOST}:9440" deploy_peer_mgmt_server "${PMC}" \ - #&& deploy_peer_agent_server "${AGENTA}" \ - #&& deploy_peer_agent_server "${AGENTB}" + && deploy_peer_agent_server "${AGENTA}" \ + && deploy_peer_agent_server "${AGENTB}" #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish From 33bf069cdd56e7fd7e62c3573169f452e8e13060 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 16 Feb 2020 20:56:51 -0800 Subject: [PATCH 344/691] Update lib.pc.sh --- scripts/lib.pc.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 1bcccf0..0682d41 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1121,7 +1121,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "Starting blueprint updates and then Uploading to Calm..." # read the entire JSON file from the directory - JSONFile="${DIRECTORY}${BLUEPRINT}" + JSONFile="${DIRECTORY}/${BLUEPRINT}" echo "Currently updating blueprint $JSONFile..." @@ -1343,7 +1343,7 @@ function upload_citrix_calm_blueprint() { echo "Starting blueprint updates and then Uploading to Calm..." - JSONFile=${DIRECTORY}${BLUEPRINT} + JSONFile=${DIRECTORY}/${BLUEPRINT} echo "Currently updating blueprint $JSONFile..." @@ -1605,7 +1605,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF # update the user with script progress... echo "Starting blueprint updates and then Uploading to Calm..." - JSONFile=${DIRECTORY}${BLUEPRINT} + JSONFile=${DIRECTORY}/${BLUEPRINT} echo "Currently updating blueprint $JSONFile..." From c1144c6c572cdd971f5ecb4f551783275dfb288e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 16 Feb 2020 21:07:17 -0800 Subject: [PATCH 345/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 0682d41..470e0a3 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1343,7 +1343,7 @@ function upload_citrix_calm_blueprint() { echo "Starting blueprint updates and then Uploading to Calm..." - JSONFile=${DIRECTORY}/${BLUEPRINT} + JSONFile="${DIRECTORY}/${BLUEPRINT}" echo "Currently updating blueprint $JSONFile..." @@ -1605,7 +1605,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF # update the user with script progress... echo "Starting blueprint updates and then Uploading to Calm..." - JSONFile=${DIRECTORY}/${BLUEPRINT} + JSONFile="${DIRECTORY}/${BLUEPRINT}" echo "Currently updating blueprint $JSONFile..." From b84d2e173b7cd40f44e9425299e40ff3a04c7c2b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 16 Feb 2020 23:54:43 -0800 Subject: [PATCH 346/691] Updates for blueprints and images --- scripts/lib.pc.sh | 12 ++++++++---- scripts/ts2020.sh | 36 +++++++++++++++++++----------------- 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 470e0a3..75cface 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1250,13 +1250,15 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "ERA Blueprint UUID = $ERA_BLUEPRINT_UUID" # GET The Blueprint payload - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Era Server", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Era Server", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json # Launch the BLUEPRINT echo "Launching the Era Server Application" - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}/launch" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}/launch" + + echo "Finished Launching the Era Server Application" } @@ -1505,13 +1507,15 @@ function upload_citrix_calm_blueprint() { echo "Citrix Blueprint UUID = $CITRIX_BLUEPRINT_UUID" # GET The Blueprint payload - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Citrix Infra", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Citrix Infra", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json # Launch the BLUEPRINT echo "Launching the Era Server Application" - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}/launch" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}/launch" + + echo "Finished Launching the Era Server Application" } diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index c0818b1..c0b9a03 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -95,29 +95,29 @@ case ${1} in ERA-Server-build-1.2.0.1.qcow2 \ Windows2016.qcow2 \ CentOS7.qcow2 \ - #Win10v1903.qcow2 \ + Win10v1903.qcow2 \ ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ - #move-3.4.1.qcow2 \ - #MSSQL-2016-VM.qcow2 \ - #GTSOracle/19c-april/19c-bootdisk.qcow2 \ - #GTSOracle/19c-april/19c-disk1.qcow2 \ - #GTSOracle/19c-april/19c-disk2.qcow2 \ - #GTSOracle/19c-april/19c-disk3.qcow2 \ - #GTSOracle/19c-april/19c-disk4.qcow2 \ - #GTSOracle/19c-april/19c-disk5.qcow2 \ - #GTSOracle/19c-april/19c-disk6.qcow2 \ - #GTSOracle/19c-april/19c-disk7.qcow2 \ - #GTSOracle/19c-april/19c-disk8.qcow2 \ - #GTSOracle/19c-april/19c-disk9.qcow2 \ - #Windows2012R2.qcow2 \ - #HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ + move-3.4.1.qcow2 \ + MSSQL-2016-VM.qcow2 \ + GTSOracle/19c-april/19c-bootdisk.qcow2 \ + GTSOracle/19c-april/19c-disk1.qcow2 \ + GTSOracle/19c-april/19c-disk2.qcow2 \ + GTSOracle/19c-april/19c-disk3.qcow2 \ + GTSOracle/19c-april/19c-disk4.qcow2 \ + GTSOracle/19c-april/19c-disk5.qcow2 \ + GTSOracle/19c-april/19c-disk6.qcow2 \ + GTSOracle/19c-april/19c-disk7.qcow2 \ + GTSOracle/19c-april/19c-disk8.qcow2 \ + GTSOracle/19c-april/19c-disk9.qcow2 \ + Windows2012R2.qcow2 \ + HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ ) export ISO_IMAGES=(\ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ Nutanix-VirtIO-1.1.5.iso \ - #FrameCCA-2.1.6.iso \ - #FrameGuestAgentInstaller_1.0.2.2_7930.iso \ + FrameCCA-2.1.6.iso \ + FrameGuestAgentInstaller_1.0.2.2_7930.iso \ ) @@ -174,7 +174,9 @@ case ${1} in && pc_cluster_img_import \ && seedPC \ && upload_era_calm_blueprint \ + && sleep 30 \ && upload_citrix_calm_blueprint \ + && sleep 30 \ && upload_CICDInfra_calm_blueprint \ && prism_check 'PC' From 3d184250c1fa6ff327b3c1f45e5821fec37704d3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 17 Feb 2020 00:07:00 -0800 Subject: [PATCH 347/691] cleanup --- scripts/lib.pe.sh | 2 +- stage_workshop.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 47c9310..6e60b1e 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -624,7 +624,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." diff --git a/stage_workshop.sh b/stage_workshop.sh index 04a08de..95cff4d 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -187,7 +187,7 @@ EoM log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" ## TODO: If DEBUG is set, we run the below command with bash -x - remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION # shellcheck disable=SC2153 From 3b20d1852e625d14e44b18f74717205d6b3783d3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 17 Feb 2020 00:32:32 -0800 Subject: [PATCH 348/691] Update lib.pc.sh --- scripts/lib.pc.sh | 72 +++++++++++++++++++++++------------------------ 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 75cface..0e67b95 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1147,24 +1147,24 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF if [ "$ERA_IP" != "none" ]; then tmp_ERA_IP=$(mktemp) # add the new variable to the json file and save it - $(jq --arg var_name $ERA_IP '.spec.resources.app_profile_list[0].variable_list[0].value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) + $(jq --arg var_name $ERA_IP '(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) fi # VM Configuration if [ "$ERA_IMAGE" != "none" ]; then tmp_ERA_IMAGE=$(mktemp) - $(jq --arg var_name $ERA_IMAGE '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) + $(jq --arg var_name $ERA_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) fi if [ "$ERA_IMAGE_UUID" != "none" ]; then tmp_ERA_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $ERA_IMAGE_UUID '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_ERA_IMAGE_UUID" && mv "$tmp_ERA_IMAGE_UUID" $JSONFile) + $(jq --arg var_name $ERA_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_ERA_IMAGE_UUID" && mv "$tmp_ERA_IMAGE_UUID" $JSONFile) fi if [ "$NETWORK_NAME" != "none" ]; then tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + $(jq --arg var_name $NETWORK_NAME '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) fi if [ "$NETWORK_UUID" != "none" ]; then tmp_NETWORK_UUID=$(mktemp) - $(jq --arg var_name $NETWORK_UUID '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) + $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) fi #if [ "$NETWORK_NAME" != "none" ]; then # tmp_NETWORK_NAME=$(mktemp) @@ -1177,27 +1177,27 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF # Credentials if [ "$ERAADMIN_PASSWORD" != "none" ]; then tmp_ERAADMIN_PASSWORD=$(mktemp) - $(jq --arg var_name $ERAADMIN_PASSWORD '.spec.resources.credential_definition_list[0].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) + $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) fi if [ "$ERAADMIN_PASSWORD_MODIFIED" != "none" ]; then tmp_ERAADMIN_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $ERAADMIN_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD_MODIFIED" && mv "$tmp_ERAADMIN_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $ERAADMIN_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD_MODIFIED" && mv "$tmp_ERAADMIN_PASSWORD_MODIFIED" $JSONFile) fi if [ "$PE_CREDS_PASSWORD" != "none" ]; then tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '.spec.resources.credential_definition_list[1].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) fi if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) fi if [ "$ERACLI_PASSWORD" != "none" ]; then tmp_ERACLI_PASSWORD=$(mktemp) - $(jq --arg var_name $ERACLI_PASSWORD '.spec.resources.credential_definition_list[2].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) + $(jq --arg var_name $ERACLI_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) fi if [ "$ERACLI_PASSWORD_MODIFIED" != "none" ]; then tmp_ERACLI_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) fi fi @@ -1376,48 +1376,48 @@ function upload_citrix_calm_blueprint() { if [ "$DOMAIN" != "none" ]; then tmp_DOMAIN=$(mktemp) # add the new variable to the json file and save it - $(jq --arg var_name $DOMAIN'.spec.resources.app_profile_list[0].variable_list[0].value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) + $(jq --arg var_name $DOMAIN'(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) fi if [ "$AD_IP" != "none" ]; then tmp_AD_IP=$(mktemp) - $(jq --arg var_name $AD_IP '.spec.resources.app_profile_list[0].variable_list[1].value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) + $(jq --arg var_name $AD_IP '(.spec.resources.app_profile_list[0].variable_list[1]).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) fi if [ "$PE_IP" != "none" ]; then tmp_PE_IP=$(mktemp) - $(jq --arg var_name $PE_IP'.spec.resources.app_profile_list[0].variable_list[2].value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) + $(jq --arg var_name $PE_IP'(.spec.resources.app_profile_list[0].variable_list[2]).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) fi if [ "$DDC_IP" != "none" ]; then tmp_DDC_IP=$(mktemp) - $(jq --arg var_name $DDC_IP '.spec.resources.app_profile_list[0].variable_list[6].value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) + $(jq --arg var_name $DDC_IP '(.spec.resources.app_profile_list[0].variable_list[6]).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) fi if [ "$CVM_NETWORK" != "none" ]; then tmp_CVM_NETWORK=$(mktemp) - $(jq --arg var_name $CVM_NETWORK '.spec.resources.app_profile_list[0].variable_list[4].value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) + $(jq --arg var_name $CVM_NETWORK '(.spec.resources.app_profile_list[0].variable_list[4]).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) fi # VM Configuration if [ "$SERVER_IMAGE" != "none" ]; then tmp_SERVER_IMAGE=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) + $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) fi if [ "$SERVER_IMAGE_UUID" != "none" ]; then tmp_SERVER_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE_UUID '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) + $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) fi if [ "$CITRIX_IMAGE" != "none" ]; then tmp_CITRIX_IMAGE=$(mktemp) - $(jq --arg var_name $CITRIX_IMAGE '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference.name=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE" && mv "$tmp_CITRIX_IMAGE" $JSONFile) + $(jq --arg var_name $CITRIX_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).name=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE" && mv "$tmp_CITRIX_IMAGE" $JSONFile) fi if [ "$CITRIX_IMAGE_UUID" != "none" ]; then tmp_CITRIX_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $CITRIX_IMAGE_UUID '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE_UUID" && mv "$tmp_CITRIX_IMAGE_UUID" $JSONFile) + $(jq --arg var_name $CITRIX_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE_UUID" && mv "$tmp_CITRIX_IMAGE_UUID" $JSONFile) fi if [ "$NETWORK_NAME" != "none" ]; then tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + $(jq --arg var_name $NETWORK_NAME '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) fi if [ "$NETWORK_UUID" != "none" ]; then tmp_NETWORK_UUID=$(mktemp) - $(jq --arg var_name $NETWORK_UUID '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) + $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) fi #if [ "$VLAN_NAME" != "none" ]; then # tmp_VLAN_NAME=$(mktemp) @@ -1426,35 +1426,35 @@ function upload_citrix_calm_blueprint() { # Credentials if [ "$LOCAL_PASSWORD" != "none" ]; then tmp_LOCAL_PASSWORD=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) + $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) fi if [ "$LOCAL_PASSWORD_MODIFIED" != "none" ]; then tmp_LOCAL_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD_MODIFIED" && mv "$tmp_LOCAL_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $LOCAL_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD_MODIFIED" && mv "$tmp_LOCAL_PASSWORD_MODIFIED" $JSONFile) fi if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) fi if [ "$DOMAIN_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_DOMAIN_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" && mv "$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" && mv "$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" $JSONFile) fi if [ "$PE_CREDS_PASSWORD" != "none" ]; then tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) fi if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) fi if [ "$SQL_CREDS_PASSWORD" != "none" ]; then tmp_SQL_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[3].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[3].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) fi if [ "$SQL_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_SQL_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[3].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[3].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) fi fi @@ -1632,28 +1632,28 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF # VM Configuration if [ "$SERVER_IMAGE" != "none" ]; then tmp_SERVER_IMAGE=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) + $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) fi if [ "$SERVER_IMAGE_UUID" != "none" ]; then tmp_SERVER_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE_UUID '.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) + $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) fi if [ "$NETWORK_NAME" != "none" ]; then tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + $(jq --arg var_name $NETWORK_NAME '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) fi if [ "$NETWORK_UUID" != "none" ]; then tmp_NETWORK_UUID=$(mktemp) - $(jq --arg var_name $NETWORK_UUID '.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) + $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) fi # Credentials if [ "$CENTOS_PASSWORD" != "none" ]; then tmp_CENTOS_PASSWORD=$(mktemp) - $(jq --arg var_name $CENTOS_PASSWORD '.spec.resources.credential_definition_list[0].secret.attrs.secret_reference=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD" && mv "$tmp_CENTOS_PASSWORD" $JSONFile) + $(jq --arg var_name $CENTOS_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD" && mv "$tmp_CENTOS_PASSWORD" $JSONFile) fi if [ "$CENTOS_PASSWORD_MODIFIED" != "none" ]; then tmp_CENTOS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $CENTOS_PASSWORD_MODIFIED '.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD_MODIFIED" && mv "$tmp_CENTOS_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $CENTOS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD_MODIFIED" && mv "$tmp_CENTOS_PASSWORD_MODIFIED" $JSONFile) fi fi From 39a582775ddaaa3fd2129e823b0931ad852db8b3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 17 Feb 2020 11:33:17 -0800 Subject: [PATCH 349/691] Update lib.pc.sh --- scripts/lib.pc.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 0e67b95..54f1836 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1142,7 +1142,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ ${BLUEPRINT} == "${NAME}" ]; then + # Profile Variables if [ "$ERA_IP" != "none" ]; then tmp_ERA_IP=$(mktemp) @@ -1199,7 +1199,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF tmp_ERACLI_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) fi - fi + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) tmp_removal=$(mktemp) @@ -1371,7 +1371,7 @@ function upload_citrix_calm_blueprint() { fi # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ ${BLUEPRINT} == "${NAME}" ]; then + # Profile Variables if [ "$DOMAIN" != "none" ]; then tmp_DOMAIN=$(mktemp) @@ -1456,7 +1456,7 @@ function upload_citrix_calm_blueprint() { tmp_SQL_CREDS_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[3].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) fi - fi + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) tmp_removal=$(mktemp) @@ -1627,7 +1627,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - if [ ${BLUEPRINT} == "${NAME}" ]; then + # Profile Variables # VM Configuration if [ "$SERVER_IMAGE" != "none" ]; then @@ -1655,7 +1655,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF tmp_CENTOS_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $CENTOS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD_MODIFIED" && mv "$tmp_CENTOS_PASSWORD_MODIFIED" $JSONFile) fi - fi + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) tmp_removal=$(mktemp) From cdd0ba1b46c54535dbbb50f4d9728ec8dd90d6b4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 17 Feb 2020 11:39:28 -0800 Subject: [PATCH 350/691] Update lib.pc.sh --- scripts/lib.pc.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 54f1836..eb6b7e2 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1630,27 +1630,33 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF # Profile Variables # VM Configuration + echo "Making $SERVER_IMAGE Edits" if [ "$SERVER_IMAGE" != "none" ]; then tmp_SERVER_IMAGE=$(mktemp) $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) fi + echo "Making $SERVER_IMAGE_UUID Edits" if [ "$SERVER_IMAGE_UUID" != "none" ]; then tmp_SERVER_IMAGE_UUID=$(mktemp) $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) fi + echo "Making $NETWORK_NAME Edits" if [ "$NETWORK_NAME" != "none" ]; then tmp_NETWORK_NAME=$(mktemp) $(jq --arg var_name $NETWORK_NAME '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) fi + echo "Making $NETWORK_UUID Edits" if [ "$NETWORK_UUID" != "none" ]; then tmp_NETWORK_UUID=$(mktemp) $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) fi # Credentials + echo "Making $CENTOS_PASSWORD Edits" if [ "$CENTOS_PASSWORD" != "none" ]; then tmp_CENTOS_PASSWORD=$(mktemp) $(jq --arg var_name $CENTOS_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD" && mv "$tmp_CENTOS_PASSWORD" $JSONFile) fi + echo "Making $CENTOS_PASSWORD_MODIFIED Edits" if [ "$CENTOS_PASSWORD_MODIFIED" != "none" ]; then tmp_CENTOS_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $CENTOS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD_MODIFIED" && mv "$tmp_CENTOS_PASSWORD_MODIFIED" $JSONFile) From 405984d4afa6dfbc7529b31d9f02b9cb127e3b5a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 17 Feb 2020 11:56:30 -0800 Subject: [PATCH 351/691] Updates for Objects --- scripts/snc_ts2020.sh | 4 ++++ scripts/ts2020.sh | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/scripts/snc_ts2020.sh b/scripts/snc_ts2020.sh index 434d532..3226e10 100755 --- a/scripts/snc_ts2020.sh +++ b/scripts/snc_ts2020.sh @@ -141,6 +141,10 @@ case ${1} in export NUCLEI_SERVER='localhost' export NUCLEI_USERNAME="${PRISM_ADMIN}" export NUCLEI_PASSWORD="${PE_PASSWORD}" + export BUCKETS_DNS_IP="${IPV4_PREFIX}.$((${OCTET[3]} + 25))" + export BUCKETS_VIP="${IPV4_PREFIX}.$((${OCTET[3]} + 26))" + export OBJECTS_NW_START="${IPV4_PREFIX}.$((${OCTET[3]} + 27))" + export OBJECTS_NW_END="${IPV4_PREFIX}.$((${OCTET[3]} + 30))" # nuclei -debug -username admin -server localhost -password x vm.list if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index c0b9a03..10b08be 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -91,6 +91,11 @@ case ${1} in PC | pc ) . lib.pc.sh + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + export QCOW2_IMAGES=(\ ERA-Server-build-1.2.0.1.qcow2 \ Windows2016.qcow2 \ From 689ecba687c6b00ecfd27bbe8f89382a43e40fab Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 17 Feb 2020 12:14:12 -0800 Subject: [PATCH 352/691] Added in Echo's for tracking --- scripts/lib.pc.sh | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index eb6b7e2..f81023b 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1144,24 +1144,29 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") # Profile Variables + echo "Making ERA_IP Edits" if [ "$ERA_IP" != "none" ]; then tmp_ERA_IP=$(mktemp) # add the new variable to the json file and save it $(jq --arg var_name $ERA_IP '(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) fi # VM Configuration + echo "Making ERA_IMAGE Edits" if [ "$ERA_IMAGE" != "none" ]; then tmp_ERA_IMAGE=$(mktemp) $(jq --arg var_name $ERA_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) fi + echo "Making ERA_IP_UUID Edits" if [ "$ERA_IMAGE_UUID" != "none" ]; then tmp_ERA_IMAGE_UUID=$(mktemp) $(jq --arg var_name $ERA_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_ERA_IMAGE_UUID" && mv "$tmp_ERA_IMAGE_UUID" $JSONFile) fi + echo "Making NETWORK_NAME Edits" if [ "$NETWORK_NAME" != "none" ]; then tmp_NETWORK_NAME=$(mktemp) $(jq --arg var_name $NETWORK_NAME '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) fi + echo "Making NETWORK_UUID Edits" if [ "$NETWORK_UUID" != "none" ]; then tmp_NETWORK_UUID=$(mktemp) $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) @@ -1175,26 +1180,32 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF # $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) #fi # Credentials + echo "Making ERAADMIN_PASSWORD Edits" if [ "$ERAADMIN_PASSWORD" != "none" ]; then tmp_ERAADMIN_PASSWORD=$(mktemp) $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) fi + echo "Making ERAADMIN_PASSWORD_MODIFIED Edits" if [ "$ERAADMIN_PASSWORD_MODIFIED" != "none" ]; then tmp_ERAADMIN_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $ERAADMIN_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD_MODIFIED" && mv "$tmp_ERAADMIN_PASSWORD_MODIFIED" $JSONFile) fi + echo "Making PE_CREDS_PASSWORD Edits" if [ "$PE_CREDS_PASSWORD" != "none" ]; then tmp_PE_CREDS_PASSWORD=$(mktemp) $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) fi + echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) fi + echo "Making ERACLI_PASSWORD Edits" if [ "$ERACLI_PASSWORD" != "none" ]; then tmp_ERACLI_PASSWORD=$(mktemp) $(jq --arg var_name $ERACLI_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) fi + echo "Making ERACLI_PASSWORD_MODIFIED Edits" if [ "$ERACLI_PASSWORD_MODIFIED" != "none" ]; then tmp_ERACLI_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) @@ -1373,48 +1384,59 @@ function upload_citrix_calm_blueprint() { # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") # Profile Variables + echo "Making DOMAIN Edits" if [ "$DOMAIN" != "none" ]; then tmp_DOMAIN=$(mktemp) # add the new variable to the json file and save it $(jq --arg var_name $DOMAIN'(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) fi + echo "Making AD_IP Edits" if [ "$AD_IP" != "none" ]; then tmp_AD_IP=$(mktemp) $(jq --arg var_name $AD_IP '(.spec.resources.app_profile_list[0].variable_list[1]).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) fi + echo "Making PE_IP Edits" if [ "$PE_IP" != "none" ]; then tmp_PE_IP=$(mktemp) $(jq --arg var_name $PE_IP'(.spec.resources.app_profile_list[0].variable_list[2]).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) fi + echo "Making DDC_IP Edits" if [ "$DDC_IP" != "none" ]; then tmp_DDC_IP=$(mktemp) $(jq --arg var_name $DDC_IP '(.spec.resources.app_profile_list[0].variable_list[6]).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) fi + echo "Making CVM_NETWORK Edits" if [ "$CVM_NETWORK" != "none" ]; then tmp_CVM_NETWORK=$(mktemp) $(jq --arg var_name $CVM_NETWORK '(.spec.resources.app_profile_list[0].variable_list[4]).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) fi # VM Configuration + echo "Making SERVER_IMAGE Edits" if [ "$SERVER_IMAGE" != "none" ]; then tmp_SERVER_IMAGE=$(mktemp) $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) fi + echo "Making SERVER_IMAGE_UUID Edits" if [ "$SERVER_IMAGE_UUID" != "none" ]; then tmp_SERVER_IMAGE_UUID=$(mktemp) $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) fi + echo "Making CITRIX_IMAGE Edits" if [ "$CITRIX_IMAGE" != "none" ]; then tmp_CITRIX_IMAGE=$(mktemp) $(jq --arg var_name $CITRIX_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).name=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE" && mv "$tmp_CITRIX_IMAGE" $JSONFile) fi + echo "Making CITRIX_IMAGE_UUID Edits" if [ "$CITRIX_IMAGE_UUID" != "none" ]; then tmp_CITRIX_IMAGE_UUID=$(mktemp) $(jq --arg var_name $CITRIX_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE_UUID" && mv "$tmp_CITRIX_IMAGE_UUID" $JSONFile) fi + echo "Making NETWORK_NAME Edits" if [ "$NETWORK_NAME" != "none" ]; then tmp_NETWORK_NAME=$(mktemp) $(jq --arg var_name $NETWORK_NAME '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) fi + echo "Making NETWORK_UUID Edits" if [ "$NETWORK_UUID" != "none" ]; then tmp_NETWORK_UUID=$(mktemp) $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) @@ -1424,34 +1446,42 @@ function upload_citrix_calm_blueprint() { # $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) #fi # Credentials + echo "Making LOCAL_PASSWORD Edits" if [ "$LOCAL_PASSWORD" != "none" ]; then tmp_LOCAL_PASSWORD=$(mktemp) $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) fi + echo "Making LOCAL_PASSWORD_MODIFIED Edits" if [ "$LOCAL_PASSWORD_MODIFIED" != "none" ]; then tmp_LOCAL_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $LOCAL_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD_MODIFIED" && mv "$tmp_LOCAL_PASSWORD_MODIFIED" $JSONFile) fi + echo "Making DOMAIN_CREDS_PASSWORD Edits" if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) fi + echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" if [ "$DOMAIN_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_DOMAIN_CREDS_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $DOMAIN_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" && mv "$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" $JSONFile) fi + echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" if [ "$PE_CREDS_PASSWORD" != "none" ]; then tmp_PE_CREDS_PASSWORD=$(mktemp) $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) fi + echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) fi + echo "Making SQL_CREDS_PASSWORD Edits" if [ "$SQL_CREDS_PASSWORD" != "none" ]; then tmp_SQL_CREDS_PASSWORD=$(mktemp) $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[3].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) fi + echo "Making SQL_CREDS_PASSWORD_MODIFIED Edits" if [ "$SQL_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_SQL_CREDS_PASSWORD_MODIFIED=$(mktemp) $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[3].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) From 54625cc6fd48528c3283d74049fe3ad18b882956 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 17 Feb 2020 19:04:08 -0800 Subject: [PATCH 353/691] Updates --- scripts/calm_rsa_key.env | 27 ++ scripts/lib.pc.sh | 540 ++++++++++++++++++++++----------------- scripts/ts2020.sh | 6 +- 3 files changed, 335 insertions(+), 238 deletions(-) create mode 100644 scripts/calm_rsa_key.env diff --git a/scripts/calm_rsa_key.env b/scripts/calm_rsa_key.env new file mode 100644 index 0000000..585f21c --- /dev/null +++ b/scripts/calm_rsa_key.env @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG +ZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK +6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9 +HtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy +hCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR +uz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp +6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0 +MrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c +1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj +8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl +JDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf +h45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk +QVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c +oDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0 +EjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj +uFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M +Ez2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k +7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk +hztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC +kPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME +rECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF +2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z +iUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ +dQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP +gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF +-----END RSA PRIVATE KEY----- diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index f81023b..d96a63f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1024,71 +1024,57 @@ EOF } ############################################################################################################################################################################### -# Routine to upload Era Calm Blueprint and set variables +# Routine to upload Citrix Calm Blueprint and set variables ############################################################################################################################################################################### -function upload_era_calm_blueprint() { - local DIRECTORY="/home/nutanix/era" - local BLUEPRINT=${ERA_Blueprint} +function upload_citrix_calm_blueprint() { + local DIRECTORY="/home/nutanix/citrix" + local BLUEPRINT=${Citrix_Blueprint} local CALM_PROJECT="BootcampInfra" - local ERA_IP=${ERA_HOST} + local DOMAIN=${AUTH_FQDN} + local AD_IP=${AUTH_HOST} local PE_IP=${PE_HOST} - local CLSTR_NAME="none" - local CTR_UUID=${_storage_default_uuid} - local CTR_NAME=${STORAGE_DEFAULT} + local DDC_IP=${CITRIX_DDC_HOST} + local NutanixAcropolisPlugin="none" + local CVM_NETWORK=${NW1_NAME} local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} - local ERAADMIN_PASSWORD="nutanix/4u" - local ERAADMIN_PASSWORD_MODIFIED="true" + local BPG_RKTOOLS_URL="none" + local NutanixAcropolis_Installed_Path="none" + local LOCAL_PASSWORD="nutanix/4u" + local LOCAL_PASSWORD_MODIFIED="true" + local DOMAIN_CREDS_PASSWORD="nutanix/4u" + local DOMAIN_PASSWORD_MODIFIED="true" local PE_CREDS_PASSWORD="${PE_PASSWORD}" local PE_CREDS_PASSWORD_MODIFIED="true" - local ERACLI_PASSWORD="-----BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG -ZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK -6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9 -HtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy -hCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR -uz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp -6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0 -MrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c -1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj -8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl -JDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf -h45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk -QVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c -oDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0 -EjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj -uFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M -Ez2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k -7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk -hztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC -kPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME -rECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF -2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z -iUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ -dQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP -gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF ------END RSA PRIVATE KEY-----" - local ERACLI_PASSWORD_MODIFIED="true" + local SQL_CREDS_PASSWORD="nutanix/4u" + local SQL_CREDS_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS - local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" - local ERA_IMAGE_UUID - local CURL_HTTP_OPTS="--max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure" + local NETWORK_UUID + local SERVER_IMAGE="Windows2016.qcow2" + local SERVER_IMAGE_UUID + local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" + local CITRIX_IMAGE_UUID + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " mkdir $DIRECTORY #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - echo "ERA Image UUID = $ERA_IMAGE_UUID" + echo "Server Image UUID = $SERVER_IMAGE_UUID" + + CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "NETWORK UUID = $NETWORK_UUID" # download the blueprint - DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) - log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint}) + log "Downloading ${CALM_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" # ensure the directory that contains the blueprints to be imported is not empty if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then @@ -1120,22 +1106,26 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "Starting blueprint updates and then Uploading to Calm..." - # read the entire JSON file from the directory JSONFile="${DIRECTORY}/${BLUEPRINT}" echo "Currently updating blueprint $JSONFile..." echo "${CALM_PROJECT} network UUID: ${project_uuid}" - echo "ERA_IP=${ERA_IP}" + echo "DOMAIN=${DOMAIN}" + echo "AD_IP=${AD_IP}" echo "PE_IP=${PE_IP}" - echo "ERA_IMAGE=${ERA_IMAGE}" - echo "ERA_IMAGE_UUID=${ERA_IMAGE_UUID}" + echo "DDC_IP=${DDC_IP}" + echo "CVM_NETWORK=${CVM_NETWORK}" + echo "SERVER_IMAGE=${SERVER_IMAGE}" + echo "SERVER_IMAGE_UUID=${SERVER_IMAGE_UUID}" + echo "CITRIX_IMAGE=${CITRIX_IMAGE}" + echo "CITRIX_IMAGE_UUID=${CITRIX_IMAGE_UUID}" echo "NETWORK_UUID=${NETWORK_UUID}" # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint tmp=$(mktemp) - # ADD PROJECT , we need to add it into the JSON data + # ADD PROJECT (affects all BPs being imported) if no project was specified on the command line, we've already pre-set the project variable to 'none' if a project was specified, we need to add it into the JSON data if [ $CALM_PROJECT != 'none' ]; then # add the new atributes to the JSON and overwrite the old JSON file with the new one $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) @@ -1144,22 +1134,52 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") # Profile Variables - echo "Making ERA_IP Edits" - if [ "$ERA_IP" != "none" ]; then - tmp_ERA_IP=$(mktemp) + echo "Making DOMAIN Edits" + if [ "$DOMAIN" != "none" ]; then + tmp_DOMAIN=$(mktemp) # add the new variable to the json file and save it - $(jq --arg var_name $ERA_IP '(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) + $(jq --arg var_name $DOMAIN'(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) + fi + echo "Making AD_IP Edits" + if [ "$AD_IP" != "none" ]; then + tmp_AD_IP=$(mktemp) + $(jq --arg var_name $AD_IP '(.spec.resources.app_profile_list[0].variable_list[1]).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) + fi + echo "Making PE_IP Edits" + if [ "$PE_IP" != "none" ]; then + tmp_PE_IP=$(mktemp) + $(jq --arg var_name $PE_IP'(.spec.resources.app_profile_list[0].variable_list[2]).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) + fi + echo "Making DDC_IP Edits" + if [ "$DDC_IP" != "none" ]; then + tmp_DDC_IP=$(mktemp) + $(jq --arg var_name $DDC_IP '(.spec.resources.app_profile_list[0].variable_list[6]).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) + fi + echo "Making CVM_NETWORK Edits" + if [ "$CVM_NETWORK" != "none" ]; then + tmp_CVM_NETWORK=$(mktemp) + $(jq --arg var_name $CVM_NETWORK '(.spec.resources.app_profile_list[0].variable_list[4]).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) fi # VM Configuration - echo "Making ERA_IMAGE Edits" - if [ "$ERA_IMAGE" != "none" ]; then - tmp_ERA_IMAGE=$(mktemp) - $(jq --arg var_name $ERA_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) + echo "Making SERVER_IMAGE Edits" + if [ "$SERVER_IMAGE" != "none" ]; then + tmp_SERVER_IMAGE=$(mktemp) + $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) fi - echo "Making ERA_IP_UUID Edits" - if [ "$ERA_IMAGE_UUID" != "none" ]; then - tmp_ERA_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $ERA_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_ERA_IMAGE_UUID" && mv "$tmp_ERA_IMAGE_UUID" $JSONFile) + echo "Making SERVER_IMAGE_UUID Edits" + if [ "$SERVER_IMAGE_UUID" != "none" ]; then + tmp_SERVER_IMAGE_UUID=$(mktemp) + $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) + fi + echo "Making CITRIX_IMAGE Edits" + if [ "$CITRIX_IMAGE" != "none" ]; then + tmp_CITRIX_IMAGE=$(mktemp) + $(jq --arg var_name $CITRIX_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).name=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE" && mv "$tmp_CITRIX_IMAGE" $JSONFile) + fi + echo "Making CITRIX_IMAGE_UUID Edits" + if [ "$CITRIX_IMAGE_UUID" != "none" ]; then + tmp_CITRIX_IMAGE_UUID=$(mktemp) + $(jq --arg var_name $CITRIX_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE_UUID" && mv "$tmp_CITRIX_IMAGE_UUID" $JSONFile) fi echo "Making NETWORK_NAME Edits" if [ "$NETWORK_NAME" != "none" ]; then @@ -1171,44 +1191,50 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF tmp_NETWORK_UUID=$(mktemp) $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) fi - #if [ "$NETWORK_NAME" != "none" ]; then - # tmp_NETWORK_NAME=$(mktemp) - # $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) - #fi #if [ "$VLAN_NAME" != "none" ]; then # tmp_VLAN_NAME=$(mktemp) # $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) #fi # Credentials - echo "Making ERAADMIN_PASSWORD Edits" - if [ "$ERAADMIN_PASSWORD" != "none" ]; then - tmp_ERAADMIN_PASSWORD=$(mktemp) - $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) + echo "Making LOCAL_PASSWORD Edits" + if [ "$LOCAL_PASSWORD" != "none" ]; then + tmp_LOCAL_PASSWORD=$(mktemp) + $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) fi - echo "Making ERAADMIN_PASSWORD_MODIFIED Edits" - if [ "$ERAADMIN_PASSWORD_MODIFIED" != "none" ]; then - tmp_ERAADMIN_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $ERAADMIN_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD_MODIFIED" && mv "$tmp_ERAADMIN_PASSWORD_MODIFIED" $JSONFile) + echo "Making LOCAL_PASSWORD_MODIFIED Edits" + if [ "$LOCAL_PASSWORD_MODIFIED" != "none" ]; then + tmp_LOCAL_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $LOCAL_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD_MODIFIED" && mv "$tmp_LOCAL_PASSWORD_MODIFIED" $JSONFile) fi - echo "Making PE_CREDS_PASSWORD Edits" + echo "Making DOMAIN_CREDS_PASSWORD Edits" + if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then + tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) + fi + echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" + if [ "$DOMAIN_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_DOMAIN_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" && mv "$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" $JSONFile) + fi + echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" if [ "$PE_CREDS_PASSWORD" != "none" ]; then tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) fi echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) fi - echo "Making ERACLI_PASSWORD Edits" - if [ "$ERACLI_PASSWORD" != "none" ]; then - tmp_ERACLI_PASSWORD=$(mktemp) - $(jq --arg var_name $ERACLI_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) + echo "Making SQL_CREDS_PASSWORD Edits" + if [ "$SQL_CREDS_PASSWORD" != "none" ]; then + tmp_SQL_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[3].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) fi - echo "Making ERACLI_PASSWORD_MODIFIED Edits" - if [ "$ERACLI_PASSWORD_MODIFIED" != "none" ]; then - tmp_ERACLI_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) + echo "Making SQL_CREDS_PASSWORD_MODIFIED Edits" + if [ "$SQL_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_SQL_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[3].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) fi @@ -1241,7 +1267,7 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) + upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid "https://localhost:9440/api/nutanix/v3/blueprints/import_file") #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then @@ -1256,75 +1282,116 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF echo "Finished uploading ${BLUEPRINT} and setting Variables!" #Getting the Blueprint UUID - ERA_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==EraServerDeployment"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + CITRIX_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CitrixBootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - echo "ERA Blueprint UUID = $ERA_BLUEPRINT_UUID" + echo "Citrix Blueprint UUID = $CITRIX_BLUEPRINT_UUID" + + echo "Set Credentials" + + # GET The Blueprint so it can be updated + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" > set_blueprint_credentials_file.json + + JSONFile="set_blueprint_credentials_file.json" + + # Credentials + echo "Making LOCAL_PASSWORD Edits" + if [ "$LOCAL_PASSWORD" != "none" ]; then + tmp_LOCAL_PASSWORD=$(mktemp) + $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) + fi + echo "Making LOCAL_PASSWORD_MODIFIED Edits" + if [ "$LOCAL_PASSWORD_MODIFIED" != "none" ]; then + tmp_LOCAL_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $LOCAL_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD_MODIFIED" && mv "$tmp_LOCAL_PASSWORD_MODIFIED" $JSONFile) + fi + echo "Making DOMAIN_CREDS_PASSWORD Edits" + if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then + tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) + fi + echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" + if [ "$DOMAIN_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_DOMAIN_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $DOMAIN_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" && mv "$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" $JSONFile) + fi + echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" + if [ "$PE_CREDS_PASSWORD" != "none" ]; then + tmp_PE_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + fi + echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" + if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) + fi + echo "Making SQL_CREDS_PASSWORD Edits" + if [ "$SQL_CREDS_PASSWORD" != "none" ]; then + tmp_SQL_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[3].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) + fi + echo "Making SQL_CREDS_PASSWORD_MODIFIED Edits" + if [ "$SQL_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_SQL_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[3].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) + fi + + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @set_blueprint_credentials_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" + + echo "Finished Updating Credentials" # GET The Blueprint payload - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Era Server", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Citrix Infra", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json # Launch the BLUEPRINT echo "Launching the Era Server Application" - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}/launch" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}/launch" echo "Finished Launching the Era Server Application" } ############################################################################################################################################################################### -# Routine to upload Citrix Calm Blueprint and set variables +# Routine to upload Era Calm Blueprint and set variables ############################################################################################################################################################################### -function upload_citrix_calm_blueprint() { - local DIRECTORY="/home/nutanix/citrix" - local BLUEPRINT=${Citrix_Blueprint} +function upload_era_calm_blueprint() { + local DIRECTORY="/home/nutanix/era" + local BLUEPRINT=${ERA_Blueprint} local CALM_PROJECT="BootcampInfra" - local DOMAIN=${AUTH_FQDN} - local AD_IP=${AUTH_HOST} + local ERA_IP=${ERA_HOST} local PE_IP=${PE_HOST} - local DDC_IP=${CITRIX_DDC_HOST} - local NutanixAcropolisPlugin="none" - local CVM_NETWORK=${NW1_NAME} + local CLSTR_NAME="none" + local CTR_UUID=${_storage_default_uuid} + local CTR_NAME=${STORAGE_DEFAULT} local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} - local BPG_RKTOOLS_URL="none" - local NutanixAcropolis_Installed_Path="none" - local LOCAL_PASSWORD="nutanix/4u" - local LOCAL_PASSWORD_MODIFIED="true" - local DOMAIN_CREDS_PASSWORD="nutanix/4u" - local DOMAIN_PASSWORD_MODIFIED="true" + local ERAADMIN_PASSWORD="nutanix/4u" + local ERAADMIN_PASSWORD_MODIFIED="true" local PE_CREDS_PASSWORD="${PE_PASSWORD}" local PE_CREDS_PASSWORD_MODIFIED="true" - local SQL_CREDS_PASSWORD="nutanix/4u" - local SQL_CREDS_PASSWORD_MODIFIED="true" + local ERACLI_PASSWORD=`cat /home/nutanix/stageworkshop/scripts/calm_rsa_key.env` + local ERACLI_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS - local NETWORK_UUID - local SERVER_IMAGE="Windows2016.qcow2" - local SERVER_IMAGE_UUID - local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" - local CITRIX_IMAGE_UUID - local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" + local ERA_IMAGE_UUID + local CURL_HTTP_OPTS="--max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure" mkdir $DIRECTORY #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - - echo "Server Image UUID = $SERVER_IMAGE_UUID" - - CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" + echo "ERA Image UUID = $ERA_IMAGE_UUID" NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "NETWORK UUID = $NETWORK_UUID" # download the blueprint - DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint}) - log "Downloading ${CALM_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) + log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" # ensure the directory that contains the blueprints to be imported is not empty if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then @@ -1356,26 +1423,22 @@ function upload_citrix_calm_blueprint() { echo "Starting blueprint updates and then Uploading to Calm..." + # read the entire JSON file from the directory JSONFile="${DIRECTORY}/${BLUEPRINT}" echo "Currently updating blueprint $JSONFile..." echo "${CALM_PROJECT} network UUID: ${project_uuid}" - echo "DOMAIN=${DOMAIN}" - echo "AD_IP=${AD_IP}" + echo "ERA_IP=${ERA_IP}" echo "PE_IP=${PE_IP}" - echo "DDC_IP=${DDC_IP}" - echo "CVM_NETWORK=${CVM_NETWORK}" - echo "SERVER_IMAGE=${SERVER_IMAGE}" - echo "SERVER_IMAGE_UUID=${SERVER_IMAGE_UUID}" - echo "CITRIX_IMAGE=${CITRIX_IMAGE}" - echo "CITRIX_IMAGE_UUID=${CITRIX_IMAGE_UUID}" + echo "ERA_IMAGE=${ERA_IMAGE}" + echo "ERA_IMAGE_UUID=${ERA_IMAGE_UUID}" echo "NETWORK_UUID=${NETWORK_UUID}" # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint tmp=$(mktemp) - # ADD PROJECT (affects all BPs being imported) if no project was specified on the command line, we've already pre-set the project variable to 'none' if a project was specified, we need to add it into the JSON data + # ADD PROJECT , we need to add it into the JSON data if [ $CALM_PROJECT != 'none' ]; then # add the new atributes to the JSON and overwrite the old JSON file with the new one $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) @@ -1384,52 +1447,22 @@ function upload_citrix_calm_blueprint() { # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") # Profile Variables - echo "Making DOMAIN Edits" - if [ "$DOMAIN" != "none" ]; then - tmp_DOMAIN=$(mktemp) + echo "Making ERA_IP Edits" + if [ "$ERA_IP" != "none" ]; then + tmp_ERA_IP=$(mktemp) # add the new variable to the json file and save it - $(jq --arg var_name $DOMAIN'(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) - fi - echo "Making AD_IP Edits" - if [ "$AD_IP" != "none" ]; then - tmp_AD_IP=$(mktemp) - $(jq --arg var_name $AD_IP '(.spec.resources.app_profile_list[0].variable_list[1]).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) - fi - echo "Making PE_IP Edits" - if [ "$PE_IP" != "none" ]; then - tmp_PE_IP=$(mktemp) - $(jq --arg var_name $PE_IP'(.spec.resources.app_profile_list[0].variable_list[2]).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) - fi - echo "Making DDC_IP Edits" - if [ "$DDC_IP" != "none" ]; then - tmp_DDC_IP=$(mktemp) - $(jq --arg var_name $DDC_IP '(.spec.resources.app_profile_list[0].variable_list[6]).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) - fi - echo "Making CVM_NETWORK Edits" - if [ "$CVM_NETWORK" != "none" ]; then - tmp_CVM_NETWORK=$(mktemp) - $(jq --arg var_name $CVM_NETWORK '(.spec.resources.app_profile_list[0].variable_list[4]).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) + $(jq --arg var_name $ERA_IP '(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) fi # VM Configuration - echo "Making SERVER_IMAGE Edits" - if [ "$SERVER_IMAGE" != "none" ]; then - tmp_SERVER_IMAGE=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) - fi - echo "Making SERVER_IMAGE_UUID Edits" - if [ "$SERVER_IMAGE_UUID" != "none" ]; then - tmp_SERVER_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) - fi - echo "Making CITRIX_IMAGE Edits" - if [ "$CITRIX_IMAGE" != "none" ]; then - tmp_CITRIX_IMAGE=$(mktemp) - $(jq --arg var_name $CITRIX_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).name=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE" && mv "$tmp_CITRIX_IMAGE" $JSONFile) + echo "Making ERA_IMAGE Edits" + if [ "$ERA_IMAGE" != "none" ]; then + tmp_ERA_IMAGE=$(mktemp) + $(jq --arg var_name $ERA_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) fi - echo "Making CITRIX_IMAGE_UUID Edits" - if [ "$CITRIX_IMAGE_UUID" != "none" ]; then - tmp_CITRIX_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $CITRIX_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE_UUID" && mv "$tmp_CITRIX_IMAGE_UUID" $JSONFile) + echo "Making ERA_IP_UUID Edits" + if [ "$ERA_IMAGE_UUID" != "none" ]; then + tmp_ERA_IMAGE_UUID=$(mktemp) + $(jq --arg var_name $ERA_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_ERA_IMAGE_UUID" && mv "$tmp_ERA_IMAGE_UUID" $JSONFile) fi echo "Making NETWORK_NAME Edits" if [ "$NETWORK_NAME" != "none" ]; then @@ -1441,50 +1474,44 @@ function upload_citrix_calm_blueprint() { tmp_NETWORK_UUID=$(mktemp) $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) fi + #if [ "$NETWORK_NAME" != "none" ]; then + # tmp_NETWORK_NAME=$(mktemp) + # $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) + #fi #if [ "$VLAN_NAME" != "none" ]; then # tmp_VLAN_NAME=$(mktemp) # $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) #fi # Credentials - echo "Making LOCAL_PASSWORD Edits" - if [ "$LOCAL_PASSWORD" != "none" ]; then - tmp_LOCAL_PASSWORD=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) - fi - echo "Making LOCAL_PASSWORD_MODIFIED Edits" - if [ "$LOCAL_PASSWORD_MODIFIED" != "none" ]; then - tmp_LOCAL_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD_MODIFIED" && mv "$tmp_LOCAL_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making DOMAIN_CREDS_PASSWORD Edits" - if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then - tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) + echo "Making ERAADMIN_PASSWORD Edits" + if [ "$ERAADMIN_PASSWORD" != "none" ]; then + tmp_ERAADMIN_PASSWORD=$(mktemp) + $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) fi - echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" - if [ "$DOMAIN_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_DOMAIN_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" && mv "$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" $JSONFile) + echo "Making ERAADMIN_PASSWORD_MODIFIED Edits" + if [ "$ERAADMIN_PASSWORD_MODIFIED" != "none" ]; then + tmp_ERAADMIN_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $ERAADMIN_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD_MODIFIED" && mv "$tmp_ERAADMIN_PASSWORD_MODIFIED" $JSONFile) fi - echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" + echo "Making PE_CREDS_PASSWORD Edits" if [ "$PE_CREDS_PASSWORD" != "none" ]; then tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) fi echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) + $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) fi - echo "Making SQL_CREDS_PASSWORD Edits" - if [ "$SQL_CREDS_PASSWORD" != "none" ]; then - tmp_SQL_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[3].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) + echo "Making ERACLI_PASSWORD Edits" + if [ "$ERACLI_PASSWORD" != "none" ]; then + tmp_ERACLI_PASSWORD=$(mktemp) + $(jq --arg var_name $ERACLI_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) fi - echo "Making SQL_CREDS_PASSWORD_MODIFIED Edits" - if [ "$SQL_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_SQL_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[3].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) + echo "Making ERACLI_PASSWORD_MODIFIED Edits" + if [ "$ERACLI_PASSWORD_MODIFIED" != "none" ]; then + tmp_ERACLI_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) fi @@ -1517,7 +1544,7 @@ function upload_citrix_calm_blueprint() { bp_name=$blueprint_name project_uuid=$project_uuid - upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid "https://localhost:9440/api/nutanix/v3/blueprints/import_file") + upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) #if the upload_result var is not empty then let's say it was succcessful if [ -z "$upload_result" ]; then @@ -1532,18 +1559,61 @@ function upload_citrix_calm_blueprint() { echo "Finished uploading ${BLUEPRINT} and setting Variables!" #Getting the Blueprint UUID - CITRIX_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CitrixBootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + ERA_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==EraServerDeployment"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - echo "Citrix Blueprint UUID = $CITRIX_BLUEPRINT_UUID" + echo "ERA Blueprint UUID = $ERA_BLUEPRINT_UUID" + + echo "Set Credentials" + + # GET The Blueprint so it can be updated + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" > set_blueprint_credentials_file.json + + JSONFile="set_blueprint_credentials_file.json" + + # Credentials + echo "Making ERAADMIN_PASSWORD Edits" + if [ "$ERAADMIN_PASSWORD" != "none" ]; then + tmp_ERAADMIN_PASSWORD=$(mktemp) + $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) + fi + echo "Making ERAADMIN_PASSWORD_MODIFIED Edits" + if [ "$ERAADMIN_PASSWORD_MODIFIED" != "none" ]; then + tmp_ERAADMIN_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $ERAADMIN_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD_MODIFIED" && mv "$tmp_ERAADMIN_PASSWORD_MODIFIED" $JSONFile) + fi + echo "Making PE_CREDS_PASSWORD Edits" + if [ "$PE_CREDS_PASSWORD" != "none" ]; then + tmp_PE_CREDS_PASSWORD=$(mktemp) + $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) + fi + echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" + if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then + tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) + fi + echo "Making ERACLI_PASSWORD Edits" + if [ "$ERACLI_PASSWORD" != "none" ]; then + tmp_ERACLI_PASSWORD=$(mktemp) + $(jq --arg var_name $ERACLI_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) + fi + echo "Making ERACLI_PASSWORD_MODIFIED Edits" + if [ "$ERACLI_PASSWORD_MODIFIED" != "none" ]; then + tmp_ERACLI_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) + fi + + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @set_blueprint_credentials_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" + + echo "Finished Updating Credentials" # GET The Blueprint payload - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Citrix Infra", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Era Server", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json # Launch the BLUEPRINT echo "Launching the Era Server Application" - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}/launch" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}/launch" echo "Finished Launching the Era Server Application" @@ -1561,33 +1631,7 @@ function upload_CICDInfra_calm_blueprint() { local PE_IP=${PE_HOST} local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} - local CENTOS_PASSWORD="-----BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG -ZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK -6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9 -HtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy -hCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR -uz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp -6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0 -MrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c -1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj -8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl -JDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf -h45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk -QVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c -oDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0 -EjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj -uFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M -Ez2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k -7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk -hztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC -kPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME -rECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF -2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z -iUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ -dQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP -gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF ------END RSA PRIVATE KEY-----" + local CENTOS_PASSWORD=`cat /home/nutanix/stageworkshop/scripts/calm_rsa_key.env` local CENTOS_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local NETWORK_UUID @@ -1734,8 +1778,34 @@ gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF fi fi -done - echo "Finished uploading ${BLUEPRINT} and setting Variables!" + #Getting the Blueprint UUID + CICDInfra_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CICD_Infra.json"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "ERA Blueprint UUID = $CICDInfra_BLUEPRINT_UUID" + + echo "Set Credentials" + + # GET The Blueprint so it can be updated + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}" > set_blueprint_credentials_file.json + + JSONFile="set_blueprint_credentials_file.json" + + # Credentials + echo "Making $CENTOS_PASSWORD Edits" + if [ "$CENTOS_PASSWORD" != "none" ]; then + tmp_CENTOS_PASSWORD=$(mktemp) + $(jq --arg var_name $CENTOS_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD" && mv "$tmp_CENTOS_PASSWORD" $JSONFile) + fi + echo "Making $CENTOS_PASSWORD_MODIFIED Edits" + if [ "$CENTOS_PASSWORD_MODIFIED" != "none" ]; then + tmp_CENTOS_PASSWORD_MODIFIED=$(mktemp) + $(jq --arg var_name $CENTOS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD_MODIFIED" && mv "$tmp_CENTOS_PASSWORD_MODIFIED" $JSONFile) + fi + + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @set_blueprint_credentials_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}" + + echo "Finished Updating Credentials" + } diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 10b08be..f390834 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -95,7 +95,7 @@ case ${1} in export BUCKETS_VIP="${IPV4_PREFIX}.17" export OBJECTS_NW_START="${IPV4_PREFIX}.18" export OBJECTS_NW_END="${IPV4_PREFIX}.21" - + export QCOW2_IMAGES=(\ ERA-Server-build-1.2.0.1.qcow2 \ Windows2016.qcow2 \ @@ -178,10 +178,10 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && seedPC \ - && upload_era_calm_blueprint \ - && sleep 30 \ && upload_citrix_calm_blueprint \ && sleep 30 \ + && upload_era_calm_blueprint \ + && sleep 30 \ && upload_CICDInfra_calm_blueprint \ && prism_check 'PC' From 6a434e7a59c4731e31a6d018209f52dacdf5aef3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 09:25:15 -0800 Subject: [PATCH 354/691] Citrix Updates --- scripts/calm.sh | 25 +++++- scripts/citrix_bootcamp.sh | 176 ++++++++++++++++--------------------- stage_workshop.sh | 4 +- 3 files changed, 103 insertions(+), 102 deletions(-) diff --git a/scripts/calm.sh b/scripts/calm.sh index 7c3622a..afb96ae 100755 --- a/scripts/calm.sh +++ b/scripts/calm.sh @@ -18,6 +18,8 @@ case ${1} in PE | pe ) . lib.pe.sh + export AUTH_SERVER='AutoAD' + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -46,7 +48,7 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish fi @@ -60,6 +62,23 @@ case ${1} in PC | pc ) . lib.pc.sh + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + Win10v1903.qcow2 \ + ToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + Nutanix-VirtIO-1.1.5.iso \ + ) + run_once dependencies 'install' 'jq' || exit 13 @@ -106,12 +125,14 @@ case ${1} in ssp_auth \ && calm_enable \ + && lcm \ + && pc_project \ && images \ && pc_cluster_img_import \ && prism_check 'PC' log "Non-blocking functions (in development) follow." - pc_project + #pc_project pc_admin # ntnx_download 'AOS' # function in lib.common.sh diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 9851f00..21e86ff 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -9,76 +9,19 @@ . global.vars.sh begin -args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' +args_required 'EMAIL PE_PASSWORD PC_VERSION' #dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization - -log "Adding key to ${1} VMs..." -ssh_pubkey & # non-blocking, parallel suitable - # Some parallelization possible to critical path; not much: would require pre-requestite checks to work! case ${1} in PE | pe ) . lib.pe.sh - log "Configure PE role mapping" - ncli authconfig add-role-mapping role=ROLE_CLUSTER_ADMIN entity-type=group name="${DOMAIN_NAME}" entity-values="${DOMAIN_ADMIN_GROUP}" - - log "Creating Reverse Lookup Zone on DC VM" - remote_exec 'ssh' 'AUTH_SERVER' "samba-tool dns zonecreate dc1 ${HPOC_NUMBER}.21.10.in-addr.arpa; service samba-ad-dc restart" - log 'Create custom OUs...' - remote_exec 'ssh' 'AUTH_SERVER' "apt install ldb-tools -y -q" - remote_exec 'ssh' 'AUTH_SERVER' "cat << EOF > ous.ldif -dn: OU=Non-PersistentDesktop,DC=NTNXLAB,DC=local -changetype: add -objectClass: top -objectClass: organizationalunit -description: Non-Persistent Desktop OU - -dn: OU=PersistentDesktop,DC=NTNXLAB,DC=local -changetype: add -objectClass: top -objectClass: organizationalunit -description: Persistent Desktop OU - -dn: OU=XenAppServer,DC=NTNXLAB,DC=local -changetype: add -objectClass: top -objectClass: organizationalunit -description: XenApp Server OU - -EOF" - remote_exec 'ssh' 'AUTH_SERVER' "ldbmodify -H /var/lib/samba/private/sam.ldb ous.ldif; service samba-ad-dc restart" - - log "Create PE user account XD for MCS Plugin" - ncli user create user-name=xd user-password=nutanix/4u first-name=XenDesktop last-name=Service email-id=no-reply@nutanix.com - ncli user grant-cluster-admin-role user-name=xd - - log "Get UUIDs from cluster:" - NET_UUID=$(acli net.get ${NW1_NAME} | grep "uuid" | cut -f 2 -d ':' | xargs) - log "${NW1_NAME} UUID is ${NET_UUID}" - CONTAINER_UUID=$(ncli container ls name=${STORAGE_DEFAULT} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) - log "${STORAGE_DEFAULT} UUID is ${CONTAINER_UUID}" - - log "Download AFS image from ${AFS_SRC_URL}" - wget -nv ${AFS_SRC_URL} - log "Download AFS metadata JSON from ${AFS_META_URL}" - wget -nv ${AFS_META_URL} - log "Stage AFS" - ncli software upload file-path=/home/nutanix/${AFS_SRC_URL##*/} meta-file-path=/home/nutanix/${AFS_META_URL##*/} software-type=FILE_SERVER - log "Delete AFS sources to free some space" - rm ${AFS_SRC_URL##*/} ${AFS_META_URL##*/} - - curl -u admin:${PE_PASSWORD} -k -H 'Content-Type: application/json' -X POST https://127.0.0.1:9440/api/nutanix/v3/prism_central -d "${DEPLOY_BODY}" - log "Waiting for PC deployment to complete (Sleeping 15m)" - sleep 900 - log "Sending PC configuration script" - pc_send_file stage_citrixhow_pc.sh - - # Execute that file asynchroneously remotely (script keeps running on CVM in the background) - log "Launching PC configuration script" - pc_remote_exec "PE_PASSWORD=${PE_PASSWORD} nohup bash /home/nutanix/stage_citrixhow_pc.sh >> pcconfig.log 2>&1 &" + export AUTH_SERVER='AutoAD' + + args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' + ssh_pubkey & # non-blocking, parallel suitable dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ && pe_license \ @@ -86,59 +29,87 @@ EOF" && network_configure \ && authentication_source \ && pe_auth \ - && pc_install "${NW1_NAME}" \ - && prism_check 'PC' if (( $? == 0 )) ; then - pc_configure \ - && dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ - log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" - log "PE = https://${PE_HOST}:9440" - log "PC = https://${PC_HOST}:9440" + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" - finish + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi else finish - log "Error 18: in main functional chain, exit!" - exit 18 + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} fi + ;; PC | pc ) . lib.pc.sh - #PC_UPGRADE_URL='http://10.21.250.221/images/ahv/techsummit/nutanix_installer_package_pc-release-euphrates-5.5.0.6-stable-14bd63735db09b1c9babdaaf48d062723137fc46.tar.gz' + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" - # Set Prism Central Password to Prism Element Password - # log "Setting PC password to PE password" - # ncli user reset-password user-name="admin" password="${PE_PASSWORD}" + export QCOW2_IMAGES=(\ + ERA-Server-build-1.2.0.1.qcow2 \ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + Win10v1903.qcow2 \ + ToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + Nutanix-VirtIO-1.1.5.iso \ + ) - # Prism Central upgrade - #log "Download PC upgrade image: ${PC_UPGRADE_URL##*/}" - #wget -nv ${PC_UPGRADE_URL} - #log "Prepare PC upgrade image" - #tar -xzf ${PC_UPGRADE_URL##*/} - #rm ${PC_UPGRADE_URL##*/} + run_once - #log "Upgrade PC" - #cd /home/nutanix/install ; ./bin/cluster -i . -p upgrade + dependencies 'install' 'jq' || exit 13 - log "PC Configuration complete on $(date)" - - dependencies 'install' 'sshpass' && dependencies 'install' 'jq' || exit 13 + ssh_pubkey & # non-blocking, parallel suitable pc_passwd + ntnx_cmd # check cli services available? export NUCLEI_SERVER='localhost' export NUCLEI_USERNAME="${PRISM_ADMIN}" export NUCLEI_PASSWORD="${PE_PASSWORD}" - # nuclei -debug -username admin -server localhost -password nx2Tech704\! vm.list + # nuclei -debug -username admin -server localhost -password x vm.list - ntnx_cmd # check cli services available? + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi - if [[ ! -z "${2}" ]]; then - # hidden bonus + if [[ ! -z "${2}" ]]; then # hidden bonus log "Don't forget: $0 first.last@nutanixdc.local%password" calm_update && exit 0 fi @@ -147,21 +118,27 @@ EOF" export SLEEP=10 pc_init \ + && pc_dns_add \ && pc_ui \ - && pc_auth - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi - + && pc_auth \ + && pc_smtp ssp_auth \ && calm_enable \ + && lcm \ + && pc_project \ && images \ && flow_enable \ + && pc_cluster_img_import \ + && upload_citrix_calm_blueprint \ + && sleep 30 \ && prism_check 'PC' + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD if (( $? == 0 )); then @@ -175,4 +152,7 @@ EOF" exit ${_error} fi ;; + FILES | files | afs ) + files_install + ;; esac diff --git a/stage_workshop.sh b/stage_workshop.sh index 95cff4d..eeba8f8 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -23,7 +23,7 @@ WORKSHOPS=(\ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ #"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ -#"Citrix Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -69,7 +69,7 @@ function stage_clusters() { _pe_launch='calm.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i Citrix | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Citrix" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='citrix_bootcamp.sh' _pc_launch=${_pe_launch} From fe0f4e13d760eca76a40dfc40c146447691e7824 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 09:50:57 -0800 Subject: [PATCH 355/691] Calm BPs --- scripts/lib.pc.sh | 6 +++--- scripts/ts2020.sh | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index d96a63f..bbf7a87 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1055,7 +1055,7 @@ function upload_citrix_calm_blueprint() { local SERVER_IMAGE_UUID local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" local CITRIX_IMAGE_UUID - local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + local CURL_HTTP_OPTS=" --max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure " mkdir $DIRECTORY @@ -1376,7 +1376,7 @@ function upload_era_calm_blueprint() { local DOWNLOAD_BLUEPRINTS local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID - local CURL_HTTP_OPTS="--max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure" + local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" mkdir $DIRECTORY @@ -1637,7 +1637,7 @@ function upload_CICDInfra_calm_blueprint() { local NETWORK_UUID local SERVER_IMAGE="CentOS7.qcow2" local SERVER_IMAGE_UUID - local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + local CURL_HTTP_OPTS=" --max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure " mkdir $DIRECTORY diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index f390834..f7c7ac6 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -123,6 +123,7 @@ case ${1} in Nutanix-VirtIO-1.1.5.iso \ FrameCCA-2.1.6.iso \ FrameGuestAgentInstaller_1.0.2.2_7930.iso \ + VeeamBRv10.iso \ ) From 91d85819c8508b4c90f30ec6a53fb4eb58f16c2f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 10:08:11 -0800 Subject: [PATCH 356/691] Update ts2020.sh --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index f7c7ac6..09373d6 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -101,7 +101,7 @@ case ${1} in Windows2016.qcow2 \ CentOS7.qcow2 \ Win10v1903.qcow2 \ - ToolsVM.qcow2 \ + WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ move-3.4.1.qcow2 \ MSSQL-2016-VM.qcow2 \ From ad26dd4594aa73d440115c5e1c1e0bdaddd7f543 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 10:17:26 -0800 Subject: [PATCH 357/691] Update citrix_bootcamp.sh --- scripts/citrix_bootcamp.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 21e86ff..f1813b6 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -70,7 +70,6 @@ case ${1} in export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ - ERA-Server-build-1.2.0.1.qcow2 \ Windows2016.qcow2 \ CentOS7.qcow2 \ Win10v1903.qcow2 \ From 4b9c66fc1b69385f377a49f30fabdc361e86fbbb Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 10:18:32 -0800 Subject: [PATCH 358/691] WinToolsVM --- scripts/citrix_bootcamp.sh | 2 +- scripts/global.vars.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index f1813b6..27aa67d 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -73,7 +73,7 @@ case ${1} in Windows2016.qcow2 \ CentOS7.qcow2 \ Win10v1903.qcow2 \ - ToolsVM.qcow2 \ + WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ ) export ISO_IMAGES=(\ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 9cc017b..4b23ede 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -58,7 +58,7 @@ QCOW2_IMAGES=(\ Windows2016.qcow2 \ Windows2012R2.qcow2 \ Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ + WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ ERA-Server-build-1.1.1.3.qcow2 \ MSSQL-2016-VM.qcow2 \ From b557570474fdfc5cfa36a76f0647f6eb0d8fcba4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 10:54:34 -0800 Subject: [PATCH 359/691] Update lib.pc.sh --- scripts/lib.pc.sh | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index bbf7a87..8a5af07 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1293,6 +1293,9 @@ function upload_citrix_calm_blueprint() { JSONFile="set_blueprint_credentials_file.json" + # Remove Staatus + echo "Removing Status" + $(jq -c 'del(.status)' $JSONFile) # Credentials echo "Making LOCAL_PASSWORD Edits" if [ "$LOCAL_PASSWORD" != "none" ]; then @@ -1371,7 +1374,7 @@ function upload_era_calm_blueprint() { local ERAADMIN_PASSWORD_MODIFIED="true" local PE_CREDS_PASSWORD="${PE_PASSWORD}" local PE_CREDS_PASSWORD_MODIFIED="true" - local ERACLI_PASSWORD=`cat /home/nutanix/stageworkshop/scripts/calm_rsa_key.env` + local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' /home/nutanix/stageworkshop/scripts/calm_rsa_key.env) local ERACLI_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" @@ -1570,6 +1573,9 @@ function upload_era_calm_blueprint() { JSONFile="set_blueprint_credentials_file.json" + # Remove Staatus + echo "Removing Status" + $(jq -c 'del(.status)' $JSONFile) # Credentials echo "Making ERAADMIN_PASSWORD Edits" if [ "$ERAADMIN_PASSWORD" != "none" ]; then @@ -1631,7 +1637,7 @@ function upload_CICDInfra_calm_blueprint() { local PE_IP=${PE_HOST} local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} - local CENTOS_PASSWORD=`cat /home/nutanix/stageworkshop/scripts/calm_rsa_key.env` + local CENTOS_PASSWORD=$(awk '{printf "%s\\n", $0}' /home/nutanix/stageworkshop/scripts/calm_rsa_key.env) local CENTOS_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local NETWORK_UUID @@ -1792,6 +1798,9 @@ function upload_CICDInfra_calm_blueprint() { JSONFile="set_blueprint_credentials_file.json" + # Remove Staatus + echo "Removing Status" + $(jq -c 'del(.status)' $JSONFile) # Credentials echo "Making $CENTOS_PASSWORD Edits" if [ "$CENTOS_PASSWORD" != "none" ]; then From 5b64f38fbbc7451d41bc7aa20f6bf472f2ae8616 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 12:42:45 -0800 Subject: [PATCH 360/691] Update citrix_bootcamp.sh --- scripts/citrix_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 27aa67d..83363e3 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -123,10 +123,10 @@ case ${1} in && pc_smtp ssp_auth \ + && images \ && calm_enable \ && lcm \ && pc_project \ - && images \ && flow_enable \ && pc_cluster_img_import \ && upload_citrix_calm_blueprint \ From 1ead7f2443af35a9e6ffa38343b0bd2d2e70f7fa Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 12:45:03 -0800 Subject: [PATCH 361/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 8a5af07..88c7881 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1073,8 +1073,8 @@ function upload_citrix_calm_blueprint() { echo "NETWORK UUID = $NETWORK_UUID" # download the blueprint - DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${CALM_Blueprint} -o ${DIRECTORY}${CALM_Blueprint}) - log "Downloading ${CALM_Blueprint} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}${BLUEPRINT}) + log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" # ensure the directory that contains the blueprints to be imported is not empty if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then From 6d2c65af36f9558a788280e565c40fc4aad999ce Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 16:40:17 -0800 Subject: [PATCH 362/691] Clean up Calm BP update Code - Citrix --- scripts/calm_rsa_key.env | 27 ----- scripts/citrix_bootcamp.sh | 2 +- scripts/global.vars.sh | 1 + scripts/lib.pc.sh | 225 +++++++++---------------------------- 4 files changed, 56 insertions(+), 199 deletions(-) delete mode 100644 scripts/calm_rsa_key.env diff --git a/scripts/calm_rsa_key.env b/scripts/calm_rsa_key.env deleted file mode 100644 index 585f21c..0000000 --- a/scripts/calm_rsa_key.env +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG -ZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK -6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9 -HtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy -hCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR -uz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp -6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0 -MrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c -1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj -8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl -JDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf -h45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk -QVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c -oDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0 -EjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj -uFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M -Ez2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k -7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk -hztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC -kPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME -rECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF -2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z -iUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ -dQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP -gmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF ------END RSA PRIVATE KEY----- diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 83363e3..27aa67d 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -123,10 +123,10 @@ case ${1} in && pc_smtp ssp_auth \ - && images \ && calm_enable \ && lcm \ && pc_project \ + && images \ && flow_enable \ && pc_cluster_img_import \ && upload_citrix_calm_blueprint \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4b23ede..1b42b24 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -17,6 +17,7 @@ ATTEMPTS=40 SLEEP=60 PrismOpsServer='PrismProLabUtilityServer' SeedPC='seedPC.zip' +CALM_RSA_KEY_FILE='calm_rsa_key.env' ERA_Blueprint='EraServerDeployment.json' Citrix_Blueprint='CitrixBootcampInfra.json' Beam_Blueprint='' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 88c7881..9e76083 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1057,6 +1057,8 @@ function upload_citrix_calm_blueprint() { local CITRIX_IMAGE_UUID local CURL_HTTP_OPTS=" --max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure " + echo "Starting Citrix Blueprint Deployment" + mkdir $DIRECTORY #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter @@ -1073,7 +1075,7 @@ function upload_citrix_calm_blueprint() { echo "NETWORK UUID = $NETWORK_UUID" # download the blueprint - DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}${BLUEPRINT}) + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" # ensure the directory that contains the blueprints to be imported is not empty @@ -1131,113 +1133,6 @@ function upload_citrix_calm_blueprint() { $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) fi - # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - - # Profile Variables - echo "Making DOMAIN Edits" - if [ "$DOMAIN" != "none" ]; then - tmp_DOMAIN=$(mktemp) - # add the new variable to the json file and save it - $(jq --arg var_name $DOMAIN'(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_DOMAIN" && mv "$tmp_DOMAIN" $JSONFile) - fi - echo "Making AD_IP Edits" - if [ "$AD_IP" != "none" ]; then - tmp_AD_IP=$(mktemp) - $(jq --arg var_name $AD_IP '(.spec.resources.app_profile_list[0].variable_list[1]).value=$var_name' $JSONFile >"$tmp_AD_IP" && mv "$tmp_AD_IP" $JSONFile) - fi - echo "Making PE_IP Edits" - if [ "$PE_IP" != "none" ]; then - tmp_PE_IP=$(mktemp) - $(jq --arg var_name $PE_IP'(.spec.resources.app_profile_list[0].variable_list[2]).value=$var_name' $JSONFile >"$tmp_PE_IP" && mv "$tmp_PE_IP" $JSONFile) - fi - echo "Making DDC_IP Edits" - if [ "$DDC_IP" != "none" ]; then - tmp_DDC_IP=$(mktemp) - $(jq --arg var_name $DDC_IP '(.spec.resources.app_profile_list[0].variable_list[6]).value=$var_name' $JSONFile >"$tmp_DDC_IP" && mv "$tmp_DDC_IP" $JSONFile) - fi - echo "Making CVM_NETWORK Edits" - if [ "$CVM_NETWORK" != "none" ]; then - tmp_CVM_NETWORK=$(mktemp) - $(jq --arg var_name $CVM_NETWORK '(.spec.resources.app_profile_list[0].variable_list[4]).value=$var_name' $JSONFile >"$tmp_CVM_NETWORK" && mv "$tmp_CVM_NETWORK" $JSONFile) - fi - # VM Configuration - echo "Making SERVER_IMAGE Edits" - if [ "$SERVER_IMAGE" != "none" ]; then - tmp_SERVER_IMAGE=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) - fi - echo "Making SERVER_IMAGE_UUID Edits" - if [ "$SERVER_IMAGE_UUID" != "none" ]; then - tmp_SERVER_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) - fi - echo "Making CITRIX_IMAGE Edits" - if [ "$CITRIX_IMAGE" != "none" ]; then - tmp_CITRIX_IMAGE=$(mktemp) - $(jq --arg var_name $CITRIX_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).name=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE" && mv "$tmp_CITRIX_IMAGE" $JSONFile) - fi - echo "Making CITRIX_IMAGE_UUID Edits" - if [ "$CITRIX_IMAGE_UUID" != "none" ]; then - tmp_CITRIX_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $CITRIX_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_CITRIX_IMAGE_UUID" && mv "$tmp_CITRIX_IMAGE_UUID" $JSONFile) - fi - echo "Making NETWORK_NAME Edits" - if [ "$NETWORK_NAME" != "none" ]; then - tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) - fi - echo "Making NETWORK_UUID Edits" - if [ "$NETWORK_UUID" != "none" ]; then - tmp_NETWORK_UUID=$(mktemp) - $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) - fi - #if [ "$VLAN_NAME" != "none" ]; then - # tmp_VLAN_NAME=$(mktemp) - # $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) - #fi - # Credentials - echo "Making LOCAL_PASSWORD Edits" - if [ "$LOCAL_PASSWORD" != "none" ]; then - tmp_LOCAL_PASSWORD=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) - fi - echo "Making LOCAL_PASSWORD_MODIFIED Edits" - if [ "$LOCAL_PASSWORD_MODIFIED" != "none" ]; then - tmp_LOCAL_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD_MODIFIED" && mv "$tmp_LOCAL_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making DOMAIN_CREDS_PASSWORD Edits" - if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then - tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) - fi - echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" - if [ "$DOMAIN_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_DOMAIN_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" && mv "$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" - if [ "$PE_CREDS_PASSWORD" != "none" ]; then - tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) - fi - echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" - if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making SQL_CREDS_PASSWORD Edits" - if [ "$SQL_CREDS_PASSWORD" != "none" ]; then - tmp_SQL_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[3].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) - fi - echo "Making SQL_CREDS_PASSWORD_MODIFIED Edits" - if [ "$SQL_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_SQL_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[3].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) - fi - - # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) tmp_removal=$(mktemp) $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) @@ -1255,14 +1150,6 @@ function upload_citrix_calm_blueprint() { # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload echo "Uploading the updated blueprint: $blueprint_name..." - # Example curl call from the console: - # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" - # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" - # bp_name="EraServerDeployment" - # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" - # password='techX2019!' - # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" - path_to_file=$JSONFile bp_name=$blueprint_name project_uuid=$project_uuid @@ -1286,59 +1173,40 @@ function upload_citrix_calm_blueprint() { echo "Citrix Blueprint UUID = $CITRIX_BLUEPRINT_UUID" - echo "Set Credentials" - - # GET The Blueprint so it can be updated - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" > set_blueprint_credentials_file.json + echo "Update Blueprint and writing to temp file" - JSONFile="set_blueprint_credentials_file.json" + DOWNLOADED_JSONFile="${BLUEPRINT}-${CITRIX_BLUEPRINT_UUID}.json" + UPDATED_JSONFile="${BLUEPRINT}-${CITRIX_BLUEPRINT_UUID}-updated.json" - # Remove Staatus - echo "Removing Status" - $(jq -c 'del(.status)' $JSONFile) - # Credentials - echo "Making LOCAL_PASSWORD Edits" - if [ "$LOCAL_PASSWORD" != "none" ]; then - tmp_LOCAL_PASSWORD=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD" && mv "$tmp_LOCAL_PASSWORD" $JSONFile) - fi - echo "Making LOCAL_PASSWORD_MODIFIED Edits" - if [ "$LOCAL_PASSWORD_MODIFIED" != "none" ]; then - tmp_LOCAL_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $LOCAL_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_LOCAL_PASSWORD_MODIFIED" && mv "$tmp_LOCAL_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making DOMAIN_CREDS_PASSWORD Edits" - if [ "$DOMAIN_CREDS_PASSWORD" != "none" ]; then - tmp_DOMAIN_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD" && mv "$tmp_DOMAIN_CREDS_PASSWORD" $JSONFile) - fi - echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" - if [ "$DOMAIN_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_DOMAIN_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $DOMAIN_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" && mv "$tmp_DOMAIN_CREDS_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making DOMAIN_CREDS_PASSWORD_MODIFIED Edits" - if [ "$PE_CREDS_PASSWORD" != "none" ]; then - tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) - fi - echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" - if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making SQL_CREDS_PASSWORD Edits" - if [ "$SQL_CREDS_PASSWORD" != "none" ]; then - tmp_SQL_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD '(.spec.resources.credential_definition_list[3].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD" && mv "$tmp_SQL_CREDS_PASSWORD" $JSONFile) - fi - echo "Making SQL_CREDS_PASSWORD_MODIFIED Edits" - if [ "$SQL_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_SQL_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $SQL_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[3].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_SQL_CREDS_PASSWORD_MODIFIED" && mv "$tmp_SQL_CREDS_PASSWORD_MODIFIED" $JSONFile) - fi - - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @set_blueprint_credentials_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" + # GET The Blueprint so it can be updated + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" > ${DOWNLOADED_JSONFile} + + cat $DOWNLOADED_JSONFile \ + | jq -c 'del(.status)' \ + | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[0].value = \"$DOMAIN\")" \ + | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[1].value = \"$AD_IP\")" \ + | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[2].value = \"$PE_IP\")" \ + | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[6].value = \"$DDC_IP\")" \ + | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[4].value = \"$CVM_NETWORK\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$SERVER_IMAGE\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference.name = \"$CITRIX_IMAGE\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference.uuid = \"$CITRIX_IMAGE_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[0].secret.attrs.secret_reference = \"$LOCAL_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ + | jq -c -r "(.spec.resources.credential_definition_list[1].secret.attrs.secret_reference = \"$DOMAIN_CREDS_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \ + | jq -c -r "(.spec.resources.credential_definition_list[2].secret.attrs.secret_reference = \"$PE_CREDS_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \ + | jq -c -r "(.spec.resources.credential_definition_list[3].secret.attrs.secret_reference = \"$SQL_CREDS_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[3].secret.attrs.is_secret_modified = "true")' \ + > $UPDATED_JSONFile + + echo "Saving Credentials Edits with PUT" + + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @$UPDATED_JSONFile "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}" echo "Finished Updating Credentials" @@ -1374,15 +1242,20 @@ function upload_era_calm_blueprint() { local ERAADMIN_PASSWORD_MODIFIED="true" local PE_CREDS_PASSWORD="${PE_PASSWORD}" local PE_CREDS_PASSWORD_MODIFIED="true" - local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' /home/nutanix/stageworkshop/scripts/calm_rsa_key.env) + local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) local ERACLI_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" + echo "Starting Era Blueprint Deployment" + mkdir $DIRECTORY + DOWNLOAD_CALM_RSA_KEY=$(curl -L ${BLUEPRINT_URL}${CALM_RSA_KEY_FILE} -o ${DIRECTORY}/${CALM_RSA_KEY_FILE}) + log "Downloading ${CALM_RSA_KEY_FILE}" + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1575,7 +1448,8 @@ function upload_era_calm_blueprint() { # Remove Staatus echo "Removing Status" - $(jq -c 'del(.status)' $JSONFile) + tmp_REMOVE_STATUS=$(mktemp) + $(jq -c 'del(.status)' $JSONFile) >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_REMOVE_STATUS" $JSONFile # Credentials echo "Making ERAADMIN_PASSWORD Edits" if [ "$ERAADMIN_PASSWORD" != "none" ]; then @@ -1608,6 +1482,8 @@ function upload_era_calm_blueprint() { $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) fi + echo "Saving Credentials Edits with PUT" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @set_blueprint_credentials_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" echo "Finished Updating Credentials" @@ -1637,7 +1513,7 @@ function upload_CICDInfra_calm_blueprint() { local PE_IP=${PE_HOST} local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} - local CENTOS_PASSWORD=$(awk '{printf "%s\\n", $0}' /home/nutanix/stageworkshop/scripts/calm_rsa_key.env) + local CENTOS_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) local CENTOS_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local NETWORK_UUID @@ -1645,8 +1521,13 @@ function upload_CICDInfra_calm_blueprint() { local SERVER_IMAGE_UUID local CURL_HTTP_OPTS=" --max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure " + echo "Starting CICDInfra Blueprint Deployment" + mkdir $DIRECTORY + DOWNLOAD_CALM_RSA_KEY=$(curl -L ${BLUEPRINT_URL}${CALM_RSA_KEY_FILE} -o ${DIRECTORY}/${CALM_RSA_KEY_FILE}) + log "Downloading ${CALM_RSA_KEY_FILE}" + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1657,7 +1538,7 @@ function upload_CICDInfra_calm_blueprint() { echo "NETWORK UUID = $NETWORK_UUID" # download the blueprint - DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}${BLUEPRINT}) + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" # ensure the directory that contains the blueprints to be imported is not empty @@ -1813,6 +1694,8 @@ function upload_CICDInfra_calm_blueprint() { $(jq --arg var_name $CENTOS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD_MODIFIED" && mv "$tmp_CENTOS_PASSWORD_MODIFIED" $JSONFile) fi + echo "Saving Credentials Edits with PUT" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @set_blueprint_credentials_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}" echo "Finished Updating Credentials" From 495f5f6a90480b9d09b9c4a4cf28a0b3adfc7ffa Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 16:41:14 -0800 Subject: [PATCH 363/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ---- 1 file changed, 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9e76083..6ed09a2 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1042,13 +1042,9 @@ function upload_citrix_calm_blueprint() { local BPG_RKTOOLS_URL="none" local NutanixAcropolis_Installed_Path="none" local LOCAL_PASSWORD="nutanix/4u" - local LOCAL_PASSWORD_MODIFIED="true" local DOMAIN_CREDS_PASSWORD="nutanix/4u" - local DOMAIN_PASSWORD_MODIFIED="true" local PE_CREDS_PASSWORD="${PE_PASSWORD}" - local PE_CREDS_PASSWORD_MODIFIED="true" local SQL_CREDS_PASSWORD="nutanix/4u" - local SQL_CREDS_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local NETWORK_UUID local SERVER_IMAGE="Windows2016.qcow2" From 06047515dddc8d60fc0990a10270473c4b26ea54 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 16:42:14 -0800 Subject: [PATCH 364/691] Update citrix_bootcamp.sh --- scripts/citrix_bootcamp.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 27aa67d..2802d81 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -71,10 +71,8 @@ case ${1} in export QCOW2_IMAGES=(\ Windows2016.qcow2 \ - CentOS7.qcow2 \ Win10v1903.qcow2 \ WinToolsVM.qcow2 \ - Linux_ToolsVM.qcow2 \ ) export ISO_IMAGES=(\ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ @@ -127,6 +125,7 @@ case ${1} in && lcm \ && pc_project \ && images \ + && sleep 60 \ && flow_enable \ && pc_cluster_img_import \ && upload_citrix_calm_blueprint \ From 353b82d3d3a712753d801430bf52dabc4e6f6be7 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 16:42:42 -0800 Subject: [PATCH 365/691] Update citrix_bootcamp.sh --- scripts/citrix_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 2802d81..183a8af 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -125,7 +125,7 @@ case ${1} in && lcm \ && pc_project \ && images \ - && sleep 60 \ + && sleep 120 \ && flow_enable \ && pc_cluster_img_import \ && upload_citrix_calm_blueprint \ From ebb4b53cd98ce80dfa863b6496a205a64cb8344b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 16:50:50 -0800 Subject: [PATCH 366/691] Updates --- scripts/citrix_bootcamp.sh | 1 - scripts/era_bootcamp.sh | 49 +++++++++++++++++++++++++------------- stage_workshop.sh | 6 ++--- 3 files changed, 36 insertions(+), 20 deletions(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 183a8af..59b6f6d 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -79,7 +79,6 @@ case ${1} in Nutanix-VirtIO-1.1.5.iso \ ) - run_once dependencies 'install' 'jq' || exit 13 diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 5e645b8..daab47b 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -18,6 +18,8 @@ case ${1} in PE | pe ) . lib.pe.sh + export AUTH_SERVER='AutoAD' + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -33,15 +35,6 @@ case ${1} in && prism_check 'PC' \ if (( $? == 0 )) ; then - ## TODO: If Debug is set we should run with bash -x. Maybe this???? Or are we going to use a fourth parameter - # if [ ! -z DEBUG ]; then - # bash_cmd='bash' - # else - # bash_cmd='bash -x' - # fi - # _command="EMAIL=${EMAIL} \ - # PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - # PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup ${bash_cmd} ${HOME}/${PC_LAUNCH} IMAGES" _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" @@ -55,11 +48,8 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install && sleep 30 - - create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 - file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish fi @@ -73,6 +63,33 @@ case ${1} in PC | pc ) . lib.pc.sh + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + ERA-Server-build-1.2.0.1.qcow2 \ + MSSQL-2016-VM.qcow2 \ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + GTSOracle/19c-april/19c-bootdisk.qcow2 \ + GTSOracle/19c-april/19c-disk1.qcow2 \ + GTSOracle/19c-april/19c-disk2.qcow2 \ + GTSOracle/19c-april/19c-disk3.qcow2 \ + GTSOracle/19c-april/19c-disk4.qcow2 \ + GTSOracle/19c-april/19c-disk5.qcow2 \ + GTSOracle/19c-april/19c-disk6.qcow2 \ + GTSOracle/19c-april/19c-disk7.qcow2 \ + GTSOracle/19c-april/19c-disk8.qcow2 \ + GTSOracle/19c-april/19c-disk9.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + run_once dependencies 'install' 'jq' || exit 13 @@ -115,16 +132,16 @@ case ${1} in ssp_auth \ && calm_enable \ - && karbon_enable \ && lcm \ - && karbon_image_download \ && images \ && flow_enable \ && pc_cluster_img_import \ + && upload_era_calm_blueprint \ + && sleep 30 \ && prism_check 'PC' log "Non-blocking functions (in development) follow." - pc_project + #pc_project pc_admin # ntnx_download 'AOS' # function in lib.common.sh diff --git a/stage_workshop.sh b/stage_workshop.sh index eeba8f8..bb725a6 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -21,9 +21,9 @@ WORKSHOPS=(\ "Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ "SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ -#"Era Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ -#"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ "Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ +"Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ +#"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ #"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -74,7 +74,7 @@ function stage_clusters() { _pe_launch='citrix_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i Era | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Era" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh' _pe_launch='era_bootcamp.sh' _pc_launch=${_pe_launch} From f66453d28a126fb87f552fba4e9c75bb8b95f9e3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 17:13:28 -0800 Subject: [PATCH 367/691] Update lib.pc.sh --- scripts/lib.pc.sh | 226 ++++++++-------------------------------------- 1 file changed, 38 insertions(+), 188 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6ed09a2..ac99439 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1235,11 +1235,8 @@ function upload_era_calm_blueprint() { local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} local ERAADMIN_PASSWORD="nutanix/4u" - local ERAADMIN_PASSWORD_MODIFIED="true" local PE_CREDS_PASSWORD="${PE_PASSWORD}" - local PE_CREDS_PASSWORD_MODIFIED="true" local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) - local ERACLI_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID @@ -1316,77 +1313,6 @@ function upload_era_calm_blueprint() { $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) fi - # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - - # Profile Variables - echo "Making ERA_IP Edits" - if [ "$ERA_IP" != "none" ]; then - tmp_ERA_IP=$(mktemp) - # add the new variable to the json file and save it - $(jq --arg var_name $ERA_IP '(.spec.resources.app_profile_list[0].variable_list[0]).value=$var_name' $JSONFile >"$tmp_ERA_IP" && mv "$tmp_ERA_IP" $JSONFile) - fi - # VM Configuration - echo "Making ERA_IMAGE Edits" - if [ "$ERA_IMAGE" != "none" ]; then - tmp_ERA_IMAGE=$(mktemp) - $(jq --arg var_name $ERA_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_ERA_IMAGE" && mv "$tmp_ERA_IMAGE" $JSONFile) - fi - echo "Making ERA_IP_UUID Edits" - if [ "$ERA_IMAGE_UUID" != "none" ]; then - tmp_ERA_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $ERA_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_ERA_IMAGE_UUID" && mv "$tmp_ERA_IMAGE_UUID" $JSONFile) - fi - echo "Making NETWORK_NAME Edits" - if [ "$NETWORK_NAME" != "none" ]; then - tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) - fi - echo "Making NETWORK_UUID Edits" - if [ "$NETWORK_UUID" != "none" ]; then - tmp_NETWORK_UUID=$(mktemp) - $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) - fi - #if [ "$NETWORK_NAME" != "none" ]; then - # tmp_NETWORK_NAME=$(mktemp) - # $(jq --arg var_name $NETWORK_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_NAME")).value=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) - #fi - #if [ "$VLAN_NAME" != "none" ]; then - # tmp_VLAN_NAME=$(mktemp) - # $(jq --arg var_name $VLAN_NAME '(.spec.resources.service_definition_list[0].variable_list[] | select (.name=="NETWORK_VLAN")).value=$var_name' $JSONFile >"$tmp_VLAN_NAME" && mv "$tmp_VLAN_NAME" $JSONFile) - #fi - # Credentials - echo "Making ERAADMIN_PASSWORD Edits" - if [ "$ERAADMIN_PASSWORD" != "none" ]; then - tmp_ERAADMIN_PASSWORD=$(mktemp) - $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) - fi - echo "Making ERAADMIN_PASSWORD_MODIFIED Edits" - if [ "$ERAADMIN_PASSWORD_MODIFIED" != "none" ]; then - tmp_ERAADMIN_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $ERAADMIN_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD_MODIFIED" && mv "$tmp_ERAADMIN_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making PE_CREDS_PASSWORD Edits" - if [ "$PE_CREDS_PASSWORD" != "none" ]; then - tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) - fi - echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" - if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making ERACLI_PASSWORD Edits" - if [ "$ERACLI_PASSWORD" != "none" ]; then - tmp_ERACLI_PASSWORD=$(mktemp) - $(jq --arg var_name $ERACLI_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) - fi - echo "Making ERACLI_PASSWORD_MODIFIED Edits" - if [ "$ERACLI_PASSWORD_MODIFIED" != "none" ]; then - tmp_ERACLI_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) - fi - - # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) tmp_removal=$(mktemp) $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) @@ -1404,14 +1330,6 @@ function upload_era_calm_blueprint() { # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload echo "Uploading the updated blueprint: $blueprint_name..." - # Example curl call from the console: - # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" - # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" - # bp_name="EraServerDeployment" - # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" - # password='techX2019!' - # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" - path_to_file=$JSONFile bp_name=$blueprint_name project_uuid=$project_uuid @@ -1435,52 +1353,32 @@ function upload_era_calm_blueprint() { echo "ERA Blueprint UUID = $ERA_BLUEPRINT_UUID" - echo "Set Credentials" + echo "Update Blueprint and writing to temp file" + + DOWNLOADED_JSONFile="${BLUEPRINT}-${ERA_BLUEPRINT_UUID}.json" + UPDATED_JSONFile="${BLUEPRINT}-${ERA_BLUEPRINT_UUID}-updated.json" # GET The Blueprint so it can be updated - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" > set_blueprint_credentials_file.json - - JSONFile="set_blueprint_credentials_file.json" - - # Remove Staatus - echo "Removing Status" - tmp_REMOVE_STATUS=$(mktemp) - $(jq -c 'del(.status)' $JSONFile) >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_REMOVE_STATUS" $JSONFile - # Credentials - echo "Making ERAADMIN_PASSWORD Edits" - if [ "$ERAADMIN_PASSWORD" != "none" ]; then - tmp_ERAADMIN_PASSWORD=$(mktemp) - $(jq --arg var_name $ERAADMIN_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD" && mv "$tmp_ERAADMIN_PASSWORD" $JSONFile) - fi - echo "Making ERAADMIN_PASSWORD_MODIFIED Edits" - if [ "$ERAADMIN_PASSWORD_MODIFIED" != "none" ]; then - tmp_ERAADMIN_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $ERAADMIN_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERAADMIN_PASSWORD_MODIFIED" && mv "$tmp_ERAADMIN_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making PE_CREDS_PASSWORD Edits" - if [ "$PE_CREDS_PASSWORD" != "none" ]; then - tmp_PE_CREDS_PASSWORD=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD '(.spec.resources.credential_definition_list[1].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD" && mv "$tmp_PE_CREDS_PASSWORD" $JSONFile) - fi - echo "Making PE_CREDS_PASSWORD_MODIFIED Edits" - if [ "$PE_CREDS_PASSWORD_MODIFIED" != "none" ]; then - tmp_PE_CREDS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $PE_CREDS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[1].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_PE_CREDS_PASSWORD_MODIFIED" && mv "$tmp_PE_CREDS_PASSWORD_MODIFIED" $JSONFile) - fi - echo "Making ERACLI_PASSWORD Edits" - if [ "$ERACLI_PASSWORD" != "none" ]; then - tmp_ERACLI_PASSWORD=$(mktemp) - $(jq --arg var_name $ERACLI_PASSWORD '(.spec.resources.credential_definition_list[2].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD" && mv "$tmp_ERACLI_PASSWORD" $JSONFile) - fi - echo "Making ERACLI_PASSWORD_MODIFIED Edits" - if [ "$ERACLI_PASSWORD_MODIFIED" != "none" ]; then - tmp_ERACLI_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $ERACLI_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[2].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_ERACLI_PASSWORD_MODIFIED" && mv "$tmp_ERACLI_PASSWORD_MODIFIED" $JSONFile) - fi + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" > ${DOWNLOADED_JSONFile} + + cat $DOWNLOADED_JSONFile \ + | jq -c 'del(.status)' \ + | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[0].value = \"$ERA_IP\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$ERA_IMAGE\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$ERA_IMAGE_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[0].secret.attrs.secret_reference = \"$ERAADMIN_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ + | jq -c -r "(.spec.resources.credential_definition_list[1].secret.attrs.secret_reference = \"$PE_CREDS_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \ + | jq -c -r "(.spec.resources.credential_definition_list[2].secret.attrs.secret_reference = \"$ERACLI_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \ + > $UPDATED_JSONFile echo "Saving Credentials Edits with PUT" - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @set_blueprint_credentials_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @$UPDATED_JSONFile "https://localhost:9440/api/nutanix/v3/blueprints/${ERA_BLUEPRINT_UUID}" echo "Finished Updating Credentials" @@ -1582,44 +1480,6 @@ function upload_CICDInfra_calm_blueprint() { # add the new atributes to the JSON and overwrite the old JSON file with the new one $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) fi - - # ADD VARIABLES (affects ONLY if the current blueprint being imported MATCHES the name specified earlier "EraServerDeployment.json") - - # Profile Variables - # VM Configuration - echo "Making $SERVER_IMAGE Edits" - if [ "$SERVER_IMAGE" != "none" ]; then - tmp_SERVER_IMAGE=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).name=$var_name' $JSONFile >"$tmp_SERVER_IMAGE" && mv "$tmp_SERVER_IMAGE" $JSONFile) - fi - echo "Making $SERVER_IMAGE_UUID Edits" - if [ "$SERVER_IMAGE_UUID" != "none" ]; then - tmp_SERVER_IMAGE_UUID=$(mktemp) - $(jq --arg var_name $SERVER_IMAGE_UUID '(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference).uuid=$var_name' $JSONFile >"$tmp_SERVER_IMAGE_UUID" && mv "$tmp_SERVER_IMAGE_UUID" $JSONFile) - fi - echo "Making $NETWORK_NAME Edits" - if [ "$NETWORK_NAME" != "none" ]; then - tmp_NETWORK_NAME=$(mktemp) - $(jq --arg var_name $NETWORK_NAME '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).name=$var_name' $JSONFile >"$tmp_NETWORK_NAME" && mv "$tmp_NETWORK_NAME" $JSONFile) - fi - echo "Making $NETWORK_UUID Edits" - if [ "$NETWORK_UUID" != "none" ]; then - tmp_NETWORK_UUID=$(mktemp) - $(jq --arg var_name $NETWORK_UUID '(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference).uuid=$var_name' $JSONFile >"$tmp_NETWORK_UUID" && mv "$tmp_NETWORK_UUID" $JSONFile) - fi - # Credentials - echo "Making $CENTOS_PASSWORD Edits" - if [ "$CENTOS_PASSWORD" != "none" ]; then - tmp_CENTOS_PASSWORD=$(mktemp) - $(jq --arg var_name $CENTOS_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD" && mv "$tmp_CENTOS_PASSWORD" $JSONFile) - fi - echo "Making $CENTOS_PASSWORD_MODIFIED Edits" - if [ "$CENTOS_PASSWORD_MODIFIED" != "none" ]; then - tmp_CENTOS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $CENTOS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD_MODIFIED" && mv "$tmp_CENTOS_PASSWORD_MODIFIED" $JSONFile) - fi - - # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) tmp_removal=$(mktemp) $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) @@ -1637,13 +1497,6 @@ function upload_CICDInfra_calm_blueprint() { # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload echo "Uploading the updated blueprint: $blueprint_name..." - # Example curl call from the console: - # url="https://10.42.7.39:9440/api/nutanix/v3/blueprints/import_file" - # path_to_file="/Users/sharon.santana/Desktop/saved_blueprints/EraServerDeployment.json" - # bp_name="EraServerDeployment" - # project_uuid="a944258a-fd8a-4d02-8646-72c311e03747" - # password='techX2019!' - # curl -s -k -X POST $url -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid --user admin:"$password" path_to_file=$JSONFile bp_name=$blueprint_name @@ -1668,31 +1521,28 @@ function upload_CICDInfra_calm_blueprint() { echo "ERA Blueprint UUID = $CICDInfra_BLUEPRINT_UUID" - echo "Set Credentials" + echo "Update Blueprint and writing to temp file" + + DOWNLOADED_JSONFile="${BLUEPRINT}-${CICDInfra_BLUEPRINT_UUID}.json" + UPDATED_JSONFile="${BLUEPRINT}-${CICDInfra_BLUEPRINT_UUID}-updated.json" # GET The Blueprint so it can be updated - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}" > set_blueprint_credentials_file.json - - JSONFile="set_blueprint_credentials_file.json" - - # Remove Staatus - echo "Removing Status" - $(jq -c 'del(.status)' $JSONFile) - # Credentials - echo "Making $CENTOS_PASSWORD Edits" - if [ "$CENTOS_PASSWORD" != "none" ]; then - tmp_CENTOS_PASSWORD=$(mktemp) - $(jq --arg var_name $CENTOS_PASSWORD '(.spec.resources.credential_definition_list[0].secret.attrs).secret_reference=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD" && mv "$tmp_CENTOS_PASSWORD" $JSONFile) - fi - echo "Making $CENTOS_PASSWORD_MODIFIED Edits" - if [ "$CENTOS_PASSWORD_MODIFIED" != "none" ]; then - tmp_CENTOS_PASSWORD_MODIFIED=$(mktemp) - $(jq --arg var_name $CENTOS_PASSWORD_MODIFIED '(.spec.resources.credential_definition_list[0].secret.attrs).is_secret_modified=$var_name' $JSONFile >"$tmp_CENTOS_PASSWORD_MODIFIED" && mv "$tmp_CENTOS_PASSWORD_MODIFIED" $JSONFile) - fi + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}" > ${DOWNLOADED_JSONFile} + + cat $DOWNLOADED_JSONFile \ + | jq -c 'del(.status)' \ + | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[0].value = \"$ERA_IP\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$SERVER_IMAGE\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[0].secret.attrs.secret_reference = \"$CENTOS_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ + > $UPDATED_JSONFile echo "Saving Credentials Edits with PUT" - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @set_blueprint_credentials_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @$UPDATED_JSONFile "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}" echo "Finished Updating Credentials" From 2af289da86523208f8f6dee064c05f3ed5b162d0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 18:54:07 -0800 Subject: [PATCH 368/691] Update era_bootcamp.sh --- scripts/era_bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index daab47b..88af7c1 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -133,6 +133,7 @@ case ${1} in ssp_auth \ && calm_enable \ && lcm \ + && pc_project \ && images \ && flow_enable \ && pc_cluster_img_import \ From 28c7b543701e569531f281f20c631c9022dd7a7f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 23:40:08 -0800 Subject: [PATCH 369/691] Update lib.pc.sh --- scripts/lib.pc.sh | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index ac99439..19a8b8f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1051,7 +1051,7 @@ function upload_citrix_calm_blueprint() { local SERVER_IMAGE_UUID local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" local CITRIX_IMAGE_UUID - local CURL_HTTP_OPTS=" --max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure " + local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" echo "Starting Citrix Blueprint Deployment" @@ -1100,6 +1100,7 @@ function upload_citrix_calm_blueprint() { fi fi + # update the user with script progress... echo "Starting blueprint updates and then Uploading to Calm..." @@ -1190,13 +1191,13 @@ function upload_citrix_calm_blueprint() { | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[1].data_source_reference.uuid = \"$CITRIX_IMAGE_UUID\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ - | jq -c -r "(.spec.resources.credential_definition_list[0].secret.attrs.secret_reference = \"$LOCAL_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value = \"$LOCAL_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ - | jq -c -r "(.spec.resources.credential_definition_list[1].secret.attrs.secret_reference = \"$DOMAIN_CREDS_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$DOMAIN_CREDS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \ - | jq -c -r "(.spec.resources.credential_definition_list[2].secret.attrs.secret_reference = \"$PE_CREDS_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[2].secret.value = \"$PE_CREDS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \ - | jq -c -r "(.spec.resources.credential_definition_list[3].secret.attrs.secret_reference = \"$SQL_CREDS_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[3].secret.value = \"$SQL_CREDS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[3].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile @@ -1215,7 +1216,7 @@ function upload_citrix_calm_blueprint() { curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}/launch" - echo "Finished Launching the Era Server Application" + echo "Finished Launching the Calm Infra Application" } @@ -1368,11 +1369,11 @@ function upload_era_calm_blueprint() { | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$ERA_IMAGE_UUID\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ - | jq -c -r "(.spec.resources.credential_definition_list[0].secret.attrs.secret_reference = \"$ERAADMIN_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value = \"$ERAADMIN_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ - | jq -c -r "(.spec.resources.credential_definition_list[1].secret.attrs.secret_reference = \"$PE_CREDS_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$PE_CREDS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \ - | jq -c -r "(.spec.resources.credential_definition_list[2].secret.attrs.secret_reference = \"$ERACLI_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[2].secret.value = \"$ERACLI_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile @@ -1536,7 +1537,7 @@ function upload_CICDInfra_calm_blueprint() { | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ - | jq -c -r "(.spec.resources.credential_definition_list[0].secret.attrs.secret_reference = \"$CENTOS_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value = \"$CENTOS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile From b6d1fd9e5729ce0042eafcd668ed448555048cfe Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 18 Feb 2020 23:43:29 -0800 Subject: [PATCH 370/691] Fixes for Calm BPs --- scripts/citrix_bootcamp.sh | 2 +- scripts/era_bootcamp.sh | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 59b6f6d..856aa74 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -124,7 +124,7 @@ case ${1} in && lcm \ && pc_project \ && images \ - && sleep 120 \ + && sleep 420 \ && flow_enable \ && pc_cluster_img_import \ && upload_citrix_calm_blueprint \ diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 88af7c1..a7ea3f7 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -135,6 +135,7 @@ case ${1} in && lcm \ && pc_project \ && images \ + && sleep 420 \ && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ From 0f64241fe0e7d9d1bc5df964059d909f016e6cf0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 09:11:09 -0800 Subject: [PATCH 371/691] Updates for Image check in calm BP Uploads --- scripts/lib.pc.sh | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 19a8b8f..8f5a248 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1060,10 +1060,52 @@ function upload_citrix_calm_blueprint() { #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + echo "Getting Server Image UUID" + + # The response should be a Task UUID + if [[ ! -z $SERVER_IMAGE_UUID ]]; then + # Check if Image has been upload to IMage service + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) + while [ $SERVER_IMAGE_UUID -ne 1 ]; do + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' grep "Windows2016.qcow2" | wc -l) + if [[ $loops -ne 30 ]]; then + sleep 10 + (( _loops++ )) + else + log "Image is not upload, please check." + break + fi + done + log "Image has been uploaded." + else + log "Image is not upload, please check." + fi + echo "Server Image UUID = $SERVER_IMAGE_UUID" CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + echo "Getting Citrix Image UUID" + + # The response should be a Task UUID + if [[ ! -z $CITRIX_IMAGE_UUID ]]; then + # Check if Image has been upload to IMage service + CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) + while [ $CITRIX_IMAGE_UUID -ne 1 ]; do + CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) + if [[ $loops -ne 30 ]]; then + sleep 10 + (( _loops++ )) + else + log "Image is not upload, please check." + break + fi + done + log "Image has been uploaded." + else + log "Image is not upload, please check." + fi + echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") From 7a1dcaedb8cb1c958913fcf7c4dfc8f175bfd00e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 09:12:12 -0800 Subject: [PATCH 372/691] remove sleep timers, relace with if statements --- scripts/citrix_bootcamp.sh | 1 - scripts/era_bootcamp.sh | 1 - 2 files changed, 2 deletions(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 856aa74..6ee8244 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -124,7 +124,6 @@ case ${1} in && lcm \ && pc_project \ && images \ - && sleep 420 \ && flow_enable \ && pc_cluster_img_import \ && upload_citrix_calm_blueprint \ diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index a7ea3f7..88af7c1 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -135,7 +135,6 @@ case ${1} in && lcm \ && pc_project \ && images \ - && sleep 420 \ && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ From f8571833f1e119d198b1f66988026ece3ed4eea9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 09:15:09 -0800 Subject: [PATCH 373/691] Update lib.pc.sh --- scripts/lib.pc.sh | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 8f5a248..6e45cd0 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1092,7 +1092,7 @@ function upload_citrix_calm_blueprint() { # Check if Image has been upload to IMage service CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) while [ $CITRIX_IMAGE_UUID -ne 1 ]; do - CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) + CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) if [[ $loops -ne 30 ]]; then sleep 10 (( _loops++ )) @@ -1295,6 +1295,27 @@ function upload_era_calm_blueprint() { #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + echo "Getting Era Image UUID" + + # The response should be a Task UUID + if [[ ! -z $ERA_IMAGE_UUID ]]; then + # Check if Image has been upload to IMage service + ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) + while [ $ERA_IMAGE_UUID -ne 1 ]; do + ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) + if [[ $loops -ne 30 ]]; then + sleep 10 + (( _loops++ )) + else + log "Image is not upload, please check." + break + fi + done + log "Image has been uploaded." + else + log "Image is not upload, please check." + fi + echo "ERA Image UUID = $ERA_IMAGE_UUID" NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") From 91a59407359f301e77827cb5c2b2b65c221cb1ee Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 09:17:07 -0800 Subject: [PATCH 374/691] Update lib.pc.sh --- scripts/lib.pc.sh | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6e45cd0..42c21d6 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1067,7 +1067,7 @@ function upload_citrix_calm_blueprint() { # Check if Image has been upload to IMage service SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) while [ $SERVER_IMAGE_UUID -ne 1 ]; do - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' grep "Windows2016.qcow2" | wc -l) + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) if [[ $loops -ne 30 ]]; then sleep 10 (( _loops++ )) @@ -1489,6 +1489,27 @@ function upload_CICDInfra_calm_blueprint() { #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + echo "Getting Server Image UUID" + + # The response should be a Task UUID + if [[ ! -z $SERVER_IMAGE_UUID ]]; then + # Check if Image has been upload to IMage service + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + while [ $SERVER_IMAGE_UUID -ne 1 ]; do + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + if [[ $loops -ne 30 ]]; then + sleep 10 + (( _loops++ )) + else + log "Image is not upload, please check." + break + fi + done + log "Image has been uploaded." + else + log "Image is not upload, please check." + fi + echo "Server Image UUID = $SERVER_IMAGE_UUID" NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") From ba4e5cde001a36d9677f87da676c7aad815cb3e3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 09:27:08 -0800 Subject: [PATCH 375/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 42c21d6..d50d3f1 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1436,7 +1436,7 @@ function upload_era_calm_blueprint() { | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$PE_CREDS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \ - | jq -c -r "(.spec.resources.credential_definition_list[2].secret.value = \"$ERACLI_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile @@ -1621,7 +1621,7 @@ function upload_CICDInfra_calm_blueprint() { | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ - | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value = \"$CENTOS_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile From f5dcd6f7a02156ddc063f6305d32ad9359835415 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 09:45:01 -0800 Subject: [PATCH 376/691] Update lib.pc.sh --- scripts/lib.pc.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index d50d3f1..9fa8f77 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1292,6 +1292,8 @@ function upload_era_calm_blueprint() { DOWNLOAD_CALM_RSA_KEY=$(curl -L ${BLUEPRINT_URL}${CALM_RSA_KEY_FILE} -o ${DIRECTORY}/${CALM_RSA_KEY_FILE}) log "Downloading ${CALM_RSA_KEY_FILE}" + echo "ERACLI Key - $ERACLI_PASSWORD" + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1436,7 +1438,7 @@ function upload_era_calm_blueprint() { | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$PE_CREDS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \ - | jq -c -r "(.spec.resources.credential_definition_list[].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[2].secret.value=\"$ERACLI_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile @@ -1621,7 +1623,7 @@ function upload_CICDInfra_calm_blueprint() { | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ - | jq -c -r "(.spec.resources.credential_definition_list[].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile From 283ba1c6998f328ab48c30ebb7b29bd8c1e47dc6 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 11:19:01 -0800 Subject: [PATCH 377/691] Update If Loop on images for Calm BPs --- scripts/lib.pc.sh | 133 +++++++++++++++++++++++----------------------- 1 file changed, 66 insertions(+), 67 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9fa8f77..998ca83 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1052,58 +1052,57 @@ function upload_citrix_calm_blueprint() { local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" local CITRIX_IMAGE_UUID local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" + local _loops="0" + local _maxtries="30" echo "Starting Citrix Blueprint Deployment" mkdir $DIRECTORY - #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - echo "Getting Server Image UUID" + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # The response should be a Task UUID - if [[ ! -z $SERVER_IMAGE_UUID ]]; then + if [[ -z $SERVER_IMAGE_UUID ]]; then # Check if Image has been upload to IMage service - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) - while [ $SERVER_IMAGE_UUID -ne 1 ]; do - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) - if [[ $loops -ne 30 ]]; then - sleep 10 - (( _loops++ )) - else - log "Image is not upload, please check." - break - fi + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) + while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 10 seconds" + sleep 10 + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) + (( _loops++ )) done - log "Image has been uploaded." - else - log "Image is not upload, please check." + if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]] + log "Image has been uploaded." + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + else + log "Image is not upload, please check." + fi fi echo "Server Image UUID = $SERVER_IMAGE_UUID" - CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - echo "Getting Citrix Image UUID" + #Getting the IMAGE_UUID + CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # The response should be a Task UUID - if [[ ! -z $CITRIX_IMAGE_UUID ]]; then + if [[ -z $CITRIX_IMAGE_UUID ]]; then # Check if Image has been upload to IMage service - CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) - while [ $CITRIX_IMAGE_UUID -ne 1 ]; do - CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) - if [[ $loops -ne 30 ]]; then - sleep 10 - (( _loops++ )) - else - log "Image is not upload, please check." - break - fi + CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) + while [ $CITRIX_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 10 seconds" + sleep 10 + CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) + (( _loops++ )) done - log "Image has been uploaded." - else - log "Image is not upload, please check." + if [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]] + log "Image has been uploaded." + CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + else + log "Image is not upload, please check." + fi fi echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" @@ -1284,6 +1283,8 @@ function upload_era_calm_blueprint() { local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" + local _loops="0" + local _maxtries="30" echo "Starting Era Blueprint Deployment" @@ -1294,28 +1295,26 @@ function upload_era_calm_blueprint() { echo "ERACLI Key - $ERACLI_PASSWORD" + echo "Getting Era Image UUID" + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - - echo "Getting Era Image UUID" - # The response should be a Task UUID - if [[ ! -z $ERA_IMAGE_UUID ]]; then + if [[ -z $ERA_IMAGE_UUID ]]; then # Check if Image has been upload to IMage service - ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) - while [ $ERA_IMAGE_UUID -ne 1 ]; do - ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) - if [[ $loops -ne 30 ]]; then - sleep 10 - (( _loops++ )) - else - log "Image is not upload, please check." - break - fi + ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) + while [[ $ERA_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 10 seconds" + sleep 10 + ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) + (( _loops++ )) done - log "Image has been uploaded." - else - log "Image is not upload, please check." + if [[ $ERA_IMAGE_UUID_CHECK -eq 1 ]] + log "Image has been uploaded." + ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + else + log "Image is not upload, please check." + fi fi echo "ERA Image UUID = $ERA_IMAGE_UUID" @@ -1480,6 +1479,8 @@ function upload_CICDInfra_calm_blueprint() { local SERVER_IMAGE="CentOS7.qcow2" local SERVER_IMAGE_UUID local CURL_HTTP_OPTS=" --max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure " + local _loops="0" + local _maxtries="30" echo "Starting CICDInfra Blueprint Deployment" @@ -1488,28 +1489,26 @@ function upload_CICDInfra_calm_blueprint() { DOWNLOAD_CALM_RSA_KEY=$(curl -L ${BLUEPRINT_URL}${CALM_RSA_KEY_FILE} -o ${DIRECTORY}/${CALM_RSA_KEY_FILE}) log "Downloading ${CALM_RSA_KEY_FILE}" - #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - echo "Getting Server Image UUID" + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # The response should be a Task UUID - if [[ ! -z $SERVER_IMAGE_UUID ]]; then + if [[ -z $SERVER_IMAGE_UUID ]]; then # Check if Image has been upload to IMage service - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) - while [ $SERVER_IMAGE_UUID -ne 1 ]; do - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) - if [[ $loops -ne 30 ]]; then - sleep 10 - (( _loops++ )) - else - log "Image is not upload, please check." - break - fi + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 10 seconds" + sleep 10 + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + (( _loops++ )) done - log "Image has been uploaded." - else - log "Image is not upload, please check." + if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]] + log "Image has been uploaded." + SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + else + log "Image is not upload, please check." + fi fi echo "Server Image UUID = $SERVER_IMAGE_UUID" From 117b500ad7dc2c1b6c4512778fa03394ecf27aeb Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 13:46:58 -0800 Subject: [PATCH 378/691] Update lib.pc.sh --- scripts/lib.pc.sh | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 998ca83..9d84aba 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1060,7 +1060,6 @@ function upload_citrix_calm_blueprint() { mkdir $DIRECTORY echo "Getting Server Image UUID" - #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # The response should be a Task UUID @@ -1073,7 +1072,7 @@ function upload_citrix_calm_blueprint() { SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) (( _loops++ )) done - if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]] + if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else @@ -1084,7 +1083,6 @@ function upload_citrix_calm_blueprint() { echo "Server Image UUID = $SERVER_IMAGE_UUID" echo "Getting Citrix Image UUID" - #Getting the IMAGE_UUID CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # The response should be a Task UUID @@ -1097,7 +1095,7 @@ function upload_citrix_calm_blueprint() { CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) (( _loops++ )) done - if [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]] + if [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else @@ -1296,9 +1294,9 @@ function upload_era_calm_blueprint() { echo "ERACLI Key - $ERACLI_PASSWORD" echo "Getting Era Image UUID" - #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + # The response should be a Task UUID if [[ -z $ERA_IMAGE_UUID ]]; then # Check if Image has been upload to IMage service @@ -1309,7 +1307,7 @@ function upload_era_calm_blueprint() { ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) (( _loops++ )) done - if [[ $ERA_IMAGE_UUID_CHECK -eq 1 ]] + if [[ $ERA_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else @@ -1490,7 +1488,6 @@ function upload_CICDInfra_calm_blueprint() { log "Downloading ${CALM_RSA_KEY_FILE}" echo "Getting Server Image UUID" - #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # The response should be a Task UUID @@ -1500,10 +1497,10 @@ function upload_CICDInfra_calm_blueprint() { while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 10 seconds" sleep 10 - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) (( _loops++ )) done - if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]] + if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else From 8ff26511110c5864f641da97d67ea4ae416951fd Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 15:48:44 -0800 Subject: [PATCH 379/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9d84aba..6aa176f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1435,7 +1435,7 @@ function upload_era_calm_blueprint() { | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$PE_CREDS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \ - | jq -c -r "(.spec.resources.credential_definition_list[2].secret.value=\"$ERACLI_PASSWORD\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile From 9adaf2b4cb947df60d9fff2897b5dc2fe44f3e8a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 16:38:49 -0800 Subject: [PATCH 380/691] Update lib.pc.sh --- scripts/lib.pc.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6aa176f..b075515 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -403,9 +403,9 @@ function pc_auth() { # TODO:50 FUTURE: pass AUTH_SERVER argument - log "Add Directory ${AUTH_SERVER}" + log "Add Directory ${AUTH_DOMAIN" _http_body=$(cat < Date: Wed, 19 Feb 2020 17:48:38 -0800 Subject: [PATCH 381/691] Updated Image If Statement --- scripts/lib.pc.sh | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b075515..934e8ca 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -403,7 +403,7 @@ function pc_auth() { # TODO:50 FUTURE: pass AUTH_SERVER argument - log "Add Directory ${AUTH_DOMAIN" + log "Add Directory ${AUTH_DOMAIN}" _http_body=$(cat < Date: Wed, 19 Feb 2020 18:01:48 -0800 Subject: [PATCH 382/691] Peer staging updates --- scripts/global.vars.sh | 4 ++-- scripts/lib.pe.sh | 20 +++++++++----------- 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 1b42b24..512d807 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -36,8 +36,8 @@ SSH_OPTS+=' -q' # -v' # ################################################### #Peer Software -PeerMgmtServer='Windows2016-PeerMgmt-14feb20' -PeerAgentServer='Windows2016-PeerAgent-12feb20' +PeerMgmtServer='Windows2016-PeerMgmt-18feb20' +PeerAgentServer='Windows2016-PeerAgent-18feb20' PMC="PeerMgmt" AGENTA="PeerAgent-Files" AGENTB="PeerAgent-Win" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 6e60b1e..df00a78 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -927,20 +927,19 @@ deploy_peer_mgmt_server() { #MTM todo have unattend.xml staged somewhere else wget http://10.42.194.11/workshop_staging/peer/unattend.xml -P /home/nutanix/peer_staging/ - mv /home/nutanix/peer_staging/unattend.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml + mv /home/nutanix/peer_staging/unattend-pmc.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml - +​ ### Deploy PMC Server ### echo "${VMNAME} - Deploying VM..." #log "Create ${VMNAME} VM based on ${IMAGENAME} image" acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" - #acli "vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" - acli "vm.disk_create ${VMNAME} clone_from_image=${PeerMgmtServer}" + acli "vm.disk_create ${VMNAME} clone_from_image=${IMAGENAME}" # MTM TODO replace net1 with appropriate variable acli "vm.nic_create ${VMNAME} network=Secondary" - +​ #log "Power on ${VMNAME} VM..." echo "${VMNAME} - Powering on..." acli "vm.on ${VMNAME}" @@ -974,24 +973,23 @@ deploy_peer_agent_server() { #MTM todo have unattend.xml staged somewhere else wget http://10.42.194.11/workshop_staging/peer/unattend.xml -P /home/nutanix/peer_staging/ - mv /home/nutanix/peer_staging/unattend.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml + mv /home/nutanix/peer_staging/unattend-agent.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml - +​ ### Deploy Agent Server ### echo "${VMNAME} - Deploying VM..." #log "Create ${VMNAME} VM based on ${IMAGENAME} image" acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" - #acli "vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" - acli "vm.disk_create ${VMNAME} clone_from_image=${PeerAgentServer}" + acli "vm.disk_create ${VMNAME} clone_from_image=${IMAGENAME}" # MTM TODO replace net1 with appropriate variable acli "vm.nic_create ${VMNAME} network=Secondary" - +​ #log "Power on ${VMNAME} VM..." echo "${VMNAME} - Powering on..." acli "vm.on ${VMNAME}" - +​ echo "${VMNAME} - Deployed." } From 8f23ba04be7fdf1422803db9bff2bb316f7e0960 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 20:02:05 -0800 Subject: [PATCH 383/691] Update lib.pc.sh --- scripts/lib.pc.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 934e8ca..2c2e6ca 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1068,7 +1068,7 @@ function upload_citrix_calm_blueprint() { # Check if Image has been upload to IMage service SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 10 seconds" + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) (( _loops++ )) @@ -1092,7 +1092,7 @@ function upload_citrix_calm_blueprint() { # Check if Image has been upload to IMage service CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) while [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 10 seconds" + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) (( _loops++ )) @@ -1304,7 +1304,7 @@ function upload_era_calm_blueprint() { # Check if Image has been upload to IMage service ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) while [[ $ERA_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 10 seconds" + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) (( _loops++ )) @@ -1497,7 +1497,7 @@ function upload_CICDInfra_calm_blueprint() { # Check if Image has been upload to IMage service SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 10 seconds" + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) (( _loops++ )) From f3ffe937518e8f1d236efdce4bf03728de2a666e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 20:23:58 -0800 Subject: [PATCH 384/691] Update lib.pc.sh --- scripts/lib.pc.sh | 35 ++++++++++++++++++----------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 2c2e6ca..6317338 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1061,19 +1061,19 @@ function upload_citrix_calm_blueprint() { echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - #ERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) + + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) # The response should be a Task UUID if [[ $SERVER_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) (( _loops++ )) done - if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then + if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else @@ -1085,19 +1085,19 @@ function upload_citrix_calm_blueprint() { echo "Getting Citrix Image UUID" #Getting the IMAGE_UUID - #CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) + + CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) # The response should be a Task UUID if [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service - CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) + CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) while [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 - CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" | wc -l) + CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) (( _loops++ )) done - if [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]]; then + if [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else @@ -1298,15 +1298,15 @@ function upload_era_calm_blueprint() { echo "Getting Era Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter #ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) + ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) # The response should be a Task UUID if [[ $ERA_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service - ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) + ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) while [[ $ERA_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 - ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "ERA-Server-build-1.2.0.1.qcow2" | wc -l) + ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) (( _loops++ )) done if [[ $ERA_IMAGE_UUID_CHECK -eq 1 ]]; then @@ -1491,15 +1491,16 @@ function upload_CICDInfra_calm_blueprint() { echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) # The response should be a Task UUID - if [[ -z $SERVER_IMAGE_UUID ]]; then + if [[ $SERVER_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) (( _loops++ )) done if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then From d60fb2d29cdc58a6a261102e20b7f2f893b88304 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 19 Feb 2020 22:42:17 -0800 Subject: [PATCH 385/691] Temp comment out of Oracle Images while DFS catches up --- scripts/era_bootcamp.sh | 20 ++++++++++---------- scripts/ts2020.sh | 20 ++++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 88af7c1..b37592c 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -75,16 +75,16 @@ case ${1} in CentOS7.qcow2 \ WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ - GTSOracle/19c-april/19c-bootdisk.qcow2 \ - GTSOracle/19c-april/19c-disk1.qcow2 \ - GTSOracle/19c-april/19c-disk2.qcow2 \ - GTSOracle/19c-april/19c-disk3.qcow2 \ - GTSOracle/19c-april/19c-disk4.qcow2 \ - GTSOracle/19c-april/19c-disk5.qcow2 \ - GTSOracle/19c-april/19c-disk6.qcow2 \ - GTSOracle/19c-april/19c-disk7.qcow2 \ - GTSOracle/19c-april/19c-disk8.qcow2 \ - GTSOracle/19c-april/19c-disk9.qcow2 \ + #GTSOracle/19c-april/19c-bootdisk.qcow2 \ + #GTSOracle/19c-april/19c-disk1.qcow2 \ + #GTSOracle/19c-april/19c-disk2.qcow2 \ + #GTSOracle/19c-april/19c-disk3.qcow2 \ + #GTSOracle/19c-april/19c-disk4.qcow2 \ + #GTSOracle/19c-april/19c-disk5.qcow2 \ + #GTSOracle/19c-april/19c-disk6.qcow2 \ + #GTSOracle/19c-april/19c-disk7.qcow2 \ + #GTSOracle/19c-april/19c-disk8.qcow2 \ + #GTSOracle/19c-april/19c-disk9.qcow2 \ ) export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 09373d6..36bf7d0 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -105,16 +105,16 @@ case ${1} in Linux_ToolsVM.qcow2 \ move-3.4.1.qcow2 \ MSSQL-2016-VM.qcow2 \ - GTSOracle/19c-april/19c-bootdisk.qcow2 \ - GTSOracle/19c-april/19c-disk1.qcow2 \ - GTSOracle/19c-april/19c-disk2.qcow2 \ - GTSOracle/19c-april/19c-disk3.qcow2 \ - GTSOracle/19c-april/19c-disk4.qcow2 \ - GTSOracle/19c-april/19c-disk5.qcow2 \ - GTSOracle/19c-april/19c-disk6.qcow2 \ - GTSOracle/19c-april/19c-disk7.qcow2 \ - GTSOracle/19c-april/19c-disk8.qcow2 \ - GTSOracle/19c-april/19c-disk9.qcow2 \ + #GTSOracle/19c-april/19c-bootdisk.qcow2 \ + #GTSOracle/19c-april/19c-disk1.qcow2 \ + #GTSOracle/19c-april/19c-disk2.qcow2 \ + #GTSOracle/19c-april/19c-disk3.qcow2 \ + #GTSOracle/19c-april/19c-disk4.qcow2 \ + #GTSOracle/19c-april/19c-disk5.qcow2 \ + #GTSOracle/19c-april/19c-disk6.qcow2 \ + #GTSOracle/19c-april/19c-disk7.qcow2 \ + #GTSOracle/19c-april/19c-disk8.qcow2 \ + #GTSOracle/19c-april/19c-disk9.qcow2 \ Windows2012R2.qcow2 \ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ ) From b20b97160648288fa783799eb9dff9889699390c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 20 Feb 2020 01:04:31 -0800 Subject: [PATCH 386/691] Update lib.pe.sh --- scripts/lib.pe.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index df00a78..d07c358 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -926,7 +926,7 @@ deploy_peer_mgmt_server() { #mkdir /home/nutanix/peer_staging/ #MTM todo have unattend.xml staged somewhere else - wget http://10.42.194.11/workshop_staging/peer/unattend.xml -P /home/nutanix/peer_staging/ + wget http://10.42.194.11/workshop_staging/peer/unattend-pmc.xml -P /home/nutanix/peer_staging/ mv /home/nutanix/peer_staging/unattend-pmc.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml @@ -972,7 +972,7 @@ deploy_peer_agent_server() { #mkdir /home/nutanix/peer_staging/ #MTM todo have unattend.xml staged somewhere else - wget http://10.42.194.11/workshop_staging/peer/unattend.xml -P /home/nutanix/peer_staging/ + wget http://10.42.194.11/workshop_staging/peer/unattend-agent.xml -P /home/nutanix/peer_staging/ mv /home/nutanix/peer_staging/unattend-agent.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml From 9dcc9fe4e6865424b8cf89684461fd3bd4e0949b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 20 Feb 2020 09:21:29 -0800 Subject: [PATCH 387/691] Veeam IMage Update --- scripts/ts2020.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 36bf7d0..5da93da 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -117,13 +117,14 @@ case ${1} in #GTSOracle/19c-april/19c-disk9.qcow2 \ Windows2012R2.qcow2 \ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ + VeeamAHVProxy2.0.404.qcow2 \ ) export ISO_IMAGES=(\ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ Nutanix-VirtIO-1.1.5.iso \ FrameCCA-2.1.6.iso \ FrameGuestAgentInstaller_1.0.2.2_7930.iso \ - VeeamBRv10.iso \ + VBR_10.0.0.4442.iso \ ) From 77fa7e9971197bdd6f93fd97cdcd58d2d78aad8e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 20 Feb 2020 09:52:54 -0800 Subject: [PATCH 388/691] Update citrix_bootcamp.sh --- scripts/citrix_bootcamp.sh | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 6ee8244..1390561 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -29,6 +29,15 @@ case ${1} in && network_configure \ && authentication_source \ && pe_auth \ + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -126,6 +135,7 @@ case ${1} in && images \ && flow_enable \ && pc_cluster_img_import \ + && seedPC \ && upload_citrix_calm_blueprint \ && sleep 30 \ && prism_check 'PC' From ee9c892f07f3a464374ec17fd10aaf5fd7b74ed1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 20 Feb 2020 11:23:56 -0800 Subject: [PATCH 389/691] Update lib.pe.sh --- scripts/lib.pe.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index d07c358..bc5111a 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -936,7 +936,7 @@ deploy_peer_mgmt_server() { echo "${VMNAME} - Deploying VM..." #log "Create ${VMNAME} VM based on ${IMAGENAME} image" acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" - acli "vm.disk_create ${VMNAME} clone_from_image=${IMAGENAME}" + acli "vm.disk_create ${VMNAME} clone_from_image=${PeerMgmtServer}" # MTM TODO replace net1 with appropriate variable acli "vm.nic_create ${VMNAME} network=Secondary" ​ @@ -982,7 +982,7 @@ deploy_peer_agent_server() { echo "${VMNAME} - Deploying VM..." #log "Create ${VMNAME} VM based on ${IMAGENAME} image" acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" - acli "vm.disk_create ${VMNAME} clone_from_image=${IMAGENAME}" + acli "vm.disk_create ${VMNAME} clone_from_image=${PeerAgentServer}" # MTM TODO replace net1 with appropriate variable acli "vm.nic_create ${VMNAME} network=Secondary" ​ From 48c14bdeb58eb38e40d21012c9158270cf48e27d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 20 Feb 2020 12:18:43 -0800 Subject: [PATCH 390/691] Update ts2020.sh --- scripts/ts2020.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 5da93da..325aa92 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -117,14 +117,14 @@ case ${1} in #GTSOracle/19c-april/19c-disk9.qcow2 \ Windows2012R2.qcow2 \ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ - VeeamAHVProxy2.0.404.qcow2 \ + veeam/VeeamAHVProxy2.0.404.qcow2 \ ) export ISO_IMAGES=(\ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ Nutanix-VirtIO-1.1.5.iso \ FrameCCA-2.1.6.iso \ FrameGuestAgentInstaller_1.0.2.2_7930.iso \ - VBR_10.0.0.4442.iso \ + veeam/VBR_10.0.0.4442.iso \ ) From b80122f7f98747bc712f6205db61c1b8af76774c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 20 Feb 2020 15:58:22 -0800 Subject: [PATCH 391/691] peer updates --- scripts/lib.pc.sh | 49 +++++++++++++++++++++++++---------------------- scripts/lib.pe.sh | 14 ++++---------- 2 files changed, 30 insertions(+), 33 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6317338..4e16b02 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1061,16 +1061,18 @@ function upload_citrix_calm_blueprint() { echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) + _loops="0" + maxtries="30" + + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) # The response should be a Task UUID if [[ $SERVER_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "Windows2016.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) (( _loops++ )) done if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then @@ -1085,16 +1087,18 @@ function upload_citrix_calm_blueprint() { echo "Getting Citrix Image UUID" #Getting the IMAGE_UUID + _loops="0" + maxtries="30" - CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) + CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) # The response should be a Task UUID if [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service - CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) + CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) while [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 - CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) + CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) (( _loops++ )) done if [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]]; then @@ -1107,6 +1111,8 @@ function upload_citrix_calm_blueprint() { echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" + echo "Getting Network UUID" + NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "NETWORK UUID = $NETWORK_UUID" @@ -1278,7 +1284,7 @@ function upload_era_calm_blueprint() { local VLAN_NAME=${NW1_VLAN} local ERAADMIN_PASSWORD="nutanix/4u" local PE_CREDS_PASSWORD="${PE_PASSWORD}" - local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) + #local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) local DOWNLOAD_BLUEPRINTS local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID @@ -1290,14 +1296,11 @@ function upload_era_calm_blueprint() { mkdir $DIRECTORY - DOWNLOAD_CALM_RSA_KEY=$(curl -L ${BLUEPRINT_URL}${CALM_RSA_KEY_FILE} -o ${DIRECTORY}/${CALM_RSA_KEY_FILE}) - log "Downloading ${CALM_RSA_KEY_FILE}" - - echo "ERACLI Key - $ERACLI_PASSWORD" - echo "Getting Era Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - #ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _loops="0" + maxtries="30" + ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) # The response should be a Task UUID if [[ $ERA_IMAGE_UUID_CHECK -ne 1 ]]; then @@ -1319,6 +1322,8 @@ function upload_era_calm_blueprint() { echo "ERA Image UUID = $ERA_IMAGE_UUID" + echo "Getting NETWORK UUID" + NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "NETWORK UUID = $NETWORK_UUID" @@ -1437,7 +1442,7 @@ function upload_era_calm_blueprint() { | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$PE_CREDS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \ - | jq -c -r "(.spec.resources.credential_definition_list[].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[2].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ | jq -c -r '(.spec.resources.credential_definition_list[2].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile @@ -1472,7 +1477,7 @@ function upload_CICDInfra_calm_blueprint() { local PE_IP=${PE_HOST} local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} - local CENTOS_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) + #local CENTOS_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) local CENTOS_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local NETWORK_UUID @@ -1486,21 +1491,20 @@ function upload_CICDInfra_calm_blueprint() { mkdir $DIRECTORY - DOWNLOAD_CALM_RSA_KEY=$(curl -L ${BLUEPRINT_URL}${CALM_RSA_KEY_FILE} -o ${DIRECTORY}/${CALM_RSA_KEY_FILE}) - log "Downloading ${CALM_RSA_KEY_FILE}" - echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter + _loops="0" + maxtries="30" - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) # The response should be a Task UUID if [[ $SERVER_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep "CentOS7.qcow2" | wc -l) + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) (( _loops++ )) done if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then @@ -1510,7 +1514,6 @@ function upload_CICDInfra_calm_blueprint() { log "Image is not upload, please check." fi fi - echo "Server Image UUID = $SERVER_IMAGE_UUID" NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index bc5111a..f9e0d3a 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -901,7 +901,7 @@ function pc_destroy() { # Routine to deploy the Peer Management Center ############################################################################################################################################################################### # MTM TODO When integrating with Nutanix scripts, need to change echo to log and put quotes around text after all acli commands -deploy_peer_mgmt_server() { +function deploy_peer_mgmt_server() { @@ -922,10 +922,7 @@ deploy_peer_mgmt_server() { ### Get sysyprep config file ready ### echo "${VMNAME} - Prepping sysprep config..." - # MTM Create a temp folder for sysprep file work as to not clutter up nutanix home - #mkdir /home/nutanix/peer_staging/ - #MTM todo have unattend.xml staged somewhere else wget http://10.42.194.11/workshop_staging/peer/unattend-pmc.xml -P /home/nutanix/peer_staging/ mv /home/nutanix/peer_staging/unattend-pmc.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml @@ -938,7 +935,7 @@ deploy_peer_mgmt_server() { acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" acli "vm.disk_create ${VMNAME} clone_from_image=${PeerMgmtServer}" # MTM TODO replace net1 with appropriate variable - acli "vm.nic_create ${VMNAME} network=Secondary" + acli "vm.nic_create ${VMNAME} network=${NW2_NAME}" ​ #log "Power on ${VMNAME} VM..." echo "${VMNAME} - Powering on..." @@ -952,7 +949,7 @@ deploy_peer_mgmt_server() { # Routine to deploy a Peer Agent ############################################################################################################################################################################### # MTM TODO When integrating with Nutanix scripts, need to change echo to log and put quotes around text after all acli commands -deploy_peer_agent_server() { +function deploy_peer_agent_server() { if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PeerAgentServer} | wc --lines) == 0 )); then log "Import ${PeerAgentServer} image from ${QCOW2_REPOS}..." @@ -968,10 +965,7 @@ deploy_peer_agent_server() { ### Get sysyprep config file ready ### echo "${VMNAME} - Prepping sysprep config..." - # MTM Create a temp folder for sysprep file work as to not clutter up nutanix home - #mkdir /home/nutanix/peer_staging/ - #MTM todo have unattend.xml staged somewhere else wget http://10.42.194.11/workshop_staging/peer/unattend-agent.xml -P /home/nutanix/peer_staging/ mv /home/nutanix/peer_staging/unattend-agent.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml @@ -984,7 +978,7 @@ deploy_peer_agent_server() { acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" acli "vm.disk_create ${VMNAME} clone_from_image=${PeerAgentServer}" # MTM TODO replace net1 with appropriate variable - acli "vm.nic_create ${VMNAME} network=Secondary" + acli "vm.nic_create ${VMNAME} network=${NW2_NAME}" ​ #log "Power on ${VMNAME} VM..." echo "${VMNAME} - Powering on..." From 98b5f31b0d39f39f6a982be379e92c08d06d8d3e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 20 Feb 2020 16:28:04 -0800 Subject: [PATCH 392/691] Peer Staging Cleanup --- scripts/lib.pe.sh | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index f9e0d3a..1eaf991 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -903,8 +903,6 @@ function pc_destroy() { # MTM TODO When integrating with Nutanix scripts, need to change echo to log and put quotes around text after all acli commands function deploy_peer_mgmt_server() { - - if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PeerMgmtServer} | wc --lines) == 0 )); then log "Import ${PeerMgmtServer} image from ${QCOW2_REPOS}..." acli image.create ${PeerMgmtServer} \ @@ -913,36 +911,26 @@ function deploy_peer_mgmt_server() { else log "Image found, assuming ready. Skipping ${PeerMgmtServer} import." fi - echo "Creating temp folder and applying perms..." mkdir /home/nutanix/peer_staging/ - VMNAME=$1 - ### Get sysyprep config file ready ### - echo "${VMNAME} - Prepping sysprep config..." - wget http://10.42.194.11/workshop_staging/peer/unattend-pmc.xml -P /home/nutanix/peer_staging/ mv /home/nutanix/peer_staging/unattend-pmc.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml -​ ### Deploy PMC Server ### - echo "${VMNAME} - Deploying VM..." #log "Create ${VMNAME} VM based on ${IMAGENAME} image" acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" acli "vm.disk_create ${VMNAME} clone_from_image=${PeerMgmtServer}" # MTM TODO replace net1 with appropriate variable acli "vm.nic_create ${VMNAME} network=${NW2_NAME}" -​ #log "Power on ${VMNAME} VM..." echo "${VMNAME} - Powering on..." acli "vm.on ${VMNAME}" - echo "${VMNAME} - Deployed." - } ############################################################################################################################################################################### @@ -959,31 +947,22 @@ function deploy_peer_agent_server() { else log "Image found, assuming ready. Skipping ${PeerAgentServer} import." fi - VMNAME=$1 - ### Get sysyprep config file ready ### - echo "${VMNAME} - Prepping sysprep config..." - wget http://10.42.194.11/workshop_staging/peer/unattend-agent.xml -P /home/nutanix/peer_staging/ mv /home/nutanix/peer_staging/unattend-agent.xml /home/nutanix/peer_staging/unattend_${VMNAME}.xml chmod 777 /home/nutanix/peer_staging/unattend_${VMNAME}.xml sed -i "s/.*<\/ComputerName>/${VMNAME}<\/ComputerName>/g" /home/nutanix/peer_staging/unattend_${VMNAME}.xml -​ ### Deploy Agent Server ### - echo "${VMNAME} - Deploying VM..." #log "Create ${VMNAME} VM based on ${IMAGENAME} image" acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" acli "vm.disk_create ${VMNAME} clone_from_image=${PeerAgentServer}" # MTM TODO replace net1 with appropriate variable acli "vm.nic_create ${VMNAME} network=${NW2_NAME}" -​ #log "Power on ${VMNAME} VM..." echo "${VMNAME} - Powering on..." acli "vm.on ${VMNAME}" -​ echo "${VMNAME} - Deployed." - } From 0a2a2c2a381ba620ea6f3f4e76d44c48ae521b36 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 20 Feb 2020 20:21:20 -0800 Subject: [PATCH 393/691] moving seedPC to the end of staging. --- scripts/citrix_bootcamp.sh | 2 +- scripts/lib.pc.sh | 8 +++++--- scripts/ts2020.sh | 22 +++++++++++----------- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 1390561..238bf71 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -135,9 +135,9 @@ case ${1} in && images \ && flow_enable \ && pc_cluster_img_import \ - && seedPC \ && upload_citrix_calm_blueprint \ && sleep 30 \ + && seedPC \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 4e16b02..6fd63d7 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1063,7 +1063,7 @@ function upload_citrix_calm_blueprint() { #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" maxtries="30" - + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) # The response should be a Task UUID if [[ $SERVER_IMAGE_UUID_CHECK -ne 1 ]]; then @@ -1263,7 +1263,7 @@ function upload_citrix_calm_blueprint() { curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${CITRIX_BLUEPRINT_UUID}/launch" - echo "Finished Launching the Calm Infra Application" + echo "Finished Launching the Citrix Infra Application" } @@ -1608,7 +1608,7 @@ function upload_CICDInfra_calm_blueprint() { #Getting the Blueprint UUID CICDInfra_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CICD_Infra.json"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - echo "ERA Blueprint UUID = $CICDInfra_BLUEPRINT_UUID" + echo "CICD Blueprint UUID = $CICDInfra_BLUEPRINT_UUID" echo "Update Blueprint and writing to temp file" @@ -1635,4 +1635,6 @@ function upload_CICDInfra_calm_blueprint() { echo "Finished Updating Credentials" + echo "Finished CICDInfra Blueprint Deployment" + } diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 325aa92..6e276bb 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -105,16 +105,16 @@ case ${1} in Linux_ToolsVM.qcow2 \ move-3.4.1.qcow2 \ MSSQL-2016-VM.qcow2 \ - #GTSOracle/19c-april/19c-bootdisk.qcow2 \ - #GTSOracle/19c-april/19c-disk1.qcow2 \ - #GTSOracle/19c-april/19c-disk2.qcow2 \ - #GTSOracle/19c-april/19c-disk3.qcow2 \ - #GTSOracle/19c-april/19c-disk4.qcow2 \ - #GTSOracle/19c-april/19c-disk5.qcow2 \ - #GTSOracle/19c-april/19c-disk6.qcow2 \ - #GTSOracle/19c-april/19c-disk7.qcow2 \ - #GTSOracle/19c-april/19c-disk8.qcow2 \ - #GTSOracle/19c-april/19c-disk9.qcow2 \ + GTSOracle/19c-april/19c-bootdisk.qcow2 \ + GTSOracle/19c-april/19c-disk1.qcow2 \ + GTSOracle/19c-april/19c-disk2.qcow2 \ + GTSOracle/19c-april/19c-disk3.qcow2 \ + GTSOracle/19c-april/19c-disk4.qcow2 \ + GTSOracle/19c-april/19c-disk5.qcow2 \ + GTSOracle/19c-april/19c-disk6.qcow2 \ + GTSOracle/19c-april/19c-disk7.qcow2 \ + GTSOracle/19c-april/19c-disk8.qcow2 \ + GTSOracle/19c-april/19c-disk9.qcow2 \ Windows2012R2.qcow2 \ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ veeam/VeeamAHVProxy2.0.404.qcow2 \ @@ -179,12 +179,12 @@ case ${1} in && images \ && flow_enable \ && pc_cluster_img_import \ - && seedPC \ && upload_citrix_calm_blueprint \ && sleep 30 \ && upload_era_calm_blueprint \ && sleep 30 \ && upload_CICDInfra_calm_blueprint \ + && seedPC \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From b5517d9ca4b2acadcd89526bd99cba9859b0e8c4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 21 Feb 2020 01:43:54 -0800 Subject: [PATCH 394/691] Swapping ISOs and QCOWs for DL order --- scripts/lib.common.sh | 144 +++++++++++++++++++++--------------------- 1 file changed, 72 insertions(+), 72 deletions(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 318f251..2bc8563 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -282,93 +282,95 @@ function images() { local _source='source_uri' local _test - ####################################### -# For doing Disk IMAGES +# For doing ISO IMAGES ####################################### - for _image in "${QCOW2_IMAGES[@]}" ; do - - # log "DEBUG: ${_image} image.create..." - if [[ ${_cli} == 'nuclei' ]]; then - _test=$(source /etc/profile.d/nutanix_env.sh \ - && ${_cli} image.list 2>&1 \ - | grep -i complete \ - | grep "${_image}") +for _image in "${ISO_IMAGES[@]}" ; do + + # log "DEBUG: ${_image} image.create..." + if [[ ${_cli} == 'nuclei' ]]; then + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ${_cli} image.list 2>&1 \ + | grep -i complete \ + | grep "${_image}") + #else + # _test=$(source /etc/profile.d/nutanix_env.sh \ + # && ${_cli} image.list 2>&1 \ + # | grep "${_image}") + fi - fi + if [[ ! -z ${_test} ]]; then + log "Skip: ${_image} already complete on cluster." + else + _command='' + _name="${_image}" - if [[ ! -z ${_test} ]]; then - log "Skip: ${_image} already complete on cluster." + if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc --lines) )); then + log 'Bypass multiple repo source checks...' + SOURCE_URL="${_image}" else - _command='' - _name="${_image}" - - if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc --lines) )); then - log 'Bypass multiple repo source checks...' - SOURCE_URL="${_image}" - else - repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]! - fi + repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]! + fi - if [[ -z "${SOURCE_URL}" ]]; then - _error=30 - log "Warning ${_error}: didn't find any sources for ${_image}, continuing..." - # exit ${_error} - fi + if [[ -z "${SOURCE_URL}" ]]; then + _error=30 + log "Warning ${_error}: didn't find any sources for ${_image}, continuing..." + # exit ${_error} + fi - # TODO:0 TOFIX: acs-centos ugly override for today... - if (( $(echo "${_image}" | grep -i 'acs-centos' | wc --lines ) > 0 )); then - _name=acs-centos - fi + # TODO:0 TOFIX: acs-centos ugly override for today... + if (( $(echo "${_image}" | grep -i 'acs-centos' | wc --lines ) > 0 )); then + _name=acs-centos + fi - if [[ ${_cli} == 'acli' ]]; then - _image_type='kDiskImage' - _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ - container=${STORAGE_IMAGES} architecture=kX86_64 wait=true" - else - _command+=" name=${_name} description=\"${_image}\"" - fi + if [[ ${_cli} == 'acli' ]]; then + _image_type='kIsoImage' + _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ + container=${STORAGE_IMAGES} architecture=kX86_64 wait=true" + else + _command+=" name=${_name} description=\"${_image}\"" + fi - if [[ ${_cli} == 'nuclei' ]]; then - _http_body=$(cat <&1 & - if (( $? != 0 )); then - log "Warning: Image submission: $?. Continuing..." - #exit 10 - fi + ${_cli} "image.create ${_command}" ${_source}=${SOURCE_URL} 2>&1 & + if (( $? != 0 )); then + log "Warning: Image submission: $?. Continuing..." + #exit 10 + fi - if [[ ${_cli} == 'nuclei' ]]; then - log "NOTE: image.uuid = RUNNING, but takes a while to show up in:" - log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State" - fi + if [[ ${_cli} == 'nuclei' ]]; then + log "NOTE: image.uuid = RUNNING, but takes a while to show up in:" + log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State" fi fi + fi - done +done - ####################################### - # For doing ISO IMAGES - ####################################### +####################################### +# For doing Disk IMAGES +####################################### - for _image in "${ISO_IMAGES[@]}" ; do + for _image in "${QCOW2_IMAGES[@]}" ; do # log "DEBUG: ${_image} image.create..." if [[ ${_cli} == 'nuclei' ]]; then @@ -376,10 +378,7 @@ EOF && ${_cli} image.list 2>&1 \ | grep -i complete \ | grep "${_image}") - #else - # _test=$(source /etc/profile.d/nutanix_env.sh \ - # && ${_cli} image.list 2>&1 \ - # | grep "${_image}") + fi if [[ ! -z ${_test} ]]; then @@ -407,7 +406,7 @@ EOF fi if [[ ${_cli} == 'acli' ]]; then - _image_type='kIsoImage' + _image_type='kDiskImage' _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ container=${STORAGE_IMAGES} architecture=kX86_64 wait=true" else @@ -423,7 +422,7 @@ EOF "path_and_params":"/api/nutanix/v3/images", "body":{"spec": {"name":"${_name}","description":"${_image}","resources":{ - "image_type":"ISO_IMAGE", + "image_type":"DISK_IMAGE", "source_uri":"${SOURCE_URL}"}}, "metadata":{"kind":"image"},"api_version":"3.1.0"}}],"api_version":"3.0"} EOF @@ -447,6 +446,7 @@ EOF fi done + } ################################################################################## From e7f76e6c3c0e3416bc5817869ad81b7e1f618e01 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 21 Feb 2020 14:31:04 -0800 Subject: [PATCH 395/691] Staging Update --- scripts/lib.pc.sh | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6fd63d7..45d74b4 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1051,9 +1051,7 @@ function upload_citrix_calm_blueprint() { local SERVER_IMAGE_UUID local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" local CITRIX_IMAGE_UUID - local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" - local _loops="0" - local _maxtries="30" + echo "Starting Citrix Blueprint Deployment" @@ -1062,7 +1060,7 @@ function upload_citrix_calm_blueprint() { echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" - maxtries="30" + _maxtries="30" SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) # The response should be a Task UUID @@ -1085,10 +1083,12 @@ function upload_citrix_calm_blueprint() { echo "Server Image UUID = $SERVER_IMAGE_UUID" + sleep 30 + echo "Getting Citrix Image UUID" #Getting the IMAGE_UUID _loops="0" - maxtries="30" + _maxtries="30" CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) # The response should be a Task UUID @@ -1111,6 +1111,8 @@ function upload_citrix_calm_blueprint() { echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" + sleep 30 + echo "Getting Network UUID" NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1289,8 +1291,7 @@ function upload_era_calm_blueprint() { local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" - local _loops="0" - local _maxtries="30" + echo "Starting Era Blueprint Deployment" @@ -1299,7 +1300,7 @@ function upload_era_calm_blueprint() { echo "Getting Era Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" - maxtries="30" + _maxtries="30" ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) # The response should be a Task UUID @@ -1494,7 +1495,7 @@ function upload_CICDInfra_calm_blueprint() { echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" - maxtries="30" + _maxtries="30" SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) # The response should be a Task UUID @@ -1618,16 +1619,16 @@ function upload_CICDInfra_calm_blueprint() { # GET The Blueprint so it can be updated curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${CICDInfra_BLUEPRINT_UUID}" > ${DOWNLOADED_JSONFile} - cat $DOWNLOADED_JSONFile \ - | jq -c 'del(.status)' \ - | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[0].value = \"$ERA_IP\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$SERVER_IMAGE\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ - | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ - | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ - > $UPDATED_JSONFile + cat $DOWNLOADED_JSONFile \ + | jq -c 'del(.status)' \ + | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[0].value = \"$ERA_IP\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$SERVER_IMAGE\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ + > $UPDATED_JSONFile echo "Saving Credentials Edits with PUT" From 93a695f9b7d5cbfa7845a3f0a61e0825bfba5d01 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 21 Feb 2020 17:19:02 -0800 Subject: [PATCH 396/691] Update lib.pc.sh --- scripts/lib.pc.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 45d74b4..6664b70 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1060,7 +1060,7 @@ function upload_citrix_calm_blueprint() { echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" - _maxtries="30" + _maxtries="60" SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) # The response should be a Task UUID @@ -1088,7 +1088,7 @@ function upload_citrix_calm_blueprint() { echo "Getting Citrix Image UUID" #Getting the IMAGE_UUID _loops="0" - _maxtries="30" + _maxtries="60" CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) # The response should be a Task UUID @@ -1300,7 +1300,7 @@ function upload_era_calm_blueprint() { echo "Getting Era Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" - _maxtries="30" + _maxtries="60" ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) # The response should be a Task UUID @@ -1495,7 +1495,7 @@ function upload_CICDInfra_calm_blueprint() { echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" - _maxtries="30" + _maxtries="60" SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) # The response should be a Task UUID From 95881fc47c293c1e402b6769ddf6b16adb8bf864 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 21 Feb 2020 20:33:38 -0800 Subject: [PATCH 397/691] Update lib.pc.sh --- scripts/lib.pc.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6664b70..cb1494c 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1067,7 +1067,7 @@ function upload_citrix_calm_blueprint() { if [[ $SERVER_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) - while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do + while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) @@ -1095,7 +1095,7 @@ function upload_citrix_calm_blueprint() { if [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) - while [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do + while [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) @@ -1307,7 +1307,7 @@ function upload_era_calm_blueprint() { if [[ $ERA_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) - while [[ $ERA_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do + while [[ $ERA_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) @@ -1502,7 +1502,7 @@ function upload_CICDInfra_calm_blueprint() { if [[ $SERVER_IMAGE_UUID_CHECK -ne 1 ]]; then # Check if Image has been upload to IMage service SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) - while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt 30 ]]; do + while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) From a9e191d14fe5d548cfec312e4f63a0b680547c52 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 21 Feb 2020 21:28:27 -0800 Subject: [PATCH 398/691] Adding FrameCCA-2.1.0.iso --- scripts/ts2020.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 6e276bb..2f22380 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -123,6 +123,7 @@ case ${1} in Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ Nutanix-VirtIO-1.1.5.iso \ FrameCCA-2.1.6.iso \ + FrameCCA-2.1.0.iso \ FrameGuestAgentInstaller_1.0.2.2_7930.iso \ veeam/VBR_10.0.0.4442.iso \ ) From 0063cb79ccece645b207f6862b19eb970785473c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 21 Feb 2020 23:52:42 -0800 Subject: [PATCH 399/691] Update lib.pc.sh --- scripts/lib.pc.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index cb1494c..d1ca1b9 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1060,7 +1060,7 @@ function upload_citrix_calm_blueprint() { echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" - _maxtries="60" + _maxtries="75" SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) # The response should be a Task UUID @@ -1088,7 +1088,7 @@ function upload_citrix_calm_blueprint() { echo "Getting Citrix Image UUID" #Getting the IMAGE_UUID _loops="0" - _maxtries="60" + _maxtries="75" CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) # The response should be a Task UUID @@ -1300,7 +1300,7 @@ function upload_era_calm_blueprint() { echo "Getting Era Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" - _maxtries="60" + _maxtries="75" ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) # The response should be a Task UUID @@ -1495,7 +1495,7 @@ function upload_CICDInfra_calm_blueprint() { echo "Getting Server Image UUID" #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter _loops="0" - _maxtries="60" + _maxtries="75" SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) # The response should be a Task UUID From 8a8fbaa4dfc912690c3b02ce62e3170db5ec7882 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 21 Feb 2020 23:55:37 -0800 Subject: [PATCH 400/691] Update ts2020.sh --- scripts/ts2020.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 2f22380..c0ffb68 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -115,7 +115,6 @@ case ${1} in GTSOracle/19c-april/19c-disk7.qcow2 \ GTSOracle/19c-april/19c-disk8.qcow2 \ GTSOracle/19c-april/19c-disk9.qcow2 \ - Windows2012R2.qcow2 \ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ veeam/VeeamAHVProxy2.0.404.qcow2 \ ) From 32ab420ab8a87b5c1ff29968d3b37c350029460c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 00:01:18 -0800 Subject: [PATCH 401/691] Update lib.pc.sh --- scripts/lib.pc.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index d1ca1b9..abc5125 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1051,6 +1051,7 @@ function upload_citrix_calm_blueprint() { local SERVER_IMAGE_UUID local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" local CITRIX_IMAGE_UUID + local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" echo "Starting Citrix Blueprint Deployment" @@ -1484,7 +1485,7 @@ function upload_CICDInfra_calm_blueprint() { local NETWORK_UUID local SERVER_IMAGE="CentOS7.qcow2" local SERVER_IMAGE_UUID - local CURL_HTTP_OPTS=" --max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure " + local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" local _loops="0" local _maxtries="30" From 375956747baf2721bf6a37060e5de40cec2e2753 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 09:37:51 -0800 Subject: [PATCH 402/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 1eaf991..cc8407f 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -624,7 +624,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." From 01424ff08c186ed73aa6f882c52e8baeabaebdb0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 10:22:47 -0800 Subject: [PATCH 403/691] Update global.vars.sh --- scripts/global.vars.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 512d807..1bd7de9 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -21,6 +21,7 @@ CALM_RSA_KEY_FILE='calm_rsa_key.env' ERA_Blueprint='EraServerDeployment.json' Citrix_Blueprint='CitrixBootcampInfra.json' Beam_Blueprint='' +Karbon_Blueprint='' CICDInfra_Blueprint='CICD_Infra.json' # Curl and SSH settings From bdc950e8bc9148c18cf33815a55b3b474ef1d029 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 13:35:00 -0800 Subject: [PATCH 404/691] Updates to Add karbon BP and image DL --- scripts/lib.pc.sh | 281 ++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 245 insertions(+), 36 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index abc5125..7b7f2e1 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -226,21 +226,22 @@ function karbon_enable() { function karbon_image_download() { local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' local _loop=0 - local _startDownload="https://localhost:7050/acs/image/download" - local _getuuidDownload="https://localhost:7050/acs/image/list" + local _cookies=''NTNX_IGW_SESSION': resp.cookies['NTNX_IGW_SESSION']' + local _startDownload="https://localhost:9440/karbon/acs/image/download" + local _getuuidDownload="https://localhost:9440/karbon/acs/image/list" # Create the Basic Authentication using base6 commands _auth=$(echo "admin:${PE_PASSWORD}" | base64) # Call the UUID URL so we have the right UUID for the image - uuid=$(curl -X GET -H "X-NTNX-AUTH: Basic ${_auth}" https://localhost:7050/acs/image/list $CURL_HTTP_OPTS | jq '.[0].uuid' | tr -d \/\") + uuid=$(curl -X GET -H "X-NTNX-AUTH: Basic ${_auth}" https://localhost:9440/karbon/acs/image/list $CURL_HTTP_OPTS | jq '.[0].uuid' | tr -d \/\") log "UUID for The Karbon image is: $uuid" # Use the UUID to download the image response=$(curl -X POST ${_startDownload} -d "{\"uuid\":\"${uuid}\"}" -H "X-NTNX-AUTH: Basic ${_auth}" ${CURL_HTTP_OPTS}) if [ -z $response ]; then - log "Download of the CenOS image for Karbon has not been started. Trying one more time..." + log "Download of the CentOS image for Karbon has not been started. Trying one more time..." response=$(curl -X POST ${_startDownload} -d "{\"uuid\":\"${uuid}\"}" -H "X-NTNX-AUTH: Basic ${_auth}" ${CURL_HTTP_OPTS}) if [ -z $response ]; then log "Download of CentOS image for Karbon failed... Please run manually." @@ -1052,6 +1053,8 @@ function upload_citrix_calm_blueprint() { local CITRIX_IMAGE="Citrix_Virtual_Apps_and_Desktops_7_1912.iso" local CITRIX_IMAGE_UUID local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" + local _loops="0" + local _maxtries="75" echo "Starting Citrix Blueprint Deployment" @@ -1083,6 +1086,7 @@ function upload_citrix_calm_blueprint() { fi echo "Server Image UUID = $SERVER_IMAGE_UUID" + echo "-----------------------------------------" sleep 30 @@ -1111,6 +1115,7 @@ function upload_citrix_calm_blueprint() { fi echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" + echo "-----------------------------------------" sleep 30 @@ -1119,6 +1124,7 @@ function upload_citrix_calm_blueprint() { NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "NETWORK UUID = $NETWORK_UUID" + echo "-----------------------------------------" # download the blueprint DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) @@ -1159,17 +1165,6 @@ function upload_citrix_calm_blueprint() { echo "Currently updating blueprint $JSONFile..." - echo "${CALM_PROJECT} network UUID: ${project_uuid}" - echo "DOMAIN=${DOMAIN}" - echo "AD_IP=${AD_IP}" - echo "PE_IP=${PE_IP}" - echo "DDC_IP=${DDC_IP}" - echo "CVM_NETWORK=${CVM_NETWORK}" - echo "SERVER_IMAGE=${SERVER_IMAGE}" - echo "SERVER_IMAGE_UUID=${SERVER_IMAGE_UUID}" - echo "CITRIX_IMAGE=${CITRIX_IMAGE}" - echo "CITRIX_IMAGE_UUID=${CITRIX_IMAGE_UUID}" - echo "NETWORK_UUID=${NETWORK_UUID}" # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint tmp=$(mktemp) @@ -1213,7 +1208,7 @@ function upload_citrix_calm_blueprint() { fi fi - echo "Finished uploading ${BLUEPRINT} and setting Variables!" + echo "Finished uploading ${BLUEPRINT}!" #Getting the Blueprint UUID CITRIX_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CitrixBootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1221,6 +1216,17 @@ function upload_citrix_calm_blueprint() { echo "Citrix Blueprint UUID = $CITRIX_BLUEPRINT_UUID" echo "Update Blueprint and writing to temp file" + echo "${CALM_PROJECT} network UUID: ${project_uuid}" + echo "DOMAIN=${DOMAIN}" + echo "AD_IP=${AD_IP}" + echo "PE_IP=${PE_IP}" + echo "DDC_IP=${DDC_IP}" + echo "CVM_NETWORK=${CVM_NETWORK}" + echo "SERVER_IMAGE=${SERVER_IMAGE}" + echo "SERVER_IMAGE_UUID=${SERVER_IMAGE_UUID}" + echo "CITRIX_IMAGE=${CITRIX_IMAGE}" + echo "CITRIX_IMAGE_UUID=${CITRIX_IMAGE_UUID}" + echo "NETWORK_UUID=${NETWORK_UUID}" DOWNLOADED_JSONFile="${BLUEPRINT}-${CITRIX_BLUEPRINT_UUID}.json" UPDATED_JSONFile="${BLUEPRINT}-${CITRIX_BLUEPRINT_UUID}-updated.json" @@ -1292,6 +1298,8 @@ function upload_era_calm_blueprint() { local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" local ERA_IMAGE_UUID local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" + local _loops="0" + local _maxtries="75" echo "Starting Era Blueprint Deployment" @@ -1323,12 +1331,14 @@ function upload_era_calm_blueprint() { fi echo "ERA Image UUID = $ERA_IMAGE_UUID" + echo "-----------------------------------------" echo "Getting NETWORK UUID" NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "NETWORK UUID = $NETWORK_UUID" + echo "-----------------------------------------" # download the blueprint DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) @@ -1369,13 +1379,6 @@ function upload_era_calm_blueprint() { echo "Currently updating blueprint $JSONFile..." - echo "${CALM_PROJECT} network UUID: ${project_uuid}" - echo "ERA_IP=${ERA_IP}" - echo "PE_IP=${PE_IP}" - echo "ERA_IMAGE=${ERA_IMAGE}" - echo "ERA_IMAGE_UUID=${ERA_IMAGE_UUID}" - echo "NETWORK_UUID=${NETWORK_UUID}" - # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint tmp=$(mktemp) @@ -1418,7 +1421,7 @@ function upload_era_calm_blueprint() { fi fi - echo "Finished uploading ${BLUEPRINT} and setting Variables!" + echo "Finished uploading ${BLUEPRINT}!" #Getting the Blueprint UUID ERA_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==EraServerDeployment"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1427,6 +1430,13 @@ function upload_era_calm_blueprint() { echo "Update Blueprint and writing to temp file" + echo "${CALM_PROJECT} network UUID: ${project_uuid}" + echo "ERA_IP=${ERA_IP}" + echo "PE_IP=${PE_IP}" + echo "ERA_IMAGE=${ERA_IMAGE}" + echo "ERA_IMAGE_UUID=${ERA_IMAGE_UUID}" + echo "NETWORK_UUID=${NETWORK_UUID}" + DOWNLOADED_JSONFile="${BLUEPRINT}-${ERA_BLUEPRINT_UUID}.json" UPDATED_JSONFile="${BLUEPRINT}-${ERA_BLUEPRINT_UUID}-updated.json" @@ -1467,6 +1477,200 @@ function upload_era_calm_blueprint() { } +############################################################################################################################################################################### +# Routine to upload Karbon Calm Blueprint and set variables +############################################################################################################################################################################### + +function upload_karbon_calm_blueprint() { + local DIRECTORY="/home/nutanix/karbon" + local BLUEPRINT=${Karbon_Blueprint} + local CALM_PROJECT="BootcampInfra" + local KARBON_IMAGE='ntnx-0.2' + local PE_IP=${PE_HOST} + local CLSTR_NAME="none" + local CTR_UUID=${_storage_default_uuid} + local CTR_NAME=${STORAGE_DEFAULT} + local NETWORK_NAME=${NW1_NAME} + local VLAN_NAME=${NW1_VLAN} + local PE_CREDS_PASSWORD="${PE_PASSWORD}" + local PE_CREDS_PASSWORD="${PE_PASSWORD}" + #local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) + local DOWNLOAD_BLUEPRINTS + local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" + local _loops="0" + local _maxtries="75" + + + echo "Starting Karbon Blueprint Deployment" + + mkdir $DIRECTORY + + echo "Getting Karbon Image UUID" + #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter + _loops="0" + _maxtries="75" + + KARBON_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ntnx-0.2' | wc -l) + # The response should be a Task UUID + if [[ $KARBON_IMAGE_UUID_CHECK -ne 1 ]]; then + # Check if Image has been upload to IMage service + KARBON_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ntnx-0.2' | wc -l) + while [[ $KARBON_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" + sleep 60 + KARBON_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ntnx-0.2' | wc -l) + (( _loops++ )) + done + if [[ $KARBON_IMAGE_UUID_CHECK -eq 1 ]]; then + log "Image has been uploaded." + KARBON_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ntnx-0.2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + else + log "Image is not upload, please check." + fi + fi + + echo "Karbon Image UUID = $KARBON_IMAGE_UUID" + echo "-----------------------------------------" + + echo "Getting NETWORK UUID" + + NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "NETWORK UUID = $NETWORK_UUID" + echo "-----------------------------------------" + + # download the blueprint + DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) + log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" + + # ensure the directory that contains the blueprints to be imported is not empty + if [[ $(ls -l "$DIRECTORY"/*.json) == *"No such file or directory"* ]]; then + echo "There are no .json files found in the directory provided." + exit 0 + fi + + if [ $CALM_PROJECT != 'none' ]; then + + # curl command needed: + # curl -s -k -X POST https://10.42.7.39:9440/api/nutanix/v3/projects/list -H 'Content-Type: application/json' --user admin:techX2019! -d '{"kind": "project", "filter": "name==default"}' | jq -r '.entities[].metadata.uuid' + + # make API call and store project_uuid + project_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"project", "filter":"name==BootcampInfra"}' 'https://localhost:9440/api/nutanix/v3/projects/list' | jq -r '.entities[].metadata.uuid') + + echo "Projet UUID = $project_uuid" + + if [ -z "$project_uuid" ]; then + # project wasn't found + # exit at this point as we don't want to assume all blueprints should then hit the 'default' project + echo "Project $CALM_PROJECT was not found. Please check the name and retry." + exit 0 + else + echo "Project $CALM_PROJECT exists..." + fi + fi + + # update the user with script progress... + + echo "Starting blueprint updates and then Uploading to Calm..." + + # read the entire JSON file from the directory + JSONFile="${DIRECTORY}/${BLUEPRINT}" + + echo "Currently updating blueprint $JSONFile..." + + # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint + tmp=$(mktemp) + + # ADD PROJECT , we need to add it into the JSON data + if [ $CALM_PROJECT != 'none' ]; then + # add the new atributes to the JSON and overwrite the old JSON file with the new one + $(jq --arg proj $CALM_PROJECT --arg proj_uuid $project_uuid '.metadata+={"project_reference":{"kind":$proj,"uuid":$proj_uuid}}' $JSONFile >"$tmp" && mv "$tmp" $JSONFile) + fi + + # REMOVE the "status" and "product_version" keys (if they exist) from the JSON data this is included on export but is invalid on import. (affects all BPs being imported) + tmp_removal=$(mktemp) + $(jq 'del(.status) | del(.product_version)' $JSONFile >"$tmp_removal" && mv "$tmp_removal" $JSONFile) + + # GET BP NAME (affects all BPs being imported) + # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all + blueprint_name_quotes=$(jq '(.spec.name)' $JSONFile) + blueprint_name="${blueprint_name_quotes%\"}" # remove the suffix " + blueprint_name="${blueprint_name#\"}" # will remove the prefix " + + if [ $blueprint_name == 'null' ]; then + echo "Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?" + exit 0 + else + # got the blueprint name means it is probably a valid blueprint file, we can now continue the upload + echo "Uploading the updated blueprint: $blueprint_name..." + + path_to_file=$JSONFile + bp_name=$blueprint_name + project_uuid=$project_uuid + + upload_result=$(curl -s -k --insecure --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST https://localhost:9440/api/nutanix/v3/blueprints/import_file -F file=@$path_to_file -F name=$bp_name -F project_uuid=$project_uuid) + + #if the upload_result var is not empty then let's say it was succcessful + if [ -z "$upload_result" ]; then + echo "Upload for $bp_name did not finish." + else + echo "Upload for $bp_name finished." + echo "-----------------------------------------" + # echo "Result: $upload_result" + fi + fi + + echo "Finished uploading ${BLUEPRINT}!" + + #Getting the Blueprint UUID + KARBON_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==KarbonClusterDeployment"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "Karbon Blueprint UUID = $KARBON_BLUEPRINT_UUID" + + echo "Update Blueprint and writing to temp file" + + echo "${CALM_PROJECT} network UUID: ${project_uuid}" + echo "ERA_IMAGE=${KARBON_IMAGE}" + echo "ERA_IMAGE_UUID=${KARBON_IMAGE_UUID}" + echo "NETWORK_UUID=${NETWORK_UUID}" + + DOWNLOADED_JSONFile="${BLUEPRINT}-${KARBON_BLUEPRINT_UUID}.json" + UPDATED_JSONFile="${BLUEPRINT}-${KARBON_BLUEPRINT_UUID}-updated.json" + + # GET The Blueprint so it can be updated + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}" > ${DOWNLOADED_JSONFile} + + cat $DOWNLOADED_JSONFile \ + | jq -c 'del(.status)' \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$KARBON_IMAGE\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$KARBON_IMAGE_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value = \"$PE_CREDS_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ + | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$PC_CREDS_PASSWORD\")" \ + | jq -c -r '(.spec.resources.credential_definition_list[1].secret.attrs.is_secret_modified = "true")' \ + > $UPDATED_JSONFile + + echo "Saving Credentials Edits with PUT" + + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT -d @$UPDATED_JSONFile "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}" + + echo "Finished Updating Credentials" + + # GET The Blueprint payload + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Karbon Cluster", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json + + # Launch the BLUEPRINT + + echo "Launching the Era Server Application" + + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" + + echo "Finished Launching the Karbon Cluster Deployment Blueprint" + +} + ############################################################################################################################################################################### # Routine to upload CICDInfra Calm Blueprint and set variables ############################################################################################################################################################################### @@ -1475,19 +1679,16 @@ function upload_CICDInfra_calm_blueprint() { local DIRECTORY="/home/nutanix/cicdinfra" local BLUEPRINT=${CICDInfra_Blueprint} local CALM_PROJECT="BootcampInfra" - local ERA_IP=${ERA_HOST} - local PE_IP=${PE_HOST} local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} #local CENTOS_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) - local CENTOS_PASSWORD_MODIFIED="true" local DOWNLOAD_BLUEPRINTS local NETWORK_UUID local SERVER_IMAGE="CentOS7.qcow2" local SERVER_IMAGE_UUID local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" local _loops="0" - local _maxtries="30" + local _maxtries="75" echo "Starting CICDInfra Blueprint Deployment" @@ -1517,10 +1718,12 @@ function upload_CICDInfra_calm_blueprint() { fi fi echo "Server Image UUID = $SERVER_IMAGE_UUID" + echo "-----------------------------------------" NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "NETWORK UUID = $NETWORK_UUID" + echo "-----------------------------------------" # download the blueprint DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) @@ -1559,10 +1762,6 @@ function upload_CICDInfra_calm_blueprint() { echo "Currently updating blueprint $JSONFile..." - echo "${CALM_PROJECT} network UUID: ${project_uuid}" - echo "NETWORK_UUID=${NETWORK_UUID}" - - # NOTE: bash doesn't do in place editing so we need to use a temp file and overwrite the old file with new changes for every blueprint tmp=$(mktemp) @@ -1605,7 +1804,7 @@ function upload_CICDInfra_calm_blueprint() { fi fi - echo "Finished uploading ${BLUEPRINT} and setting Variables!" + echo "Finished uploading ${BLUEPRINT}!" #Getting the Blueprint UUID CICDInfra_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CICD_Infra.json"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1614,6 +1813,11 @@ function upload_CICDInfra_calm_blueprint() { echo "Update Blueprint and writing to temp file" + echo "${CALM_PROJECT} network UUID: ${project_uuid}" + echo "SERVER_IMAGE=${SERVER_IMAGE}" + echo "SERVER_IMAGE_UUID=${SERVER_IMAGE_UUID}" + echo "NETWORK_UUID=${NETWORK_UUID}" + DOWNLOADED_JSONFile="${BLUEPRINT}-${CICDInfra_BLUEPRINT_UUID}.json" UPDATED_JSONFile="${BLUEPRINT}-${CICDInfra_BLUEPRINT_UUID}-updated.json" @@ -1622,11 +1826,16 @@ function upload_CICDInfra_calm_blueprint() { cat $DOWNLOADED_JSONFile \ | jq -c 'del(.status)' \ - | jq -c -r "(.spec.resources.app_profile_list[0].variable_list[0].value = \"$ERA_IP\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$SERVER_IMAGE\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[1].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[1].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[2].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[2].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[3].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[3].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile From e562bb9968ca4f37fc4b78971bed4965c6eb7a4f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 14:16:51 -0800 Subject: [PATCH 405/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7b7f2e1..36a0954 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1106,13 +1106,13 @@ function upload_citrix_calm_blueprint() { CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) (( _loops++ )) done - if [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]]; then + elif [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else log "Image is not upload, please check." fi - fi + #fi echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" echo "-----------------------------------------" From 31d5e454d2bbafd6ff656e2359a1dff60e297097 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 14:17:47 -0800 Subject: [PATCH 406/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 36a0954..407e991 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1109,9 +1109,9 @@ function upload_citrix_calm_blueprint() { elif [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - else + else log "Image is not upload, please check." - fi + fi #fi echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" From 4a64ee9340145e536c37835092f4776e4155881c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 16:51:28 -0800 Subject: [PATCH 407/691] Update lib.pc.sh --- scripts/lib.pc.sh | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 407e991..0c81902 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1077,12 +1077,11 @@ function upload_citrix_calm_blueprint() { SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) (( _loops++ )) done - if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then + elif [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - else + else log "Image is not upload, please check." - fi fi echo "Server Image UUID = $SERVER_IMAGE_UUID" @@ -1322,14 +1321,14 @@ function upload_era_calm_blueprint() { ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) (( _loops++ )) done - if [[ $ERA_IMAGE_UUID_CHECK -eq 1 ]]; then + elif [[ $ERA_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - else + else log "Image is not upload, please check." - fi fi + echo "ERA Image UUID = $ERA_IMAGE_UUID" echo "-----------------------------------------" @@ -1521,14 +1520,14 @@ function upload_karbon_calm_blueprint() { KARBON_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ntnx-0.2' | wc -l) (( _loops++ )) done - if [[ $KARBON_IMAGE_UUID_CHECK -eq 1 ]]; then + elif [[ $KARBON_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." KARBON_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ntnx-0.2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - else + else log "Image is not upload, please check." - fi fi + echo "Karbon Image UUID = $KARBON_IMAGE_UUID" echo "-----------------------------------------" @@ -1710,13 +1709,13 @@ function upload_CICDInfra_calm_blueprint() { SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) (( _loops++ )) done - if [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then + elif [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then log "Image has been uploaded." SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - else + else log "Image is not upload, please check." - fi fi + echo "Server Image UUID = $SERVER_IMAGE_UUID" echo "-----------------------------------------" From 6b51498d6fd7d09936330109d4930e441d965949 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 17:27:11 -0800 Subject: [PATCH 408/691] Karbon BP Deployment --- scripts/global.vars.sh | 2 +- scripts/lib.pc.sh | 44 ++---------------------------------------- scripts/ts2020.sh | 2 ++ 3 files changed, 5 insertions(+), 43 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 1bd7de9..1cae582 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -21,7 +21,7 @@ CALM_RSA_KEY_FILE='calm_rsa_key.env' ERA_Blueprint='EraServerDeployment.json' Citrix_Blueprint='CitrixBootcampInfra.json' Beam_Blueprint='' -Karbon_Blueprint='' +Karbon_Blueprint='KarbonClusterDeployment.json' CICDInfra_Blueprint='CICD_Infra.json' # Curl and SSH settings diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 0c81902..8aa1e0d 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1504,40 +1504,6 @@ function upload_karbon_calm_blueprint() { mkdir $DIRECTORY - echo "Getting Karbon Image UUID" - #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - _loops="0" - _maxtries="75" - - KARBON_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ntnx-0.2' | wc -l) - # The response should be a Task UUID - if [[ $KARBON_IMAGE_UUID_CHECK -ne 1 ]]; then - # Check if Image has been upload to IMage service - KARBON_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ntnx-0.2' | wc -l) - while [[ $KARBON_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" - sleep 60 - KARBON_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ntnx-0.2' | wc -l) - (( _loops++ )) - done - elif [[ $KARBON_IMAGE_UUID_CHECK -eq 1 ]]; then - log "Image has been uploaded." - KARBON_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ntnx-0.2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - else - log "Image is not upload, please check." - fi - - - echo "Karbon Image UUID = $KARBON_IMAGE_UUID" - echo "-----------------------------------------" - - echo "Getting NETWORK UUID" - - NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - - echo "NETWORK UUID = $NETWORK_UUID" - echo "-----------------------------------------" - # download the blueprint DOWNLOAD_BLUEPRINTS=$(curl -L ${BLUEPRINT_URL}${BLUEPRINT} -o ${DIRECTORY}/${BLUEPRINT}) log "Downloading ${BLUEPRINT} | BLUEPRINT_URL ${BLUEPRINT_URL}|${DOWNLOAD_BLUEPRINTS}" @@ -1629,9 +1595,7 @@ function upload_karbon_calm_blueprint() { echo "Update Blueprint and writing to temp file" echo "${CALM_PROJECT} network UUID: ${project_uuid}" - echo "ERA_IMAGE=${KARBON_IMAGE}" - echo "ERA_IMAGE_UUID=${KARBON_IMAGE_UUID}" - echo "NETWORK_UUID=${NETWORK_UUID}" + echo "KARBON_BLUEPRINT_UUID=${KARBON_BLUEPRINT_UUID}" DOWNLOADED_JSONFile="${BLUEPRINT}-${KARBON_BLUEPRINT_UUID}.json" UPDATED_JSONFile="${BLUEPRINT}-${KARBON_BLUEPRINT_UUID}-updated.json" @@ -1641,10 +1605,6 @@ function upload_karbon_calm_blueprint() { cat $DOWNLOADED_JSONFile \ | jq -c 'del(.status)' \ - | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$KARBON_IMAGE\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$KARBON_IMAGE_UUID\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value = \"$PE_CREDS_PASSWORD\")" \ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ | jq -c -r "(.spec.resources.credential_definition_list[1].secret.value = \"$PC_CREDS_PASSWORD\")" \ @@ -1658,7 +1618,7 @@ function upload_karbon_calm_blueprint() { echo "Finished Updating Credentials" # GET The Blueprint payload - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "Karbon Cluster", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X GET -d '{}' "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}" | jq 'del(.status, .spec.name) | .spec += {"application_name": "KarbonClusterDeployment", "app_profile_reference": {"uuid": .spec.resources.app_profile_list[0].uuid, "kind": "app_profile" }}' > set_blueprint_response_file.json # Launch the BLUEPRINT diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index c0ffb68..8806d7d 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -183,6 +183,8 @@ case ${1} in && sleep 30 \ && upload_era_calm_blueprint \ && sleep 30 \ + && upload_karbon_calm_blueprint \ + && sleep 30 \ && upload_CICDInfra_calm_blueprint \ && seedPC \ && prism_check 'PC' From cef99d3eb17214d22235d668c4c43bf4e2ebe374 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 20:22:58 -0800 Subject: [PATCH 409/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 8aa1e0d..3b7a4ac 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1492,7 +1492,7 @@ function upload_karbon_calm_blueprint() { local NETWORK_NAME=${NW1_NAME} local VLAN_NAME=${NW1_VLAN} local PE_CREDS_PASSWORD="${PE_PASSWORD}" - local PE_CREDS_PASSWORD="${PE_PASSWORD}" + local PC_CREDS_PASSWORD="${PE_PASSWORD}" #local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) local DOWNLOAD_BLUEPRINTS local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" From cd1fd83d60d7b78a463bdf760a14b4c7a02cf2e9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 20:32:09 -0800 Subject: [PATCH 410/691] Update lib.pc.sh --- scripts/lib.pc.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 3b7a4ac..c195700 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1111,7 +1111,6 @@ function upload_citrix_calm_blueprint() { else log "Image is not upload, please check." fi - #fi echo "Citrix Image UUID = $CITRIX_IMAGE_UUID" echo "-----------------------------------------" From 76d1a38a7ab7d37e60b86c6b02c4e0a84ac5a4d3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sat, 22 Feb 2020 22:59:44 -0800 Subject: [PATCH 411/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index c195700..1f6327f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1062,7 +1062,7 @@ function upload_citrix_calm_blueprint() { mkdir $DIRECTORY echo "Getting Server Image UUID" - #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter + #Getting the IMAGE_UUID _loops="0" _maxtries="75" From facb70edeb3bf7888a1aca3980100a6d34685287 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Sun, 23 Feb 2020 14:22:34 -0800 Subject: [PATCH 412/691] stage-workshop Updated the Stageworkshop.sh so it moves to the next line in the clusters.txt if there is a failure.... --- cluster.txt | 6 +++++- stage_workshop.sh | 10 ++++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/cluster.txt b/cluster.txt index 1c28991..063b32f 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1 +1,5 @@ -10.42.86.37|techX2019!|willem@nutanix.com +10.42.3.37|techX2020!|willem@nutanix.com +10.42.5.37|techX2020!|willem@nutanix.com +10.42.25.37|techX2020!|willem@nutanix.com +10.42.67.37|techX2020!|willem@nutanix.com +10.42.70.37|techX2020!|willem@nutanix.com \ No newline at end of file diff --git a/stage_workshop.sh b/stage_workshop.sh index bb725a6..b6b3533 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -135,7 +135,13 @@ ______Warning -- curl time out indicates either: - Foundation and initialization (Cluster IP API response) hasn't completed. EoM - prism_check 'PE' 60 + _error=$(prism_check 'PE' '1') + # If we were unable to connect to the PRISM UI, send a message to the console and move to the next + if [[ ! -z ${_error} ]]; then + log "We were unable to connect to the PRISM UI on ${PE_HOST}..." + continue + fi + if [[ -d cache ]]; then pushd cache || true @@ -241,7 +247,7 @@ function validate_clusters() { set -f # shellcheck disable=2206 _fields=(${_cluster//|/ }) - PE_HOST=${_fields[0]} + PE_HOST=${_fields[0]} PE_PASSWORD=${_fields[1]} prism_check 'PE' From 814aa0a6a0e1958afbcd2e398b7a4dc646ed0db6 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 23 Feb 2020 15:06:56 -0800 Subject: [PATCH 413/691] Update lib.pc.sh --- scripts/lib.pc.sh | 35 ++++++++++++++--------------------- 1 file changed, 14 insertions(+), 21 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 1f6327f..bb6582b 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1068,16 +1068,13 @@ function upload_citrix_calm_blueprint() { SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) # The response should be a Task UUID - if [[ $SERVER_IMAGE_UUID_CHECK -ne 1 ]]; then - # Check if Image has been upload to IMage service - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) - while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" - sleep 60 - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) - (( _loops++ )) - done - elif [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then + while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" + sleep 60 + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Windows2016.qcow2' | wc -l) + (( _loops++ )) + done + if [[ $_loops -lt $_maxtries ]]; then log "Image has been uploaded." SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Windows2016.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else @@ -1095,17 +1092,13 @@ function upload_citrix_calm_blueprint() { _maxtries="75" CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) - # The response should be a Task UUID - if [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 ]]; then - # Check if Image has been upload to IMage service - CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) - while [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" - sleep 60 - CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) - (( _loops++ )) - done - elif [[ $CITRIX_IMAGE_UUID_CHECK -eq 1 ]]; then + while [[ $CITRIX_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" + sleep 60 + CITRIX_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'Citrix_Virtual_Apps_and_Desktops_7_1912.iso' | wc -l) + (( _loops++ )) + done + if [[ $_loops -lt $_maxtries ]]; then log "Image has been uploaded." CITRIX_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==Citrix_Virtual_Apps_and_Desktops_7_1912.iso"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else From cfd20dce4473fe7293372250747cbd256fced4f2 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Sun, 23 Feb 2020 16:04:05 -0800 Subject: [PATCH 414/691] Priority images Added in lib.common.sh and ts2020.sh the priority for images --- scripts/lib.common.sh | 28 ++++++++++++++++++++++++++++ scripts/ts2020.sh | 1 + test/images.json | 35 +++++++++++++++++++++++++++++++++++ 3 files changed, 64 insertions(+) create mode 100644 test/images.json diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 2bc8563..b1377a1 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -450,7 +450,35 @@ EOF } ################################################################################## +# Priority Images that need to be uploaded and controlled before we move to the mass upload +function priority_images{ + + local _prio_images_arr=("ERA-Server-build-1.2.0.1.qcow2","Windows2016.qcow2","CentOS7.qcow2","Citrix_Virtual_Apps_and_Desktops_7_1912.iso") + + for _image in "${_prio_images_arr[@]}"; do + _http_body=$(cat < Date: Sun, 23 Feb 2020 16:11:33 -0800 Subject: [PATCH 415/691] Update ts2020.sh Typo --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 016ac54..4dbd6be 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -176,7 +176,7 @@ case ${1} in && pc_project \ && object_store \ && karbon_image_download \ - && priority_images + && priority_images \ && images \ && flow_enable \ && pc_cluster_img_import \ From 8b0dc99d3b24f46611b20ee801532dcad0f2c837 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Sun, 23 Feb 2020 16:24:02 -0800 Subject: [PATCH 416/691] Update lib.common.sh --- scripts/lib.common.sh | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index b1377a1..2564720 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -454,7 +454,12 @@ EOF function priority_images{ - local _prio_images_arr=("ERA-Server-build-1.2.0.1.qcow2","Windows2016.qcow2","CentOS7.qcow2","Citrix_Virtual_Apps_and_Desktops_7_1912.iso") + local _prio_images_arr=(\ + ERA-Server-build-1.2.0.1.qcow2 \ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + ) for _image in "${_prio_images_arr[@]}"; do _http_body=$(cat < Date: Sun, 23 Feb 2020 16:26:46 -0800 Subject: [PATCH 417/691] Update lib.common.sh Had a typo in the function --- scripts/lib.common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 2564720..415abf5 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -452,7 +452,7 @@ EOF ################################################################################## # Priority Images that need to be uploaded and controlled before we move to the mass upload -function priority_images{ +function priority_images(){ local _prio_images_arr=(\ ERA-Server-build-1.2.0.1.qcow2 \ From 01ad7aba8488ce2c403442070f80ca3809870668 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 23 Feb 2020 17:28:51 -0800 Subject: [PATCH 418/691] Update lib.pc.sh --- scripts/lib.pc.sh | 34 ++++++++++++++-------------------- 1 file changed, 14 insertions(+), 20 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index bb6582b..8ec7d9b 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1304,16 +1304,13 @@ function upload_era_calm_blueprint() { ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) # The response should be a Task UUID - if [[ $ERA_IMAGE_UUID_CHECK -ne 1 ]]; then - # Check if Image has been upload to IMage service - ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) - while [[ $ERA_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" - sleep 60 - ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) - (( _loops++ )) - done - elif [[ $ERA_IMAGE_UUID_CHECK -eq 1 ]]; then + while [[ $ERA_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" + sleep 60 + ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) + (( _loops++ )) + done + if [[ $_loops -lt $_maxtries ]]; then log "Image has been uploaded." ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else @@ -1652,16 +1649,13 @@ function upload_CICDInfra_calm_blueprint() { SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) # The response should be a Task UUID - if [[ $SERVER_IMAGE_UUID_CHECK -ne 1 ]]; then - # Check if Image has been upload to IMage service - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) - while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" - sleep 60 - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) - (( _loops++ )) - done - elif [[ $SERVER_IMAGE_UUID_CHECK -eq 1 ]]; then + while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do + log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" + sleep 60 + SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) + (( _loops++ )) + done + if [[ $_loops -lt $_maxtries ]]; then log "Image has been uploaded." SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else From f2845cc48a08245c7b95cd34df77106b2e3fad4f Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Sun, 23 Feb 2020 19:04:37 -0800 Subject: [PATCH 419/691] small update on lib.common.sh Changed the image upload --- cluster.txt | 22 +++++++++++++++++----- scripts/lib.common.sh | 3 ++- stage_workshop.sh | 2 +- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/cluster.txt b/cluster.txt index 063b32f..d659aac 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1,5 +1,17 @@ -10.42.3.37|techX2020!|willem@nutanix.com -10.42.5.37|techX2020!|willem@nutanix.com -10.42.25.37|techX2020!|willem@nutanix.com -10.42.67.37|techX2020!|willem@nutanix.com -10.42.70.37|techX2020!|willem@nutanix.com \ No newline at end of file +10.55.10.37|techX2020!|willem@nutanix.com +10.55.11.37|techX2020!|willem@nutanix.com +10.55.16.37|techX2020!|willem@nutanix.com +10.55.18.37|techX2020!|willem@nutanix.com +10.55.21.37|techX2020!|willem@nutanix.com +10.55.24.37|techX2020!|willem@nutanix.com +10.55.33.37|techX2020!|willem@nutanix.com +10.55.35.37|techX2020!|willem@nutanix.com +10.55.39.37|techX2020!|willem@nutanix.com +10.55.40.37|techX2020!|willem@nutanix.com +10.55.41.37|techX2020!|willem@nutanix.com +10.55.46.37|techX2020!|willem@nutanix.com +10.55.48.37|techX2020!|willem@nutanix.com +10.55.49.37|techX2020!|willem@nutanix.com +10.55.50.37|techX2020!|willem@nutanix.com +10.55.52.37|techX2020!|willem@nutanix.com +10.55.54.37|techX2020!|willem@nutanix.com \ No newline at end of file diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 415abf5..ce8069e 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -460,7 +460,8 @@ function priority_images(){ CentOS7.qcow2 \ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ ) - + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + for _image in "${_prio_images_arr[@]}"; do _http_body=$(cat < Date: Sun, 23 Feb 2020 23:31:58 -0800 Subject: [PATCH 420/691] Update lib.common.sh To get the right ISO/Disk images controlled due to dependencies on Blueprint uploads --- scripts/lib.common.sh | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index ce8069e..30f286f 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -460,9 +460,20 @@ function priority_images(){ CentOS7.qcow2 \ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ ) - local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " - + local CURL_HTTP_OPTS=" --max-time 25 --header Content-Type:application/json --header Accept:application/json --insecure " + echo ${OCTET[1]} + if [[ ${OCTET[1]} == '42' || ${OCTET[1]} == '38' ]]; then + SOURCE_URL="10.42.194.11" + else + SOURCE_URL="10.55.251.38" + fi + for _image in "${_prio_images_arr[@]}"; do + if [[ ${_image} == *"iso"* ]]; then + DISK_TYPE="ISO_IMAGE" + else + DISK_TYPE="DISK_IMAGE" + fi _http_body=$(cat < Date: Mon, 24 Feb 2020 04:21:55 -0800 Subject: [PATCH 421/691] Updated due to DFS issues Changed the DFS servers for the FS under our control at 10.42.38.10 and 10.55.76.10 --- scripts/lib.common.sh | 13 +++++++------ scripts/ts2020.sh | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 30f286f..917eae4 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -461,13 +461,16 @@ function priority_images(){ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ ) local CURL_HTTP_OPTS=" --max-time 25 --header Content-Type:application/json --header Accept:application/json --insecure " - echo ${OCTET[1]} - if [[ ${OCTET[1]} == '42' || ${OCTET[1]} == '38' ]]; then - SOURCE_URL="10.42.194.11" + + # Set the correct High Perf FileServer + if [[ ${OCTET[1]} == '42' ]] || [[ ${OCTET[1]} == '38' ]]; then + SOURCE_URL="10.42.38.10" else - SOURCE_URL="10.55.251.38" + SOURCE_URL="10.55.76.10" fi + log "Grabbing the priority files from the ${SOURCE_URL} fileserver..." + for _image in "${_prio_images_arr[@]}"; do if [[ ${_image} == *"iso"* ]]; then DISK_TYPE="ISO_IMAGE" @@ -487,9 +490,7 @@ function priority_images(){ "metadata":{"kind":"image"},"api_version":"3.1.0"}}],"api_version":"3.0"} EOF ) - echo ${_http_body} _task_id=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" https://localhost:9440/api/nutanix/v3/batch| jq '.api_response_list[].api_response.status.execution_context.task_uuid' | tr -d \") - echo ${_task_id} loop ${_task_id} done diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index 4dbd6be..cbb1cbd 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -177,7 +177,6 @@ case ${1} in && object_store \ && karbon_image_download \ && priority_images \ - && images \ && flow_enable \ && pc_cluster_img_import \ && upload_citrix_calm_blueprint \ @@ -188,6 +187,7 @@ case ${1} in && sleep 30 \ && upload_CICDInfra_calm_blueprint \ && seedPC \ + && images \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 6494ecdb32b2ce1830dea7dbefcfc32e5734a0f8 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 24 Feb 2020 04:29:08 -0800 Subject: [PATCH 422/691] Update lib.common.sh Remove the debug messages from Curl. --- scripts/lib.common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 917eae4..191607d 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -460,7 +460,7 @@ function priority_images(){ CentOS7.qcow2 \ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ ) - local CURL_HTTP_OPTS=" --max-time 25 --header Content-Type:application/json --header Accept:application/json --insecure " + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " # Set the correct High Perf FileServer if [[ ${OCTET[1]} == '42' ]] || [[ ${OCTET[1]} == '38' ]]; then From f654c50b5e4c359437f7589610e064283366f67a Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 24 Feb 2020 05:40:29 -0800 Subject: [PATCH 423/691] Update lib.pc.sh Small add to check that Flow has been enabled via the task_id and checking in a loop --- scripts/lib.pc.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 8ec7d9b..c28a3d4 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -35,12 +35,13 @@ function flow_enable() { # Try one more time then fail, but continue if [ -z $_task_id ]; then log "Flow not yet enabled. Will retry...." - _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow) + _task_id=$(curl -X POST $_json_data $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} $_url_flow | jq '.task_uuid' | tr -d \") if [ -z $_task_id ]; then log "Flow still not enabled.... ***Not retrying. Please enable via UI.***" fi else + loop ${_task_id} log "Flow has been Enabled..." fi From 1c5e8f36f5e9f061ceece0274f47fee19287730b Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 24 Feb 2020 08:20:58 -0800 Subject: [PATCH 424/691] Update lib.common.sh Removed old stuff int he priority_images function --- scripts/lib.common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 191607d..001d602 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -486,7 +486,7 @@ function priority_images(){ "body":{"spec": {"name":"${_image}","description":"${_image}","resources":{ "image_type":"${DISK_TYPE}", - "source_uri":"http://${SOURCE_URL}/workshop_staging/${_image}"}}, + "source_uri":"http://${SOURCE_URL}/${_image}"}}, "metadata":{"kind":"image"},"api_version":"3.1.0"}}],"api_version":"3.0"} EOF ) From 156b55da9c9e86dc694f3a020416c3b857fde8b6 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 24 Feb 2020 12:07:50 -0800 Subject: [PATCH 425/691] Added our own web based image servers --- cluster.txt | 18 +----------------- scripts/global.vars.sh | 3 +++ 2 files changed, 4 insertions(+), 17 deletions(-) diff --git a/cluster.txt b/cluster.txt index d659aac..fffa8e4 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1,17 +1 @@ -10.55.10.37|techX2020!|willem@nutanix.com -10.55.11.37|techX2020!|willem@nutanix.com -10.55.16.37|techX2020!|willem@nutanix.com -10.55.18.37|techX2020!|willem@nutanix.com -10.55.21.37|techX2020!|willem@nutanix.com -10.55.24.37|techX2020!|willem@nutanix.com -10.55.33.37|techX2020!|willem@nutanix.com -10.55.35.37|techX2020!|willem@nutanix.com -10.55.39.37|techX2020!|willem@nutanix.com -10.55.40.37|techX2020!|willem@nutanix.com -10.55.41.37|techX2020!|willem@nutanix.com -10.55.46.37|techX2020!|willem@nutanix.com -10.55.48.37|techX2020!|willem@nutanix.com -10.55.49.37|techX2020!|willem@nutanix.com -10.55.50.37|techX2020!|willem@nutanix.com -10.55.52.37|techX2020!|willem@nutanix.com -10.55.54.37|techX2020!|willem@nutanix.com \ No newline at end of file +10.42.71.37|techX2020!|willem@nutanix.com \ No newline at end of file diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 1cae582..e9f3d00 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -192,6 +192,7 @@ case "${OCTET[0]}.${OCTET[1]}" in ) QCOW2_REPOS=(\ 'http://10.55.251.38/workshop_staging/' \ + 'http://10.55.76.10/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ @@ -229,6 +230,7 @@ case "${OCTET[0]}.${OCTET[1]}" in ) QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ + 'http://10.42.38.10/images' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ @@ -266,6 +268,7 @@ case "${OCTET[0]}.${OCTET[1]}" in ) QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ + 'http://10.42.38.10/images' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ From 70d4bc8b7a700acff01cc80cd862a801b0f07025 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 24 Feb 2020 12:42:52 -0800 Subject: [PATCH 426/691] Update lib.common.sh --- scripts/lib.common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 001d602..33b1226 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -464,7 +464,7 @@ function priority_images(){ # Set the correct High Perf FileServer if [[ ${OCTET[1]} == '42' ]] || [[ ${OCTET[1]} == '38' ]]; then - SOURCE_URL="10.42.38.10" + SOURCE_URL="10.42.38.10/images" else SOURCE_URL="10.55.76.10" fi From d5adcf7b70c3df16f4456f1461ab882e0129b75c Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 24 Feb 2020 13:13:14 -0800 Subject: [PATCH 427/691] Remove Karbon from Launching Remove Karbon BP from deploying due to missing CentOS image --- cluster.txt | 12 +++++++++++- scripts/lib.pc.sh | 6 +++--- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/cluster.txt b/cluster.txt index fffa8e4..5ce6118 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1 +1,11 @@ -10.42.71.37|techX2020!|willem@nutanix.com \ No newline at end of file +10.42.81.37|techX2020!|willem@nutanix.com +10.42.84.37|techX2020!|willem@nutanix.com +10.42.86.37|techX2020!|willem@nutanix.com +10.42.95.37|techX2020!|willem@nutanix.com +10.42.96.37|techX2020!|willem@nutanix.com +10.42.99.37|techX2020!|willem@nutanix.com +10.42.104.37|techX2020!|willem@nutanix.com +10.42.108.37|techX2020!|willem@nutanix.com +10.42.110.37|techX2020!|willem@nutanix.com +10.42.112.37|techX2020!|willem@nutanix.com +10.42.114.37|techX2020!|willem@nutanix.com \ No newline at end of file diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index c28a3d4..8d5f05c 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1612,11 +1612,11 @@ function upload_karbon_calm_blueprint() { # Launch the BLUEPRINT - echo "Launching the Era Server Application" + #echo "Launching the Era Server Application" - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" + #curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" - echo "Finished Launching the Karbon Cluster Deployment Blueprint" + #echo "Finished Launching the Karbon Cluster Deployment Blueprint" } From f71a72b55328072940f572cb61049a9382f619f4 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 24 Feb 2020 13:42:27 -0800 Subject: [PATCH 428/691] Update global.vars.sh Laura's version of seedPC. Has some updates.. --- scripts/global.vars.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index e9f3d00..e276ab3 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -203,7 +203,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.55.251.38/workshop_staging/AutoAD.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) - PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' + PC_DATA='http://10.55.76.10/seedPC.zip' BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' @@ -241,7 +241,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) - PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' + PC_DATA='http://10.42.38.10/images/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.42.196.10,10.42.194.10' OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' @@ -279,7 +279,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) - PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' + PC_DATA='http://10.42.38.10/images/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' From 13d4ce3dd40ada57c02b12cea39ec4a65f4f3a1c Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 24 Feb 2020 19:50:28 -0800 Subject: [PATCH 429/691] VLAN incorrect on the .38 network Due to a network on secondary VLAN .38 are not in line with the 55 and 42. That way the routing is not valid and needs to be diferent for the 10.38 network --- cluster.txt | 49 ++++++++++++++++++++++++++++++++---------- scripts/global.vars.sh | 3 ++- 2 files changed, 40 insertions(+), 12 deletions(-) diff --git a/cluster.txt b/cluster.txt index 5ce6118..a7d937c 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1,11 +1,38 @@ -10.42.81.37|techX2020!|willem@nutanix.com -10.42.84.37|techX2020!|willem@nutanix.com -10.42.86.37|techX2020!|willem@nutanix.com -10.42.95.37|techX2020!|willem@nutanix.com -10.42.96.37|techX2020!|willem@nutanix.com -10.42.99.37|techX2020!|willem@nutanix.com -10.42.104.37|techX2020!|willem@nutanix.com -10.42.108.37|techX2020!|willem@nutanix.com -10.42.110.37|techX2020!|willem@nutanix.com -10.42.112.37|techX2020!|willem@nutanix.com -10.42.114.37|techX2020!|willem@nutanix.com \ No newline at end of file +10.42.162.37|techX2020!|willem@nutanix.com +10.42.168.37|techX2020!|willem@nutanix.com +10.38.170.37|techX2020!|willem@nutanix.com +10.38.171.37|techX2020!|willem@nutanix.com +10.38.172.37|techX2020!|willem@nutanix.com +10.38.193.37|techX2020!|willem@nutanix.com +10.38.203.37|techX2020!|willem@nutanix.com +10.38.206.37|techX2020!|willem@nutanix.com +10.38.207.37|techX2020!|willem@nutanix.com +10.38.208.37|techX2020!|willem@nutanix.com +10.38.209.37|techX2020!|willem@nutanix.com +10.38.210.37|techX2020!|willem@nutanix.com +10.38.211.37|techX2020!|willem@nutanix.com +10.38.212.37|techX2020!|willem@nutanix.com +10.38.213.37|techX2020!|willem@nutanix.com +10.38.217.37|techX2020!|willem@nutanix.com +10.38.218.37|techX2020!|willem@nutanix.com +10.55.9.37|techX2020!|willem@nutanix.com +10.55.10.37|techX2020!|willem@nutanix.com +10.55.11.37|techX2020!|willem@nutanix.com +10.55.16.37|techX2020!|willem@nutanix.com +10.55.18.37|techX2020!|willem@nutanix.com +10.55.21.37|techX2020!|willem@nutanix.com +10.55.24.37|techX2020!|willem@nutanix.com +10.55.33.37|techX2020!|willem@nutanix.com +10.55.35.37|techX2020!|willem@nutanix.com +10.55.39.37|techX2020!|willem@nutanix.com +10.55.40.37|techX2020!|willem@nutanix.com +10.55.41.37|techX2020!|willem@nutanix.com +10.55.46.37|techX2020!|willem@nutanix.com +10.55.48.37|techX2020!|willem@nutanix.com +10.55.49.37|techX2020!|willem@nutanix.com +10.55.50.37|techX2020!|willem@nutanix.com +10.55.52.37|techX2020!|willem@nutanix.com +10.55.54.37|techX2020!|willem@nutanix.com +10.55.57.37|techX2020!|willem@nutanix.com +10.55.60.37|techX2020!|willem@nutanix.com +10.42.94.37|techX2020!|willem@nutanix.com \ No newline at end of file diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index e276ab3..558a443 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -282,7 +282,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.38.10/images/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" - OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' + OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' + NW2_VLAN=$((OCTET[2]*10+3)) ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR JQ_REPOS=(\ From b38e041de84575dbf8a7007ec51e1c212c744b1b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 24 Feb 2020 22:34:05 -0600 Subject: [PATCH 430/691] CICD BP Upload Updates --- scripts/lib.pc.sh | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 8d5f05c..5ceb863 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1643,29 +1643,6 @@ function upload_CICDInfra_calm_blueprint() { mkdir $DIRECTORY - echo "Getting Server Image UUID" - #Getting the IMAGE_UUID -- WHen changing the image make sure to change in the name filter - _loops="0" - _maxtries="75" - - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) - # The response should be a Task UUID - while [[ $SERVER_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do - log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" - sleep 60 - SERVER_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d {} 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'CentOS7.qcow2' | wc -l) - (( _loops++ )) - done - if [[ $_loops -lt $_maxtries ]]; then - log "Image has been uploaded." - SERVER_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==CentOS7.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - else - log "Image is not upload, please check." - fi - - echo "Server Image UUID = $SERVER_IMAGE_UUID" - echo "-----------------------------------------" - NETWORK_UUID=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "NETWORK UUID = $NETWORK_UUID" @@ -1760,8 +1737,6 @@ function upload_CICDInfra_calm_blueprint() { echo "Update Blueprint and writing to temp file" echo "${CALM_PROJECT} network UUID: ${project_uuid}" - echo "SERVER_IMAGE=${SERVER_IMAGE}" - echo "SERVER_IMAGE_UUID=${SERVER_IMAGE_UUID}" echo "NETWORK_UUID=${NETWORK_UUID}" DOWNLOADED_JSONFile="${BLUEPRINT}-${CICDInfra_BLUEPRINT_UUID}.json" @@ -1772,16 +1747,12 @@ function upload_CICDInfra_calm_blueprint() { cat $DOWNLOADED_JSONFile \ | jq -c 'del(.status)' \ - | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.name = \"$SERVER_IMAGE\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.disk_list[0].data_source_reference.uuid = \"$SERVER_IMAGE_UUID\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[0].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[1].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[1].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[2].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[2].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[3].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ - | jq -c -r "(.spec.resources.substrate_definition_list[3].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile From e36a8ab031f5f74a83c6b2952b390a7196c03183 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 24 Feb 2020 20:35:54 -0800 Subject: [PATCH 431/691] Update global.vars.sh Update due to the network segmentation in 10.38. 170-200 needs VLAN+3. Rest is VLAN+1 --- cluster.txt | 31 +++++++------------------------ scripts/global.vars.sh | 6 +++++- 2 files changed, 12 insertions(+), 25 deletions(-) diff --git a/cluster.txt b/cluster.txt index a7d937c..954a120 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1,10 +1,11 @@ -10.42.162.37|techX2020!|willem@nutanix.com -10.42.168.37|techX2020!|willem@nutanix.com 10.38.170.37|techX2020!|willem@nutanix.com 10.38.171.37|techX2020!|willem@nutanix.com 10.38.172.37|techX2020!|willem@nutanix.com 10.38.193.37|techX2020!|willem@nutanix.com +10.38.195.37|techX2020!|willem@nutanix.com 10.38.203.37|techX2020!|willem@nutanix.com +10.38.204.37|techX2020!|willem@nutanix.com +10.38.205.37|techX2020!|willem@nutanix.com 10.38.206.37|techX2020!|willem@nutanix.com 10.38.207.37|techX2020!|willem@nutanix.com 10.38.208.37|techX2020!|willem@nutanix.com @@ -13,26 +14,8 @@ 10.38.211.37|techX2020!|willem@nutanix.com 10.38.212.37|techX2020!|willem@nutanix.com 10.38.213.37|techX2020!|willem@nutanix.com +10.38.214.37|techX2020!|willem@nutanix.com +10.38.215.37|techX2020!|willem@nutanix.com +10.38.216.37|techX2020!|willem@nutanix.com 10.38.217.37|techX2020!|willem@nutanix.com -10.38.218.37|techX2020!|willem@nutanix.com -10.55.9.37|techX2020!|willem@nutanix.com -10.55.10.37|techX2020!|willem@nutanix.com -10.55.11.37|techX2020!|willem@nutanix.com -10.55.16.37|techX2020!|willem@nutanix.com -10.55.18.37|techX2020!|willem@nutanix.com -10.55.21.37|techX2020!|willem@nutanix.com -10.55.24.37|techX2020!|willem@nutanix.com -10.55.33.37|techX2020!|willem@nutanix.com -10.55.35.37|techX2020!|willem@nutanix.com -10.55.39.37|techX2020!|willem@nutanix.com -10.55.40.37|techX2020!|willem@nutanix.com -10.55.41.37|techX2020!|willem@nutanix.com -10.55.46.37|techX2020!|willem@nutanix.com -10.55.48.37|techX2020!|willem@nutanix.com -10.55.49.37|techX2020!|willem@nutanix.com -10.55.50.37|techX2020!|willem@nutanix.com -10.55.52.37|techX2020!|willem@nutanix.com -10.55.54.37|techX2020!|willem@nutanix.com -10.55.57.37|techX2020!|willem@nutanix.com -10.55.60.37|techX2020!|willem@nutanix.com -10.42.94.37|techX2020!|willem@nutanix.com \ No newline at end of file +10.38.218.37|techX2020!|willem@nutanix.com \ No newline at end of file diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 558a443..a2f00c0 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -283,7 +283,11 @@ case "${OCTET[0]}.${OCTET[1]}" in BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' - NW2_VLAN=$((OCTET[2]*10+3)) + + # If the third OCTET is between 170 and 199, we need to have the +3 vlan for the secondary + if [[ ${OCTET[2]} -gt 169 ]] && [[ ${OCTET[2]} -lt 200 ]]; then + NW2_VLAN=$((OCTET[2]*10+3)) + fi ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR JQ_REPOS=(\ From 3d9c83bdcab9f1d7066ca0390e53bc4c69b86d01 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 24 Feb 2020 22:42:01 -0600 Subject: [PATCH 432/691] Updates to calm_bootcamp for testing --- scripts/{calm.sh => calm_bootcamp.sh} | 13 ++++++++++--- stage_workshop.sh | 8 ++++---- 2 files changed, 14 insertions(+), 7 deletions(-) rename scripts/{calm.sh => calm_bootcamp.sh} (94%) diff --git a/scripts/calm.sh b/scripts/calm_bootcamp.sh similarity index 94% rename from scripts/calm.sh rename to scripts/calm_bootcamp.sh index afb96ae..5913de2 100755 --- a/scripts/calm.sh +++ b/scripts/calm_bootcamp.sh @@ -70,12 +70,10 @@ case ${1} in export QCOW2_IMAGES=(\ Windows2016.qcow2 \ CentOS7.qcow2 \ - Win10v1903.qcow2 \ ToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ ) export ISO_IMAGES=(\ - Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ Nutanix-VirtIO-1.1.5.iso \ ) @@ -125,10 +123,19 @@ case ${1} in ssp_auth \ && calm_enable \ + && karbon_enable \ + && objects_enable \ && lcm \ && pc_project \ - && images \ + && object_store \ + && karbon_image_download \ + && priority_images \ + && flow_enable \ && pc_cluster_img_import \ + && upload_karbon_calm_blueprint \ + && sleep 30 \ + && upload_CICDInfra_calm_blueprint \ + && images \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/stage_workshop.sh b/stage_workshop.sh index 1d50d35..09a7906 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -24,7 +24,7 @@ WORKSHOPS=(\ "Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ "Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ -#"Calm Workshop (AOS 5.8.x/AHV PC 5.8.x) = Stable" \ +"Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -64,9 +64,9 @@ function stage_clusters() { _pe_launch='snc_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i Calm | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Calm" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='calm.sh' + _pe_launch='calm_bootcamp.sh' _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i "^Citrix" | wc ${WC_ARG}) > 0 )); then @@ -247,7 +247,7 @@ function validate_clusters() { set -f # shellcheck disable=2206 _fields=(${_cluster//|/ }) - PE_HOST=${_fields[0]} + PE_HOST=${_fields[0]} PE_PASSWORD=${_fields[1]} prism_check 'PE' From b94810ed3549f1183758468d1fcaff9332fd41be Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 25 Feb 2020 07:52:28 -0600 Subject: [PATCH 433/691] Update lib.pc.sh --- scripts/lib.pc.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 5ceb863..438ff0f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1612,11 +1612,11 @@ function upload_karbon_calm_blueprint() { # Launch the BLUEPRINT - #echo "Launching the Era Server Application" + echo "Launching the Karbon Cluster Blueprint" - #curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" - #echo "Finished Launching the Karbon Cluster Deployment Blueprint" + echo "Finished Launching the Karbon Cluster Deployment Blueprint" } From 577bb0490a40678c2eb8fa14d36ef7f955f084bd Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 25 Feb 2020 06:24:33 -0800 Subject: [PATCH 434/691] small updates Disabled Karbon BP deploy as it depends on the CentOS ACS download... --- cluster.txt | 56 +++++++++++++++++++++++++++++------------------ scripts/lib.pc.sh | 6 ++--- 2 files changed, 38 insertions(+), 24 deletions(-) diff --git a/cluster.txt b/cluster.txt index 954a120..bc0c326 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1,21 +1,35 @@ -10.38.170.37|techX2020!|willem@nutanix.com -10.38.171.37|techX2020!|willem@nutanix.com -10.38.172.37|techX2020!|willem@nutanix.com -10.38.193.37|techX2020!|willem@nutanix.com -10.38.195.37|techX2020!|willem@nutanix.com -10.38.203.37|techX2020!|willem@nutanix.com -10.38.204.37|techX2020!|willem@nutanix.com -10.38.205.37|techX2020!|willem@nutanix.com -10.38.206.37|techX2020!|willem@nutanix.com -10.38.207.37|techX2020!|willem@nutanix.com -10.38.208.37|techX2020!|willem@nutanix.com -10.38.209.37|techX2020!|willem@nutanix.com -10.38.210.37|techX2020!|willem@nutanix.com -10.38.211.37|techX2020!|willem@nutanix.com -10.38.212.37|techX2020!|willem@nutanix.com -10.38.213.37|techX2020!|willem@nutanix.com -10.38.214.37|techX2020!|willem@nutanix.com -10.38.215.37|techX2020!|willem@nutanix.com -10.38.216.37|techX2020!|willem@nutanix.com -10.38.217.37|techX2020!|willem@nutanix.com -10.38.218.37|techX2020!|willem@nutanix.com \ No newline at end of file +10.42.25.37|techX2020!|willem@nutanix.com +10.42.27.37|techX2020!|willem@nutanix.com +10.42.31.37|techX2020!|willem@nutanix.com +10.42.32.37|techX2020!|willem@nutanix.com +10.42.37.37|techX2020!|willem@nutanix.com +10.42.56.37|techX2020!|willem@nutanix.com +10.42.58.37|techX2020!|willem@nutanix.com +10.42.61.37|techX2020!|willem@nutanix.com +10.42.66.37|techX2020!|willem@nutanix.com +10.42.67.37|techX2020!|willem@nutanix.com +10.42.69.37|techX2020!|willem@nutanix.com +10.42.70.37|techX2020!|willem@nutanix.com +10.42.75.37|techX2020!|willem@nutanix.com +10.42.105.37|techX2020!|willem@nutanix.com +10.42.110.37|techX2020!|willem@nutanix.com +10.42.111.37|techX2020!|willem@nutanix.com +10.42.161.37|techX2020!|willem@nutanix.com +10.38.196.37|techX2020!|willem@nutanix.com +10.38.198.37|techX2020!|willem@nutanix.com +10.55.4.37|techX2020!|willem@nutanix.com +10.55.7.37|techX2020!|willem@nutanix.com +10.55.14.37|techX2020!|willem@nutanix.com +10.55.17.37|techX2020!|willem@nutanix.com +10.55.20.37|techX2020!|willem@nutanix.com +10.55.22.37|techX2020!|willem@nutanix.com +10.55.23.37|techX2020!|willem@nutanix.com +10.55.28.37|techX2020!|willem@nutanix.com +10.55.30.37|techX2020!|willem@nutanix.com +10.55.31.37|techX2020!|willem@nutanix.com +10.55.43.37|techX2020!|willem@nutanix.com +10.55.55.37|techX2020!|willem@nutanix.com +10.55.56.37|techX2020!|willem@nutanix.com +10.55.59.37|techX2020!|willem@nutanix.com +10.55.67.37|techX2020!|willem@nutanix.com +10.55.76.37|techX2020!|willem@nutanix.com \ No newline at end of file diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 438ff0f..de1975b 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1612,11 +1612,11 @@ function upload_karbon_calm_blueprint() { # Launch the BLUEPRINT - echo "Launching the Karbon Cluster Blueprint" + #echo "Launching the Karbon Cluster Blueprint" - curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" + #curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" - echo "Finished Launching the Karbon Cluster Deployment Blueprint" + #echo "Finished Launching the Karbon Cluster Deployment Blueprint" } From 65a68806dbef3a705ce87e9c8e7bf4e9449e7337 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 25 Feb 2020 08:55:16 -0600 Subject: [PATCH 435/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index de1975b..1d8f762 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1629,8 +1629,6 @@ function upload_CICDInfra_calm_blueprint() { local BLUEPRINT=${CICDInfra_Blueprint} local CALM_PROJECT="BootcampInfra" local NETWORK_NAME=${NW1_NAME} - local VLAN_NAME=${NW1_VLAN} - #local CENTOS_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) local DOWNLOAD_BLUEPRINTS local NETWORK_UUID local SERVER_IMAGE="CentOS7.qcow2" @@ -1753,6 +1751,8 @@ function upload_CICDInfra_calm_blueprint() { | jq -c -r "(.spec.resources.substrate_definition_list[1].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[2].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ | jq -c -r "(.spec.resources.substrate_definition_list[2].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[3].create_spec.resources.nic_list[].subnet_reference.name = \"$NETWORK_NAME\")" \ + | jq -c -r "(.spec.resources.substrate_definition_list[3].create_spec.resources.nic_list[].subnet_reference.uuid = \"$NETWORK_UUID\")" \ | jq -c -r "(.spec.resources.credential_definition_list[0].secret.value=\"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAii7qFDhVadLx5lULAG/ooCUTA/ATSmXbArs+GdHxbUWd/bNG\nZCXnaQ2L1mSVVGDxfTbSaTJ3En3tVlMtD2RjZPdhqWESCaoj2kXLYSiNDS9qz3SK\n6h822je/f9O9CzCTrw2XGhnDVwmNraUvO5wmQObCDthTXc72PcBOd6oa4ENsnuY9\nHtiETg29TZXgCYPFXipLBHSZYkBmGgccAeY9dq5ywiywBJLuoSovXkkRJk3cd7Gy\nhCRIwYzqfdgSmiAMYgJLrz/UuLxatPqXts2D8v1xqR9EPNZNzgd4QHK4of1lqsNR\nuz2SxkwqLcXSw0mGcAL8mIwVpzhPzwmENC5OrwIBJQKCAQB++q2WCkCmbtByyrAp\n6ktiukjTL6MGGGhjX/PgYA5IvINX1SvtU0NZnb7FAntiSz7GFrODQyFPQ0jL3bq0\nMrwzRDA6x+cPzMb/7RvBEIGdadfFjbAVaMqfAsul5SpBokKFLxU6lDb2CMdhS67c\n1K2Hv0qKLpHL0vAdEZQ2nFAMWETvVMzl0o1dQmyGzA0GTY8VYdCRsUbwNgvFMvBj\n8T/svzjpASDifa7IXlGaLrXfCH584zt7y+qjJ05O1G0NFslQ9n2wi7F93N8rHxgl\nJDE4OhfyaDyLL1UdBlBpjYPSUbX7D5NExLggWEVFEwx4JRaK6+aDdFDKbSBIidHf\nh45NAoGBANjANRKLBtcxmW4foK5ILTuFkOaowqj+2AIgT1ezCVpErHDFg0bkuvDk\nQVdsAJRX5//luSO30dI0OWWGjgmIUXD7iej0sjAPJjRAv8ai+MYyaLfkdqv1Oj5c\noDC3KjmSdXTuWSYNvarsW+Uf2v7zlZlWesTnpV6gkZH3tX86iuiZAoGBAKM0mKX0\nEjFkJH65Ym7gIED2CUyuFqq4WsCUD2RakpYZyIBKZGr8MRni3I4z6Hqm+rxVW6Dj\nuFGQe5GhgPvO23UG1Y6nm0VkYgZq81TraZc/oMzignSC95w7OsLaLn6qp32Fje1M\nEz2Yn0T3dDcu1twY8OoDuvWx5LFMJ3NoRJaHAoGBAJ4rZP+xj17DVElxBo0EPK7k\n7TKygDYhwDjnJSRSN0HfFg0agmQqXucjGuzEbyAkeN1Um9vLU+xrTHqEyIN/Jqxk\nhztKxzfTtBhK7M84p7M5iq+0jfMau8ykdOVHZAB/odHeXLrnbrr/gVQsAKw1NdDC\nkPCNXP/c9JrzB+c4juEVAoGBAJGPxmp/vTL4c5OebIxnCAKWP6VBUnyWliFhdYME\nrECvNkjoZ2ZWjKhijVw8Il+OAjlFNgwJXzP9Z0qJIAMuHa2QeUfhmFKlo4ku9LOF\n2rdUbNJpKD5m+IRsLX1az4W6zLwPVRHp56WjzFJEfGiRjzMBfOxkMSBSjbLjDm3Z\niUf7AoGBALjvtjapDwlEa5/CFvzOVGFq4L/OJTBEBGx/SA4HUc3TFTtlY2hvTDPZ\ndQr/JBzLBUjCOBVuUuH3uW7hGhW+DnlzrfbfJATaRR8Ht6VU651T+Gbrr8EqNpCP\ngmznERCNf9Kaxl/hlyV5dZBe/2LIK+/jLGNu9EJLoraaCBFshJKF\n-----END RSA PRIVATE KEY-----\n\")" \ | jq -c -r '(.spec.resources.credential_definition_list[0].secret.attrs.is_secret_modified = "true")' \ > $UPDATED_JSONFile From cf6b42bcb3ad18da10dcaad4d3313d5e92ed26ae Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 25 Feb 2020 08:26:21 -0800 Subject: [PATCH 436/691] Added the trailing "/" --- cluster.txt | 7 ------- scripts/global.vars.sh | 2 +- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/cluster.txt b/cluster.txt index bc0c326..27464a6 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1,10 +1,3 @@ -10.42.25.37|techX2020!|willem@nutanix.com -10.42.27.37|techX2020!|willem@nutanix.com -10.42.31.37|techX2020!|willem@nutanix.com -10.42.32.37|techX2020!|willem@nutanix.com -10.42.37.37|techX2020!|willem@nutanix.com -10.42.56.37|techX2020!|willem@nutanix.com -10.42.58.37|techX2020!|willem@nutanix.com 10.42.61.37|techX2020!|willem@nutanix.com 10.42.66.37|techX2020!|willem@nutanix.com 10.42.67.37|techX2020!|willem@nutanix.com diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index a2f00c0..d294d9b 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -230,7 +230,7 @@ case "${OCTET[0]}.${OCTET[1]}" in ) QCOW2_REPOS=(\ 'http://10.42.194.11/workshop_staging/' \ - 'http://10.42.38.10/images' \ + 'http://10.42.38.10/images/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ From 0f02a76d9f5c892a1cb490ead7629a6f7621283a Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 25 Feb 2020 09:29:50 -0800 Subject: [PATCH 437/691] Updated the Karbon blueprint the network for the new G7 Updated the parameters for the secondary network so all G7 (PHX-POC200+) have the right VLAN ID (*10+3) for the secondary network. Also launch of the Karbon blueprint after a short wiat of 30 seconds --- scripts/global.vars.sh | 4 ++-- scripts/lib.common.sh | 23 ++++++++++++----------- scripts/lib.pc.sh | 11 +++++++---- test/repo_source.sh | 30 +++++++++++++++--------------- 4 files changed, 36 insertions(+), 32 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index d294d9b..092ada6 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -285,8 +285,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' # If the third OCTET is between 170 and 199, we need to have the +3 vlan for the secondary - if [[ ${OCTET[2]} -gt 169 ]] && [[ ${OCTET[2]} -lt 200 ]]; then - NW2_VLAN=$((OCTET[2]*10+3)) + if [[ ${OCTET[2]} -gt 169 ]]; then + NW2_VLAN=$((OCTET[2]*10+1)) fi ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 33b1226..3a44e1c 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -306,7 +306,7 @@ for _image in "${ISO_IMAGES[@]}" ; do _command='' _name="${_image}" - if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc --lines) )); then + if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc -l) )); then log 'Bypass multiple repo source checks...' SOURCE_URL="${_image}" else @@ -320,7 +320,7 @@ for _image in "${ISO_IMAGES[@]}" ; do fi # TODO:0 TOFIX: acs-centos ugly override for today... - if (( $(echo "${_image}" | grep -i 'acs-centos' | wc --lines ) > 0 )); then + if (( $(echo "${_image}" | grep -i 'acs-centos' | wc -l ) > 0 )); then _name=acs-centos fi @@ -387,7 +387,7 @@ done _command='' _name="${_image}" - if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc --lines) )); then + if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc -l) )); then log 'Bypass multiple repo source checks...' SOURCE_URL="${_image}" else @@ -401,7 +401,7 @@ done fi # TODO:0 TOFIX: acs-centos ugly override for today... - if (( $(echo "${_image}" | grep -i 'acs-centos' | wc --lines ) > 0 )); then + if (( $(echo "${_image}" | grep -i 'acs-centos' | wc -l ) > 0 )); then _name=acs-centos fi @@ -522,7 +522,7 @@ function ntnx_cmd() { _hold=$(source /etc/profile ; nuclei cluster.list 2>&1) _status=$? - if (( $(echo "${_hold}" | grep websocket | wc --lines) > 0 )); then + if (( $(echo "${_hold}" | grep websocket | wc -l) > 0 )); then log "Warning: Zookeeper isn't up yet." elif (( ${_status} > 0 )); then log "${_status} = ${_hold}, uh oh!" @@ -653,7 +653,7 @@ function ntnx_download() { _source_url=$(cat ${_meta_url##*/} | jq -r .download_url_cdn) fi - if (( $(pgrep curl | wc --lines | tr -d '[:space:]') > 0 )); then + if (( $(pgrep curl | wc -l | tr -d '[:space:]') > 0 )); then pkill curl fi log "Retrieving Nutanix ${_ncli_softwaretype} bits..." @@ -923,31 +923,32 @@ function repo_source() { if [[ -z ${_package} ]]; then _suffix=${_candidates[0]##*/} - if (( $(echo "${_suffix}" | grep . | wc --lines) > 0)); then + if (( $(echo "${_suffix}" | grep . | wc -l) > 0)); then log "Convenience: omitted package argument, added package=${_package}" _package="${_suffix}" fi fi # Prepend your local HTTP cache... - _candidates=( "http://${HTTP_CACHE_HOST}:${HTTP_CACHE_PORT}/" "${_candidates[@]}" ) + #_candidates=( "http://${HTTP_CACHE_HOST}:${HTTP_CACHE_PORT}/" "${_candidates[@]}" ) while (( ${_index} < ${#_candidates[@]} )) do + echo ${_candidates[${_index}]} unset SOURCE_URL # log "DEBUG: ${_index} ${_candidates[${_index}]}, OPTIONAL: _package=${_package}" _url=${_candidates[${_index}]} if [[ -z ${_package} ]]; then - if (( $(echo "${_url}" | grep '/$' | wc --lines) == 0 )); then + if (( $(echo "${_url}" | grep '/$' | wc -l) == 0 )); then log "error ${_error}: ${_url} doesn't end in trailing slash, please correct." exit ${_error} fi - elif (( $(echo "${_url}" | grep '/$' | wc --lines) == 1 )); then + elif (( $(echo "${_url}" | grep '/$' | wc -l) == 1 )); then _url+="${_package}" fi - if (( $(echo "${_url}" | grep '^nfs' | wc --lines) == 1 )); then + if (( $(echo "${_url}" | grep '^nfs' | wc -l) == 1 )); then log "warning: TODO: cURL can't test nfs URLs...assuming a pass!" export SOURCE_URL="${_url}" break diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 1d8f762..d7de46d 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1612,11 +1612,14 @@ function upload_karbon_calm_blueprint() { # Launch the BLUEPRINT - #echo "Launching the Karbon Cluster Blueprint" + log "Sleep 30 seconds so the blueprint can settle in......" + sleep 30 + + log "Launching the Karbon Cluster Blueprint" - #curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" + curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" - #echo "Finished Launching the Karbon Cluster Deployment Blueprint" + log "Finished Launching the Karbon Cluster Deployment Blueprint" } @@ -1728,7 +1731,7 @@ function upload_CICDInfra_calm_blueprint() { echo "Finished uploading ${BLUEPRINT}!" #Getting the Blueprint UUID - CICDInfra_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CICD_Infra.json"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + CICDInfra_BLUEPRINT_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"blueprint","filter": "name==CICD_Infra"}' 'https://localhost:9440/api/nutanix/v3/blueprints/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") echo "CICD Blueprint UUID = $CICDInfra_BLUEPRINT_UUID" diff --git a/test/repo_source.sh b/test/repo_source.sh index db96ec1..379b998 100755 --- a/test/repo_source.sh +++ b/test/repo_source.sh @@ -10,22 +10,22 @@ # echo AUTH_HOST=${AUTH_HOST} # exit -log "__AutoDC__" -unset SOURCE_URL -repo_source AUTODC_REPOS[@] -log "SOURCE_URL=${SOURCE_URL}" +#log "__AutoDC__" +#unset SOURCE_URL +#repo_source AUTODC_REPOS[@] +#log "SOURCE_URL=${SOURCE_URL}" -log "__SSHPass__" -unset SOURCE_URL -_sshpass_pkg=${SSHPASS_REPOS[0]##*/} -repo_source SSHPASS_REPOS[@] ${_sshpass_pkg} -log "SOURCE_URL=${SOURCE_URL}" - -log "__jq__" -unset SOURCE_URL -_jq_pkg=${JQ_REPOS[0]##*/} -repo_source JQ_REPOS[@] ${_jq_pkg} -log "SOURCE_URL=${SOURCE_URL}" +#log "__SSHPass__" +#unset SOURCE_URL +#_sshpass_pkg=${SSHPASS_REPOS[0]##*/} +#repo_source SSHPASS_REPOS[@] ${_sshpass_pkg} +#log "SOURCE_URL=${SOURCE_URL}" +# +#log "__jq__" +#unset SOURCE_URL +#_jq_pkg=${JQ_REPOS[0]##*/} +#repo_source JQ_REPOS[@] ${_jq_pkg} +#log "SOURCE_URL=${SOURCE_URL}" log "__qcow2 Images__" for _image in "${QCOW2_IMAGES[@]}" ; do From 0211545dfd69a422571a7299a8eb0e16bfe7c500 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 25 Feb 2020 13:22:49 -0800 Subject: [PATCH 438/691] Typos for 10.38 --- cluster.txt | 44 ++++++--------- cluster_usa.txt | 122 +++++++++++++++++++++++++++++++++++++++++ scripts/global.vars.sh | 2 +- 3 files changed, 139 insertions(+), 29 deletions(-) create mode 100644 cluster_usa.txt diff --git a/cluster.txt b/cluster.txt index 27464a6..4c5422a 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1,28 +1,16 @@ -10.42.61.37|techX2020!|willem@nutanix.com -10.42.66.37|techX2020!|willem@nutanix.com -10.42.67.37|techX2020!|willem@nutanix.com -10.42.69.37|techX2020!|willem@nutanix.com -10.42.70.37|techX2020!|willem@nutanix.com -10.42.75.37|techX2020!|willem@nutanix.com -10.42.105.37|techX2020!|willem@nutanix.com -10.42.110.37|techX2020!|willem@nutanix.com -10.42.111.37|techX2020!|willem@nutanix.com -10.42.161.37|techX2020!|willem@nutanix.com -10.38.196.37|techX2020!|willem@nutanix.com -10.38.198.37|techX2020!|willem@nutanix.com -10.55.4.37|techX2020!|willem@nutanix.com -10.55.7.37|techX2020!|willem@nutanix.com -10.55.14.37|techX2020!|willem@nutanix.com -10.55.17.37|techX2020!|willem@nutanix.com -10.55.20.37|techX2020!|willem@nutanix.com -10.55.22.37|techX2020!|willem@nutanix.com -10.55.23.37|techX2020!|willem@nutanix.com -10.55.28.37|techX2020!|willem@nutanix.com -10.55.30.37|techX2020!|willem@nutanix.com -10.55.31.37|techX2020!|willem@nutanix.com -10.55.43.37|techX2020!|willem@nutanix.com -10.55.55.37|techX2020!|willem@nutanix.com -10.55.56.37|techX2020!|willem@nutanix.com -10.55.59.37|techX2020!|willem@nutanix.com -10.55.67.37|techX2020!|willem@nutanix.com -10.55.76.37|techX2020!|willem@nutanix.com \ No newline at end of file +10.38.203.37|techX2020!|willem@nutanix.com +10.38.204.37|techX2020!|willem@nutanix.com +10.38.205.37|techX2020!|willem@nutanix.com +10.38.206.37|techX2020!|willem@nutanix.com +10.38.207.37|techX2020!|willem@nutanix.com +10.38.208.37|techX2020!|willem@nutanix.com +10.38.209.37|techX2020!|willem@nutanix.com +10.38.210.37|techX2020!|willem@nutanix.com +10.38.211.37|techX2020!|willem@nutanix.com +10.38.212.37|techX2020!|willem@nutanix.com +10.38.213.37|techX2020!|willem@nutanix.com +10.38.217.37|techX2020!|willem@nutanix.com +10.38.218.37|techX2020!|willem@nutanix.com +10.38.216.37|techX2020!|willem@nutanix.com +10.38.215.37|techX2020!|willem@nutanix.com +10.38.214.37|techX2020!|willem@nutanix.com \ No newline at end of file diff --git a/cluster_usa.txt b/cluster_usa.txt new file mode 100644 index 0000000..2b0335c --- /dev/null +++ b/cluster_usa.txt @@ -0,0 +1,122 @@ +10.42.3.37 +10.42.4.37 +10.42.5.37 +10.42.6.37 +10.42.10.37 +10.42.11.37 +10.42.12.37 +10.42.13.37 +10.42.14.37 +10.42.15.37 +10.42.16.37 +10.42.17.37 +10.42.18.37 +10.42.19.37 +10.42.20.37 +10.42.22.37 +10.42.23.37 +10.42.25.37 +10.42.27.37 +10.42.28.37 +10.42.29.37 +10.42.30.37 +10.42.31.37 +10.42.32.37 +10.42.34.37 +10.42.35.37 +10.42.42.37 +10.42.50.37 +10.42.56.37 +10.42.58.37 +10.42.61.37 +10.42.65.37 +10.42.66.37 +10.42.67.37 +10.42.69.37 +10.42.70.37 +10.42.71.37 +10.42.74.37 +10.42.75.37 +10.42.79.37 +10.42.81.37 +10.42.84.37 +10.42.86.37 +10.42.94.37 +10.42.95.37 +10.42.96.37 +10.42.99.37 +10.42.104.37 +10.42.105.37 +10.42.108.37 +10.42.110.37 +10.42.111.37 +10.42.112.37 +10.42.113.37 +10.42.114.37 +10.42.161.37 +10.42.162.37 +10.42.168.37 +10.38.170.37 +10.38.171.37 +10.38.172.37 +10.38.193.37 +10.38.195.37 +10.38.196.37 +10.38.198.37 +10.38.203.37 +10.38.204.37 +10.38.205.37 +10.38.206.37 +10.38.207.37 +10.38.208.37 +10.38.209.37 +10.38.210.37 +10.38.211.37 +10.38.212.37 +10.38.213.37 +10.38.217.37 +10.38.218.37 +10.55.4.37 +10.55.7.37 +10.55.9.37 +10.55.10.37 +10.55.11.37 +10.55.14.37 +10.55.16.37 +10.55.17.37 +10.55.20.37 +10.55.21.37 +10.55.22.37 +10.55.23.37 +10.55.24.37 +10.55.28.37 +10.55.30.37 +10.55.31.37 +10.55.185.37 +10.55.35.37 +10.55.39.37 +10.55.41.37 +10.55.43.37 +10.55.46.37 +10.55.47.37 +10.55.48.37 +10.55.49.37 +10.55.50.37 +10.55.52.37 +10.55.54.37 +10.55.55.37 +10.55.56.37 +10.55.57.37 +10.55.59.37 +10.55.60.37 +10.55.67.37 +10.55.76.37 +10.38.216.37 +10.38.215.37 +10.38.214.37 +10.42.73.37 +10.42.80.37 +10.42.82.37 +10.42.85.37 +10.42.100.37 +10.42.37.37 \ No newline at end of file diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 092ada6..3c88c6e 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -286,7 +286,7 @@ case "${OCTET[0]}.${OCTET[1]}" in # If the third OCTET is between 170 and 199, we need to have the +3 vlan for the secondary if [[ ${OCTET[2]} -gt 169 ]]; then - NW2_VLAN=$((OCTET[2]*10+1)) + NW2_VLAN=$((OCTET[2]*10+3)) fi ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR From 427ac122f8e80b53f12e6d7900389e026b162fae Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 25 Feb 2020 14:26:11 -0800 Subject: [PATCH 439/691] Changed timeout on AD to 45 from 25 Changed the time out to 45 minutes from 25 minutes. Timing seems to be off... --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index cc8407f..aa04430 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -79,7 +79,7 @@ function authentication_source() { log "Power on ${AUTH_SERVER} VM..." acli "vm.on ${AUTH_SERVER}" - _attempts=25 + _attempts=45 _loop=0 _sleep=60 From 1ba29c0bfacf6db58e2ee135d9000d6c79efa03c Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 26 Feb 2020 08:35:20 -0800 Subject: [PATCH 440/691] Update global.vars.sh Getting the TE webservers to a higher priority than the DFS. Still a backup on the DFS and Amazon --- scripts/global.vars.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 3c88c6e..96603d5 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -191,8 +191,8 @@ case "${OCTET[0]}.${OCTET[1]}" in #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ - 'http://10.55.251.38/workshop_staging/' \ 'http://10.55.76.10/' \ + 'http://10.55.251.38/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ @@ -229,9 +229,9 @@ case "${OCTET[0]}.${OCTET[1]}" in #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ - 'http://10.42.194.11/workshop_staging/' \ - 'http://10.42.38.10/images/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ + 'http://10.42.38.10/images' \ + 'http://10.42.194.11/workshop_staging/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ @@ -267,10 +267,10 @@ case "${OCTET[0]}.${OCTET[1]}" in #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ - 'http://10.42.194.11/workshop_staging/' \ 'http://10.42.38.10/images' \ + 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ - ) + ) AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ From 7e9bff8a9dbe4353bc5b44cc0950595e41d7e4b4 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 26 Feb 2020 09:01:30 -0800 Subject: [PATCH 441/691] Update lib.pe.sh Small update on the text for the File server Analyzer installation --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index aa04430..cd8d72a 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -467,7 +467,7 @@ function create_file_analytics_server() { log "Get ${STORAGE_DEFAULT} Container UUID" _storage_default_uuid=$(ncli container ls name=${STORAGE_DEFAULT} | grep Uuid | grep -v Pool | cut -f 2 -d ':' | xargs) - echo "${_nw_name} network UUID: ${_nw_uuid}" + echo "Secondary network UUID: ${_nw_uuid}" echo "${STORAGE_DEFAULT} storage container UUID: ${_storage_default_uuid}" From 8f924325742d1b6a868b1b17455cc119b5fc2b8f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 3 Mar 2020 12:13:46 -0800 Subject: [PATCH 442/691] Bootcamp Staging Updates --- scripts/calm_bootcamp.sh | 3 +- scripts/citrix_bootcamp.sh | 2 + scripts/era_bootcamp.sh | 10 -- scripts/files_bootcamp.sh | 165 ++++++++++++++++++++++++ scripts/{frame.sh => frame_bootcamp.sh} | 19 ++- stage_workshop.sh | 8 +- 6 files changed, 186 insertions(+), 21 deletions(-) rename scripts/{frame.sh => frame_bootcamp.sh} (90%) diff --git a/scripts/calm_bootcamp.sh b/scripts/calm_bootcamp.sh index 5913de2..58c3916 100755 --- a/scripts/calm_bootcamp.sh +++ b/scripts/calm_bootcamp.sh @@ -70,7 +70,7 @@ case ${1} in export QCOW2_IMAGES=(\ Windows2016.qcow2 \ CentOS7.qcow2 \ - ToolsVM.qcow2 \ + WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ ) export ISO_IMAGES=(\ @@ -129,7 +129,6 @@ case ${1} in && pc_project \ && object_store \ && karbon_image_download \ - && priority_images \ && flow_enable \ && pc_cluster_img_import \ && upload_karbon_calm_blueprint \ diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 238bf71..eb7f5e2 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -19,6 +19,8 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' + export PrismOpsServer='GTSPrismOpsLabUtilityServer' + export SeedPC='GTSseedPC.zp' args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index b37592c..204f4e4 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -75,16 +75,6 @@ case ${1} in CentOS7.qcow2 \ WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ - #GTSOracle/19c-april/19c-bootdisk.qcow2 \ - #GTSOracle/19c-april/19c-disk1.qcow2 \ - #GTSOracle/19c-april/19c-disk2.qcow2 \ - #GTSOracle/19c-april/19c-disk3.qcow2 \ - #GTSOracle/19c-april/19c-disk4.qcow2 \ - #GTSOracle/19c-april/19c-disk5.qcow2 \ - #GTSOracle/19c-april/19c-disk6.qcow2 \ - #GTSOracle/19c-april/19c-disk7.qcow2 \ - #GTSOracle/19c-april/19c-disk8.qcow2 \ - #GTSOracle/19c-april/19c-disk9.qcow2 \ ) export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh index 9afbad3..b99f474 100755 --- a/scripts/files_bootcamp.sh +++ b/scripts/files_bootcamp.sh @@ -3,6 +3,171 @@ #__main()__________ +# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoAD' + export PrismOpsServer='GTSPrismOpsLabUtilityServer' + export SeedPC='GTSseedPC.zp' + + args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + deploy_peer_mgmt_server "${PMC}" \ + && deploy_peer_agent_server "${AGENTA}" \ + && deploy_peer_agent_server "${AGENTB}" + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + Win10v1903.qcow2 \ + WinToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && pc_project \ + && images \ + && flow_enable \ + && pc_cluster_img_import \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac +#!/usr/bin/env bash +# -x + +#__main()__________ + # Source Nutanix environment (PATH + aliases), then common routines + global variables . /etc/profile.d/nutanix_env.sh . lib.common.sh diff --git a/scripts/frame.sh b/scripts/frame_bootcamp.sh similarity index 90% rename from scripts/frame.sh rename to scripts/frame_bootcamp.sh index fbcc522..2b66c3d 100755 --- a/scripts/frame.sh +++ b/scripts/frame_bootcamp.sh @@ -18,6 +18,10 @@ case ${1} in PE | pe ) . lib.pe.sh + export AUTH_SERVER='AutoAD' + export PrismOpsServer='GTSPrismOpsLabUtilityServer' + export SeedPC='GTSseedPC.zp' + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -27,6 +31,15 @@ case ${1} in && network_configure \ && authentication_source \ && pe_auth \ + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -55,11 +68,7 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install && sleep 30 - - create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 - - file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish fi diff --git a/stage_workshop.sh b/stage_workshop.sh index 09a7906..9925dad 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -19,11 +19,11 @@ WORKSHOPS=(\ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ "Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ -"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ +#"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ "Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ "Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ -#"Files Bootcamp (AOS 5.11+/AHV PC 5.11+) = Development" \ +"Files Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -79,7 +79,7 @@ function stage_clusters() { _pe_launch='era_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i Files | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Files" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh' _pe_launch='files_bootcamp.sh' _pc_launch=${_pe_launch} @@ -96,7 +96,7 @@ function stage_clusters() { fi if (( $(echo ${_workshop} | grep -i "^Frame" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='frame.sh' + _pe_launch='frame_bootcamp.sh' _pc_launch=${_pe_launch} fi From 57957a578ab80f587b6634fc781c17904e5108e0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 3 Mar 2020 12:15:49 -0800 Subject: [PATCH 443/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 9925dad..fadd3a5 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -22,7 +22,7 @@ WORKSHOPS=(\ #"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ "Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ -"Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ +"Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ "Files Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed From 80adfdb142cbfc7f43995c1f6c932f1f27e45d73 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 3 Mar 2020 13:30:39 -0800 Subject: [PATCH 444/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index fadd3a5..4dbd4f9 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -74,7 +74,7 @@ function stage_clusters() { _pe_launch='citrix_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^Era" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Databases" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh' _pe_launch='era_bootcamp.sh' _pc_launch=${_pe_launch} From e50570dc16e71c5f7f65cf30aba1a08033a21ff4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 3 Mar 2020 13:38:40 -0800 Subject: [PATCH 445/691] adding in Private Cloud Staging --- scripts/privatecloud_bootcamp.sh | 183 +++++++++++++++++++++++++++++++ stage_workshop.sh | 8 +- 2 files changed, 190 insertions(+), 1 deletion(-) create mode 100755 scripts/privatecloud_bootcamp.sh diff --git a/scripts/privatecloud_bootcamp.sh b/scripts/privatecloud_bootcamp.sh new file mode 100755 index 0000000..8cadd10 --- /dev/null +++ b/scripts/privatecloud_bootcamp.sh @@ -0,0 +1,183 @@ +#!/usr/bin/env bash + #-x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + #export PC_DEV_VERSION='5.10.2' + #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' + #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' + #export FILES_VERSION='3.2.0.1' + #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + + export AUTH_SERVER='AutoAD' + export PrismOpsServer='GTSPrismOpsLabUtilityServer' + export SeedPC='GTSseedPC.zp' + + export _external_nw_name="${1}" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ + veeam/VeeamAHVProxy2.0.404.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + veeam/VBR_10.0.0.4442.iso \ + ) + + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && objects_enable \ + && lcm \ + && pc_project \ + && object_store \ + && karbon_image_download \ + && flow_enable \ + && pc_cluster_img_import \ + && seedPC \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/stage_workshop.sh b/stage_workshop.sh index 4dbd4f9..d58c40b 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -21,8 +21,9 @@ WORKSHOPS=(\ "Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ -"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ +"Private Cloud Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ "Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ +"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ "Files Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -64,6 +65,11 @@ function stage_clusters() { _pe_launch='snc_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^Private Cloud" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='privatecloud_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^Calm" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='calm_bootcamp.sh' From 30842e74bfdf2326760d482142a4058c8618d00e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 3 Mar 2020 18:03:54 -0800 Subject: [PATCH 446/691] updates --- scripts/files_bootcamp.sh | 2 ++ scripts/frame_bootcamp.sh | 7 ++----- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh index b99f474..18288a9 100755 --- a/scripts/files_bootcamp.sh +++ b/scripts/files_bootcamp.sh @@ -134,8 +134,10 @@ case ${1} in ssp_auth \ && calm_enable \ + && objects_enable \ && lcm \ && pc_project \ + && object_store \ && images \ && flow_enable \ && pc_cluster_img_import \ diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index 2b66c3d..4e70d3a 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -124,17 +124,14 @@ case ${1} in ssp_auth \ && calm_enable \ - && karbon_enable \ - && objects_enable \ && lcm \ - && object_store \ - && karbon_image_download \ + && pc_project \ && images \ && flow_enable \ && prism_check 'PC' log "Non-blocking functions (in development) follow." - pc_project + #pc_project pc_admin # ntnx_download 'AOS' # function in lib.common.sh From 58d3a3df5155f0d6eae569db40b9eb859aeb9301 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 4 Mar 2020 09:28:12 +0100 Subject: [PATCH 447/691] Update global.vars.sh Updated the URLs to have an extra "/" in the server locations --- scripts/global.vars.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 96603d5..807725c 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -229,7 +229,7 @@ case "${OCTET[0]}.${OCTET[1]}" in #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ - 'http://10.42.38.10/images' \ + 'http://10.42.38.10/images/' \ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) @@ -267,7 +267,7 @@ case "${OCTET[0]}.${OCTET[1]}" in #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ - 'http://10.42.38.10/images' \ + 'http://10.42.38.10/images/' \ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) From feeb313862e7e8cb946a5ea279f991480cc7755a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 4 Mar 2020 08:52:45 -0800 Subject: [PATCH 448/691] Update stage_workshop.sh --- stage_workshop.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index d58c40b..06306bd 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -81,12 +81,12 @@ function stage_clusters() { _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i "^Databases" | wc ${WC_ARG}) > 0 )); then - _libraries+='lib.pe.sh' + _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='era_bootcamp.sh' _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i "^Files" | wc ${WC_ARG}) > 0 )); then - _libraries+='lib.pe.sh' + _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='files_bootcamp.sh' _pc_launch=${_pe_launch} fi From 20dfd5677b781546c0d4e5fd8a88a14741524336 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 4 Mar 2020 16:38:43 -0800 Subject: [PATCH 449/691] Updates to add EraManaged Network --- scripts/era_bootcamp.sh | 4 +- scripts/global.vars.sh | 9 +++++ scripts/lib.pe.sh | 85 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 97 insertions(+), 1 deletion(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 204f4e4..9a8bca9 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -19,6 +19,8 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.249" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -26,7 +28,7 @@ case ${1} in dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ && pe_license \ && pe_init \ - && network_configure \ + && era_network_configure\ && authentication_source \ && pe_auth diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 807725c..9aa13bf 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -147,6 +147,15 @@ case "${OCTET[3]}" in esac +# Networking needs for Era Bootcamp +NW3_NAME='EraManaged' +NW3_VLAN=$((OCTET[2]*10+1)) +NW3_SUBNET="${IPV4_PREFIX}.129/25" + +# Networking needs for Frame Bootcamp + + + # Stuff needed for object_store # OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' VLAN=${OCTET[2]} diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index cd8d72a..88d18f9 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -562,6 +562,91 @@ function network_configure() { fi } +############################################################################################################################################################################### +# Routine to create the networks for Era bootcamp +############################################################################################################################################################################### + + +function era_network_configure() { + local _network_name="${NW1_NAME}" + + if [[ ! -z "${NW2_NAME}" ]]; then + #TODO: accommodate for X networks! + _network_name="${NW2_NAME}" + fi + + if [[ ! -z "${NW3_NAME}" ]]; then + #TODO: accommodate for X networks! + _network_name="${NW3_NAME}" + fi + + if [[ ! -z $(acli "net.list" | grep ${_network_name}) ]]; then + log "IDEMPOTENCY: ${_network_name} network set, skip." + else + args_required 'AUTH_DOMAIN IPV4_PREFIX AUTH_HOST' + + if [[ ! -z $(acli "net.list" | grep 'Rx-Automation-Network') ]]; then + log "Remove Rx-Automation-Network..." + acli "-y net.delete Rx-Automation-Network" + fi + + log "Create primary network: Name: ${NW1_NAME}, VLAN: ${NW1_VLAN}, Subnet: ${NW1_SUBNET}, Domain: ${AUTH_DOMAIN}, Pool: ${NW1_DHCP_START} to ${NW1_DHCP_END}" + acli "net.create ${NW1_NAME} vlan=${NW1_VLAN} ip_config=${NW1_SUBNET}" + acli "net.update_dhcp_dns ${NW1_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${NW1_NAME} start=${NW1_DHCP_START} end=${NW1_DHCP_END}" + + if [[ ! -z "${NW2_NAME}" ]]; then + log "Create secondary network: Name: ${NW2_NAME}, VLAN: ${NW2_VLAN}, Subnet: ${NW2_SUBNET}, Pool: ${NW2_DHCP_START} to ${NW2_DHCP_END}" + acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" + acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" + fi + + if [[ ! -z "${NW3_NAME}" ]]; then + log "Create EraManaged network: Name: ${NW3_NAME}, VLAN: ${NW3_VLAN}, Subnet: ${NW3_SUBNET}" + acli "net.create ${NW3_NAME} vlan=${NW3_VLAN} ip_config=${NW3_SUBNET}" + acli "net.update_dhcp_dns ${NW3_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + fi + fi +} + +############################################################################################################################################################################### +# Routine to create the networks for frame bootcamp +############################################################################################################################################################################### + + +function frame_network_configure() { + local _network_name="${NW1_NAME}" + + if [[ ! -z "${NW2_NAME}" ]]; then + #TODO: accommodate for X networks! + _network_name="${NW2_NAME}" + fi + + if [[ ! -z $(acli "net.list" | grep ${_network_name}) ]]; then + log "IDEMPOTENCY: ${_network_name} network set, skip." + else + args_required 'AUTH_DOMAIN IPV4_PREFIX AUTH_HOST' + + if [[ ! -z $(acli "net.list" | grep 'Rx-Automation-Network') ]]; then + log "Remove Rx-Automation-Network..." + acli "-y net.delete Rx-Automation-Network" + fi + + log "Create primary network: Name: ${NW1_NAME}, VLAN: ${NW1_VLAN}, Subnet: ${NW1_SUBNET}, Domain: ${AUTH_DOMAIN}, Pool: ${NW1_DHCP_START} to ${NW1_DHCP_END}" + acli "net.create ${NW1_NAME} vlan=${NW1_VLAN} ip_config=${NW1_SUBNET}" + acli "net.update_dhcp_dns ${NW1_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${NW1_NAME} start=${NW1_DHCP_START} end=${NW1_DHCP_END}" + + if [[ ! -z "${NW2_NAME}" ]]; then + log "Create secondary network: Name: ${NW2_NAME}, VLAN: ${NW2_VLAN}, Subnet: ${NW2_SUBNET}, Pool: ${NW2_DHCP_START} to ${NW2_DHCP_END}" + acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" + acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" + fi + fi +} + ############################################################################################################################################################################### # Routine to check if the registration of PE was successful ############################################################################################################################################################################### From d50c611da71b8dac88f2a347794e69e11b933efd Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 4 Mar 2020 17:36:47 -0800 Subject: [PATCH 450/691] Updates for Frame User Networks --- scripts/era_bootcamp.sh | 4 ++ scripts/frame_bootcamp.sh | 90 ++++++++++++++++++++++++++++++++++++++- scripts/global.vars.sh | 9 ---- scripts/lib.pe.sh | 83 +++++++++++++++++++++++++++++++++--- 4 files changed, 171 insertions(+), 15 deletions(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 9a8bca9..66fdb43 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -19,8 +19,12 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' + # Networking needs for Era Bootcamp export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.249" + export NW3_NAME='EraManaged' + export NW3_VLAN=$((OCTET[2]*10+1)) + export NW3_SUBNET="${IPV4_PREFIX}.129/25" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index 4e70d3a..2bb6cde 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -22,13 +22,85 @@ case ${1} in export PrismOpsServer='GTSPrismOpsLabUtilityServer' export SeedPC='GTSseedPC.zp' + # Networking needs for Frame Bootcamp + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.139" + export NW2_DHCP_START2="${IPV4_PREFIX}.250" + export NW2_DHCP_END2="${IPV4_PREFIX}.253" + + export USERNW01_NAME='User01-Network' + export USERNW01_VLAN=$((OCTET[2]*10+1)) + export USERNW01_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW01_DHCP_START="${IPV4_PREFIX}.140" + export USERNW01_DHCP_END="${IPV4_PREFIX}.149" + + export USERNW02_NAME='User02-Network' + export USERNW02_VLAN=$((OCTET[2]*10+1)) + export USERNW02="${IPV4_PREFIX}.129/25" + export USERNW02_DHCP_START="${IPV4_PREFIX}.150" + export USERNW02_DHCP_END="${IPV4_PREFIX}.159" + + export USERNW03_NAME='User03-Network' + export USERNW03_VLAN=$((OCTET[2]*10+1)) + export USERNW03_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW03_DHCP_START="${IPV4_PREFIX}.160" + export USERNW03_DHCP_END="${IPV4_PREFIX}.169" + + export USERNW04_NAME='User04-Network' + export USERNW04_VLAN=$((OCTET[2]*10+1)) + export USERNW04_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW04_DHCP_START="${IPV4_PREFIX}.170" + export USERNW04_DHCP_END="${IPV4_PREFIX}.179" + + export USERNW05_NAME='User05-Network' + export USERNW05_VLAN=$((OCTET[2]*10+1)) + export USERNW05_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW05_DHCP_START="${IPV4_PREFIX}.180" + export USERNW05_DHCP_END="${IPV4_PREFIX}.189" + + export USERNW06_NAME='User06-Network' + export USERNW06_VLAN=$((OCTET[2]*10+1)) + export USERNW06_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW06_DHCP_START="${IPV4_PREFIX}.190" + export USERNW06_DHCP_END="${IPV4_PREFIX}.199" + + export USERNW07_NAME='User07-Network' + export USERNW07_VLAN=$((OCTET[2]*10+1)) + export USERNW07_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW07_DHCP_START="${IPV4_PREFIX}.200" + export USERNW07_DHCP_END="${IPV4_PREFIX}.209" + + export USERNW08_NAME='User08-Network' + export USERNW08_VLAN=$((OCTET[2]*10+1)) + export USERNW08_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW08_DHCP_START="${IPV4_PREFIX}.210" + export USERNW08_DHCP_END="${IPV4_PREFIX}.219" + + export USERNW09_NAME='User09-Network' + export USERNW09_VLAN=$((OCTET[2]*10+1)) + export USERNW09_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW09_DHCP_START="${IPV4_PREFIX}.220" + export USERNW09_DHCP_END="${IPV4_PREFIX}.229" + + export USERNW10_NAME='User10-Network' + export USERNW10_VLAN=$((OCTET[2]*10+1)) + export USERNW10_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW10_DHCP_START="${IPV4_PREFIX}.230" + export USERNW10_DHCP_END="${IPV4_PREFIX}.239" + + export USERNW11_NAME='User11-Network' + export USERNW11_VLAN=$((OCTET[2]*10+1)) + export USERNW11_SUBNET="${IPV4_PREFIX}.129/25" + export USERNW11_DHCP_START="${IPV4_PREFIX}.240" + export USERNW11_DHCP_END="${IPV4_PREFIX}.249" + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ && pe_license \ && pe_init \ - && network_configure \ + && frame_network_configure \ && authentication_source \ && pe_auth \ && prism_pro_server_deploy \ @@ -82,6 +154,22 @@ case ${1} in PC | pc ) . lib.pc.sh + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + Win10v1903.qcow2 \ + WinToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + FrameCCA-2.1.0.iso \ + FrameGuestAgentInstaller_1.0.2.2_7930.iso \ + Nutanix-VirtIO-1.1.5.iso \ + ) + run_once dependencies 'install' 'jq' || exit 13 diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 9aa13bf..807725c 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -147,15 +147,6 @@ case "${OCTET[3]}" in esac -# Networking needs for Era Bootcamp -NW3_NAME='EraManaged' -NW3_VLAN=$((OCTET[2]*10+1)) -NW3_SUBNET="${IPV4_PREFIX}.129/25" - -# Networking needs for Frame Bootcamp - - - # Stuff needed for object_store # OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' VLAN=${OCTET[2]} diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 88d18f9..d7d6e07 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -570,11 +570,6 @@ function network_configure() { function era_network_configure() { local _network_name="${NW1_NAME}" - if [[ ! -z "${NW2_NAME}" ]]; then - #TODO: accommodate for X networks! - _network_name="${NW2_NAME}" - fi - if [[ ! -z "${NW3_NAME}" ]]; then #TODO: accommodate for X networks! _network_name="${NW3_NAME}" @@ -643,6 +638,84 @@ function frame_network_configure() { acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" + acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START2} end=${NW2_DHCP_END2}" + fi + + if [[ ! -z "${USERNW01_NAME}" ]]; then + log "Create User network: Name: ${USERNW01_NAME}, VLAN: ${USERNW01_VLAN}, Subnet: ${USERNW01_SUBNET}, Pool: ${USERNW01_DHCP_START} to ${USERNW01_DHCP_END}" + acli "net.create ${USERNW01_NAME} vlan=${USERNW01_VLAN} ip_config=${USERNW01_SUBNET}" + acli "net.update_dhcp_dns ${USERNW01_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW01_NAME} start=${USERNW01_DHCP_START} end=${USERNW01_DHCP_END}" + fi + + if [[ ! -z "${USERNW02_NAME}" ]]; then + log "Create User network: Name: ${USERNW02_NAME}, VLAN: ${USERNW02_VLAN}, Subnet: ${USERNW02_SUBNET}, Pool: ${USERNW02_DHCP_START} to ${USERNW02_DHCP_END}" + acli "net.create ${USERNW02_NAME} vlan=${USERNW02_VLAN} ip_config=${USERNW02_SUBNET}" + acli "net.update_dhcp_dns ${USERNW02_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW02_NAME} start=${USERNW02_DHCP_START} end=${USERNW02_DHCP_END}" + fi + + if [[ ! -z "${USERNW03_NAME}" ]]; then + log "Create User network: Name: ${USERNW03_NAME}, VLAN: ${USERNW03_VLAN}, Subnet: ${USERNW03_SUBNET}, Pool: ${USERNW03_DHCP_START} to ${USERNW03_DHCP_END}" + acli "net.create ${USERNW03_NAME} vlan=${USERNW03_VLAN} ip_config=${USERNW03_SUBNET}" + acli "net.update_dhcp_dns ${USERNW03_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW03_NAME} start=${USERNW03_DHCP_START} end=${USERNW03_DHCP_END}" + fi + + if [[ ! -z "${USERNW04_NAME}" ]]; then + log "Create User network: Name: ${USERNW04_NAME}, VLAN: ${USERNW04_VLAN}, Subnet: ${USERNW04_SUBNET}, Pool: ${USERNW04_DHCP_START} to ${USERNW04_DHCP_END}" + acli "net.create ${USERNW04_NAME} vlan=${USERNW04_VLAN} ip_config=${USERNW04_SUBNET}" + acli "net.update_dhcp_dns ${USERNW04_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW04_NAME} start=${USERNW04_DHCP_START} end=${USERNW04_DHCP_END}" + fi + + if [[ ! -z "${USERNW05_NAME}" ]]; then + log "Create User network: Name: ${USERNW05_NAME}, VLAN: ${USERNW05_VLAN}, Subnet: ${USERNW05_SUBNET}, Pool: ${USERNW05_DHCP_START} to ${USERNW05_DHCP_END}" + acli "net.create ${USERNW05_NAME} vlan=${USERNW05_VLAN} ip_config=${USERNW05_SUBNET}" + acli "net.update_dhcp_dns ${USERNW05_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW05_NAME} start=${USERNW05_DHCP_START} end=${USERNW05_DHCP_END}" + fi + + if [[ ! -z "${USERNW06_NAME}" ]]; then + log "Create User network: Name: ${USERNW06_NAME}, VLAN: ${USERNW06_VLAN}, Subnet: ${USERNW06_SUBNET}, Pool: ${USERNW06_DHCP_START} to ${USERNW06_DHCP_END}" + acli "net.create ${USERNW06_NAME} vlan=${USERNW06_VLAN} ip_config=${USERNW06_SUBNET}" + acli "net.update_dhcp_dns ${USERNW06_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW06_NAME} start=${USERNW06_DHCP_START} end=${USERNW06_DHCP_END}" + fi + + if [[ ! -z "${USERNW07_NAME}" ]]; then + log "Create User network: Name: ${USERNW07_NAME}, VLAN: ${USERNW07_VLAN}, Subnet: ${USERNW07_SUBNET}, Pool: ${USERNW07_DHCP_START} to ${USERNW07_DHCP_END}" + acli "net.create ${USERNW07_NAME} vlan=${USERNW07_VLAN} ip_config=${USERNW07_SUBNET}" + acli "net.update_dhcp_dns ${USERNW07_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW07_NAME} start=${USERNW07_DHCP_START} end=${USERNW07_DHCP_END}" + fi + + if [[ ! -z "${USERNW08_NAME}" ]]; then + log "Create User network: Name: ${USERNW08_NAME}, VLAN: ${USERNW08_VLAN}, Subnet: ${USERNW08_SUBNET}, Pool: ${USERNW08_DHCP_START} to ${USERNW08_DHCP_END}" + acli "net.create ${USERNW08_NAME} vlan=${USERNW08_VLAN} ip_config=${USERNW08_SUBNET}" + acli "net.update_dhcp_dns ${USERNW08_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW08_NAME} start=${USERNW08_DHCP_START} end=${USERNW08_DHCP_END}" + fi + + if [[ ! -z "${USERNW09_NAME}" ]]; then + log "Create User network: Name: ${USERNW09_NAME}, VLAN: ${USERNW09_VLAN}, Subnet: ${USERNW09_SUBNET}, Pool: ${USERNW09_DHCP_START} to ${USERNW09_DHCP_END}" + acli "net.create ${USERNW09_NAME} vlan=${USERNW09_VLAN} ip_config=${USERNW09_SUBNET}" + acli "net.update_dhcp_dns ${USERNW09_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW09_NAME} start=${USERNW09_DHCP_START} end=${USERNW09_DHCP_END}" + fi + + if [[ ! -z "${USERNW10_NAME}" ]]; then + log "Create User network: Name: ${USERNW10_NAME}, VLAN: ${USERNW10_VLAN}, Subnet: ${USERNW10_SUBNET}, Pool: ${USERNW10_DHCP_START} to ${USERNW10_DHCP_END}" + acli "net.create ${USERNW10_NAME} vlan=${USERNW10_VLAN} ip_config=${USERNW10_SUBNET}" + acli "net.update_dhcp_dns ${USERNW10_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW10_NAME} start=${USERNW10_DHCP_START} end=${USERNW10_DHCP_END}" + fi + + if [[ ! -z "${USERNW11_NAME}" ]]; then + log "Create User network: Name: ${USERNW11_NAME}, VLAN: ${USERNW11_VLAN}, Subnet: ${USERNW11_SUBNET}, Pool: ${USERNW11_DHCP_START} to ${USERNW11_DHCP_END}" + acli "net.create ${USERNW11_NAME} vlan=${USERNW11_VLAN} ip_config=${USERNW11_SUBNET}" + acli "net.update_dhcp_dns ${USERNW11_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${USERNW11_NAME} start=${USERNW11_DHCP_START} end=${USERNW11_DHCP_END}" fi fi } From bf7ab3c06695e552fdd0ceb62b4ba2cfd6515374 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 4 Mar 2020 17:39:38 -0800 Subject: [PATCH 451/691] Update era_bootcamp.sh --- scripts/era_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 66fdb43..d12d287 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -21,7 +21,7 @@ case ${1} in export AUTH_SERVER='AutoAD' # Networking needs for Era Bootcamp export NW2_DHCP_START="${IPV4_PREFIX}.132" - export NW2_DHCP_END="${IPV4_PREFIX}.249" + export NW2_DHCP_END="${IPV4_PREFIX}.219" export NW3_NAME='EraManaged' export NW3_VLAN=$((OCTET[2]*10+1)) export NW3_SUBNET="${IPV4_PREFIX}.129/25" From c3289c810fe1959e5ca2d152ff4e61b0ed1db648 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 4 Mar 2020 19:30:22 -0800 Subject: [PATCH 452/691] removing IPAM/Domain from Era and Frame Networks --- scripts/frame_bootcamp.sh | 2 +- scripts/lib.pe.sh | 46 +++++++++++++++++++-------------------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index 2bb6cde..978783e 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -36,7 +36,7 @@ case ${1} in export USERNW02_NAME='User02-Network' export USERNW02_VLAN=$((OCTET[2]*10+1)) - export USERNW02="${IPV4_PREFIX}.129/25" + export USERNW02_SUBNET="${IPV4_PREFIX}.129/25" export USERNW02_DHCP_START="${IPV4_PREFIX}.150" export USERNW02_DHCP_END="${IPV4_PREFIX}.159" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index d7d6e07..a4b39b0 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -600,7 +600,7 @@ function era_network_configure() { if [[ ! -z "${NW3_NAME}" ]]; then log "Create EraManaged network: Name: ${NW3_NAME}, VLAN: ${NW3_VLAN}, Subnet: ${NW3_SUBNET}" acli "net.create ${NW3_NAME} vlan=${NW3_VLAN} ip_config=${NW3_SUBNET}" - acli "net.update_dhcp_dns ${NW3_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli "net.update_dhcp_dns ${NW3_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" fi fi } @@ -644,78 +644,78 @@ function frame_network_configure() { if [[ ! -z "${USERNW01_NAME}" ]]; then log "Create User network: Name: ${USERNW01_NAME}, VLAN: ${USERNW01_VLAN}, Subnet: ${USERNW01_SUBNET}, Pool: ${USERNW01_DHCP_START} to ${USERNW01_DHCP_END}" acli "net.create ${USERNW01_NAME} vlan=${USERNW01_VLAN} ip_config=${USERNW01_SUBNET}" - acli "net.update_dhcp_dns ${USERNW01_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW01_NAME} start=${USERNW01_DHCP_START} end=${USERNW01_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW01_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW01_NAME} start=${USERNW01_DHCP_START} end=${USERNW01_DHCP_END}" fi if [[ ! -z "${USERNW02_NAME}" ]]; then log "Create User network: Name: ${USERNW02_NAME}, VLAN: ${USERNW02_VLAN}, Subnet: ${USERNW02_SUBNET}, Pool: ${USERNW02_DHCP_START} to ${USERNW02_DHCP_END}" acli "net.create ${USERNW02_NAME} vlan=${USERNW02_VLAN} ip_config=${USERNW02_SUBNET}" - acli "net.update_dhcp_dns ${USERNW02_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW02_NAME} start=${USERNW02_DHCP_START} end=${USERNW02_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW02_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW02_NAME} start=${USERNW02_DHCP_START} end=${USERNW02_DHCP_END}" fi if [[ ! -z "${USERNW03_NAME}" ]]; then log "Create User network: Name: ${USERNW03_NAME}, VLAN: ${USERNW03_VLAN}, Subnet: ${USERNW03_SUBNET}, Pool: ${USERNW03_DHCP_START} to ${USERNW03_DHCP_END}" acli "net.create ${USERNW03_NAME} vlan=${USERNW03_VLAN} ip_config=${USERNW03_SUBNET}" - acli "net.update_dhcp_dns ${USERNW03_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW03_NAME} start=${USERNW03_DHCP_START} end=${USERNW03_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW03_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW03_NAME} start=${USERNW03_DHCP_START} end=${USERNW03_DHCP_END}" fi if [[ ! -z "${USERNW04_NAME}" ]]; then log "Create User network: Name: ${USERNW04_NAME}, VLAN: ${USERNW04_VLAN}, Subnet: ${USERNW04_SUBNET}, Pool: ${USERNW04_DHCP_START} to ${USERNW04_DHCP_END}" acli "net.create ${USERNW04_NAME} vlan=${USERNW04_VLAN} ip_config=${USERNW04_SUBNET}" - acli "net.update_dhcp_dns ${USERNW04_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW04_NAME} start=${USERNW04_DHCP_START} end=${USERNW04_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW04_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW04_NAME} start=${USERNW04_DHCP_START} end=${USERNW04_DHCP_END}" fi if [[ ! -z "${USERNW05_NAME}" ]]; then log "Create User network: Name: ${USERNW05_NAME}, VLAN: ${USERNW05_VLAN}, Subnet: ${USERNW05_SUBNET}, Pool: ${USERNW05_DHCP_START} to ${USERNW05_DHCP_END}" acli "net.create ${USERNW05_NAME} vlan=${USERNW05_VLAN} ip_config=${USERNW05_SUBNET}" - acli "net.update_dhcp_dns ${USERNW05_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW05_NAME} start=${USERNW05_DHCP_START} end=${USERNW05_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW05_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW05_NAME} start=${USERNW05_DHCP_START} end=${USERNW05_DHCP_END}" fi if [[ ! -z "${USERNW06_NAME}" ]]; then log "Create User network: Name: ${USERNW06_NAME}, VLAN: ${USERNW06_VLAN}, Subnet: ${USERNW06_SUBNET}, Pool: ${USERNW06_DHCP_START} to ${USERNW06_DHCP_END}" acli "net.create ${USERNW06_NAME} vlan=${USERNW06_VLAN} ip_config=${USERNW06_SUBNET}" - acli "net.update_dhcp_dns ${USERNW06_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW06_NAME} start=${USERNW06_DHCP_START} end=${USERNW06_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW06_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW06_NAME} start=${USERNW06_DHCP_START} end=${USERNW06_DHCP_END}" fi if [[ ! -z "${USERNW07_NAME}" ]]; then log "Create User network: Name: ${USERNW07_NAME}, VLAN: ${USERNW07_VLAN}, Subnet: ${USERNW07_SUBNET}, Pool: ${USERNW07_DHCP_START} to ${USERNW07_DHCP_END}" acli "net.create ${USERNW07_NAME} vlan=${USERNW07_VLAN} ip_config=${USERNW07_SUBNET}" - acli "net.update_dhcp_dns ${USERNW07_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW07_NAME} start=${USERNW07_DHCP_START} end=${USERNW07_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW07_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW07_NAME} start=${USERNW07_DHCP_START} end=${USERNW07_DHCP_END}" fi if [[ ! -z "${USERNW08_NAME}" ]]; then log "Create User network: Name: ${USERNW08_NAME}, VLAN: ${USERNW08_VLAN}, Subnet: ${USERNW08_SUBNET}, Pool: ${USERNW08_DHCP_START} to ${USERNW08_DHCP_END}" acli "net.create ${USERNW08_NAME} vlan=${USERNW08_VLAN} ip_config=${USERNW08_SUBNET}" - acli "net.update_dhcp_dns ${USERNW08_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW08_NAME} start=${USERNW08_DHCP_START} end=${USERNW08_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW08_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW08_NAME} start=${USERNW08_DHCP_START} end=${USERNW08_DHCP_END}" fi if [[ ! -z "${USERNW09_NAME}" ]]; then log "Create User network: Name: ${USERNW09_NAME}, VLAN: ${USERNW09_VLAN}, Subnet: ${USERNW09_SUBNET}, Pool: ${USERNW09_DHCP_START} to ${USERNW09_DHCP_END}" acli "net.create ${USERNW09_NAME} vlan=${USERNW09_VLAN} ip_config=${USERNW09_SUBNET}" - acli "net.update_dhcp_dns ${USERNW09_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW09_NAME} start=${USERNW09_DHCP_START} end=${USERNW09_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW09_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW09_NAME} start=${USERNW09_DHCP_START} end=${USERNW09_DHCP_END}" fi if [[ ! -z "${USERNW10_NAME}" ]]; then log "Create User network: Name: ${USERNW10_NAME}, VLAN: ${USERNW10_VLAN}, Subnet: ${USERNW10_SUBNET}, Pool: ${USERNW10_DHCP_START} to ${USERNW10_DHCP_END}" acli "net.create ${USERNW10_NAME} vlan=${USERNW10_VLAN} ip_config=${USERNW10_SUBNET}" - acli "net.update_dhcp_dns ${USERNW10_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW10_NAME} start=${USERNW10_DHCP_START} end=${USERNW10_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW10_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW10_NAME} start=${USERNW10_DHCP_START} end=${USERNW10_DHCP_END}" fi if [[ ! -z "${USERNW11_NAME}" ]]; then log "Create User network: Name: ${USERNW11_NAME}, VLAN: ${USERNW11_VLAN}, Subnet: ${USERNW11_SUBNET}, Pool: ${USERNW11_DHCP_START} to ${USERNW11_DHCP_END}" acli "net.create ${USERNW11_NAME} vlan=${USERNW11_VLAN} ip_config=${USERNW11_SUBNET}" - acli "net.update_dhcp_dns ${USERNW11_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${USERNW11_NAME} start=${USERNW11_DHCP_START} end=${USERNW11_DHCP_END}" + #acli "net.update_dhcp_dns ${USERNW11_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${USERNW11_NAME} start=${USERNW11_DHCP_START} end=${USERNW11_DHCP_END}" fi fi } From cf3f4765b48eb00e6bc4d844ed5a588e17918b87 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 5 Mar 2020 18:22:16 -0800 Subject: [PATCH 453/691] Adding basci_bootcamp stagging --- scripts/basic_bootcamp.sh | 151 ++++++++++++++++++++++++++++++++++++++ scripts/global.vars.sh | 28 +++---- stage_workshop.sh | 55 ++++++++------ 3 files changed, 196 insertions(+), 38 deletions(-) create mode 100755 scripts/basic_bootcamp.sh diff --git a/scripts/basic_bootcamp.sh b/scripts/basic_bootcamp.sh new file mode 100755 index 0000000..fd2e681 --- /dev/null +++ b/scripts/basic_bootcamp.sh @@ -0,0 +1,151 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoAD' + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + Win10v1903.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && pc_project \ + && flow_enable \ + && pc_cluster_img_import \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 807725c..70eeb60 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -3,8 +3,8 @@ # shellcheck disable=SC2034 RELEASE='release.json' PC_DEV_VERSION='5.11.2.1' -PC_CURRENT_VERSION='5.11.2' -PC_STABLE_VERSION='5.11' +PC_CURRENT_VERSION='5.11.2.1' +PC_STABLE_VERSION='5.11.2' FILES_VERSION='3.6.1.2' FILE_ANALYTICS_VERSION='2.1.0' NTNX_INIT_PASSWORD='nutanix/4u' @@ -173,10 +173,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' - PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.json' + PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.1.2.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' @@ -211,10 +211,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.1.2.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' @@ -249,10 +249,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central-metadata.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.1.2.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' diff --git a/stage_workshop.sh b/stage_workshop.sh index 06306bd..1f87975 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,21 +11,22 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ -"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ -"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Current" \ -"Previous Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ -"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11/AHV PC 5.11) = Stable" \ +"Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Current" \ +"SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Current" \ +"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ +"Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ +"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ "Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ -"Private Cloud Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ -"Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ -"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ -"Files Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ -"Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ +"Basic / API Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ +"Private Cloud Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ +"Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ +"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ +"Files Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ +"Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -47,9 +48,9 @@ function stage_clusters() { # TODO: make WORKSHOPS and map a JSON configuration file? if (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.11.2" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.11" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.11.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" fi @@ -65,11 +66,26 @@ function stage_clusters() { _pe_launch='snc_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^Basic / API Bootcamp" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='basic_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^Private Cloud" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='privatecloud_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^Databases" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='era_bootcamp.sh' + _pc_launch=${_pe_launch} + fi + if (( $(echo ${_workshop} | grep -i "^Files" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='files_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^Calm" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='calm_bootcamp.sh' @@ -80,14 +96,9 @@ function stage_clusters() { _pe_launch='citrix_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^Databases" | wc ${WC_ARG}) > 0 )); then - _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='era_bootcamp.sh' - _pc_launch=${_pe_launch} - fi - if (( $(echo ${_workshop} | grep -i "^Files" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Frame" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='files_bootcamp.sh' + _pe_launch='frame_bootcamp.sh' _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then @@ -100,11 +111,7 @@ function stage_clusters() { _pe_launch='snc_ts2020.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^Frame" | wc ${WC_ARG}) > 0 )); then - _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='frame_bootcamp.sh' - _pc_launch=${_pe_launch} - fi + dependencies 'install' 'sshpass' From 13d487fea0aea8ab536420cf00afccb2164dc59e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 5 Mar 2020 19:43:57 -0800 Subject: [PATCH 454/691] Post GTS20 Bootcamp Staging updates --- README.md | 7 ++++++- scripts/lib.pe.sh | 4 ++-- stage_workshop.sh | 3 +-- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 4605b54..dc7be73 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,8 @@ After HPoC Foundation, you can have push-button Calm in about half an hour! # Table of Contents # -- [Available Workshops](#available-workshops) +- [How To Workshop](#howto-workshop) +- [Available Workshops](#available-workshops) - [HPoC Cluster Reservation](#hpoc-cluster-reservation) - [Staging Your HPoC](#staging-your-hpoc) - [Interactive Usage](#interactive-usage) @@ -17,6 +18,10 @@ After HPoC Foundation, you can have push-button Calm in about half an hour! --- +## How To Workshop ## + +Please review the How To Workshop for the latest instructions http://ntnx.tips/howto + ## Available Workshops ## 1. Calm Introduction Workshop (AOS/AHV 5.5+) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index a4b39b0..9230abe 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -250,7 +250,7 @@ acli "vm.disk_create ${PrismOpsServer} clone_from_image=${PrismOpsServer}" #acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME}" acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME} ip=${PrismOpsServer_HOST}" -log "Power on ${VPrismOpsServer} VM..." +log "Power on ${PrismOpsServer} VM..." acli "vm.on ${PrismOpsServer}" @@ -782,7 +782,7 @@ function pc_configure() { ## TODO: If DEBUG is set, we run the below command with bash -x _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash -x ${HOME}/${PC_LAUNCH} PC" + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} PC" log "Remote asynchroneous launch PC configuration script... ${_command}" remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" log "PC Configuration complete: try Validate Staged Clusters now." diff --git a/stage_workshop.sh b/stage_workshop.sh index 1f87975..19cd2b3 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -24,9 +24,9 @@ WORKSHOPS=(\ "Basic / API Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ "Private Cloud Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ "Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ -"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ "Files Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ +"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -112,7 +112,6 @@ function stage_clusters() { _pc_launch=${_pe_launch} fi - dependencies 'install' 'sshpass' if [[ -z ${PC_VERSION} ]]; then From c134dc08edadcb112ec61d7d73de89da36247eca Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Tue, 10 Mar 2020 10:11:33 +0100 Subject: [PATCH 455/691] Update ts2020.sh Typo --- scripts/ts2020.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index cbb1cbd..e89dfdd 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -33,7 +33,7 @@ case ${1} in export AUTH_SERVER='AutoAD' export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zp' + export SeedPC='GTSseedPC.zip' export _external_nw_name="${1}" From 8414e699db409ac3431dbddfc63af58cbbdc7eed Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 11 Mar 2020 21:50:16 -0700 Subject: [PATCH 456/691] Updates for Frame and Objects Darksite --- scripts/basic_bootcamp.sh | 1 + scripts/era_bootcamp.sh | 11 +++++----- scripts/global.vars.sh | 2 +- scripts/lib.pc.sh | 22 +++++++++++++++++++ scripts/lib.pe.sh | 46 ++++++++++++++++++++------------------- 5 files changed, 54 insertions(+), 28 deletions(-) diff --git a/scripts/basic_bootcamp.sh b/scripts/basic_bootcamp.sh index fd2e681..125cb88 100755 --- a/scripts/basic_bootcamp.sh +++ b/scripts/basic_bootcamp.sh @@ -76,6 +76,7 @@ case ${1} in ) export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ + Windows2016.iso \ ) run_once diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index d12d287..9dc9f35 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -20,11 +20,12 @@ case ${1} in export AUTH_SERVER='AutoAD' # Networking needs for Era Bootcamp - export NW2_DHCP_START="${IPV4_PREFIX}.132" - export NW2_DHCP_END="${IPV4_PREFIX}.219" - export NW3_NAME='EraManaged' - export NW3_VLAN=$((OCTET[2]*10+1)) - export NW3_SUBNET="${IPV4_PREFIX}.129/25" + export NW2_NAME='EraManaged' + #export NW2_DHCP_START="${IPV4_PREFIX}.132" + #export NW2_DHCP_END="${IPV4_PREFIX}.219" + #export NW3_NAME='EraManaged' + #export NW3_VLAN=$((OCTET[2]*10+1)) + #export NW3_SUBNET="${IPV4_PREFIX}.129/25" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 70eeb60..67d8097 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -148,7 +148,7 @@ case "${OCTET[3]}" in esac # Stuff needed for object_store -# OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' +OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' VLAN=${OCTET[2]} NETWORK="${OCTET[0]}.${OCTET[1]}" diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index d7de46d..bfe2fcc 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -11,6 +11,15 @@ # Added the download bits for the Centos Image for Karbon ############################################################################################################################################################################### +############################################################################################################################################################################### +# Routine to mark PC has finished staging +############################################################################################################################################################################### + +function finish_staging() { + log "Staging is complete. Writing to .staging_complete" + touch .staging_complete + date >> .staging_complete +} ############################################################################################################################################################################### @@ -263,6 +272,7 @@ function objects_enable() { local _json_data_check="{\"entity_type\":\"objectstore\"}" local _httpURL_check="https://localhost:9440/oss/api/nutanix/v3/groups" local _httpURL="https://localhost:9440/api/nutanix/v3/services/oss" + local _maxtries=30 # Start the enablement process _response=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d $_json_data_set_enable ${_httpURL}) @@ -302,6 +312,18 @@ function object_store() { local _url_oss_check='https://localhost:9440/oss/api/nutanix/v3/objectstores/list' + # Enable Dark Site Repo and wait 3 seconds + mspctl airgap --enable --lcm-server=${OBJECTS_OFFLINE_REPO} + sleep 3 + # Confirm airgap is enabled + _response=$(mspctl airgap --status | grep "\"enable\":true" | wc -l) + + if [ $_response -eq 1 ]; then + log "Objects dark site staging successfully enabled. Response is $_response. " + else + log "Objects failed to enable dark site staging. Will use standard WAN download (this will take longer). Response is $_response." + fi + # Payload for the _json_data _json_data='{"kind":"subnet"}' diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 9230abe..9ae5eb8 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -570,10 +570,6 @@ function network_configure() { function era_network_configure() { local _network_name="${NW1_NAME}" - if [[ ! -z "${NW3_NAME}" ]]; then - #TODO: accommodate for X networks! - _network_name="${NW3_NAME}" - fi if [[ ! -z $(acli "net.list" | grep ${_network_name}) ]]; then log "IDEMPOTENCY: ${_network_name} network set, skip." @@ -590,18 +586,13 @@ function era_network_configure() { acli "net.update_dhcp_dns ${NW1_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" acli " net.add_dhcp_pool ${NW1_NAME} start=${NW1_DHCP_START} end=${NW1_DHCP_END}" + # NW2 is EraManaged, so we do not need DHCP if [[ ! -z "${NW2_NAME}" ]]; then - log "Create secondary network: Name: ${NW2_NAME}, VLAN: ${NW2_VLAN}, Subnet: ${NW2_SUBNET}, Pool: ${NW2_DHCP_START} to ${NW2_DHCP_END}" + log "Create secondary network: Name: ${NW2_NAME}, VLAN: ${NW2_VLAN}, Subnet: ${NW2_SUBNET}" acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" fi - if [[ ! -z "${NW3_NAME}" ]]; then - log "Create EraManaged network: Name: ${NW3_NAME}, VLAN: ${NW3_VLAN}, Subnet: ${NW3_SUBNET}" - acli "net.create ${NW3_NAME} vlan=${NW3_VLAN} ip_config=${NW3_SUBNET}" - #acli "net.update_dhcp_dns ${NW3_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - fi fi } @@ -643,77 +634,88 @@ function frame_network_configure() { if [[ ! -z "${USERNW01_NAME}" ]]; then log "Create User network: Name: ${USERNW01_NAME}, VLAN: ${USERNW01_VLAN}, Subnet: ${USERNW01_SUBNET}, Pool: ${USERNW01_DHCP_START} to ${USERNW01_DHCP_END}" - acli "net.create ${USERNW01_NAME} vlan=${USERNW01_VLAN} ip_config=${USERNW01_SUBNET}" + acli "net.create ${USERNW01_NAME} vlan=${USERNW01_VLAN}" + #acli "net.create ${USERNW01_NAME} vlan=${USERNW01_VLAN} ip_config=${USERNW01_SUBNET}" #acli "net.update_dhcp_dns ${USERNW01_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW01_NAME} start=${USERNW01_DHCP_START} end=${USERNW01_DHCP_END}" fi if [[ ! -z "${USERNW02_NAME}" ]]; then log "Create User network: Name: ${USERNW02_NAME}, VLAN: ${USERNW02_VLAN}, Subnet: ${USERNW02_SUBNET}, Pool: ${USERNW02_DHCP_START} to ${USERNW02_DHCP_END}" - acli "net.create ${USERNW02_NAME} vlan=${USERNW02_VLAN} ip_config=${USERNW02_SUBNET}" + acli "net.create ${USERNW02_NAME} vlan=${USERNW02_VLAN}" + #acli "net.create ${USERNW02_NAME} vlan=${USERNW02_VLAN} ip_config=${USERNW02_SUBNET}" #acli "net.update_dhcp_dns ${USERNW02_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW02_NAME} start=${USERNW02_DHCP_START} end=${USERNW02_DHCP_END}" fi if [[ ! -z "${USERNW03_NAME}" ]]; then log "Create User network: Name: ${USERNW03_NAME}, VLAN: ${USERNW03_VLAN}, Subnet: ${USERNW03_SUBNET}, Pool: ${USERNW03_DHCP_START} to ${USERNW03_DHCP_END}" - acli "net.create ${USERNW03_NAME} vlan=${USERNW03_VLAN} ip_config=${USERNW03_SUBNET}" + acli "net.create ${USERNW03_NAME} vlan=${USERNW03_VLAN}" + #acli "net.create ${USERNW03_NAME} vlan=${USERNW03_VLAN} ip_config=${USERNW03_SUBNET}" #acli "net.update_dhcp_dns ${USERNW03_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW03_NAME} start=${USERNW03_DHCP_START} end=${USERNW03_DHCP_END}" fi if [[ ! -z "${USERNW04_NAME}" ]]; then log "Create User network: Name: ${USERNW04_NAME}, VLAN: ${USERNW04_VLAN}, Subnet: ${USERNW04_SUBNET}, Pool: ${USERNW04_DHCP_START} to ${USERNW04_DHCP_END}" - acli "net.create ${USERNW04_NAME} vlan=${USERNW04_VLAN} ip_config=${USERNW04_SUBNET}" + acli "net.create ${USERNW04_NAME} vlan=${USERNW04_VLAN}" + #acli "net.create ${USERNW04_NAME} vlan=${USERNW04_VLAN} ip_config=${USERNW04_SUBNET}" #acli "net.update_dhcp_dns ${USERNW04_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW04_NAME} start=${USERNW04_DHCP_START} end=${USERNW04_DHCP_END}" fi if [[ ! -z "${USERNW05_NAME}" ]]; then log "Create User network: Name: ${USERNW05_NAME}, VLAN: ${USERNW05_VLAN}, Subnet: ${USERNW05_SUBNET}, Pool: ${USERNW05_DHCP_START} to ${USERNW05_DHCP_END}" - acli "net.create ${USERNW05_NAME} vlan=${USERNW05_VLAN} ip_config=${USERNW05_SUBNET}" + acli "net.create ${USERNW05_NAME} vlan=${USERNW05_VLAN}" + #acli "net.create ${USERNW05_NAME} vlan=${USERNW05_VLAN} ip_config=${USERNW05_SUBNET}" #acli "net.update_dhcp_dns ${USERNW05_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW05_NAME} start=${USERNW05_DHCP_START} end=${USERNW05_DHCP_END}" fi if [[ ! -z "${USERNW06_NAME}" ]]; then log "Create User network: Name: ${USERNW06_NAME}, VLAN: ${USERNW06_VLAN}, Subnet: ${USERNW06_SUBNET}, Pool: ${USERNW06_DHCP_START} to ${USERNW06_DHCP_END}" - acli "net.create ${USERNW06_NAME} vlan=${USERNW06_VLAN} ip_config=${USERNW06_SUBNET}" + acli "net.create ${USERNW06_NAME} vlan=${USERNW06_VLAN}" + #acli "net.create ${USERNW06_NAME} vlan=${USERNW06_VLAN} ip_config=${USERNW06_SUBNET}" #acli "net.update_dhcp_dns ${USERNW06_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW06_NAME} start=${USERNW06_DHCP_START} end=${USERNW06_DHCP_END}" fi if [[ ! -z "${USERNW07_NAME}" ]]; then log "Create User network: Name: ${USERNW07_NAME}, VLAN: ${USERNW07_VLAN}, Subnet: ${USERNW07_SUBNET}, Pool: ${USERNW07_DHCP_START} to ${USERNW07_DHCP_END}" - acli "net.create ${USERNW07_NAME} vlan=${USERNW07_VLAN} ip_config=${USERNW07_SUBNET}" + acli "net.create ${USERNW07_NAME} vlan=${USERNW07_VLAN}" + #acli "net.create ${USERNW07_NAME} vlan=${USERNW07_VLAN} ip_config=${USERNW07_SUBNET}" #acli "net.update_dhcp_dns ${USERNW07_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW07_NAME} start=${USERNW07_DHCP_START} end=${USERNW07_DHCP_END}" fi if [[ ! -z "${USERNW08_NAME}" ]]; then log "Create User network: Name: ${USERNW08_NAME}, VLAN: ${USERNW08_VLAN}, Subnet: ${USERNW08_SUBNET}, Pool: ${USERNW08_DHCP_START} to ${USERNW08_DHCP_END}" - acli "net.create ${USERNW08_NAME} vlan=${USERNW08_VLAN} ip_config=${USERNW08_SUBNET}" + acli "net.create ${USERNW08_NAME} vlan=${USERNW08_VLAN}" + #acli "net.create ${USERNW08_NAME} vlan=${USERNW08_VLAN} ip_config=${USERNW08_SUBNET}" #acli "net.update_dhcp_dns ${USERNW08_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW08_NAME} start=${USERNW08_DHCP_START} end=${USERNW08_DHCP_END}" fi if [[ ! -z "${USERNW09_NAME}" ]]; then log "Create User network: Name: ${USERNW09_NAME}, VLAN: ${USERNW09_VLAN}, Subnet: ${USERNW09_SUBNET}, Pool: ${USERNW09_DHCP_START} to ${USERNW09_DHCP_END}" - acli "net.create ${USERNW09_NAME} vlan=${USERNW09_VLAN} ip_config=${USERNW09_SUBNET}" + acli "net.create ${USERNW09_NAME} vlan=${USERNW09_VLAN}" + #acli "net.create ${USERNW09_NAME} vlan=${USERNW09_VLAN} ip_config=${USERNW09_SUBNET}" #acli "net.update_dhcp_dns ${USERNW09_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW09_NAME} start=${USERNW09_DHCP_START} end=${USERNW09_DHCP_END}" fi if [[ ! -z "${USERNW10_NAME}" ]]; then log "Create User network: Name: ${USERNW10_NAME}, VLAN: ${USERNW10_VLAN}, Subnet: ${USERNW10_SUBNET}, Pool: ${USERNW10_DHCP_START} to ${USERNW10_DHCP_END}" - acli "net.create ${USERNW10_NAME} vlan=${USERNW10_VLAN} ip_config=${USERNW10_SUBNET}" + acli "net.create ${USERNW10_NAME} vlan=${USERNW10_VLAN}" + #acli "net.create ${USERNW10_NAME} vlan=${USERNW10_VLAN} ip_config=${USERNW10_SUBNET}" #acli "net.update_dhcp_dns ${USERNW10_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW10_NAME} start=${USERNW10_DHCP_START} end=${USERNW10_DHCP_END}" fi if [[ ! -z "${USERNW11_NAME}" ]]; then log "Create User network: Name: ${USERNW11_NAME}, VLAN: ${USERNW11_VLAN}, Subnet: ${USERNW11_SUBNET}, Pool: ${USERNW11_DHCP_START} to ${USERNW11_DHCP_END}" - acli "net.create ${USERNW11_NAME} vlan=${USERNW11_VLAN} ip_config=${USERNW11_SUBNET}" + acli "net.create ${USERNW11_NAME} vlan=${USERNW11_VLAN}" + #acli "net.create ${USERNW11_NAME} vlan=${USERNW11_VLAN} ip_config=${USERNW11_SUBNET}" #acli "net.update_dhcp_dns ${USERNW11_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" #acli " net.add_dhcp_pool ${USERNW11_NAME} start=${USERNW11_DHCP_START} end=${USERNW11_DHCP_END}" fi From db9c7492c51ec5dc00cc591fa630d89216721615 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 12 Mar 2020 15:51:08 +0100 Subject: [PATCH 457/691] Updates needed due to new version of the ERA As we have an old version of teh ERA installation qcow2 image we needed to update to the right version so the HoL works. --- scripts/era_bootcamp.sh | 2 +- scripts/global.vars.sh | 2 +- scripts/lib.pc.sh | 8 ++++---- scripts/ts2020.sh | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 9dc9f35..53fbc3f 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -76,7 +76,7 @@ case ${1} in export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ - ERA-Server-build-1.2.0.1.qcow2 \ + ERA-Server-build-1.2.1.qcow2 \ MSSQL-2016-VM.qcow2 \ Windows2016.qcow2 \ CentOS7.qcow2 \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 67d8097..234f930 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -62,7 +62,7 @@ QCOW2_IMAGES=(\ Windows10-1709.qcow2 \ WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ - ERA-Server-build-1.1.1.3.qcow2 \ + ERA-Server-build-1.2.1.qcow2 \ MSSQL-2016-VM.qcow2 \ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index bfe2fcc..5ffe4ae 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1309,7 +1309,7 @@ function upload_era_calm_blueprint() { local PE_CREDS_PASSWORD="${PE_PASSWORD}" #local ERACLI_PASSWORD=$(awk '{printf "%s\\n", $0}' ${DIRECTORY}/${CALM_RSA_KEY_FILE}) local DOWNLOAD_BLUEPRINTS - local ERA_IMAGE="ERA-Server-build-1.2.0.1.qcow2" + local ERA_IMAGE="ERA-Server-build-1.2.1.qcow2" local ERA_IMAGE_UUID local CURL_HTTP_OPTS="--max-time 25 --silent -k --header Content-Type:application/json --header Accept:application/json --insecure" local _loops="0" @@ -1325,17 +1325,17 @@ function upload_era_calm_blueprint() { _loops="0" _maxtries="75" - ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) + ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.1.qcow2' | wc -l) # The response should be a Task UUID while [[ $ERA_IMAGE_UUID_CHECK -ne 1 && $_loops -lt $_maxtries ]]; do log "Image not yet uploaded. $_loops/$_maxtries... sleeping 60 seconds" sleep 60 - ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.0.1.qcow2' | wc -l) + ERA_IMAGE_UUID_CHECK=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d '{}' 'https://localhost:9440/api/nutanix/v3/images/list' | grep 'ERA-Server-build-1.2.1.qcow2' | wc -l) (( _loops++ )) done if [[ $_loops -lt $_maxtries ]]; then log "Image has been uploaded." - ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.0.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + ERA_IMAGE_UUID=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"image","filter": "name==ERA-Server-build-1.2.1.qcow2"}' 'https://localhost:9440/api/nutanix/v3/images/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") else log "Image is not upload, please check." fi diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh index e89dfdd..f9f7022 100755 --- a/scripts/ts2020.sh +++ b/scripts/ts2020.sh @@ -97,7 +97,7 @@ case ${1} in export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ - ERA-Server-build-1.2.0.1.qcow2 \ + ERA-Server-build-1.2.1.qcow2 \ Windows2016.qcow2 \ CentOS7.qcow2 \ Win10v1903.qcow2 \ From f504ff232f0f00bdff082ec40e6cb7796472b7fd Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 12 Mar 2020 16:17:20 +0100 Subject: [PATCH 458/691] Update lib.pc.sh Use of the new 0.4 Karbon image --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 5ffe4ae..7c77475 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1496,7 +1496,7 @@ function upload_karbon_calm_blueprint() { local DIRECTORY="/home/nutanix/karbon" local BLUEPRINT=${Karbon_Blueprint} local CALM_PROJECT="BootcampInfra" - local KARBON_IMAGE='ntnx-0.2' + local KARBON_IMAGE='ntnx-0.4' local PE_IP=${PE_HOST} local CLSTR_NAME="none" local CTR_UUID=${_storage_default_uuid} From a4cc9441aa3ee3b4783957dc35199a1e5788ea60 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 12 Mar 2020 08:19:48 -0700 Subject: [PATCH 459/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 234f930..90180fb 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -206,7 +206,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.55.76.10/seedPC.zip' BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' - OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' + OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' From 8926f30c74a9deb5b942e0899c57828ddf894264 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 13 Mar 2020 10:07:50 +0100 Subject: [PATCH 460/691] Update lib.common.sh Changed to the right Era server image --- scripts/lib.common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 3a44e1c..a808d27 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -455,7 +455,7 @@ EOF function priority_images(){ local _prio_images_arr=(\ - ERA-Server-build-1.2.0.1.qcow2 \ + ERA-Server-build-1.2.1.qcow2 \ Windows2016.qcow2 \ CentOS7.qcow2 \ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ From 720f31a9350bed96b112e129c12e98f38bfdbeb1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 15 Mar 2020 23:48:56 -0700 Subject: [PATCH 461/691] Update lib.pc.sh --- scripts/lib.pc.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7c77475..cdedbe4 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -313,16 +313,16 @@ function object_store() { # Enable Dark Site Repo and wait 3 seconds - mspctl airgap --enable --lcm-server=${OBJECTS_OFFLINE_REPO} - sleep 3 + #mspctl airgap --enable --lcm-server=${OBJECTS_OFFLINE_REPO} + #sleep 3 # Confirm airgap is enabled - _response=$(mspctl airgap --status | grep "\"enable\":true" | wc -l) + #_response=$(mspctl airgap --status | grep "\"enable\":true" | wc -l) - if [ $_response -eq 1 ]; then - log "Objects dark site staging successfully enabled. Response is $_response. " - else - log "Objects failed to enable dark site staging. Will use standard WAN download (this will take longer). Response is $_response." - fi + #if [ $_response -eq 1 ]; then + # log "Objects dark site staging successfully enabled. Response is $_response. " + #else + # log "Objects failed to enable dark site staging. Will use standard WAN download (this will take longer). Response is $_response." + #fi # Payload for the _json_data _json_data='{"kind":"subnet"}' @@ -1636,7 +1636,7 @@ function upload_karbon_calm_blueprint() { log "Sleep 30 seconds so the blueprint can settle in......" sleep 30 - + log "Launching the Karbon Cluster Blueprint" curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST -d @set_blueprint_response_file.json "https://localhost:9440/api/nutanix/v3/blueprints/${KARBON_BLUEPRINT_UUID}/launch" From 59cee1d6136036b7dc4775e86166d4124d7958fd Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 16 Mar 2020 00:05:17 -0700 Subject: [PATCH 462/691] Update lib.pc.sh --- scripts/lib.pc.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index cdedbe4..cec0b66 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -313,16 +313,16 @@ function object_store() { # Enable Dark Site Repo and wait 3 seconds - #mspctl airgap --enable --lcm-server=${OBJECTS_OFFLINE_REPO} - #sleep 3 + mspctl airgap --enable --lcm-server=${OBJECTS_OFFLINE_REPO} + sleep 3 # Confirm airgap is enabled - #_response=$(mspctl airgap --status | grep "\"enable\":true" | wc -l) + _response=$(mspctl airgap --status | grep "\"enable\":true" | wc -l) - #if [ $_response -eq 1 ]; then - # log "Objects dark site staging successfully enabled. Response is $_response. " - #else - # log "Objects failed to enable dark site staging. Will use standard WAN download (this will take longer). Response is $_response." - #fi + if [ $_response -eq 1 ]; then + log "Objects dark site staging successfully enabled. Response is $_response. " + else + log "Objects failed to enable dark site staging. Will use standard WAN download (this will take longer). Response is $_response." + fi # Payload for the _json_data _json_data='{"kind":"subnet"}' From 01997d8cb96509789d64f9fbe1231367e813617a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 16 Mar 2020 00:30:55 -0700 Subject: [PATCH 463/691] Update global.vars.sh Update for Objects offline Repo --- scripts/global.vars.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 90180fb..39cdb84 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -148,7 +148,8 @@ case "${OCTET[3]}" in esac # Stuff needed for object_store -OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' +OBJECTS_OFFLINE_REPO='http://10.42.38.10/images/' +#OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' VLAN=${OCTET[2]} NETWORK="${OCTET[0]}.${OCTET[1]}" @@ -206,7 +207,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.55.76.10/seedPC.zip' BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' - OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' + #OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' + OBJECTS_OFFLINE_REPO='http://10.55.76.10/' ;; 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' From 1f70f672d9adeda6a63a099570b3241c9ab7b1d5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 16 Mar 2020 01:40:15 -0700 Subject: [PATCH 464/691] Update global.vars.sh --- scripts/global.vars.sh | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 39cdb84..4a00d1d 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -148,7 +148,7 @@ case "${OCTET[3]}" in esac # Stuff needed for object_store -OBJECTS_OFFLINE_REPO='http://10.42.38.10/images/' +OBJECTS_OFFLINE_REPO='http://10.42.38.10/images' #OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' VLAN=${OCTET[2]} NETWORK="${OCTET[0]}.${OCTET[1]}" @@ -208,7 +208,7 @@ case "${OCTET[0]}.${OCTET[1]}" in BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' #OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' - OBJECTS_OFFLINE_REPO='http://10.55.76.10/' + OBJECTS_OFFLINE_REPO='http://10.55.76.10' ;; 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -246,7 +246,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.38.10/images/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.42.196.10,10.42.194.10' - OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' + #OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' + OBJECTS_OFFLINE_REPO='http://10.42.38.10/images' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -284,7 +285,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.38.10/images/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" - OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' + #OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' + OBJECTS_OFFLINE_REPO='http://10.42.38.10/images' # If the third OCTET is between 170 and 199, we need to have the +3 vlan for the secondary if [[ ${OCTET[2]} -gt 169 ]]; then From f2e23b4d01fd34694d59618632b5688780e1afc6 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 16 Mar 2020 13:30:43 +0100 Subject: [PATCH 465/691] Creted a OSS check.. --- scripts/images_only.sh | 102 ++++++++++++++++++++++++++++++ test/objects/cluster.txt | 77 +++++++++++++++++++++++ test/objects/object_airgap.sh | 114 ++++++++++++++++++++++++++++++++++ test/objects/test.json | 1 + 4 files changed, 294 insertions(+) create mode 100755 scripts/images_only.sh create mode 100644 test/objects/cluster.txt create mode 100755 test/objects/object_airgap.sh create mode 100644 test/objects/test.json diff --git a/scripts/images_only.sh b/scripts/images_only.sh new file mode 100755 index 0000000..45c3b42 --- /dev/null +++ b/scripts/images_only.sh @@ -0,0 +1,102 @@ +#!/usr/bin/env bash +##################################################### +# Images Inly distribution script # +# ------------------------------------------------- # +# Willem Essenstam - 0.1 - 15 March 2020 # +# Initial version # +##################################################### + +#__main()__________ +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. we-lib.common.sh +. global.vars.sh + +# Try to figure out what workshop we have run +# Which log files do we have? +log_files=$(ls *.log) + +images_arr=("CentOS7.qcow2" "Windows2012R2.qcow2" "Windows10-1709.qcow2" "WinToolsVM.qcow2" "Linux_ToolsVM.qcow2" \ + "ERA-Server-build-1.2.1.qcow2" "MSSQL-2016-VM.qcow2" "hycu-3.5.0-6253.qcow2" "VeeamAvailability_1.0.457.vmdk" "move3.2.0.qcow2" \ + "AutoXD.qcow2" "CentOS7.iso" "Windows2016.iso" "Windows2012R2.iso" "Windows10.iso" "Nutanix-VirtIO-1.1.5.iso" "SQLServer2014SP3.iso" \ + "XenApp_and_XenDesktop_7_18.iso" "VeeamBR_9.5.4.2615.Update4.iso" "Windows2016.qcow2" "ERA-Server-build-1.2.1.qcow2" "Win10v1903.qcow2" \ + "Linux_ToolsVM.qcow2" "move-3.4.1.qcow2" "GTSOracle/19c-april/19c-bootdisk.qcow2" "GTSOracle/19c-april/19c-disk1.qcow2" "GTSOracle/19c-april/19c-disk2.qcow2" \ + "GTSOracle/19c-april/19c-disk3.qcow2" "GTSOracle/19c-april/19c-disk4.qcow2" "GTSOracle/19c-april/19c-disk5.qcow2" "GTSOracle/19c-april/19c-disk6.qcow2" \ + "GTSOracle/19c-april/19c-disk7.qcow2" "GTSOracle/19c-april/19c-disk8.qcow2" "GTSOracle/19c-april/19c-disk9.qcow2" "HYCU/Mine/HYCU-4.0.3-Demo.qcow2" \ + "veeam/VeeamAHVProxy2.0.404.qcow2" "Citrix_Virtual_Apps_and_Desktops_7_1912.iso" "FrameCCA-2.1.6.iso" "FrameCCA-2.1.0.iso" "FrameGuestAgentInstaller_1.0.2.2_7930.iso" \ + "veeam/VBR_10.0.0.4442.iso") + +if [[ $log_files == *"snc_bootcamp"* ]]; then + # We have found snc_bootcamp has been run + workshop="snc_bootcamp" + send_img_array=(${images_arr[@]:0:20}) +elif [[ $log_files == *"basic_bootcamp"* ]]; then + # We have found basic_bootcamp has been run + workshop="basic_bootcamp" + send_img_array=(${images_arr[13]} ${images_arr[0]} ${images_arr[15]} ${images_arr[14]}) +elif [[ $log_files == *"privatecloud_bootcamp"* ]]; then + # We have found privatecloud_bootcamp has been run + workshop="privatecloud_bootcamp" +elif [[ $log_files == *"era_bootcamp"* ]]; then + # We have found era_bootcamp has been run + workshop="era_bootcamp" +elif [[ $log_files == *"files_bootcamp"* ]]; then + # We have found files_bootcamp has been run + workshop="files_bootcamp" +elif [[ $log_files == *"calm_bootcamp"* ]]; then + # We have found calm_bootcamp has been run + workshop="calm_bootcamp" +elif [[ $log_files == *"citrix_bootcam"* ]]; then + # We have found citrix_bootcamp has been run + workshop="citrix_bootcamp" +elif [[ $log_files == *"frame_bootcamp"* ]]; then + # We have found frame_bootcamp has been run + workshop="frame_bootcamp" +elif [[ $log_files == *"bootcamp"* ]]; then + # We have fond that the bootcamp has been run + workshop="bootcamp" + send_img_array=(${images_arr[@]:0:20}) +elif [[ $log_files == *"ts2020"* ]]; then + # We have fond that the ts2020 has been run + workshop="ts2020" + send_img_array=(${images_arr[0]} ${images_arr[@]:20:41}) +fi + +# Make the right images avail for the different workshops based on the one we found from the log file +case $workshop in + "snc_bootcamp") + echo "Found the SNC_Bootcamp has run." + ;; + "basic_bootcamp") + echo "basic_bootcamp found" + ;; + "privatecloud_bootcamp") + echo "privatecloud_bootcamp found" + ;; + "era_bootcamp") + echo "Era_bootcamp found" + ;; + "files_bootcamp") + echo "files_bootcamp found" + ;; + "calm_bootcamp") + echo "calm_bootcamp found" + ;; + "citrix_bootcamp") + echo "citrix_bootcamp found" + ;; + "frame_bootcamp") + echo "frame_bootcamp found" + ;; + "bootcamp") + echo "bootcamp found" + ;; + esac + + + + + + + + diff --git a/test/objects/cluster.txt b/test/objects/cluster.txt new file mode 100644 index 0000000..942386b --- /dev/null +++ b/test/objects/cluster.txt @@ -0,0 +1,77 @@ +# Using the # symbol you can have the line skipped by the logic of the checking script. +10.42.3.39|techX2020!|nathan@nutanix.com +10.42.5.39|techX2020!|nathan@nutanix.com +10.42.6.39|techX2020!|nathan@nutanix.com +10.42.10.39|techX2020!|nathan@nutanix.com +10.42.11.39|techX2020!|nathan@nutanix.com +10.42.12.39|techX2020!|nathan@nutanix.com +10.42.14.39|techX2020!|nathan@nutanix.com +10.42.16.39|techX2020!|nathan@nutanix.com +10.42.18.39|techX2020!|nathan@nutanix.com +10.42.19.39|techX2020!|nathan@nutanix.com +10.42.20.39|techX2020!|nathan@nutanix.com +10.42.23.39|techX2020!|nathan@nutanix.com +10.42.25.39|techX2020!|nathan@nutanix.com +10.42.27.39|techX2020!|nathan@nutanix.com +10.42.28.39|techX2020!|nathan@nutanix.com +10.42.29.39|techX2020!|nathan@nutanix.com +10.42.30.39|techX2020!|nathan@nutanix.com +10.42.31.39|techX2020!|nathan@nutanix.com +10.42.32.39|techX2020!|nathan@nutanix.com +10.42.34.39|techX2020!|nathan@nutanix.com +10.42.35.39|techX2020!|nathan@nutanix.com +10.42.42.39|techX2020!|nathan@nutanix.com +10.42.56.39|techX2020!|nathan@nutanix.com +10.42.61.39|techX2020!|nathan@nutanix.com +10.42.62.39|techX2020!|nathan@nutanix.com +10.42.66.39|techX2020!|nathan@nutanix.com +10.42.67.39|techX2020!|nathan@nutanix.com +10.42.69.39|techX2020!|nathan@nutanix.com +10.42.71.39|techX2020!|nathan@nutanix.com +10.42.74.39|techX2020!|nathan@nutanix.com +10.42.79.39|techX2020!|nathan@nutanix.com +10.42.81.39|techX2020!|nathan@nutanix.com +10.42.84.39|techX2020!|nathan@nutanix.com +10.42.86.39|techX2020!|nathan@nutanix.com +10.42.94.39|techX2020!|nathan@nutanix.com +10.42.95.39|techX2020!|nathan@nutanix.com +10.42.99.39|techX2020!|nathan@nutanix.com +10.42.104.39|techX2020!|nathan@nutanix.com +10.42.108.39|techX2020!|nathan@nutanix.com +10.42.110.39|techX2020!|nathan@nutanix.com +10.38.203.39|techX2020!|nathan@nutanix.com +10.38.204.39|techX2020!|nathan@nutanix.com +10.38.205.39|techX2020!|nathan@nutanix.com +10.38.206.39|techX2020!|nathan@nutanix.com +10.38.207.39|techX2020!|nathan@nutanix.com +10.38.208.39|techX2020!|nathan@nutanix.com +10.38.209.39|techX2020!|nathan@nutanix.com +10.38.210.39|techX2020!|nathan@nutanix.com +10.38.211.39|techX2020!|nathan@nutanix.com +10.38.212.39|techX2020!|nathan@nutanix.com +10.38.213.39|techX2020!|nathan@nutanix.com +10.38.214.39|techX2020!|nathan@nutanix.com +10.38.215.39|techX2020!|nathan@nutanix.com +10.38.216.39|techX2020!|nathan@nutanix.com +10.38.217.39|techX2020!|nathan@nutanix.com +10.38.218.39|techX2020!|nathan@nutanix.com +10.38.221.39|techX2020!|nathan@nutanix.com +10.42.111.39|techX2020!|nathan@nutanix.com +10.42.112.39|techX2020!|nathan@nutanix.com +10.55.1.39|techX2020!|nathan@nutanix.com +10.55.2.39|techX2020!|nathan@nutanix.com +10.55.4.39|techX2020!|nathan@nutanix.com +10.55.9.39|techX2020!|nathan@nutanix.com +10.55.10.39|techX2020!|nathan@nutanix.com +10.55.30.39|techX2020!|nathan@nutanix.com +10.55.31.39|techX2020!|nathan@nutanix.com +10.55.33.39|techX2020!|nathan@nutanix.com +10.55.35.39|techX2020!|nathan@nutanix.com +10.55.39.39|techX2020!|nathan@nutanix.com +10.55.40.39|techX2020!|nathan@nutanix.com +10.55.41.39|techX2020!|nathan@nutanix.com +10.55.48.39|techX2020!|nathan@nutanix.com +10.55.49.39|techX2020!|nathan@nutanix.com +10.55.55.39|techX2020!|nathan@nutanix.com +10.55.57.39|techX2020!|nathan@nutanix.com +10.55.60.39|techX2020!|nathan@nutanix.com \ No newline at end of file diff --git a/test/objects/object_airgap.sh b/test/objects/object_airgap.sh new file mode 100755 index 0000000..3a77ee6 --- /dev/null +++ b/test/objects/object_airgap.sh @@ -0,0 +1,114 @@ +#!/bin/sh + +# Script to set the airgap for Objects to our TS filesservers. + +for _cluster in $(cat cluster.txt | grep -v ^#) + do + set -f + _fields=(${_cluster//|/ }) + PE_HOST=${_fields[0]} + PE_PASSWORD=${_fields[1]} + OCTET=(${PE_HOST//./ }) + PC_HOST=${PE_HOST:-2} + CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' + _url_network="https://${PC_HOST}:9440/api/nutanix/v3/subnets/list" + _url_oss="'https://${PC_HOST}:9440/oss/api/nutanix/v3/objectstores" + _url_oss_check="https://${PC_HOST}:9440/oss/api/nutanix/v3/objectstores/list" + + + # Getting the IP for the sources file of objects + if [[ ${OCTET[1]} == "42" || ${OCTET[1]} == "38" ]]; then + source_ip="http://10.42.38.10/images" + else + source_ip="http://10.55.76.10" + fi + + # Getting the command ready based on IP network of the cluster + cmd='/usr/local/nutanix/cluster/bin/mspctl airgap --enable --lcm-server=' + cmd+=$source_ip + cmd+=';sleep 3;/usr/local/nutanix/cluster/bin/mspctl airgap --status | grep "\"enable\":true" | wc -l' + + # Fire the command on the PC of the cluster so we have the right Dark Site image pull for Objects + sshpass -e ssh nutanix@${PE_HOST} -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null $cmd + + # See if we have an error on the cluster for the objects + url="https://${PC_HOST}:9440/oss/api/nutanix/v3/groups" + payload='{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"},{"attribute":"domain"},{"attribute":"num_msp_workers"},{"attribute":"usage_bytes"},{"attribute":"num_buckets"},{"attribute":"num_objects"},{"attribute":"num_alerts_internal"},{"attribute":"client_access_network_ip_used_list"},{"attribute":"total_capacity_gib"},{"attribute":"last_completed_step"},{"attribute":"state"},{"attribute":"percentage_complete"},{"attribute":"ipv4_address"},{"attribute":"num_alerts_critical"},{"attribute":"num_alerts_info"},{"attribute":"num_alerts_warning"},{"attribute":"error_message_list"},{"attribute":"cluster_name"},{"attribute":"client_access_network_name"},{"attribute":"client_access_network_ip_list"},{"attribute":"buckets_infra_network_name"},{"attribute":"buckets_infra_network_vip"},{"attribute":"buckets_infra_network_dns"},{"attribute":"total_memory_size_mib"},{"attribute":"total_vcpu_count"},{"attribute":"num_vcpu_per_msp_worker"}]}' + _respone_json=$(curl ${CURL_HTTP_OPTS} -d ${payload} ${url} --user admin:${PE_PASSWORD} | jq '.group_results[0].entity_results[0].data[] | select (.name=="state") .values[0].values[0]' | tr -d \") + + if [[ ${_respone_json} != "COMPLETE" ]]; then + if [[ ${_respone_json} == "PENDING" ]]; then + echo "Status for ${PC_HOST} is pending.... Skipping" + else + echo "Found and error at PC ${PC_HOST}.. Starting counter measurements...." + # Delete the current objectstore + _respone_json=$(curl ${CURL_HTTP_OPTS} -d ${payload} ${url} --user admin:${PE_PASSWORD} | jq '.group_results[0].entity_results[0].entity_id' | tr -d \") + uuid_objects_store=${_respone_json} + url_delete="https://${PC_HOST}:9440/oss/api/nutanix/v3/objectstores/${uuid_objects_store}" + del_oss_response=$(curl ${CURL_HTTP_OPTS} -X DELETE ${url_delete} -w "%{http_code}\n" --user admin:${PE_PASSWORD}) + + # Has the deletion been accepted? + if [[ ${del_oss_response} == "202" ]]; then + echo "Objectstore is to be deleted... Checking before moving on..." + url="https://${PC_HOST}:9440/oss/api/nutanix/v3/groups" + payload='{"entity_type":"objectstore","group_member_sort_attribute":"name","group_member_sort_order":"ASCENDING","group_member_count":20,"group_member_offset":0,"group_member_attributes":[{"attribute":"name"}]}' + _response_json=$(curl ${CURL_HTTP_OPTS} -d ${payload} ${url} --user admin:${PE_PASSWORD} | jq '.filtered_entity_count' | tr -d \") + # Wait while the objectstore is still there before we move on in creating one. + while [[ ${_response_json} != 0 ]] + do + echo "Objectstore still found... Waiting 10 seconds.." + sleep 10 + _response_json=$(curl ${CURL_HTTP_OPTS} -d ${payload} ${url} --user admin:${PE_PASSWORD} | jq '.filtered_entity_count' | tr -d \") + done + + + + # Done waiting, now let's create the payload for the objectstore. + # Get the variables from the cluster + # Payload for the _json_data so we get the data needed... + _json_data='{"kind":"subnet"}' + CLUSTER_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user admin:${PE_PASSWORD} $_url_network | jq '.entities[].spec | select (.name=="Primary") | .cluster_reference.uuid' | tr -d \") + PRIM_NETWORK_UUID=$(curl -X POST -d $_json_data $CURL_HTTP_OPTS --user admin:${PE_PASSWORD} $_url_network | jq '.entities[] | select (.spec.name=="Primary") | .metadata.uuid' | tr -d \") + + BUCKETS_VIP="${OCTET[0]}.${OCTET[1]}.${OCTET[2]}.17" + BUCKETS_DNS_IP="${OCTET[0]}.${OCTET[1]}.${OCTET[2]}.16" + OBJECTS_NW_START="${OCTET[0]}.${OCTET[1]}.${OCTET[2]}.18" + OBJECTS_NW_END="${OCTET[0]}.${OCTET[1]}.${OCTET[2]}.21" + + # Create the payload URL + _json_data_oss='{"api_version":"3.0","metadata":{"kind":"objectstore"},"spec":{"name":"ntnx-objects","description":"NTNXLAB","resources":{"domain":"ntnxlab.local","cluster_reference":{"kind":"cluster","uuid":"' + _json_data_oss+=${CLUSTER_UUID} + _json_data_oss+='"},"buckets_infra_network_dns":"' + _json_data_oss+=${BUCKETS_DNS_IP} + _json_data_oss+='","buckets_infra_network_vip":"' + _json_data_oss+=${BUCKETS_VIP} + _json_data_oss+='","buckets_infra_network_reference":{"kind":"subnet","uuid":"' + _json_data_oss+=${PRIM_NETWORK_UUID} + _json_data_oss+='"},"client_access_network_reference":{"kind":"subnet","uuid":"' + _json_data_oss+=${PRIM_NETWORK_UUID} + _json_data_oss+='"},"aggregate_resources":{"total_vcpu_count":10,"total_memory_size_mib":32768,"total_capacity_gib":51200},"client_access_network_ipv4_range":{"ipv4_start":"' + _json_data_oss+=${OBJECTS_NW_START} + _json_data_oss+='","ipv4_end":"' + _json_data_oss+=${OBJECTS_NW_END} + _json_data_oss+='"}}}}' + + # Now we have the correct data in the payload, let's fire ti to the cluster + oss_create="https://${PC_HOST}:9440/oss/api/nutanix/v3/objectstores" + echo "curl ${CURL_HTTP_OPTS} -X POST --user admin:${PE_PASSWORD} -d ${_json_data_oss} ${oss_create})" + _response_oss_create=$(curl ${CURL_HTTP_OPTS} -X POST --user admin:${PE_PASSWORD} -d ${_json_data_oss} ${oss_create} | jq '.metadata.uuid' | tr -d \") + if [[ -z ${_response_oss_create} ]]; then + echo "Failed to fire the script. Please check the cluster.." + else + echo "Create Objectstore has been fired...." + fi + fi + fi + else + echo "All good at PC ${PC_HOST}..." + fi + + + done + + + diff --git a/test/objects/test.json b/test/objects/test.json new file mode 100644 index 0000000..c3925e2 --- /dev/null +++ b/test/objects/test.json @@ -0,0 +1 @@ +{"api_version":"3.0","metadata":{"creation_time":"2020-03-16T05:25:52.000-07:00","kind":"objectstore","last_update_time":"2020-03-16T05:25:52.000-07:00","uuid":"c8fbbddc-b580-4a2d-736c-2de01a3a67b5"},"spec":{"deployment_version":"2.0","description":"NTNXLAB","name":"ntnx-objects","resources":{"aggregate_resources":{"total_capacity_gib":51200,"total_memory_size_mib":32768,"total_vcpu_count":10},"buckets_infra_network_dns":"10.55.10.16","buckets_infra_network_reference":{"kind":"subnet","uuid":"fa5bf140-6bb9-450f-a7de-b2eb7c8ed482"},"buckets_infra_network_vip":"10.55.10.17","client_access_network_ip_list":null,"client_access_network_ipv4_range":{"ipv4_end":"10.55.10.21","ipv4_start":"10.55.10.18"},"client_access_network_reference":{"kind":"subnet","uuid":"fa5bf140-6bb9-450f-a7de-b2eb7c8ed482"},"cluster_reference":{"kind":"cluster","uuid":"0005a0d3-1163-df13-0000-00000001957f"},"domain":"ntnxlab.local"}},"status":{"description":"NTNXLAB","name":"ntnx-objects","resources":{"aggregate_resources":{"total_capacity_gib":51200,"total_memory_size_mib":32768,"total_vcpu_count":10},"buckets_infra_network_dns":"10.55.10.16","buckets_infra_network_reference":{"kind":"subnet","uuid":"fa5bf140-6bb9-450f-a7de-b2eb7c8ed482"},"buckets_infra_network_vip":"10.55.10.17","client_access_network_ip_list":null,"client_access_network_ip_used_list":null,"client_access_network_ipv4_range":{"ipv4_end":"10.55.10.21","ipv4_start":"10.55.10.18"},"client_access_network_reference":{"kind":"subnet","uuid":"fa5bf140-6bb9-450f-a7de-b2eb7c8ed482"},"cluster_reference":{"kind":"cluster","uuid":"0005a0d3-1163-df13-0000-00000001957f"},"domain":"ntnxlab.local"}}} From 92cf848a5b5827f4eeab285ca0ee75175a555969 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 16 Mar 2020 15:13:14 +0100 Subject: [PATCH 466/691] Updated the global.vars.sh Updated so it will set the correct OBJECTS_OFFLINE_REPO if we are out off GTS timeframe.. --- cluster.txt | 19 +++---------------- scripts/global.vars.sh | 16 ++++++++++------ test/objects/cluster.txt | 6 +++--- test/objects/object_airgap.sh | 5 ++--- 4 files changed, 18 insertions(+), 28 deletions(-) diff --git a/cluster.txt b/cluster.txt index 4c5422a..0019193 100644 --- a/cluster.txt +++ b/cluster.txt @@ -1,16 +1,3 @@ -10.38.203.37|techX2020!|willem@nutanix.com -10.38.204.37|techX2020!|willem@nutanix.com -10.38.205.37|techX2020!|willem@nutanix.com -10.38.206.37|techX2020!|willem@nutanix.com -10.38.207.37|techX2020!|willem@nutanix.com -10.38.208.37|techX2020!|willem@nutanix.com -10.38.209.37|techX2020!|willem@nutanix.com -10.38.210.37|techX2020!|willem@nutanix.com -10.38.211.37|techX2020!|willem@nutanix.com -10.38.212.37|techX2020!|willem@nutanix.com -10.38.213.37|techX2020!|willem@nutanix.com -10.38.217.37|techX2020!|willem@nutanix.com -10.38.218.37|techX2020!|willem@nutanix.com -10.38.216.37|techX2020!|willem@nutanix.com -10.38.215.37|techX2020!|willem@nutanix.com -10.38.214.37|techX2020!|willem@nutanix.com \ No newline at end of file +10.42.95.37|techX2020!|willem@nutanix.com +10.42.111.37|techX2020!|willem@nutanix.com +10.42.112.37|techX2020!|willem@nutanix.com diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4a00d1d..935c8d7 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -148,7 +148,11 @@ case "${OCTET[3]}" in esac # Stuff needed for object_store -OBJECTS_OFFLINE_REPO='http://10.42.38.10/images' +if curl --output /dev/null --silent --head --fail "http://10.42.38.10/images"; then + OBJECTS_OFFLINE_REPO='http://10.42.38.10/images' +else + OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects/' +fi #OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' VLAN=${OCTET[2]} NETWORK="${OCTET[0]}.${OCTET[1]}" @@ -208,7 +212,11 @@ case "${OCTET[0]}.${OCTET[1]}" in BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' #OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' - OBJECTS_OFFLINE_REPO='http://10.55.76.10' + if curl --output /dev/null --silent --head --fail "http://10.55.76.10"; then + OBJECTS_OFFLINE_REPO='http://10.55.76.10' + else + OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects/' + fi ;; 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -246,8 +254,6 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.38.10/images/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.42.196.10,10.42.194.10' - #OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' - OBJECTS_OFFLINE_REPO='http://10.42.38.10/images' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -285,8 +291,6 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.38.10/images/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" - #OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' - OBJECTS_OFFLINE_REPO='http://10.42.38.10/images' # If the third OCTET is between 170 and 199, we need to have the +3 vlan for the secondary if [[ ${OCTET[2]} -gt 169 ]]; then diff --git a/test/objects/cluster.txt b/test/objects/cluster.txt index 942386b..181f98a 100644 --- a/test/objects/cluster.txt +++ b/test/objects/cluster.txt @@ -34,7 +34,7 @@ 10.42.84.39|techX2020!|nathan@nutanix.com 10.42.86.39|techX2020!|nathan@nutanix.com 10.42.94.39|techX2020!|nathan@nutanix.com -10.42.95.39|techX2020!|nathan@nutanix.com +#10.42.95.39|techX2020!|nathan@nutanix.com 10.42.99.39|techX2020!|nathan@nutanix.com 10.42.104.39|techX2020!|nathan@nutanix.com 10.42.108.39|techX2020!|nathan@nutanix.com @@ -56,8 +56,8 @@ 10.38.217.39|techX2020!|nathan@nutanix.com 10.38.218.39|techX2020!|nathan@nutanix.com 10.38.221.39|techX2020!|nathan@nutanix.com -10.42.111.39|techX2020!|nathan@nutanix.com -10.42.112.39|techX2020!|nathan@nutanix.com +#10.42.111.39|techX2020!|nathan@nutanix.com +#10.42.112.39|techX2020!|nathan@nutanix.com 10.55.1.39|techX2020!|nathan@nutanix.com 10.55.2.39|techX2020!|nathan@nutanix.com 10.55.4.39|techX2020!|nathan@nutanix.com diff --git a/test/objects/object_airgap.sh b/test/objects/object_airgap.sh index 3a77ee6..2014e74 100755 --- a/test/objects/object_airgap.sh +++ b/test/objects/object_airgap.sh @@ -29,7 +29,7 @@ for _cluster in $(cat cluster.txt | grep -v ^#) cmd+=';sleep 3;/usr/local/nutanix/cluster/bin/mspctl airgap --status | grep "\"enable\":true" | wc -l' # Fire the command on the PC of the cluster so we have the right Dark Site image pull for Objects - sshpass -e ssh nutanix@${PE_HOST} -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null $cmd + #sshpass -e ssh nutanix@${PE_HOST} -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null $cmd # See if we have an error on the cluster for the objects url="https://${PC_HOST}:9440/oss/api/nutanix/v3/groups" @@ -92,9 +92,8 @@ for _cluster in $(cat cluster.txt | grep -v ^#) _json_data_oss+=${OBJECTS_NW_END} _json_data_oss+='"}}}}' - # Now we have the correct data in the payload, let's fire ti to the cluster + # Now we have the correct data in the payload, let's fire to to the cluster oss_create="https://${PC_HOST}:9440/oss/api/nutanix/v3/objectstores" - echo "curl ${CURL_HTTP_OPTS} -X POST --user admin:${PE_PASSWORD} -d ${_json_data_oss} ${oss_create})" _response_oss_create=$(curl ${CURL_HTTP_OPTS} -X POST --user admin:${PE_PASSWORD} -d ${_json_data_oss} ${oss_create} | jq '.metadata.uuid' | tr -d \") if [[ -z ${_response_oss_create} ]]; then echo "Failed to fire the script. Please check the cluster.." From c59cc61e580e91e120eb06f1a1a254a27037abf3 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 18 Mar 2020 07:46:23 +0100 Subject: [PATCH 467/691] Update object_airgap.sh --- test/objects/object_airgap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/objects/object_airgap.sh b/test/objects/object_airgap.sh index 2014e74..a8b9e5e 100755 --- a/test/objects/object_airgap.sh +++ b/test/objects/object_airgap.sh @@ -29,7 +29,7 @@ for _cluster in $(cat cluster.txt | grep -v ^#) cmd+=';sleep 3;/usr/local/nutanix/cluster/bin/mspctl airgap --status | grep "\"enable\":true" | wc -l' # Fire the command on the PC of the cluster so we have the right Dark Site image pull for Objects - #sshpass -e ssh nutanix@${PE_HOST} -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null $cmd + sshpass -e ssh nutanix@${PE_HOST} -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null $cmd # See if we have an error on the cluster for the objects url="https://${PC_HOST}:9440/oss/api/nutanix/v3/groups" From 9eb419020fbeab8151d0d1bd88f06e5ba3d3f6f5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 20 Mar 2020 10:39:53 -0700 Subject: [PATCH 468/691] updates for Era --- scripts/era_bootcamp.sh | 10 +++++----- scripts/lib.pe.sh | 5 +++++ 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 53fbc3f..dcc1dbc 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -20,11 +20,11 @@ case ${1} in export AUTH_SERVER='AutoAD' # Networking needs for Era Bootcamp - export NW2_NAME='EraManaged' - #export NW2_DHCP_START="${IPV4_PREFIX}.132" - #export NW2_DHCP_END="${IPV4_PREFIX}.219" - #export NW3_NAME='EraManaged' - #export NW3_VLAN=$((OCTET[2]*10+1)) + #export NW2_NAME='EraManaged' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.219" + export NW3_NAME='EraManaged' + export NW3_VLAN=$((OCTET[2]*10+1)) #export NW3_SUBNET="${IPV4_PREFIX}.129/25" args_required 'PE_HOST PC_LAUNCH' diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 9ae5eb8..4df1159 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -593,6 +593,11 @@ function era_network_configure() { acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" fi + if [[ ! -z "${NW3_NAME}" ]]; then + log "Create EraManaged network: Name: ${NW3_NAME}, VLAN: ${NW3_VLAN}" + acli "net.create ${NW2_NAME} vlan=${NW3_VLAN}" + fi + fi } From cc3c79a158829c9ce8a25859675bfb8c642be11c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 20 Mar 2020 16:21:58 -0700 Subject: [PATCH 469/691] Update frame_bootcamp.sh --- scripts/frame_bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index 978783e..0616d39 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -166,6 +166,7 @@ case ${1} in ) export ISO_IMAGES=(\ FrameCCA-2.1.0.iso \ + FrameCCA-2.1.6.iso \ FrameGuestAgentInstaller_1.0.2.2_7930.iso \ Nutanix-VirtIO-1.1.5.iso \ ) From 54bf950cf9cc977d0411ce10acce81f55e4f2dc8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 27 Mar 2020 12:34:57 -0700 Subject: [PATCH 470/691] Fixes for Files Bootcamp --- scripts/files_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh index 18288a9..9d1adca 100755 --- a/scripts/files_bootcamp.sh +++ b/scripts/files_bootcamp.sh @@ -86,9 +86,9 @@ case ${1} in Windows2016.qcow2 \ Win10v1903.qcow2 \ WinToolsVM.qcow2 \ + CentOS7.qcow2 \ ) export ISO_IMAGES=(\ - Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ Nutanix-VirtIO-1.1.5.iso \ ) From 7563cec32cc98d0fce0482ae33887bcf42c1de46 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 1 Apr 2020 21:54:06 -0700 Subject: [PATCH 471/691] updates to test PC 5.16.1.2 --- scripts/global.vars.sh | 18 +++++++++--------- stage_workshop.sh | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 935c8d7..778e144 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,7 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.11.2.1' +PC_DEV_VERSION='5.16.1.2' PC_CURRENT_VERSION='5.11.2.1' PC_STABLE_VERSION='5.11.2' FILES_VERSION='3.6.1.2' @@ -150,7 +150,7 @@ esac # Stuff needed for object_store if curl --output /dev/null --silent --head --fail "http://10.42.38.10/images"; then OBJECTS_OFFLINE_REPO='http://10.42.38.10/images' -else +else OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects/' fi #OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' @@ -176,8 +176,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.6.1.2.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.json' @@ -214,13 +214,13 @@ case "${OCTET[0]}.${OCTET[1]}" in #OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' if curl --output /dev/null --silent --head --fail "http://10.55.76.10"; then OBJECTS_OFFLINE_REPO='http://10.55.76.10' - else + else OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects/' fi ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' @@ -256,8 +256,8 @@ case "${OCTET[0]}.${OCTET[1]}" in DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' diff --git a/stage_workshop.sh b/stage_workshop.sh index 19cd2b3..c9c66e0 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -13,7 +13,7 @@ begin WORKSHOPS=(\ "Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Current" \ "SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Current" \ -"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ +"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ "Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ From b9151fe9e3a8eeebed99921343a9e672d3350352 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 1 Apr 2020 22:11:19 -0700 Subject: [PATCH 472/691] Update stage_workshop.sh --- stage_workshop.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index c9c66e0..295fa72 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -16,8 +16,8 @@ WORKSHOPS=(\ "Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ "Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ -"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ -"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Development" \ +"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16.1.2) = Development" \ +"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16.1.2) = Development" \ "Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ @@ -46,7 +46,7 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" From d003ab0d4ca42906030b43abe5cc663e833feb8b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 2 Apr 2020 01:42:45 -0700 Subject: [PATCH 473/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 778e144..ea5c7c6 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -176,7 +176,7 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.6.1.2.json' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' From 9c30ca4322a1f0356b200007897e9e7d57b0d856 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 6 Apr 2020 10:15:42 -0700 Subject: [PATCH 474/691] updates for Priority Images --- scripts/citrix_bootcamp.sh | 8 ++++++-- scripts/era_bootcamp.sh | 6 +++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index eb7f5e2..4d9f722 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -80,13 +80,16 @@ case ${1} in export OBJECTS_NW_START="${IPV4_PREFIX}.18" export OBJECTS_NW_END="${IPV4_PREFIX}.21" + export _prio_images_arr=(\ + Windows2016.qcow2 \ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + ) + export QCOW2_IMAGES=(\ - Windows2016.qcow2 \ Win10v1903.qcow2 \ WinToolsVM.qcow2 \ ) export ISO_IMAGES=(\ - Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ Nutanix-VirtIO-1.1.5.iso \ ) @@ -134,6 +137,7 @@ case ${1} in && calm_enable \ && lcm \ && pc_project \ + && priority_images \ && images \ && flow_enable \ && pc_cluster_img_import \ diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index dcc1dbc..e442bfa 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -75,8 +75,11 @@ case ${1} in export OBJECTS_NW_START="${IPV4_PREFIX}.18" export OBJECTS_NW_END="${IPV4_PREFIX}.21" + export _prio_images_arr=(\ + ERA-Server-build-1.2.1.qcow2 \ + ) + export QCOW2_IMAGES=(\ - ERA-Server-build-1.2.1.qcow2 \ MSSQL-2016-VM.qcow2 \ Windows2016.qcow2 \ CentOS7.qcow2 \ @@ -131,6 +134,7 @@ case ${1} in && calm_enable \ && lcm \ && pc_project \ + && priority_images \ && images \ && flow_enable \ && pc_cluster_img_import \ From d8ad2993b3b29cdfcfd68f44a31749e9110340f5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 7 Apr 2020 11:44:50 -0700 Subject: [PATCH 475/691] PC 5.16.1.2 Updates --- stage_workshop.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 295fa72..65628ad 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -13,20 +13,20 @@ begin WORKSHOPS=(\ "Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Current" \ "SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Current" \ -"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ "Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ "In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16.1.2) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16.1.2) = Development" \ -"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ +"Basic / API Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ +"Private Cloud Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ +"Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ +"Files Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ +"Calm Workshop (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ +"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ +"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ +"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ #"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ -"Basic / API Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ -"Private Cloud Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ -"Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ -"Files Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ -"Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ -"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { From d5f137cd0063f30df053e694298d389e9e0922b5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 7 Apr 2020 13:08:16 -0700 Subject: [PATCH 476/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 4df1159..6075802 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -595,7 +595,7 @@ function era_network_configure() { if [[ ! -z "${NW3_NAME}" ]]; then log "Create EraManaged network: Name: ${NW3_NAME}, VLAN: ${NW3_VLAN}" - acli "net.create ${NW2_NAME} vlan=${NW3_VLAN}" + acli "net.create ${NW3_NAME} vlan=${NW3_VLAN}" fi fi From aeae76b548e45737423564ff65a8968cc6de99d8 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 8 Apr 2020 09:51:16 +0200 Subject: [PATCH 477/691] Update basic_bootcamp.sh Added the Citrix Iso --- scripts/basic_bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/basic_bootcamp.sh b/scripts/basic_bootcamp.sh index 125cb88..6343890 100755 --- a/scripts/basic_bootcamp.sh +++ b/scripts/basic_bootcamp.sh @@ -77,6 +77,7 @@ case ${1} in export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ Windows2016.iso \ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ ) run_once From 0e09f39274043e4d9fe603839b4ab0971b999a0c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 8 Apr 2020 14:42:54 -0700 Subject: [PATCH 478/691] Update frame_bootcamp.sh --- scripts/frame_bootcamp.sh | 36 ++---------------------------------- 1 file changed, 2 insertions(+), 34 deletions(-) diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index 0616d39..ce8b41d 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -24,75 +24,42 @@ case ${1} in # Networking needs for Frame Bootcamp export NW2_DHCP_START="${IPV4_PREFIX}.132" - export NW2_DHCP_END="${IPV4_PREFIX}.139" + export NW2_DHCP_END="${IPV4_PREFIX}.149" export NW2_DHCP_START2="${IPV4_PREFIX}.250" export NW2_DHCP_END2="${IPV4_PREFIX}.253" export USERNW01_NAME='User01-Network' export USERNW01_VLAN=$((OCTET[2]*10+1)) - export USERNW01_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW01_DHCP_START="${IPV4_PREFIX}.140" - export USERNW01_DHCP_END="${IPV4_PREFIX}.149" export USERNW02_NAME='User02-Network' export USERNW02_VLAN=$((OCTET[2]*10+1)) - export USERNW02_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW02_DHCP_START="${IPV4_PREFIX}.150" - export USERNW02_DHCP_END="${IPV4_PREFIX}.159" export USERNW03_NAME='User03-Network' export USERNW03_VLAN=$((OCTET[2]*10+1)) - export USERNW03_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW03_DHCP_START="${IPV4_PREFIX}.160" - export USERNW03_DHCP_END="${IPV4_PREFIX}.169" export USERNW04_NAME='User04-Network' export USERNW04_VLAN=$((OCTET[2]*10+1)) - export USERNW04_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW04_DHCP_START="${IPV4_PREFIX}.170" - export USERNW04_DHCP_END="${IPV4_PREFIX}.179" export USERNW05_NAME='User05-Network' export USERNW05_VLAN=$((OCTET[2]*10+1)) - export USERNW05_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW05_DHCP_START="${IPV4_PREFIX}.180" - export USERNW05_DHCP_END="${IPV4_PREFIX}.189" export USERNW06_NAME='User06-Network' export USERNW06_VLAN=$((OCTET[2]*10+1)) - export USERNW06_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW06_DHCP_START="${IPV4_PREFIX}.190" - export USERNW06_DHCP_END="${IPV4_PREFIX}.199" export USERNW07_NAME='User07-Network' export USERNW07_VLAN=$((OCTET[2]*10+1)) - export USERNW07_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW07_DHCP_START="${IPV4_PREFIX}.200" - export USERNW07_DHCP_END="${IPV4_PREFIX}.209" export USERNW08_NAME='User08-Network' export USERNW08_VLAN=$((OCTET[2]*10+1)) - export USERNW08_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW08_DHCP_START="${IPV4_PREFIX}.210" - export USERNW08_DHCP_END="${IPV4_PREFIX}.219" export USERNW09_NAME='User09-Network' export USERNW09_VLAN=$((OCTET[2]*10+1)) - export USERNW09_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW09_DHCP_START="${IPV4_PREFIX}.220" - export USERNW09_DHCP_END="${IPV4_PREFIX}.229" export USERNW10_NAME='User10-Network' export USERNW10_VLAN=$((OCTET[2]*10+1)) - export USERNW10_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW10_DHCP_START="${IPV4_PREFIX}.230" - export USERNW10_DHCP_END="${IPV4_PREFIX}.239" export USERNW11_NAME='User11-Network' export USERNW11_VLAN=$((OCTET[2]*10+1)) - export USERNW11_SUBNET="${IPV4_PREFIX}.129/25" - export USERNW11_DHCP_START="${IPV4_PREFIX}.240" - export USERNW11_DHCP_END="${IPV4_PREFIX}.249" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -217,6 +184,7 @@ case ${1} in && pc_project \ && images \ && flow_enable \ + && seedPC \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 5d1f4027c080a38ede0484da269a15dd79f924e4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 8 Apr 2020 20:03:24 -0700 Subject: [PATCH 479/691] Update global.vars.sh --- scripts/global.vars.sh | 37 +++++++++++++++++-------------------- 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index ea5c7c6..f645e7a 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -3,8 +3,8 @@ # shellcheck disable=SC2034 RELEASE='release.json' PC_DEV_VERSION='5.16.1.2' -PC_CURRENT_VERSION='5.11.2.1' -PC_STABLE_VERSION='5.11.2' +PC_CURRENT_VERSION='5.16.1.2' +PC_STABLE_VERSION='5.11.2.1' FILES_VERSION='3.6.1.2' FILE_ANALYTICS_VERSION='2.1.0' NTNX_INIT_PASSWORD='nutanix/4u' @@ -178,10 +178,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.json' - PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' + PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.1.2.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' @@ -196,7 +196,6 @@ case "${OCTET[0]}.${OCTET[1]}" in #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ - 'http://10.55.76.10/' \ 'http://10.55.251.38/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) @@ -208,7 +207,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.55.251.38/workshop_staging/AutoAD.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) - PC_DATA='http://10.55.76.10/seedPC.zip' + PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' #OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' @@ -221,10 +220,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.1.2.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' @@ -239,7 +238,6 @@ case "${OCTET[0]}.${OCTET[1]}" in #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ - 'http://10.42.38.10/images/' \ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) @@ -251,17 +249,17 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) - PC_DATA='http://10.42.38.10/images/seedPC.zip' + PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.42.196.10,10.42.194.10' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.1.2.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' @@ -276,7 +274,6 @@ case "${OCTET[0]}.${OCTET[1]}" in #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ - 'http://10.42.38.10/images/' \ 'http://10.42.194.11/workshop_staging/' \ 'https://s3.amazonaws.com/get-ahv-images/' \ ) @@ -288,7 +285,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) - PC_DATA='http://10.42.38.10/images/seedPC.zip' + PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" From 7f6652deedbbc212195a297b0a0d2694f1b9f09f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 8 Apr 2020 20:08:40 -0700 Subject: [PATCH 480/691] PC 5.16.x Updates --- scripts/calm_bootcamp.sh | 2 +- stage_workshop.sh | 28 ++++++++++++++-------------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/scripts/calm_bootcamp.sh b/scripts/calm_bootcamp.sh index 58c3916..464e48f 100755 --- a/scripts/calm_bootcamp.sh +++ b/scripts/calm_bootcamp.sh @@ -18,7 +18,7 @@ case ${1} in PE | pe ) . lib.pe.sh - export AUTH_SERVER='AutoAD' + export AUTH_SERVER='AutoDC' args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/stage_workshop.sh b/stage_workshop.sh index 65628ad..ab325be 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,20 +11,20 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Current" \ -"SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.11.2.1) = Current" \ -"Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ -"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2) = Stable" \ -"In Development Bootcamp Staging (AOS 5.11+/AHV PC 5.16.1.2) = Development" \ -"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11+/AHV PC 5.16.1.2) = Development" \ -"Basic / API Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ -"Private Cloud Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ -"Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ -"Files Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ -"Calm Workshop (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ -"Frame Bootcamp Staging (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ -"Citrix Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Development" \ -"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Current" \ +"Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ +"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ +"Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ +"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ +"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ +"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ +"Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Databases with Era Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ +"Frame Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +#"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ #"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed From 7e5649196b54e58e9cf098ef9286d8a20a396eea Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 8 Apr 2020 20:10:33 -0700 Subject: [PATCH 481/691] Update stage_workshop.sh --- stage_workshop.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index ab325be..dfe4da7 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -15,8 +15,6 @@ WORKSHOPS=(\ "SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ -"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ "Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Databases with Era Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ @@ -24,6 +22,8 @@ WORKSHOPS=(\ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Frame Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ +"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ #"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ #"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ @@ -48,9 +48,9 @@ function stage_clusters() { # TODO: make WORKSHOPS and map a JSON configuration file? if (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.11.2" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" fi From 7627dbbf081910c335f51c5dc3cf1254ea333010 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 8 Apr 2020 20:17:58 -0700 Subject: [PATCH 482/691] Update global.vars.sh --- scripts/global.vars.sh | 21 +++++---------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f645e7a..cd3fddc 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -67,7 +67,6 @@ QCOW2_IMAGES=(\ hycu-3.5.0-6253.qcow2 \ VeeamAvailability_1.0.457.vmdk \ move3.2.0.qcow2 \ - AutoXD.qcow2 \ ) ISO_IMAGES=(\ CentOS7.iso \ @@ -75,8 +74,6 @@ ISO_IMAGES=(\ Windows2012R2.iso \ Windows10.iso \ Nutanix-VirtIO-1.1.5.iso \ - SQLServer2014SP3.iso \ - XenApp_and_XenDesktop_7_18.iso \ VeeamBR_9.5.4.2615.Update4.iso \ ) @@ -148,11 +145,6 @@ case "${OCTET[3]}" in esac # Stuff needed for object_store -if curl --output /dev/null --silent --head --fail "http://10.42.38.10/images"; then - OBJECTS_OFFLINE_REPO='http://10.42.38.10/images' -else - OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects/' -fi #OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' VLAN=${OCTET[2]} NETWORK="${OCTET[0]}.${OCTET[1]}" @@ -210,12 +202,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' - #OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' - if curl --output /dev/null --silent --head --fail "http://10.55.76.10"; then - OBJECTS_OFFLINE_REPO='http://10.55.76.10' - else - OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects/' - fi + OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' @@ -238,8 +225,8 @@ case "${OCTET[0]}.${OCTET[1]}" in #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ - 'http://10.42.194.11/workshop_staging/' \ - 'https://s3.amazonaws.com/get-ahv-images/' \ + 'http://10.42.194.11/workshop_staging/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ ) AUTODC_REPOS=(\ 'http://10.42.194.11/workshop_staging/AutoDC2.qcow2' \ @@ -252,6 +239,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.42.196.10,10.42.194.10' + OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' @@ -288,6 +276,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" + OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' # If the third OCTET is between 170 and 199, we need to have the +3 vlan for the secondary if [[ ${OCTET[2]} -gt 169 ]]; then From 9a5c7dfbfa7ec8feaceaf90ad7e7070bd2b3f4e1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 8 Apr 2020 20:45:29 -0700 Subject: [PATCH 483/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index dfe4da7..6337ab8 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -20,7 +20,7 @@ WORKSHOPS=(\ "Databases with Era Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"Frame Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ From d15703001aba7e604f69ef3567912d20a3d559f8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 10 Apr 2020 12:32:08 -0700 Subject: [PATCH 484/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 6337ab8..76cddc5 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -17,7 +17,7 @@ WORKSHOPS=(\ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ -"Databases with Era Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ From d48c0bfc457c06fec6705f050a71770ed8b31069 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 10 Apr 2020 12:50:44 -0700 Subject: [PATCH 485/691] Updates for Priority IMages --- scripts/era_bootcamp.sh | 4 ++-- scripts/global.vars.sh | 7 +++++++ scripts/lib.common.sh | 21 ++++++++------------- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index e442bfa..7ed1421 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -76,8 +76,8 @@ case ${1} in export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ - ERA-Server-build-1.2.1.qcow2 \ - ) + ERA-Server-build-1.2.1.qcow2 \ + ) export QCOW2_IMAGES=(\ MSSQL-2016-VM.qcow2 \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index cd3fddc..31bf671 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -55,6 +55,13 @@ VeeamServer='' # ################################## +_prio_images_arr=(\ + ERA-Server-build-1.2.1.qcow2 \ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ +) + QCOW2_IMAGES=(\ CentOS7.qcow2 \ Windows2016.qcow2 \ diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index a808d27..e84a2ed 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -454,22 +454,17 @@ EOF function priority_images(){ - local _prio_images_arr=(\ - ERA-Server-build-1.2.1.qcow2 \ - Windows2016.qcow2 \ - CentOS7.qcow2 \ - Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ - ) + ) local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " # Set the correct High Perf FileServer - if [[ ${OCTET[1]} == '42' ]] || [[ ${OCTET[1]} == '38' ]]; then - SOURCE_URL="10.42.38.10/images" - else - SOURCE_URL="10.55.76.10" - fi + #if [[ ${OCTET[1]} == '42' ]] || [[ ${OCTET[1]} == '38' ]]; then + # SOURCE_URL="10.42.38.10/images" + #else + # SOURCE_URL="10.55.76.10" + #fi - log "Grabbing the priority files from the ${SOURCE_URL} fileserver..." + log "Grabbing the priority files from the ${QCOW2_REPOS} fileserver..." for _image in "${_prio_images_arr[@]}"; do if [[ ${_image} == *"iso"* ]]; then @@ -486,7 +481,7 @@ function priority_images(){ "body":{"spec": {"name":"${_image}","description":"${_image}","resources":{ "image_type":"${DISK_TYPE}", - "source_uri":"http://${SOURCE_URL}/${_image}"}}, + "source_uri":"${QCOW2_REPOS}/${_image}"}}, "metadata":{"kind":"image"},"api_version":"3.1.0"}}],"api_version":"3.0"} EOF ) From 078bdf2419236cff572a45c5d88e231b55656a04 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 10 Apr 2020 12:58:12 -0700 Subject: [PATCH 486/691] Update lib.common.sh --- scripts/lib.common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index e84a2ed..76d51cd 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -454,7 +454,7 @@ EOF function priority_images(){ - ) + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " # Set the correct High Perf FileServer From e768c041925371bfa378830a73a8c25375c3600f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 10 Apr 2020 18:40:39 -0700 Subject: [PATCH 487/691] Update lib.pe.sh --- scripts/lib.pe.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 6075802..fece8e8 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -586,11 +586,12 @@ function era_network_configure() { acli "net.update_dhcp_dns ${NW1_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" acli " net.add_dhcp_pool ${NW1_NAME} start=${NW1_DHCP_START} end=${NW1_DHCP_END}" - # NW2 is EraManaged, so we do not need DHCP + # so we do not need DHCP if [[ ! -z "${NW2_NAME}" ]]; then log "Create secondary network: Name: ${NW2_NAME}, VLAN: ${NW2_VLAN}, Subnet: ${NW2_SUBNET}" acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" fi if [[ ! -z "${NW3_NAME}" ]]; then From 2b190b9479defd37c61916296bb565d80d50ed55 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 15 Apr 2020 09:45:44 +0200 Subject: [PATCH 488/691] Update frame_bootcamp.sh Change to the usernetworks as the 10.38.170 and above have a different vlan need! --- scripts/frame_bootcamp.sh | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index ce8b41d..5c5d7f0 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -29,37 +29,37 @@ case ${1} in export NW2_DHCP_END2="${IPV4_PREFIX}.253" export USERNW01_NAME='User01-Network' - export USERNW01_VLAN=$((OCTET[2]*10+1)) + export USERNW01_VLAN=${NW2_VLAN} export USERNW02_NAME='User02-Network' - export USERNW02_VLAN=$((OCTET[2]*10+1)) + export USERNW02_VLAN=${NW2_VLAN} export USERNW03_NAME='User03-Network' - export USERNW03_VLAN=$((OCTET[2]*10+1)) + export USERNW03_VLAN=${NW2_VLAN} export USERNW04_NAME='User04-Network' - export USERNW04_VLAN=$((OCTET[2]*10+1)) + export USERNW04_VLAN=${NW2_VLAN} export USERNW05_NAME='User05-Network' - export USERNW05_VLAN=$((OCTET[2]*10+1)) + export USERNW05_VLAN=${NW2_VLAN} export USERNW06_NAME='User06-Network' - export USERNW06_VLAN=$((OCTET[2]*10+1)) + export USERNW06_VLAN=${NW2_VLAN} export USERNW07_NAME='User07-Network' - export USERNW07_VLAN=$((OCTET[2]*10+1)) + export USERNW07_VLAN=${NW2_VLAN} export USERNW08_NAME='User08-Network' - export USERNW08_VLAN=$((OCTET[2]*10+1)) + export USERNW08_VLAN=${NW2_VLAN} export USERNW09_NAME='User09-Network' - export USERNW09_VLAN=$((OCTET[2]*10+1)) + export USERNW09_VLAN=${NW2_VLAN} export USERNW10_NAME='User10-Network' - export USERNW10_VLAN=$((OCTET[2]*10+1)) + export USERNW10_VLAN=${NW2_VLAN} export USERNW11_NAME='User11-Network' - export USERNW11_VLAN=$((OCTET[2]*10+1)) + export USERNW11_VLAN=${NW2_VLAN} args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable From eb13aaf815e188a7c9806aeb8dcbe03614318a25 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 15 Apr 2020 16:03:07 -0700 Subject: [PATCH 489/691] Updates for new Era bootcamps --- scripts/era_mssql_bootcamp.sh | 165 +++++++++++++++++++++++++++ scripts/era_oracle_bootcamp.sh | 166 +++++++++++++++++++++++++++ scripts/era_postgres_bootcamp.sh | 166 +++++++++++++++++++++++++++ scripts/files_bootcamp.sh | 1 + scripts/global.vars.sh | 37 +++++- scripts/lib.common.sh | 188 ++++++++++++++++++++++++++++++- scripts/lib.pc.sh | 93 +++++++++++++++ scripts/lib.pe.sh | 11 ++ stage_workshop.sh | 18 +++ 9 files changed, 841 insertions(+), 4 deletions(-) create mode 100755 scripts/era_mssql_bootcamp.sh create mode 100755 scripts/era_oracle_bootcamp.sh create mode 100755 scripts/era_postgres_bootcamp.sh diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh new file mode 100755 index 0000000..65e951d --- /dev/null +++ b/scripts/era_mssql_bootcamp.sh @@ -0,0 +1,165 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoAD' + # Networking needs for Era Bootcamp + #export NW2_NAME='EraManaged' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.219" + export NW3_NAME='EraManaged' + export NW3_VLAN=$((OCTET[2]*10+1)) + #export NW3_SUBNET="${IPV4_PREFIX}.129/25" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && create_era_container \ + && era_network_configure\ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export _prio_images_arr=(\ + ERA-Server-build-1.2.1.qcow2 \ + ) + + export QCOW2_IMAGES=(\ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && pc_project \ + && priority_images \ + && images \ + && flow_enable \ + && pc_cluster_img_import \ + && upload_era_calm_blueprint \ + && sleep 30 \ + && deploy_mssql \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh new file mode 100755 index 0000000..1379a1d --- /dev/null +++ b/scripts/era_oracle_bootcamp.sh @@ -0,0 +1,166 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoAD' + # Networking needs for Era Bootcamp + #export NW2_NAME='EraManaged' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.219" + export NW3_NAME='EraManaged' + export NW3_VLAN=$((OCTET[2]*10+1)) + #export NW3_SUBNET="${IPV4_PREFIX}.129/25" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && create_era_container \ + && era_network_configure\ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export _prio_images_arr=(\ + ERA-Server-build-1.2.1.qcow2 \ + ) + + export QCOW2_IMAGES=(\ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && pc_project \ + && priority_images \ + && images \ + && flow_enable \ + && pc_cluster_img_import \ + && upload_era_calm_blueprint \ + && sleep 30 \ + && deploy_oracle \ + && upload_oracle_patch_images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh new file mode 100755 index 0000000..cc18eba --- /dev/null +++ b/scripts/era_postgres_bootcamp.sh @@ -0,0 +1,166 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoAD' + # Networking needs for Era Bootcamp + #export NW2_NAME='EraManaged' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.219" + export NW3_NAME='EraManaged' + export NW3_VLAN=$((OCTET[2]*10+1)) + #export NW3_SUBNET="${IPV4_PREFIX}.129/25" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && create_era_container \ + && era_network_configure\ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export _prio_images_arr=(\ + ERA-Server-build-1.2.1.qcow2 \ + ) + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && pc_project \ + && priority_images \ + && images_era_bootcamp \ + && flow_enable \ + && pc_cluster_img_import \ + && upload_era_calm_blueprint \ + && sleep 30 \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh index 9d1adca..199f6bd 100755 --- a/scripts/files_bootcamp.sh +++ b/scripts/files_bootcamp.sh @@ -86,6 +86,7 @@ case ${1} in Windows2016.qcow2 \ Win10v1903.qcow2 \ WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ CentOS7.qcow2 \ ) export ISO_IMAGES=(\ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 31bf671..5ee1cb5 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -13,6 +13,7 @@ SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" STORAGE_POOL='SP01' STORAGE_DEFAULT='Default' STORAGE_IMAGES='Images' +STORAGE_ERA='Era' ATTEMPTS=40 SLEEP=60 PrismOpsServer='PrismProLabUtilityServer' @@ -32,9 +33,41 @@ SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserK SSH_OPTS+=' -q' # -v' #################################################### -# +# Era Bootcamps VARs +################################################### + +MSSQL_SourceVM="Win2016SQLSource" +MSSQL_SourceVM_Image="Win2016SQLSource" +MSSQL_SourceVM_Image1="MSSQL_1" +MSSQL_SourceVM_Image2="MSSQL_2" + +Oracle_SourceVM="Oracle12cSource" +Oracle_SourceVM_BootImage="12c_bootdisk" +Oracle_SourceVM_Image1="12c_disk1" +Oracle_SourceVM_Image2="12c_disk2" +Oracle_SourceVM_Image3="12c_disk3" +Oracle_SourceVM_Image4="12c_disk4" +Oracle_SourceVM_Image5="12c_disk5" +Oracle_SourceVM_Image6="12c_disk6" +Oracle_SourceVM_Image7="" +Oracle_SourceVM_Image8="" +Oracle_SourceVM_Image9="" +Oracle_SourceVM_Image10="" + +Oracle_Patch_BootImage="19c_bootdisk" +Oracle_Patch_Image1="19c_disk1" +Oracle_Patch_Image2="19c_disk2" +Oracle_Patch_Image3="19c_disk3" +Oracle_Patch_Image4="19c_disk4" +Oracle_Patch_Image5="19c_disk5" +Oracle_Patch_Image6="19c_disk6" +Oracle_Patch_Image7="19c_disk7" +Oracle_Patch_Image8="19c_disk8" +Oracle_Patch_Image9="19c_disk9" +Oracle_Patch_Image10="" + +#################################################### # 3rd Party images used at GTS or Add-On Labs -# ################################################### #Peer Software PeerMgmtServer='Windows2016-PeerMgmt-18feb20' diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 76d51cd..ad4b536 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -269,7 +269,8 @@ function finish() { } ################################################################################## - +# Images install +################################################################################## function images() { # https://portal.nutanix.com/#/page/docs/details?targetId=Command-Ref-AOS-v59:acl-acli-image-auto-r.html @@ -449,8 +450,9 @@ EOF } -################################################################################## +############################################################################################### # Priority Images that need to be uploaded and controlled before we move to the mass upload +############################################################################################### function priority_images(){ @@ -492,6 +494,188 @@ EOF } +################################################################################## +# Images install for Era Bootcamp +################################################################################## + +function images_era_bootcamp() { + # https://portal.nutanix.com/#/page/docs/details?targetId=Command-Ref-AOS-v59:acl-acli-image-auto-r.html + local _cli='nuclei' + local _command + local _http_body + local _image + local _image_type + local _name + local _source='source_uri' + local _test + +####################################### +# For doing ISO IMAGES +####################################### + +for _image in "${ISO_IMAGES[@]}" ; do + + # log "DEBUG: ${_image} image.create..." + if [[ ${_cli} == 'nuclei' ]]; then + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ${_cli} image.list 2>&1 \ + | grep -i complete \ + | grep "${_image}") + #else + # _test=$(source /etc/profile.d/nutanix_env.sh \ + # && ${_cli} image.list 2>&1 \ + # | grep "${_image}") + fi + + if [[ ! -z ${_test} ]]; then + log "Skip: ${_image} already complete on cluster." + else + _command='' + _name="${_image}" + + if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc -l) )); then + log 'Bypass multiple repo source checks...' + SOURCE_URL="${_image}" + else + repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]! + fi + + if [[ -z "${SOURCE_URL}" ]]; then + _error=30 + log "Warning ${_error}: didn't find any sources for ${_image}, continuing..." + # exit ${_error} + fi + + # TODO:0 TOFIX: acs-centos ugly override for today... + if (( $(echo "${_image}" | grep -i 'acs-centos' | wc -l ) > 0 )); then + _name=acs-centos + fi + + if [[ ${_cli} == 'acli' ]]; then + _image_type='kIsoImage' + _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ + container=${STORAGE_ERA} architecture=kX86_64 wait=true" + else + _command+=" name=${_name} description=\"${_image}\"" + fi + + if [[ ${_cli} == 'nuclei' ]]; then + _http_body=$(cat <&1 & + if (( $? != 0 )); then + log "Warning: Image submission: $?. Continuing..." + #exit 10 + fi + + if [[ ${_cli} == 'nuclei' ]]; then + log "NOTE: image.uuid = RUNNING, but takes a while to show up in:" + log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State" + fi + fi + fi + +done + +####################################### +# For doing Disk IMAGES +####################################### + + for _image in "${QCOW2_IMAGES[@]}" ; do + + # log "DEBUG: ${_image} image.create..." + if [[ ${_cli} == 'nuclei' ]]; then + _test=$(source /etc/profile.d/nutanix_env.sh \ + && ${_cli} image.list 2>&1 \ + | grep -i complete \ + | grep "${_image}") + + fi + + if [[ ! -z ${_test} ]]; then + log "Skip: ${_image} already complete on cluster." + else + _command='' + _name="${_image}" + + if (( $(echo "${_image}" | grep -i -e '^http' -e '^nfs' | wc -l) )); then + log 'Bypass multiple repo source checks...' + SOURCE_URL="${_image}" + else + repo_source QCOW2_REPOS[@] "${_image}" # IMPORTANT: don't ${dereference}[array]! + fi + + if [[ -z "${SOURCE_URL}" ]]; then + _error=30 + log "Warning ${_error}: didn't find any sources for ${_image}, continuing..." + # exit ${_error} + fi + + # TODO:0 TOFIX: acs-centos ugly override for today... + if (( $(echo "${_image}" | grep -i 'acs-centos' | wc -l ) > 0 )); then + _name=acs-centos + fi + + if [[ ${_cli} == 'acli' ]]; then + _image_type='kDiskImage' + _command+=" ${_name} annotation=${_image} image_type=${_image_type} \ + container=${STORAGE_ERA} architecture=kX86_64 wait=true" + else + _command+=" name=${_name} description=\"${_image}\"" + fi + + if [[ ${_cli} == 'nuclei' ]]; then + _http_body=$(cat <&1 & + if (( $? != 0 )); then + log "Warning: Image submission: $?. Continuing..." + #exit 10 + fi + + if [[ ${_cli} == 'nuclei' ]]; then + log "NOTE: image.uuid = RUNNING, but takes a while to show up in:" + log "TODO: ${_cli} image.list, state = COMPLETE; image.list Name UUID State" + fi + fi + fi + + done + +} + ################################################################################## function log() { diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index cec0b66..46ddc0f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -881,6 +881,99 @@ EOF fi } +######################################################################################################################################### +# Routine to Create Era Bootcamp PreProvisioned MSSQL Server +######################################################################################################################################### + +function deploy_mssql() { + + num_sql_vms=3 + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${MSSQL_SourceVM_Image} | wc --lines) == 0 )); then + log "Import ${MSSQL_SourceVM_Image} image from ${QCOW2_REPOS}..." + acli image.create ${MSSQL_SourceVM_Image} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/SQLServer/${MSSQL_SourceVM_Image}.qcow2" + acli image.create ${MSSQL_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/SQLServer/${MSSQL_SourceVM_Image1}.qcow2" + acli image.create ${MSSQL_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/SQLServer/${MSSQL_SourceVM_Image2}.qcow2" + else + log "Image found, assuming ready. Skipping ${MSSQL_SourceVM} import." + fi + + echo "## SQLVM_Creation_INPROGRESS ##" + acli "vm.create ${MSSQL_SourceVM} memory=2046M num_cores_per_vcpu=1 num_vcpus=2" + acli "vm.disk_create ${MSSQL_SourceVM} clone_from_image=${MSSQL_SourceVM_Image}" + acli "vm.nic_create ${MSSQL_SourceVM} network=${NW2_NAME}" + echo "## ${MSSQL_SourceVM} - Powering On ##" + acli "vm.on ${MSSQL_SourceVM}" + echo "## SQLVM_Creation_COMPLETE ##" + + #echo "## SQLVM_Clone_Creation_INPROGRESS ##" + #acli "vm.clone $MSSQL_SourceVM_User[01..$num_sql_vms] clone_from_vm=${MSSQL_SourceVM}" + #echo "## SQLVM_Clone_Creation_COMPLETE ##" + + fi +} + +######################################################################################################################################### +# Routine to Create Era Bootcamp PreProvisioned Oracle Server +######################################################################################################################################### + +function deploy_oracle() { + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_SourceVM_BootImage} | wc --lines) == 0 )); then + log "Import ${Oracle_SourceVM_BootImage} image from ${QCOW2_REPOS}..." + acli image.create ${Oracle_SourceVM_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_BootImage}.qcow2" + acli image.create ${Oracle_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image1}.qcow2" + acli image.create ${Oracle_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image2}.qcow2" + acli image.create ${Oracle_SourceVM_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image3}.qcow2" + acli image.create ${Oracle_SourceVM_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image4}.qcow2" + acli image.create ${Oracle_SourceVM_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image5}.qcow2" + acli image.create ${Oracle_SourceVM_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image6}.qcow2" + else + log "Image found, assuming ready. Skipping ${Oracle_SourceVM} import." + fi + + echo "## Oracle12cVM_Creation_INPROGRESS ##" + acli "vm.create ${Oracle_SourceVM} memory=32000M num_cores_per_vcpu=2 num_vcpus=2" + acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_BootImage}" + acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image1}" + acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image2}" + acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image3}" + acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image4}" + acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image5}" + acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image6}" + acli "vm.nic_create ${Oracle_SourceVM} network=${NW2_NAME}" + echo "## ${Oracle_SourceVM} - Powering On ##" + acli "vm.on ${Oracle_SourceVM}" + echo "### Oracle12cVM_Creation_COMPLETE ##" + + fi + +} + +######################################################################################################################################### +# Routine to Upload Era Bootcamp Patch images for Oracle +######################################################################################################################################### + +function upload_oracle_patch_images() { + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_Patch_BootImage} | wc --lines) == 0 )); then + log "Import ${Oracle_Patch_BootImage} image from ${QCOW2_REPOS}..." + acli image.create ${Oracle_Patch_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_BootImage}.qcow2" + acli image.create ${Oracle_Patch_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image1}.qcow2" + acli image.create ${Oracle_Patch_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image2}.qcow2" + acli image.create ${Oracle_Patch_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image3}.qcow2" + acli image.create ${Oracle_Patch_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image4}.qcow2" + acli image.create ${Oracle_Patch_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image5}.qcow2" + acli image.create ${Oracle_Patch_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image6}.qcow2" + acli image.create ${Oracle_Patch_Image7} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image7}.qcow2" + acli image.create ${Oracle_Patch_Image8} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image8}.qcow2" + acli image.create ${Oracle_Patch_Image9} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image9}.qcow2" + else + log "Image found, assuming ready. Skipping ${Oracle_Patch_BootImage} import." + fi + +} + ############################################################################################################################################################################### # Routine to Create a Project in the Calm part ############################################################################################################################################################################### diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index fece8e8..54ca092 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1063,6 +1063,17 @@ function pc_destroy() { done } +############################################################################################################################################################################### +# Routine create the Era Storage container for the Era Bootcamps. +############################################################################################################################################################################### + +function create_era_container() { + + log "Creating Era Storage Container" + ncli container create name="${STORAGE_ERA}" rf=2 sp-name="${STORAGE_POOL}" enable-compression=true compression-delay=60 + +} + ############################################################################################################################################################################### # Routine to deploy the Peer Management Center ############################################################################################################################################################################### diff --git a/stage_workshop.sh b/stage_workshop.sh index 76cddc5..9dca8cd 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -24,6 +24,9 @@ WORKSHOPS=(\ "Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ +"Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ +"Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ +"Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ #"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ #"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ @@ -81,6 +84,21 @@ function stage_clusters() { _pe_launch='era_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^Databases Era with MSSQL Bootcamp" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='era_mssql_bootcamp.sh' + _pc_launch=${_pe_launch} + fi + if (( $(echo ${_workshop} | grep -i "^Databases Era with Oracle Bootcamp" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='era_oracle_bootcamp.sh' + _pc_launch=${_pe_launch} + fi + if (( $(echo ${_workshop} | grep -i "^Databases Era with Postgres Bootcamp" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='era_postgres_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^Files" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='files_bootcamp.sh' From 5ea701a909a75ebc5aa79be244087c4742e6dd2e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 15 Apr 2020 16:46:07 -0700 Subject: [PATCH 490/691] Prism Ops Testing on 5.17 --- scripts/dev_privatecloud_bootcamp.sh | 183 +++++++++++++++++++++++++++ scripts/global.vars.sh | 14 +- stage_workshop.sh | 9 +- 3 files changed, 197 insertions(+), 9 deletions(-) create mode 100755 scripts/dev_privatecloud_bootcamp.sh diff --git a/scripts/dev_privatecloud_bootcamp.sh b/scripts/dev_privatecloud_bootcamp.sh new file mode 100755 index 0000000..8c224a9 --- /dev/null +++ b/scripts/dev_privatecloud_bootcamp.sh @@ -0,0 +1,183 @@ +#!/usr/bin/env bash + #-x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + #export PC_DEV_VERSION='5.10.2' + #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' + #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' + #export FILES_VERSION='3.2.0.1' + #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + + export AUTH_SERVER='AutoAD' + export PrismOpsServer='PrismOpsServer517' + export SeedPC='seedPC517.zp' + + export _external_nw_name="${1}" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ + veeam/VeeamAHVProxy2.0.404.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + veeam/VBR_10.0.0.4442.iso \ + ) + + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && objects_enable \ + && lcm \ + && pc_project \ + && object_store \ + && karbon_image_download \ + && flow_enable \ + && pc_cluster_img_import \ + && seedPC \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 5ee1cb5..3ed3ab8 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,7 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.16.1.2' +PC_DEV_VERSION='5.17' PC_CURRENT_VERSION='5.16.1.2' PC_STABLE_VERSION='5.11.2.1' FILES_VERSION='3.6.1.2' @@ -208,8 +208,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64-metadata.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' @@ -245,8 +245,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64-metadata.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -282,8 +282,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64-metadata.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' diff --git a/stage_workshop.sh b/stage_workshop.sh index 9dca8cd..cfc6b90 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -22,7 +22,7 @@ WORKSHOPS=(\ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ -"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ +"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Development" \ "Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ @@ -49,7 +49,7 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.17" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" @@ -119,6 +119,11 @@ function stage_clusters() { _pe_launch='frame_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^In Development Bootcamp Staging" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='dev_privatecloud_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='ts2020.sh' From b7a8024434f5038605bb51a8e7379758016a704d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 15 Apr 2020 18:01:23 -0700 Subject: [PATCH 491/691] Updates for BLR --- scripts/global.vars.sh | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 3ed3ab8..c9f8336 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -323,6 +323,43 @@ case "${OCTET[0]}.${OCTET[1]}" in NW2_VLAN=$((OCTET[2]*10+3)) fi ;; + 10.136 ) # HPOC us-west = BLR + PC_DEV_METAURL='http://10.136.239.13/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64-metadata.json' + PC_DEV_URL='http://10.136.239.13/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64.tar' + PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.16.1.2.json' + PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.11.2.1.json' + PC_STABLE_URL='http://10.136.239.13/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.6.1.2.json' + FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' + FILE_ANALYTICS_URL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' + JQ_REPOS=(\ + 'http://10.136.239.13/workshop_staging/jq-linux64.dms' \ + 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ + #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ + ) + SSHPASS_REPOS=(\ + 'http://10.136.239.131/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ + #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ + ) + QCOW2_REPOS=(\ + 'http://10.136.239.13/workshop_staging/' \ + 'https://s3.amazonaws.com/get-ahv-images/' \ + ) + AUTODC_REPOS=(\ + 'http://10.136.239.13/workshop_staging/AutoDC2.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ + ) + AUTOAD_REPOS=(\ + 'http://10.136.239.13/workshop_staging/AutoAD.qcow2' \ + 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ + ) + PC_DATA='http://10.136.239.13/workshop_staging/seedPC.zip' + BLUEPRINT_URL='http:/10.136.239.13/workshop_staging/CalmBlueprints/' + DNS_SERVERS='10.136.239.10,10.136.239.11' + OBJECTS_OFFLINE_REPO='http://10.136.239.13/workshop_staging/objects' + ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR JQ_REPOS=(\ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ From f412c551a98c936942e1e845bc8932957a14d1cc Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 15 Apr 2020 20:30:01 -0700 Subject: [PATCH 492/691] Updates for sleep --- scripts/era_mssql_bootcamp.sh | 1 + scripts/era_oracle_bootcamp.sh | 1 + scripts/era_postgres_bootcamp.sh | 1 + 3 files changed, 3 insertions(+) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 65e951d..9ea8734 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -131,6 +131,7 @@ case ${1} in ssp_auth \ && calm_enable \ && lcm \ + && sleep 30 \ && pc_project \ && priority_images \ && images \ diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index 1379a1d..6799527 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -131,6 +131,7 @@ case ${1} in ssp_auth \ && calm_enable \ && lcm \ + && sleep 30 \ && pc_project \ && priority_images \ && images \ diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index cc18eba..669520f 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -133,6 +133,7 @@ case ${1} in ssp_auth \ && calm_enable \ && lcm \ + && sleep 30 \ && pc_project \ && priority_images \ && images_era_bootcamp \ From 38fbf059c6b71940a74b1ff74e4c4399af722175 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 15 Apr 2020 20:33:19 -0700 Subject: [PATCH 493/691] Update era_postgres_bootcamp.sh --- scripts/era_postgres_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index 669520f..2d96ae1 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -136,7 +136,7 @@ case ${1} in && sleep 30 \ && pc_project \ && priority_images \ - && images_era_bootcamp \ + && images \ && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ From 7f8e36f7015a1d113c97ecb8ddc5529da4fd5794 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 15 Apr 2020 20:38:32 -0700 Subject: [PATCH 494/691] PC 5.17 RC4 Images Update --- scripts/global.vars.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index c9f8336..86b84c4 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -208,8 +208,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64-metadata.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.17-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.17-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' @@ -245,8 +245,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64-metadata.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.17-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -282,8 +282,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64-metadata.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.17-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -324,8 +324,8 @@ case "${OCTET[0]}.${OCTET[1]}" in fi ;; 10.136 ) # HPOC us-west = BLR - PC_DEV_METAURL='http://10.136.239.13/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64-metadata.json' - PC_DEV_URL='http://10.136.239.13/workshop_staging/nutanix_installer_package_pc-release-euphrates-5.17-stable-x86_64.tar' + PC_DEV_METAURL='http://10.136.239.13/workshop_staging/euphrates-5.17-stable-prism_central-metadata.json' + PC_DEV_URL='http://10.136.239.13/workshop_staging/euphrates-5.17-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.11.2.1.json' From 8c58d2b2851be103fc361821ed1d5b7c70853667 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 15 Apr 2020 22:54:28 -0700 Subject: [PATCH 495/691] EraManaged VLAN update --- scripts/era_bootcamp.sh | 4 +++- scripts/era_mssql_bootcamp.sh | 4 +++- scripts/era_oracle_bootcamp.sh | 4 +++- scripts/era_postgres_bootcamp.sh | 4 +++- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 7ed1421..9141a03 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -24,8 +24,10 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" export NW3_NAME='EraManaged' - export NW3_VLAN=$((OCTET[2]*10+1)) + export NW3_VLAN=${NW2_VLAN} #export NW3_SUBNET="${IPV4_PREFIX}.129/25" + export NW3_START="${IPV4_PREFIX}.220" + export NW3_END="${IPV4_PREFIX}.253" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 9ea8734..18d03ec 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -24,8 +24,10 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" export NW3_NAME='EraManaged' - export NW3_VLAN=$((OCTET[2]*10+1)) + export NW3_VLAN=${NW2_VLAN} #export NW3_SUBNET="${IPV4_PREFIX}.129/25" + export NW3_START="${IPV4_PREFIX}.220" + export NW3_END="${IPV4_PREFIX}.253" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index 6799527..dd2d228 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -24,8 +24,10 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" export NW3_NAME='EraManaged' - export NW3_VLAN=$((OCTET[2]*10+1)) + export NW3_VLAN=${NW2_VLAN} #export NW3_SUBNET="${IPV4_PREFIX}.129/25" + export NW3_START="${IPV4_PREFIX}.220" + export NW3_END="${IPV4_PREFIX}.253" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index 2d96ae1..c27307e 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -24,8 +24,10 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" export NW3_NAME='EraManaged' - export NW3_VLAN=$((OCTET[2]*10+1)) + export NW3_VLAN=${NW2_VLAN} #export NW3_SUBNET="${IPV4_PREFIX}.129/25" + export NW3_START="${IPV4_PREFIX}.220" + export NW3_END="${IPV4_PREFIX}.253" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable From d724d63aeab0e4c3f24694aea48d1822bdfac276 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 15 Apr 2020 23:28:34 -0700 Subject: [PATCH 496/691] Updated Era Staging Update --- scripts/era_mssql_bootcamp.sh | 1 - scripts/era_oracle_bootcamp.sh | 1 - scripts/era_postgres_bootcamp.sh | 1 - scripts/global.vars.sh | 7 ++++-- scripts/lib.pc.sh | 37 ++++++++++++++++++++++++++++++++ 5 files changed, 42 insertions(+), 5 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 18d03ec..84f2e24 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -25,7 +25,6 @@ case ${1} in export NW2_DHCP_END="${IPV4_PREFIX}.219" export NW3_NAME='EraManaged' export NW3_VLAN=${NW2_VLAN} - #export NW3_SUBNET="${IPV4_PREFIX}.129/25" export NW3_START="${IPV4_PREFIX}.220" export NW3_END="${IPV4_PREFIX}.253" diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index dd2d228..72e64b9 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -25,7 +25,6 @@ case ${1} in export NW2_DHCP_END="${IPV4_PREFIX}.219" export NW3_NAME='EraManaged' export NW3_VLAN=${NW2_VLAN} - #export NW3_SUBNET="${IPV4_PREFIX}.129/25" export NW3_START="${IPV4_PREFIX}.220" export NW3_END="${IPV4_PREFIX}.253" diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index c27307e..a2988b8 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -25,7 +25,6 @@ case ${1} in export NW2_DHCP_END="${IPV4_PREFIX}.219" export NW3_NAME='EraManaged' export NW3_VLAN=${NW2_VLAN} - #export NW3_SUBNET="${IPV4_PREFIX}.129/25" export NW3_START="${IPV4_PREFIX}.220" export NW3_END="${IPV4_PREFIX}.253" diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 86b84c4..6386819 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -33,8 +33,11 @@ SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserK SSH_OPTS+=' -q' # -v' #################################################### -# Era Bootcamps VARs +# Era VARs ################################################### +ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 15)) +ERA_USER="admin" +ERA_PASSWORD="nutanix/4u" MSSQL_SourceVM="Win2016SQLSource" MSSQL_SourceVM_Image="Win2016SQLSource" @@ -124,7 +127,6 @@ DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) FILE_ANALYTICS_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 22)) PrismOpsServer_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 5))" -ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 15)) CITRIX_DDC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 8)) DNS_SERVERS='8.8.8.8' NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' @@ -178,6 +180,7 @@ case "${OCTET[3]}" in NW2_NAME='Secondary' NW2_VLAN=$((OCTET[2]*10+1)) NW2_SUBNET="${IPV4_PREFIX}.129/25" + NW2_GATEWAY="${IPV4_PREFIX}.129" NW2_DHCP_START="${IPV4_PREFIX}.132" NW2_DHCP_END="${IPV4_PREFIX}.253" ;; diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 46ddc0f..14dd72d 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -974,6 +974,43 @@ function upload_oracle_patch_images() { } +######################################################################################################################################### +# Routine to Upload Era Bootcamp Patch images for Oracle +######################################################################################################################################### + +function configure_era() { + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + local baseurl="" + +## Create the EraManaged network inside Era ## +echo "Create ${NW3_NAME} Static Network" + +HTTP_JSON_BODY=$(cat < Date: Thu, 16 Apr 2020 09:33:51 -0700 Subject: [PATCH 497/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 14dd72d..01168a0 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1007,7 +1007,7 @@ HTTP_JSON_BODY=$(cat < Date: Thu, 16 Apr 2020 10:46:19 -0700 Subject: [PATCH 498/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 01168a0..ae7ddaa 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -910,7 +910,6 @@ function deploy_mssql() { #acli "vm.clone $MSSQL_SourceVM_User[01..$num_sql_vms] clone_from_vm=${MSSQL_SourceVM}" #echo "## SQLVM_Clone_Creation_COMPLETE ##" - fi } ######################################################################################################################################### @@ -946,7 +945,6 @@ function deploy_oracle() { acli "vm.on ${Oracle_SourceVM}" echo "### Oracle12cVM_Creation_COMPLETE ##" - fi } From 73e57250a8e38ca81a0d538db50437d54d2d34ef Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 16 Apr 2020 22:46:42 -0700 Subject: [PATCH 499/691] Updates for Era Networking --- scripts/era_mssql_bootcamp.sh | 6 +- scripts/era_oracle_bootcamp.sh | 8 +-- scripts/era_postgres_bootcamp.sh | 3 +- scripts/global.vars.sh | 3 +- scripts/lib.pc.sh | 106 +++++-------------------------- scripts/lib.pe.sh | 99 +++++++++++++++++++++++++++-- stage_workshop.sh | 7 +- 7 files changed, 126 insertions(+), 106 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 84f2e24..2f2b5a9 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -35,7 +35,8 @@ case ${1} in && pe_license \ && pe_init \ && create_era_container \ - && era_network_configure\ + && era_network_configure \ + && deploy_mssql \ && authentication_source \ && pe_auth @@ -139,8 +140,7 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ - && sleep 30 \ - && deploy_mssql \ + && configure_era \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index 72e64b9..f652676 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -35,7 +35,9 @@ case ${1} in && pe_license \ && pe_init \ && create_era_container \ - && era_network_configure\ + && era_network_configure \ + && deploy_oracle \ + && upload_oracle_patch_images \ && authentication_source \ && pe_auth @@ -139,9 +141,7 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ - && sleep 30 \ - && deploy_oracle \ - && upload_oracle_patch_images \ + && configure_era \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index a2988b8..37ba4ad 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -35,7 +35,7 @@ case ${1} in && pe_license \ && pe_init \ && create_era_container \ - && era_network_configure\ + && era_network_configure \ && authentication_source \ && pe_auth @@ -141,7 +141,6 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ - && sleep 30 \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6386819..e7fd10d 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -35,7 +35,7 @@ SSH_OPTS+=' -q' # -v' #################################################### # Era VARs ################################################### -ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 15)) + ERA_USER="admin" ERA_PASSWORD="nutanix/4u" @@ -127,6 +127,7 @@ DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) FILE_ANALYTICS_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 22)) PrismOpsServer_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 5))" +ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 15)) CITRIX_DDC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 8)) DNS_SERVERS='8.8.8.8' NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index ae7ddaa..654d6de 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -499,7 +499,7 @@ function pc_cluster_img_import() { "cluster_reference":{ "uuid":"${_uuid}", "kind":"cluster", - "name":"string"}}}], + "name":"${CLUSTER_NAME}"}}}], "api_version":"3.0"} EOF ) @@ -881,96 +881,7 @@ EOF fi } -######################################################################################################################################### -# Routine to Create Era Bootcamp PreProvisioned MSSQL Server -######################################################################################################################################### - -function deploy_mssql() { - - num_sql_vms=3 - - if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${MSSQL_SourceVM_Image} | wc --lines) == 0 )); then - log "Import ${MSSQL_SourceVM_Image} image from ${QCOW2_REPOS}..." - acli image.create ${MSSQL_SourceVM_Image} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/SQLServer/${MSSQL_SourceVM_Image}.qcow2" - acli image.create ${MSSQL_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/SQLServer/${MSSQL_SourceVM_Image1}.qcow2" - acli image.create ${MSSQL_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/SQLServer/${MSSQL_SourceVM_Image2}.qcow2" - else - log "Image found, assuming ready. Skipping ${MSSQL_SourceVM} import." - fi - - echo "## SQLVM_Creation_INPROGRESS ##" - acli "vm.create ${MSSQL_SourceVM} memory=2046M num_cores_per_vcpu=1 num_vcpus=2" - acli "vm.disk_create ${MSSQL_SourceVM} clone_from_image=${MSSQL_SourceVM_Image}" - acli "vm.nic_create ${MSSQL_SourceVM} network=${NW2_NAME}" - echo "## ${MSSQL_SourceVM} - Powering On ##" - acli "vm.on ${MSSQL_SourceVM}" - echo "## SQLVM_Creation_COMPLETE ##" - - #echo "## SQLVM_Clone_Creation_INPROGRESS ##" - #acli "vm.clone $MSSQL_SourceVM_User[01..$num_sql_vms] clone_from_vm=${MSSQL_SourceVM}" - #echo "## SQLVM_Clone_Creation_COMPLETE ##" -} - -######################################################################################################################################### -# Routine to Create Era Bootcamp PreProvisioned Oracle Server -######################################################################################################################################### - -function deploy_oracle() { - - if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_SourceVM_BootImage} | wc --lines) == 0 )); then - log "Import ${Oracle_SourceVM_BootImage} image from ${QCOW2_REPOS}..." - acli image.create ${Oracle_SourceVM_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_BootImage}.qcow2" - acli image.create ${Oracle_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image1}.qcow2" - acli image.create ${Oracle_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image2}.qcow2" - acli image.create ${Oracle_SourceVM_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image3}.qcow2" - acli image.create ${Oracle_SourceVM_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image4}.qcow2" - acli image.create ${Oracle_SourceVM_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image5}.qcow2" - acli image.create ${Oracle_SourceVM_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image6}.qcow2" - else - log "Image found, assuming ready. Skipping ${Oracle_SourceVM} import." - fi - - echo "## Oracle12cVM_Creation_INPROGRESS ##" - acli "vm.create ${Oracle_SourceVM} memory=32000M num_cores_per_vcpu=2 num_vcpus=2" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_BootImage}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image1}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image2}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image3}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image4}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image5}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image6}" - acli "vm.nic_create ${Oracle_SourceVM} network=${NW2_NAME}" - echo "## ${Oracle_SourceVM} - Powering On ##" - acli "vm.on ${Oracle_SourceVM}" - echo "### Oracle12cVM_Creation_COMPLETE ##" - - -} - -######################################################################################################################################### -# Routine to Upload Era Bootcamp Patch images for Oracle -######################################################################################################################################### - -function upload_oracle_patch_images() { - - if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_Patch_BootImage} | wc --lines) == 0 )); then - log "Import ${Oracle_Patch_BootImage} image from ${QCOW2_REPOS}..." - acli image.create ${Oracle_Patch_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_BootImage}.qcow2" - acli image.create ${Oracle_Patch_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image1}.qcow2" - acli image.create ${Oracle_Patch_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image2}.qcow2" - acli image.create ${Oracle_Patch_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image3}.qcow2" - acli image.create ${Oracle_Patch_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image4}.qcow2" - acli image.create ${Oracle_Patch_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image5}.qcow2" - acli image.create ${Oracle_Patch_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image6}.qcow2" - acli image.create ${Oracle_Patch_Image7} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image7}.qcow2" - acli image.create ${Oracle_Patch_Image8} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image8}.qcow2" - acli image.create ${Oracle_Patch_Image9} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image9}.qcow2" - else - log "Image found, assuming ready. Skipping ${Oracle_Patch_BootImage} import." - fi - -} ######################################################################################################################################### # Routine to Upload Era Bootcamp Patch images for Oracle @@ -994,7 +905,7 @@ HTTP_JSON_BODY=$(cat < 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='dev_privatecloud_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='ts2020.sh' From 26ba2799ccff1d6289ecb4bad52048aea23b7e9f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 16 Apr 2020 23:01:53 -0700 Subject: [PATCH 500/691] fix staging order --- scripts/era_mssql_bootcamp.sh | 4 ++-- scripts/era_oracle_bootcamp.sh | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 2f2b5a9..57729cb 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -36,9 +36,9 @@ case ${1} in && pe_init \ && create_era_container \ && era_network_configure \ - && deploy_mssql \ && authentication_source \ - && pe_auth + && pe_auth \ + && deploy_mssql if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index f652676..87bcf95 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -36,10 +36,10 @@ case ${1} in && pe_init \ && create_era_container \ && era_network_configure \ - && deploy_oracle \ - && upload_oracle_patch_images \ && authentication_source \ - && pe_auth + && pe_auth \ + && deploy_oracle \ + && upload_oracle_patch_images if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ From f7273bbc0ca5de1eacfdec7644a67ae3909c23da Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 17 Apr 2020 15:41:04 +0200 Subject: [PATCH 501/691] Update lib.pe.sh for SNC Add networking for SNC --- scripts/lib.pe.sh | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 54ca092..03dc018 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -558,10 +558,57 @@ function network_configure() { acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" + else + # As we are in the SNC environment, run the secondary network via this function + secondary_network_SNC fi fi } +############################################################################################################################################################################### +# Create the Secondary network based on the 3rd OCTET of the cluster for SNCs +############################################################################################################################################################################### + +function secondary_network_SNC(){ + # Set some needed parameters + SEC_NETW=${OCTET[3]} + SEC_NETW_VLAN=${OCTET[3]} + + # Get the last OCTET from the IP address of the AutoAD server + payload='{"filter": "vm_name==AutoAD","kind": "vm"}' + url="https://localhost:9440/api/nutanix/v3/vms/list" + autoad_ip=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PE_USER}:${PE_PASSWD} | jq '.entities[].status.resources.nic_list[].ip_endpoint_list[0].ip' | tr -d \"| cut -d '.' -f 4) + + # Get UUID of the AutoAD + autoad_uuid=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PE_USER}:${PE_PASSWD} | jq '.entities[].metadata.uuid' | tr -d \") + + # Set the right information for the network + json_payload='{"name":"Secondary","vlanId":"'${SEC_NETW_VLAN}'","ipConfig":{"dhcpOptions":{"domainNameServers":"10.10.'${SEC_NETW_VLAN}'.'${autoad_ip}'","domainSearch":"ntxlab.local","domainName":"ntnxlab.local"},"networkAddress":"10.10.71.0","prefixLength":"24","defaultGateway":"10.10.'${SEC_NETW_VLAN}'.1","pool":[ + { + "range":"10.10.'${SEC_NETW_VLAN}'.90 10.10.'${SEC_NETW_VLAN}'.200" + } + ] + } + } + }' + + # Create the network + url="https://localhost:9440/api/nutanix/v0.8/networks" + network_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PE_USER}:${PE_PASSWD} | jq '.networkUuid' | tr -d \") + + # Add second nic into the AutoAD VM + url="https://localhost:9440/PrismGateway/services/rest/v2.0/vms/${autoad_uuid}/nics" + json_payload='{"spec_list":[{"network_uuid":"'${network_uuid}'","requested_ip_address":"10.10.'${SEC_NETW_VLAN}'.'${autoad_ip}'","is_connected":true,"vlan_id":"'${SEC_NETW_VLAN}'"}]}' + + task_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PE_USER}:${PE_PASSWD} | jq '.task_uuid' | tr -d \") + if [[ -z ${task_uuid} ]]; then + log "The AutoAD didn't receive the second network card..." + else + log "The AutoAD has it's second network card assigned..." + fi + +} + ############################################################################################################################################################################### # Routine to create the networks for Era bootcamp ############################################################################################################################################################################### From 56b82a8d613bebb626bdadd43b74c5c773c58b93 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 17 Apr 2020 16:13:35 +0200 Subject: [PATCH 502/691] Update lib.pe.sh --- scripts/lib.pe.sh | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 2e6a07c..65b7dc2 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -90,6 +90,10 @@ function authentication_source() { if [[ "${_test}" == "${_autoad_success}" ]]; then log "${AUTH_SERVER} is ready." sleep ${_sleep} + if [[ -z ${NW2_NAME} ]]; then + # We are in a SNC environment. So we need to have a second network created + secondary_network_SNC + fi break elif (( ${_loop} > ${_attempts} )); then log "Error ${_error}: ${AUTH_SERVER} VM running: giving up after ${_loop} tries." @@ -558,9 +562,6 @@ function network_configure() { acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" - else - # As we are in the SNC environment, run the secondary network via this function - secondary_network_SNC fi fi } From 5f30019f0eda6ec71900eac38514dbd1ff876aee Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 17 Apr 2020 16:18:57 +0200 Subject: [PATCH 503/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 65b7dc2..4cd716b 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -91,7 +91,7 @@ function authentication_source() { log "${AUTH_SERVER} is ready." sleep ${_sleep} if [[ -z ${NW2_NAME} ]]; then - # We are in a SNC environment. So we need to have a second network created + # We are in a SNC environment. So we need to have a second network created and assign a second Nic to AutoAD so we can use that in the secondary network. secondary_network_SNC fi break From a879abe8584cc17b5479bb4eca4ddb5c30e12304 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 17 Apr 2020 16:52:34 +0200 Subject: [PATCH 504/691] Update lib.pe.sh --- scripts/lib.pe.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 4cd716b..abb7342 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -578,13 +578,13 @@ function secondary_network_SNC(){ # Get the last OCTET from the IP address of the AutoAD server payload='{"filter": "vm_name==AutoAD","kind": "vm"}' url="https://localhost:9440/api/nutanix/v3/vms/list" - autoad_ip=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PE_USER}:${PE_PASSWD} | jq '.entities[].status.resources.nic_list[].ip_endpoint_list[0].ip' | tr -d \"| cut -d '.' -f 4) + autoad_ip=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.entities[].status.resources.nic_list[].ip_endpoint_list[0].ip' | tr -d \"| cut -d '.' -f 4) # Get UUID of the AutoAD - autoad_uuid=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PE_USER}:${PE_PASSWD} | jq '.entities[].metadata.uuid' | tr -d \") + autoad_uuid=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.entities[].metadata.uuid' | tr -d \") # Set the right information for the network - json_payload='{"name":"Secondary","vlanId":"'${SEC_NETW_VLAN}'","ipConfig":{"dhcpOptions":{"domainNameServers":"10.10.'${SEC_NETW_VLAN}'.'${autoad_ip}'","domainSearch":"ntxlab.local","domainName":"ntnxlab.local"},"networkAddress":"10.10.71.0","prefixLength":"24","defaultGateway":"10.10.'${SEC_NETW_VLAN}'.1","pool":[ + json_payload='{"name":"Secondary","vlanId":"'${SEC_NETW_VLAN}'","ipConfig":{"dhcpOptions":{"domainNameServers":"10.10.'${SEC_NETW_VLAN}'.'${autoad_ip}'","domainSearch":"ntxlab.local","domainName":"ntnxlab.local"},"networkAddress":"10.10.'${SEC_NETW_VLAN}'.0","prefixLength":"24","defaultGateway":"10.10.'${SEC_NETW_VLAN}'.1","pool":[ { "range":"10.10.'${SEC_NETW_VLAN}'.90 10.10.'${SEC_NETW_VLAN}'.200" } @@ -595,13 +595,13 @@ function secondary_network_SNC(){ # Create the network url="https://localhost:9440/api/nutanix/v0.8/networks" - network_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PE_USER}:${PE_PASSWD} | jq '.networkUuid' | tr -d \") + network_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.networkUuid' | tr -d \") # Add second nic into the AutoAD VM url="https://localhost:9440/PrismGateway/services/rest/v2.0/vms/${autoad_uuid}/nics" json_payload='{"spec_list":[{"network_uuid":"'${network_uuid}'","requested_ip_address":"10.10.'${SEC_NETW_VLAN}'.'${autoad_ip}'","is_connected":true,"vlan_id":"'${SEC_NETW_VLAN}'"}]}' - task_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PE_USER}:${PE_PASSWD} | jq '.task_uuid' | tr -d \") + task_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.task_uuid' | tr -d \") if [[ -z ${task_uuid} ]]; then log "The AutoAD didn't receive the second network card..." else From 90b3c272192e6ddc785a160e96f60a57a1b26e4d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 17 Apr 2020 08:52:10 -0700 Subject: [PATCH 505/691] Updates for CLuster UUID --- scripts/lib.pc.sh | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 654d6de..7e29b1a 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -485,9 +485,13 @@ function pc_cluster_img_import() { local _test local _uuid - _uuid=$(source /etc/profile.d/nutanix_env.sh \ - && ncli --json=true cluster info \ - | jq -r .data.uuid) + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"cluster","filter": "name==${CLUSTER_NAME}"}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") + + echo "CLuster UUID is ${_cluster_uuid}" + + _uuid=$(source /etc/profile.d/nutanix_env.sh \ + && ncli --json=true cluster info | jq -r .data.uuid) + _http_body=$(cat < Date: Fri, 17 Apr 2020 16:06:31 -0700 Subject: [PATCH 506/691] Updates for Current Era MSSQL VM --- scripts/global.vars.sh | 1 - scripts/lib.pc.sh | 21 ++++++--------------- scripts/lib.pe.sh | 9 +++++---- 3 files changed, 11 insertions(+), 20 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index e7fd10d..bb752c8 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -40,7 +40,6 @@ ERA_USER="admin" ERA_PASSWORD="nutanix/4u" MSSQL_SourceVM="Win2016SQLSource" -MSSQL_SourceVM_Image="Win2016SQLSource" MSSQL_SourceVM_Image1="MSSQL_1" MSSQL_SourceVM_Image2="MSSQL_2" diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7e29b1a..f6fb652 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -489,27 +489,18 @@ function pc_cluster_img_import() { echo "CLuster UUID is ${_cluster_uuid}" - _uuid=$(source /etc/profile.d/nutanix_env.sh \ - && ncli --json=true cluster info | jq -r .data.uuid) - - _http_body=$(cat < Date: Mon, 20 Apr 2020 11:45:01 +0200 Subject: [PATCH 507/691] Update stage_workshop.sh Due to debugging... --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 6d6fccd..ff7db62 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -233,7 +233,7 @@ EoM log "Remote execution configuration script ${_pe_launch} on PE@${PE_HOST}" ## TODO: If DEBUG is set, we run the below command with bash -x - remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" + remote_exec 'SSH' 'PE' "${PE_CONFIGURATION} nohup bash -x /home/nutanix/${_pe_launch} 'PE' >> ${_pe_launch%%.sh}.log 2>&1 &" unset PE_CONFIGURATION # shellcheck disable=SC2153 From 89d0c67e5707e65a17eff34da586d31f76554c77 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 20 Apr 2020 11:52:30 +0200 Subject: [PATCH 508/691] Update lib.pe.sh --- scripts/lib.pe.sh | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 45b8342..11a52c7 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -90,10 +90,6 @@ function authentication_source() { if [[ "${_test}" == "${_autoad_success}" ]]; then log "${AUTH_SERVER} is ready." sleep ${_sleep} - if [[ -z ${NW2_NAME} ]]; then - # We are in a SNC environment. So we need to have a second network created and assign a second Nic to AutoAD so we can use that in the secondary network. - secondary_network_SNC - fi break elif (( ${_loop} > ${_attempts} )); then log "Error ${_error}: ${AUTH_SERVER} VM running: giving up after ${_loop} tries." @@ -576,15 +572,15 @@ function secondary_network_SNC(){ SEC_NETW_VLAN=${OCTET[3]} # Get the last OCTET from the IP address of the AutoAD server - payload='{"filter": "vm_name==AutoAD","kind": "vm"}' - url="https://localhost:9440/api/nutanix/v3/vms/list" - autoad_ip=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.entities[].status.resources.nic_list[].ip_endpoint_list[0].ip' | tr -d \"| cut -d '.' -f 4) + #payload='{"filter": "vm_name==AutoAD","kind": "vm"}' + #url="https://localhost:9440/api/nutanix/v3/vms/list" + #autoad_ip=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.entities[].status.resources.nic_list[].ip_endpoint_list[0].ip' | tr -d \"| cut -d '.' -f 4) # Get UUID of the AutoAD - autoad_uuid=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.entities[].metadata.uuid' | tr -d \") + #autoad_uuid=$(curl -X POST ${url} -d "${payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.entities[].metadata.uuid' | tr -d \") # Set the right information for the network - json_payload='{"name":"Secondary","vlanId":"'${SEC_NETW_VLAN}'","ipConfig":{"dhcpOptions":{"domainNameServers":"10.10.'${SEC_NETW_VLAN}'.'${autoad_ip}'","domainSearch":"ntxlab.local","domainName":"ntnxlab.local"},"networkAddress":"10.10.'${SEC_NETW_VLAN}'.0","prefixLength":"24","defaultGateway":"10.10.'${SEC_NETW_VLAN}'.1","pool":[ + json_payload='{"name":"Secondary","vlanId":"'${SEC_NETW_VLAN}'","ipConfig":{"dhcpOptions":{"domainNameServers":"'${AUTH_HOST}'","domainSearch":"ntxlab.local","domainName":"ntnxlab.local"},"networkAddress":"10.10.'${SEC_NETW_VLAN}'.0","prefixLength":"24","defaultGateway":"10.10.'${SEC_NETW_VLAN}'.1","pool":[ { "range":"10.10.'${SEC_NETW_VLAN}'.90 10.10.'${SEC_NETW_VLAN}'.200" } @@ -596,18 +592,23 @@ function secondary_network_SNC(){ # Create the network url="https://localhost:9440/api/nutanix/v0.8/networks" network_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.networkUuid' | tr -d \") - - # Add second nic into the AutoAD VM - url="https://localhost:9440/PrismGateway/services/rest/v2.0/vms/${autoad_uuid}/nics" - json_payload='{"spec_list":[{"network_uuid":"'${network_uuid}'","requested_ip_address":"10.10.'${SEC_NETW_VLAN}'.'${autoad_ip}'","is_connected":true,"vlan_id":"'${SEC_NETW_VLAN}'"}]}' - - task_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.task_uuid' | tr -d \") - if [[ -z ${task_uuid} ]]; then - log "The AutoAD didn't receive the second network card..." + if [[ -z ${network_uuid} ]]; then + log "The secondary network has not been created..." else - log "The AutoAD has it's second network card assigned..." + log "The secondary network has been created..." fi + # Add second nic into the AutoAD VM + #url="https://localhost:9440/PrismGateway/services/rest/v2.0/vms/${autoad_uuid}/nics" + #json_payload='{"spec_list":[{"network_uuid":"'${network_uuid}'","requested_ip_address":"10.10.'${SEC_NETW_VLAN}'.'${autoad_ip}'","is_connected":true,"vlan_id":"'${SEC_NETW_VLAN}'"}]}' + + #task_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.task_uuid' | tr -d \") + #if [[ -z ${task_uuid} ]]; then + # log "The AutoAD didn't receive the second network card..." + #else + # log "The AutoAD has it's second network card assigned..." + #fi + } ############################################################################################################################################################################### From ce207640fc37bdff50f9f9daf5b9d1839266dc0d Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 20 Apr 2020 11:56:39 +0200 Subject: [PATCH 509/691] Update dev_privatecloud_bootcamp.sh --- scripts/dev_privatecloud_bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/dev_privatecloud_bootcamp.sh b/scripts/dev_privatecloud_bootcamp.sh index 8c224a9..0da029a 100755 --- a/scripts/dev_privatecloud_bootcamp.sh +++ b/scripts/dev_privatecloud_bootcamp.sh @@ -42,6 +42,7 @@ case ${1} in && pe_license \ && pe_init \ && network_configure \ + && secondary_network_SNC \ && authentication_source \ && pe_auth \ && prism_pro_server_deploy \ From 839c2226c45bf3e356811a9a6467641391e95657 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 20 Apr 2020 12:00:53 +0200 Subject: [PATCH 510/691] Update lib.pe.sh --- scripts/lib.pe.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 11a52c7..3349fe5 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -591,7 +591,8 @@ function secondary_network_SNC(){ # Create the network url="https://localhost:9440/api/nutanix/v0.8/networks" - network_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} | jq '.networkUuid' | tr -d \") + network_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD}) + echo network_uuid if [[ -z ${network_uuid} ]]; then log "The secondary network has not been created..." else From c3fd89bbd70069a158b5512815151a94eb52894a Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 20 Apr 2020 12:53:06 +0200 Subject: [PATCH 511/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 3349fe5..b686f80 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -592,7 +592,7 @@ function secondary_network_SNC(){ # Create the network url="https://localhost:9440/api/nutanix/v0.8/networks" network_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD}) - echo network_uuid + echo ${network_uuid} if [[ -z ${network_uuid} ]]; then log "The secondary network has not been created..." else From a39a4da2c81f831b72bec0eb195963e46016b0e3 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Mon, 20 Apr 2020 14:53:18 +0200 Subject: [PATCH 512/691] Update lib.pe.sh --- scripts/lib.pe.sh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index b686f80..887e543 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -568,8 +568,9 @@ function network_configure() { function secondary_network_SNC(){ # Set some needed parameters - SEC_NETW=${OCTET[3]} - SEC_NETW_VLAN=${OCTET[3]} + local SEC_NETW=${OCTET[3]} + local SEC_NETW_VLAN=${OCTET[3]} + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " # Get the last OCTET from the IP address of the AutoAD server #payload='{"filter": "vm_name==AutoAD","kind": "vm"}' @@ -588,10 +589,11 @@ function secondary_network_SNC(){ } } }' + echo ${json_payload} # Create the network url="https://localhost:9440/api/nutanix/v0.8/networks" - network_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD}) + network_uuid=$(curl -X POST ${url} -d "${json_payload}" ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD}) echo ${network_uuid} if [[ -z ${network_uuid} ]]; then log "The secondary network has not been created..." From 76e8400eea483d64ab7ddc170ed0a0031ada9813 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 20 Apr 2020 15:51:07 -0700 Subject: [PATCH 513/691] Updates --- scripts/era_mssql_bootcamp.sh | 1 + scripts/era_oracle_bootcamp.sh | 1 + scripts/era_postgres_bootcamp.sh | 2 ++ scripts/global.vars.sh | 20 ++++++++++---------- scripts/lib.pc.sh | 6 ++++-- stage_workshop.sh | 7 ++++--- 6 files changed, 22 insertions(+), 15 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 57729cb..2ecdb98 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -140,6 +140,7 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ + && sleep 120 \ && configure_era \ && prism_check 'PC' diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index 87bcf95..fee84af 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -141,6 +141,7 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ + && sleep 120 \ && configure_era \ && prism_check 'PC' diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index 37ba4ad..2c53417 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -141,6 +141,8 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ + && sleep 120 \ + && configure_era \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index bb752c8..4ebd5af 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -56,16 +56,16 @@ Oracle_SourceVM_Image8="" Oracle_SourceVM_Image9="" Oracle_SourceVM_Image10="" -Oracle_Patch_BootImage="19c_bootdisk" -Oracle_Patch_Image1="19c_disk1" -Oracle_Patch_Image2="19c_disk2" -Oracle_Patch_Image3="19c_disk3" -Oracle_Patch_Image4="19c_disk4" -Oracle_Patch_Image5="19c_disk5" -Oracle_Patch_Image6="19c_disk6" -Oracle_Patch_Image7="19c_disk7" -Oracle_Patch_Image8="19c_disk8" -Oracle_Patch_Image9="19c_disk9" +Oracle_Patch_BootImage="19c-bootdisk" +Oracle_Patch_Image1="19c-disk1" +Oracle_Patch_Image2="19c-disk2" +Oracle_Patch_Image3="19c-disk3" +Oracle_Patch_Image4="19c-disk4" +Oracle_Patch_Image5="19c-disk5" +Oracle_Patch_Image6="19c-disk6" +Oracle_Patch_Image7="19c-disk7" +Oracle_Patch_Image8="19c-disk8" +Oracle_Patch_Image9="19c-disk9" Oracle_Patch_Image10="" #################################################### diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index f6fb652..29fb33a 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -485,9 +485,11 @@ function pc_cluster_img_import() { local _test local _uuid - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"cluster","filter": "name==${CLUSTER_NAME}"}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") + #_cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"cluster","filter": "name==${CLUSTER_NAME}"}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") - echo "CLuster UUID is ${_cluster_uuid}" + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"cluster","filter": "name==${CLUSTER_NAME}"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + echo "Cluster UUID is ${_cluster_uuid}" _http_body=$(cat < Date: Mon, 20 Apr 2020 15:58:27 -0700 Subject: [PATCH 514/691] Updates for New Bootcamps --- scripts/app_modernization_bootcamp.sh | 161 ++++++++++++++++++++++ scripts/cicd_bootcamp.sh | 161 ++++++++++++++++++++++ scripts/splunk_bootcamp.sh | 183 ++++++++++++++++++++++++++ stage_workshop.sh | 18 +++ 4 files changed, 523 insertions(+) create mode 100755 scripts/app_modernization_bootcamp.sh create mode 100755 scripts/cicd_bootcamp.sh create mode 100755 scripts/splunk_bootcamp.sh diff --git a/scripts/app_modernization_bootcamp.sh b/scripts/app_modernization_bootcamp.sh new file mode 100755 index 0000000..464e48f --- /dev/null +++ b/scripts/app_modernization_bootcamp.sh @@ -0,0 +1,161 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoDC' + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && objects_enable \ + && lcm \ + && pc_project \ + && object_store \ + && karbon_image_download \ + && flow_enable \ + && pc_cluster_img_import \ + && upload_karbon_calm_blueprint \ + && sleep 30 \ + && upload_CICDInfra_calm_blueprint \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/cicd_bootcamp.sh b/scripts/cicd_bootcamp.sh new file mode 100755 index 0000000..464e48f --- /dev/null +++ b/scripts/cicd_bootcamp.sh @@ -0,0 +1,161 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoDC' + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && objects_enable \ + && lcm \ + && pc_project \ + && object_store \ + && karbon_image_download \ + && flow_enable \ + && pc_cluster_img_import \ + && upload_karbon_calm_blueprint \ + && sleep 30 \ + && upload_CICDInfra_calm_blueprint \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/splunk_bootcamp.sh b/scripts/splunk_bootcamp.sh new file mode 100755 index 0000000..8cadd10 --- /dev/null +++ b/scripts/splunk_bootcamp.sh @@ -0,0 +1,183 @@ +#!/usr/bin/env bash + #-x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + #export PC_DEV_VERSION='5.10.2' + #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' + #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' + #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' + #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' + #export FILES_VERSION='3.2.0.1' + #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' + #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' + + export AUTH_SERVER='AutoAD' + export PrismOpsServer='GTSPrismOpsLabUtilityServer' + export SeedPC='GTSseedPC.zp' + + export _external_nw_name="${1}" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ + veeam/VeeamAHVProxy2.0.404.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + veeam/VBR_10.0.0.4442.iso \ + ) + + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && objects_enable \ + && lcm \ + && pc_project \ + && object_store \ + && karbon_image_download \ + && flow_enable \ + && pc_cluster_img_import \ + && seedPC \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/stage_workshop.sh b/stage_workshop.sh index 1cc05f1..3cda00d 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -25,6 +25,9 @@ WORKSHOPS=(\ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ +"Cloud Native Application Modernization Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ +"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ @@ -120,6 +123,21 @@ function stage_clusters() { _pe_launch='frame_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^Private Cloud Splunk on AHV with Objects Bootcamp" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='splunk_bootcamp.sh' + _pc_launch=${_pe_launch} + fi + if (( $(echo ${_workshop} | grep -i "^Cloud Native Application Modernization Bootcamp" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='app_modernization_bootcamp.sh' + _pc_launch=${_pe_launch} + fi + if (( $(echo ${_workshop} | grep -i "^Cloud Native CI/CD with Calm and Karbon Bootcamp" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='cicd_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^In Development Bootcamp Staging" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='dev_privatecloud_bootcamp.sh' From 1489175a593d7cac4bdf9074d553da5bd9dfc9ac Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 20 Apr 2020 18:19:55 -0700 Subject: [PATCH 515/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 29fb33a..3750555 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -503,7 +503,7 @@ EOF _test=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" \ https://localhost:9440/api/nutanix/v3/images/migrate) - log "batch _test=|${_test}|" + log "Image Migration = |${_test}|" } ############################################################################################################################################################################### From 93e285634919c26e125d2509815e8c5ee47aa45e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 20 Apr 2020 21:47:31 -0700 Subject: [PATCH 516/691] updates for Era --- scripts/era_mssql_bootcamp.sh | 2 +- scripts/era_oracle_bootcamp.sh | 2 +- scripts/era_postgres_bootcamp.sh | 2 +- scripts/lib.pc.sh | 16 ++++++++-------- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 2ecdb98..0324613 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -140,7 +140,7 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ - && sleep 120 \ + && sleep 300 \ && configure_era \ && prism_check 'PC' diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index fee84af..c650cd0 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -141,7 +141,7 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ - && sleep 120 \ + && sleep 300 \ && configure_era \ && prism_check 'PC' diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index 2c53417..78000d6 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -141,7 +141,7 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && upload_era_calm_blueprint \ - && sleep 120 \ + && sleep 300 \ && configure_era \ && prism_check 'PC' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 3750555..b5bbae7 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -887,10 +887,10 @@ EOF function configure_era() { local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " - local baseurl="" + ## Create the EraManaged network inside Era ## -echo "Create ${NW3_NAME} Static Network" +log "Create ${NW3_NAME} Static Network" HTTP_JSON_BODY=$(cat < Date: Mon, 20 Apr 2020 21:51:25 -0700 Subject: [PATCH 517/691] Updates for Era Networking --- scripts/era_mssql_bootcamp.sh | 5 +---- scripts/era_oracle_bootcamp.sh | 4 ---- scripts/era_postgres_bootcamp.sh | 4 ---- scripts/global.vars.sh | 5 +++++ 4 files changed, 6 insertions(+), 12 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 0324613..158f300 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -23,10 +23,7 @@ case ${1} in #export NW2_NAME='EraManaged' export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" - export NW3_NAME='EraManaged' - export NW3_VLAN=${NW2_VLAN} - export NW3_START="${IPV4_PREFIX}.220" - export NW3_END="${IPV4_PREFIX}.253" + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index c650cd0..e5bf84a 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -23,10 +23,6 @@ case ${1} in #export NW2_NAME='EraManaged' export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" - export NW3_NAME='EraManaged' - export NW3_VLAN=${NW2_VLAN} - export NW3_START="${IPV4_PREFIX}.220" - export NW3_END="${IPV4_PREFIX}.253" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index 78000d6..bd53ef7 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -23,10 +23,6 @@ case ${1} in #export NW2_NAME='EraManaged' export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" - export NW3_NAME='EraManaged' - export NW3_VLAN=${NW2_VLAN} - export NW3_START="${IPV4_PREFIX}.220" - export NW3_END="${IPV4_PREFIX}.253" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4ebd5af..39b0638 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -183,6 +183,11 @@ case "${OCTET[3]}" in NW2_GATEWAY="${IPV4_PREFIX}.129" NW2_DHCP_START="${IPV4_PREFIX}.132" NW2_DHCP_END="${IPV4_PREFIX}.253" + + NW3_NAME='EraManaged' + NW3_VLAN=${NW2_VLAN} + NW3_START="${IPV4_PREFIX}.220" + NW3_END="${IPV4_PREFIX}.253" ;; esac From 6534ec58a4dff2dbd172f5ad69cd62ff09cacbb1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 20 Apr 2020 21:58:51 -0700 Subject: [PATCH 518/691] Update lib.pc.sh --- scripts/lib.pc.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b5bbae7..e73bf05 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -487,9 +487,9 @@ function pc_cluster_img_import() { #_cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"cluster","filter": "name==${CLUSTER_NAME}"}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"cluster","filter": "name==${CLUSTER_NAME}"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - echo "Cluster UUID is ${_cluster_uuid}" + log "Cluster UUID is ${_cluster_uuid}" _http_body=$(cat < Date: Tue, 21 Apr 2020 12:39:30 -0700 Subject: [PATCH 519/691] Deploy Era updates --- scripts/global.vars.sh | 5 ++- scripts/lib.pc.sh | 1 + scripts/lib.pe.sh | 100 ++++++++++++++++++++++++++--------------- 3 files changed, 69 insertions(+), 37 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 39b0638..7cbb0ce 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -19,7 +19,7 @@ SLEEP=60 PrismOpsServer='PrismProLabUtilityServer' SeedPC='seedPC.zip' CALM_RSA_KEY_FILE='calm_rsa_key.env' -ERA_Blueprint='EraServerDeployment.json' + Citrix_Blueprint='CitrixBootcampInfra.json' Beam_Blueprint='' Karbon_Blueprint='KarbonClusterDeployment.json' @@ -36,6 +36,9 @@ SSH_OPTS+=' -q' # -v' # Era VARs ################################################### +ERA_Blueprint='EraServerDeployment.json' +ERAServerImage='ERA-Server-build-1.2.1.qcow2' +ERAServerName='Era Server' ERA_USER="admin" ERA_PASSWORD="nutanix/4u" diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index e73bf05..435f3e2 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -925,6 +925,7 @@ log "Adding IP Pool ${NW3_START} - ${NW3_END}" _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks/${network_id}/ip-pool" --data "{"ipPools": [{"startIP": "${NW3_START}","endIP": "${NW3_END}"}]}" ) +## Add the Secondary Network inside Era ## log "Create ${NW2_NAME} DHCP/IPAM Network" _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks" --data {"name": "${NW2_NAME}","type": "DHCP"} ) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 887e543..0355ae4 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -221,40 +221,6 @@ function authentication_source() { log "To be documented, see https://drt-it-github-prod-1.eng.nutanix.com/mark-lavi/openldap" ;; esac -} - -############################################################################################################################################################################### -# Routine to deploy PrismProServer -############################################################################################################################################################################### - -function prism_pro_server_deploy() { - -### Import Image ### - -if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PrismOpsServer} | wc --lines) == 0 )); then - log "Import ${PrismOpsServer} image from ${QCOW2_REPOS}..." - acli image.create ${PrismOpsServer} \ - image_type=kDiskImage wait=true \ - container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}${PrismOpsServer}.qcow2" -else - log "Image found, assuming ready. Skipping ${PrismOpsServer} import." -fi - -### Deploy PrismProServer ### - -log "Create ${PrismOpsServer} VM based on ${PrismOpsServer} image" -acli "vm.create ${PrismOpsServer} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" -# vmstat --wide --unit M --active # suggests 2G sufficient, was 4G -#acli "vm.disk_create ${VMNAME} cdrom=true empty=true" -acli "vm.disk_create ${PrismOpsServer} clone_from_image=${PrismOpsServer}" -#acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME}" -acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME} ip=${PrismOpsServer_HOST}" - -log "Power on ${PrismOpsServer} VM..." -acli "vm.on ${PrismOpsServer}" - - - } ############################################################################################################################################################################### @@ -1115,9 +1081,41 @@ function pc_destroy() { done } -############################################################################################################################################################################### +################################################################################################################################################### +# Routine to deploy PrismProServer +################################################################################################################################################### + +function prism_pro_server_deploy() { + +### Import Image ### + +if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${PrismOpsServer} | wc --lines) == 0 )); then + log "Import ${PrismOpsServer} image from ${QCOW2_REPOS}..." + acli image.create ${PrismOpsServer} \ + image_type=kDiskImage wait=true \ + container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}${PrismOpsServer}.qcow2" +else + log "Image found, assuming ready. Skipping ${PrismOpsServer} import." +fi + +### Deploy PrismProServer ### + +log "Create ${PrismOpsServer} VM based on ${PrismOpsServer} image" +acli "vm.create ${PrismOpsServer} num_vcpus=2 num_cores_per_vcpu=1 memory=2G" +# vmstat --wide --unit M --active # suggests 2G sufficient, was 4G +#acli "vm.disk_create ${VMNAME} cdrom=true empty=true" +acli "vm.disk_create ${PrismOpsServer} clone_from_image=${PrismOpsServer}" +#acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME}" +acli "vm.nic_create ${PrismOpsServer} network=${NW1_NAME} ip=${PrismOpsServer_HOST}" + +log "Power on ${PrismOpsServer} VM..." +acli "vm.on ${PrismOpsServer}" + +} + +################################################################################################################################################### # Routine create the Era Storage container for the Era Bootcamps. -############################################################################################################################################################################### +################################################################################################################################################### function create_era_container() { @@ -1126,6 +1124,36 @@ function create_era_container() { } +######################################################################################################################################### +# Routine to Create Era Server +######################################################################################################################################### + +function deploy_era() { + +### Import Image ### + +if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${ERAServerName} | wc --lines) == 0 )); then + log "Import ${ERAServerImage} image from ${QCOW2_REPOS}..." + acli image.create ${ERAServerImage} \ + image_type=kDiskImage wait=true \ + container=${STORAGE_IMAGES} source_url="${QCOW2_REPOS}${ERAServerImage}" +else + log "Image found, assuming ready. Skipping ${ERAServerImage} import." +fi + +### Deploy PrismProServer ### + +log "Create ${ERAServerName} VM based on ${ERAServerImage} image" +acli "vm.create ${ERAServerName} num_vcpus=1 num_cores_per_vcpu=4 memory=4G" +acli "vm.disk_create ${ERAServerName} clone_from_image=${ERAServerImage}" +acli "vm.nic_create ${ERAServerName} network=${NW1_NAME} ip=${ERA_HOST}" + +log "Power on ${ERAServerName} VM..." +acli "vm.on ${ERAServerName}" + +} + + ######################################################################################################################################### # Routine to Create Era Bootcamp PreProvisioned MSSQL Server ######################################################################################################################################### From 7f5271e1fc1d2a64613a2fba4c2096b2d8e1baa9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 21 Apr 2020 15:19:45 -0700 Subject: [PATCH 520/691] Updates for Era Config --- scripts/era_mssql_bootcamp.sh | 9 ++++---- scripts/global.vars.sh | 1 + scripts/lib.pc.sh | 39 +++++++++++++++++++++++++++++++++++ 3 files changed, 45 insertions(+), 4 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 158f300..2df462c 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -23,7 +23,7 @@ case ${1} in #export NW2_NAME='EraManaged' export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" - + args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -33,6 +33,7 @@ case ${1} in && pe_init \ && create_era_container \ && era_network_configure \ + && deploy_era \ && authentication_source \ && pe_auth \ && deploy_mssql @@ -76,7 +77,7 @@ case ${1} in export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ - ERA-Server-build-1.2.1.qcow2 \ + #ERA-Server-build-1.2.1.qcow2 \ ) export QCOW2_IMAGES=(\ @@ -136,8 +137,8 @@ case ${1} in && images \ && flow_enable \ && pc_cluster_img_import \ - && upload_era_calm_blueprint \ - && sleep 300 \ + #&& upload_era_calm_blueprint \ + #&& sleep 300 \ && configure_era \ && prism_check 'PC' diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 7cbb0ce..f494a38 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -41,6 +41,7 @@ ERAServerImage='ERA-Server-build-1.2.1.qcow2' ERAServerName='Era Server' ERA_USER="admin" ERA_PASSWORD="nutanix/4u" +ERA_Default_PASSWORD="Nutanix/4u" MSSQL_SourceVM="Win2016SQLSource" MSSQL_SourceVM_Image1="MSSQL_1" diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 435f3e2..c773ebc 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -888,6 +888,45 @@ EOF function configure_era() { local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " +## Create the EraManaged network inside Era ## +log "Reset Default Era Password" + + _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/update" --data "{ "password": "${ERA_PASSWORD}"}" ) + +## Accept EULA ## +log "Accept Era EULA" + + _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/validate" --data '{ "eulaAccepted": true }' ) + +## Register Cluster ## +log "Register ${CLUSTER_NAME} with Era" + + _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/clusters" --data "{ "name": "EraCluster","description": "Era Bootcamp Cluster","ip": "${PE_HOST}","username": "${PRISM_ADMIN}","password": "${PE_PASSWORD}","status": "UP","version": "v2","cloudType": "NTNX","properties": [{ "name": "ERA_STORAGE_CONTAINER","value": "${STORAGE_ERA}"}]}" ) + +## Get Era Cluster ID ## +log "Getting Era Cluster ID" + + _era_cluster_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X GET "https://${ERA_HOST}/era/v0.8/clusters" --data '{}' | jq -r '.[].id' | tr -d \") + +log "Era Cluster ID: |${_era_cluster_id}|" + +## Upload Cluster File ## +log "Upload Era Cluster JSON" + +HTTP_JSON_BODY=$(cat < Date: Tue, 21 Apr 2020 15:22:13 -0700 Subject: [PATCH 521/691] Update era_mssql_bootcamp.sh --- scripts/era_mssql_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 2df462c..eb7030e 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -33,9 +33,9 @@ case ${1} in && pe_init \ && create_era_container \ && era_network_configure \ - && deploy_era \ && authentication_source \ && pe_auth \ + && deploy_era \ && deploy_mssql if (( $? == 0 )) ; then From 10aa86626247f3c4acbb56066b84ae7d41da4400 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 21 Apr 2020 15:24:26 -0700 Subject: [PATCH 522/691] Update era_mssql_bootcamp.sh --- scripts/era_mssql_bootcamp.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index eb7030e..c7ab00d 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -137,8 +137,6 @@ case ${1} in && images \ && flow_enable \ && pc_cluster_img_import \ - #&& upload_era_calm_blueprint \ - #&& sleep 300 \ && configure_era \ && prism_check 'PC' From ad42ddedb1d7de89f038a1e18a872bff86770521 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 21 Apr 2020 15:27:24 -0700 Subject: [PATCH 523/691] Update era_mssql_bootcamp.sh --- scripts/era_mssql_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index c7ab00d..9f56486 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -136,8 +136,8 @@ case ${1} in && priority_images \ && images \ && flow_enable \ - && pc_cluster_img_import \ && configure_era \ + && pc_cluster_img_import \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 15f561213557077cf4e1975a173d49679a6e9268 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 21 Apr 2020 15:33:04 -0700 Subject: [PATCH 524/691] Update era_mssql_bootcamp.sh --- scripts/era_mssql_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 9f56486..c7ab00d 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -136,8 +136,8 @@ case ${1} in && priority_images \ && images \ && flow_enable \ - && configure_era \ && pc_cluster_img_import \ + && configure_era \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 79669b6cd3b944f293b600a5e7fbf543491d6334 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 21 Apr 2020 16:19:41 -0700 Subject: [PATCH 525/691] Updates for Era Deploy --- scripts/global.vars.sh | 2 +- scripts/lib.pe.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f494a38..f046f45 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -38,7 +38,7 @@ SSH_OPTS+=' -q' # -v' ERA_Blueprint='EraServerDeployment.json' ERAServerImage='ERA-Server-build-1.2.1.qcow2' -ERAServerName='Era Server' +ERAServerName='EraServer' ERA_USER="admin" ERA_PASSWORD="nutanix/4u" ERA_Default_PASSWORD="Nutanix/4u" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 0355ae4..58e63e0 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1132,7 +1132,7 @@ function deploy_era() { ### Import Image ### -if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${ERAServerName} | wc --lines) == 0 )); then +if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${ERAServerImage} | wc --lines) == 0 )); then log "Import ${ERAServerImage} image from ${QCOW2_REPOS}..." acli image.create ${ERAServerImage} \ image_type=kDiskImage wait=true \ From 1fe4328e97d35d9675efd569a0741d3e0e759086 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 22 Apr 2020 00:45:51 -0700 Subject: [PATCH 526/691] Updates for Era Configure --- scripts/era_cluster_id.json | 1 + scripts/global.vars.sh | 12 +++++-- scripts/lib.pc.sh | 71 ++++++++++++++++++++++++------------- 3 files changed, 57 insertions(+), 27 deletions(-) create mode 100644 scripts/era_cluster_id.json diff --git a/scripts/era_cluster_id.json b/scripts/era_cluster_id.json new file mode 100644 index 0000000..6e53be7 --- /dev/null +++ b/scripts/era_cluster_id.json @@ -0,0 +1 @@ +{"id":"a6b659e6-48ee-4a0b-9048-cb1a079c460c","name":"EraCluster","uniqueName":"ERACLUSTER","description":"Era Bootcamp Cluster","cloudType":"NTNX","dateCreated":null,"dateModified":null,"ownerId":"eac70dbf-22fb-462b-9498-949796ca1f73","status":"UP","version":"v2","hypervisorType":"AHV","hypervisorVersion":"5.15","properties":[{"ref_id":"a6b659e6-48ee-4a0b-9048-cb1a079c460c","name":"ERA_STORAGE_CONTAINER","value":"Era","description":null},{"ref_id":"a6b659e6-48ee-4a0b-9048-cb1a079c460c","name":"RESOURCE_CONFIG","value":"{\"storageThresholdPercentage\":95.0,\"memoryThresholdPercentage\":95.0}","description":null}],"referenceCount":0,"ip":"10.42.7.37","username":"admin","password":"techX2019!","cloudInfo":null,"resourceConfig":null} diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f046f45..f9d9b82 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -254,7 +254,9 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.55.251.38/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.55.251.38/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.55.251.10,10.55.251.11' - OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' + ERA_PRIMARY_DNS='10.55.251.10' + ERA_SECONDARY_DNS='10.55.251.11' + OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.17-stable-prism_central-metadata.json' @@ -291,6 +293,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.42.196.10,10.42.194.10' + ERA_PRIMARY_DNS='10.42.196.10' + ERA_SECONDARY_DNS='10.42.194.10' OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters @@ -328,7 +332,9 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' BLUEPRINT_URL='http://10.42.194.11/workshop_staging/CalmBlueprints/' DNS_SERVERS="10.42.196.10,10.42.194.10" - OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' + ERA_PRIMARY_DNS='10.42.196.10' + ERA_SECONDARY_DNS='10.42.194.10' + OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' # If the third OCTET is between 170 and 199, we need to have the +3 vlan for the secondary if [[ ${OCTET[2]} -gt 169 ]]; then @@ -370,6 +376,8 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_DATA='http://10.136.239.13/workshop_staging/seedPC.zip' BLUEPRINT_URL='http:/10.136.239.13/workshop_staging/CalmBlueprints/' DNS_SERVERS='10.136.239.10,10.136.239.11' + ERA_PRIMARY_DNS='10.136.239.10' + ERA_SECONDARY_DNS='10.136.239.11' OBJECTS_OFFLINE_REPO='http://10.136.239.13/workshop_staging/objects' ;; 10.132 ) # https://sewiki.nutanix.com/index.php/SH-COLO-IP-ADDR diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index c773ebc..a443cee 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -888,45 +888,63 @@ EOF function configure_era() { local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " +log "PE Cluster IP |${PE_HOST}|" +log "EraServer IP |${ERA_HOST}|" + ## Create the EraManaged network inside Era ## log "Reset Default Era Password" - _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/update" --data "{ "password": "${ERA_PASSWORD}"}" ) + _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/update" --data "{ "password": "${ERA_PASSWORD}"}" | jq -r '.status' | tr -d \") + +log "Password Reset |${_reset_passwd}|" ## Accept EULA ## log "Accept Era EULA" - _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/validate" --data '{ "eulaAccepted": true }' ) + _accept_eula=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/validate" --data '{ "eulaAccepted": true }' | jq -r '.status' | tr -d \") + +log "Accept EULA |${_accept_eula}|" ## Register Cluster ## log "Register ${CLUSTER_NAME} with Era" - _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/clusters" --data "{ "name": "EraCluster","description": "Era Bootcamp Cluster","ip": "${PE_HOST}","username": "${PRISM_ADMIN}","password": "${PE_PASSWORD}","status": "UP","version": "v2","cloudType": "NTNX","properties": [{ "name": "ERA_STORAGE_CONTAINER","value": "${STORAGE_ERA}"}]}" ) - -## Get Era Cluster ID ## -log "Getting Era Cluster ID" - - _era_cluster_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X GET "https://${ERA_HOST}/era/v0.8/clusters" --data '{}' | jq -r '.[].id' | tr -d \") - -log "Era Cluster ID: |${_era_cluster_id}|" - -## Upload Cluster File ## -log "Upload Era Cluster JSON" - HTTP_JSON_BODY=$(cat < cluster.json + + _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST -H 'Content-Type: multipart/form-data' "https://${ERA_HOST}/era/v0.8/clusters/${_era_cluster_id}/json" -F file="@"cluster.json) + +## Add the Secondary Network inside Era ## +log "Create ${NW2_NAME} DHCP/IPAM Network" + + _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks" --data {"name": "${NW2_NAME}","type": "DHCP"} ) ## Create the EraManaged network inside Era ## log "Create ${NW3_NAME} Static Network" @@ -935,6 +953,12 @@ HTTP_JSON_BODY=$(cat < Date: Wed, 22 Apr 2020 00:50:58 -0700 Subject: [PATCH 527/691] Update lib.pc.sh --- scripts/lib.pc.sh | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index a443cee..f7d9a8f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -476,9 +476,9 @@ EOF done } -############################################################################################################################################################################### +################################################################################################################################################### # Routine to import the images into PC -############################################################################################################################################################################### +################################################################################################################################################### function pc_cluster_img_import() { local _http_body @@ -501,8 +501,7 @@ _http_body=$(cat < Date: Wed, 22 Apr 2020 01:06:30 -0700 Subject: [PATCH 528/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index f7d9a8f..f92a57d 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -485,9 +485,9 @@ function pc_cluster_img_import() { local _test local _uuid - #_cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"cluster","filter": "name==${CLUSTER_NAME}"}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + #_cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") log "Cluster UUID is ${_cluster_uuid}" From e8d39422c2723385e71ff43458bbbfb3bdb0036a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 22 Apr 2020 01:10:55 -0700 Subject: [PATCH 529/691] Update lib.pc.sh --- scripts/lib.pc.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index f92a57d..e7df4ea 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -484,8 +484,9 @@ function pc_cluster_img_import() { local _http_body local _test local _uuid + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") #_cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") From 8e4f470f7e88360a95dcc4bf95466ea2b833ac70 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 22 Apr 2020 17:04:12 -0700 Subject: [PATCH 530/691] Update lib.pc.sh --- scripts/lib.pc.sh | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index e7df4ea..b56e08c 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -882,12 +882,14 @@ EOF ######################################################################################################################################### -# Routine to Upload Era Bootcamp Patch images for Oracle +# Routine to to configure Era ######################################################################################################################################### function configure_era() { local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " +set -x + log "PE Cluster IP |${PE_HOST}|" log "EraServer IP |${ERA_HOST}|" @@ -939,12 +941,14 @@ ClusterJSON='{"ip_address": "'${PE_HOST}'","port": "9440","protocol": "https","d echo $ClusterJSON > cluster.json - _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST -H 'Content-Type: multipart/form-data' "https://${ERA_HOST}/era/v0.8/clusters/${_era_cluster_id}/json" -F file="@"cluster.json) + _task_id=$(curl -k -H 'Content-Type: multipart/form-data' -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/clusters/${_era_cluster_id}/json" -F file="@"cluster.json) ## Add the Secondary Network inside Era ## log "Create ${NW2_NAME} DHCP/IPAM Network" - _task_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks" --data {"name": "${NW2_NAME}","type": "DHCP"} ) + _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks" --data {"name": "${NW2_NAME}","type": "DHCP"} | jq -r '.id' | tr -d \") + +log "Created ${NW2_NAME} Network with Network ID |${_dhcp_network_id}|" ## Create the EraManaged network inside Era ## log "Create ${NW3_NAME} Static Network" @@ -977,18 +981,18 @@ HTTP_JSON_BODY=$(cat < Date: Wed, 22 Apr 2020 17:06:08 -0700 Subject: [PATCH 531/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b56e08c..6af88db 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -903,7 +903,7 @@ log "Password Reset |${_reset_passwd}|" ## Accept EULA ## log "Accept Era EULA" - _accept_eula=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/validate" --data '{ "eulaAccepted": true }' | jq -r '.status' | tr -d \") + _accept_eula=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/validate" --data "{ "eulaAccepted": true }" | jq -r '.status' | tr -d \") log "Accept EULA |${_accept_eula}|" @@ -946,7 +946,7 @@ echo $ClusterJSON > cluster.json ## Add the Secondary Network inside Era ## log "Create ${NW2_NAME} DHCP/IPAM Network" - _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks" --data {"name": "${NW2_NAME}","type": "DHCP"} | jq -r '.id' | tr -d \") + _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks" --data "{"name": "${NW2_NAME}","type": "DHCP"}" | jq -r '.id' | tr -d \") log "Created ${NW2_NAME} Network with Network ID |${_dhcp_network_id}|" From 41763e3fa547cff3d48c2301fe2cc33b085a09ef Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 22 Apr 2020 17:15:37 -0700 Subject: [PATCH 532/691] Update era_oracle_bootcamp.sh --- scripts/era_oracle_bootcamp.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index e5bf84a..3f78a07 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -34,8 +34,8 @@ case ${1} in && era_network_configure \ && authentication_source \ && pe_auth \ - && deploy_oracle \ - && upload_oracle_patch_images + && deploy_oracle + if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -55,6 +55,7 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" + && upload_oracle_patch_images #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' From 9100e303063b1537a5b2983955f0071bc2d501fa Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 22 Apr 2020 17:20:15 -0700 Subject: [PATCH 533/691] Upsdates for new Era Staging --- scripts/era_oracle_bootcamp.sh | 3 +-- scripts/era_postgres_bootcamp.sh | 5 ++--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index 3f78a07..8ff7f5b 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -34,6 +34,7 @@ case ${1} in && era_network_configure \ && authentication_source \ && pe_auth \ + && deploy_era \ && deploy_oracle @@ -137,8 +138,6 @@ case ${1} in && images \ && flow_enable \ && pc_cluster_img_import \ - && upload_era_calm_blueprint \ - && sleep 300 \ && configure_era \ && prism_check 'PC' diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index bd53ef7..2b268a4 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -33,7 +33,8 @@ case ${1} in && create_era_container \ && era_network_configure \ && authentication_source \ - && pe_auth + && pe_auth \ + && deploy_era if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -136,8 +137,6 @@ case ${1} in && images \ && flow_enable \ && pc_cluster_img_import \ - && upload_era_calm_blueprint \ - && sleep 300 \ && configure_era \ && prism_check 'PC' From 85e44df8b50165864374d28ae7dca011f65b72a7 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 22 Apr 2020 21:23:13 -0700 Subject: [PATCH 534/691] Update era_oracle_bootcamp.sh --- scripts/era_oracle_bootcamp.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index 8ff7f5b..93704c0 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -35,7 +35,7 @@ case ${1} in && authentication_source \ && pe_auth \ && deploy_era \ - && deploy_oracle + if (( $? == 0 )) ; then @@ -56,8 +56,8 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - && upload_oracle_patch_images - + && upload_oracle_patch_images \ + && deploy_oracle #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish From 7f0a0f772209e42db08254ff071aa989f6797da0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 22 Apr 2020 21:26:51 -0700 Subject: [PATCH 535/691] Update era_oracle_bootcamp.sh --- scripts/era_oracle_bootcamp.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index 93704c0..c976122 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -35,6 +35,8 @@ case ${1} in && authentication_source \ && pe_auth \ && deploy_era \ + && deploy_oracle \ + && upload_oracle_patch_images @@ -56,8 +58,7 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - && upload_oracle_patch_images \ - && deploy_oracle + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish From 717701772346820d0731a7064c98e77ba6fec47c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 22 Apr 2020 23:46:53 -0700 Subject: [PATCH 536/691] Update lib.pc.sh --- scripts/lib.pc.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6af88db..e6da85c 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -896,14 +896,14 @@ log "EraServer IP |${ERA_HOST}|" ## Create the EraManaged network inside Era ## log "Reset Default Era Password" - _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/update" --data "{ "password": "${ERA_PASSWORD}"}" | jq -r '.status' | tr -d \") + _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/update" --data '{ "password": "'${ERA_PASSWORD}'"}' | jq -r '.status' | tr -d \") log "Password Reset |${_reset_passwd}|" ## Accept EULA ## log "Accept Era EULA" - _accept_eula=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/validate" --data "{ "eulaAccepted": true }" | jq -r '.status' | tr -d \") + _accept_eula=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/validate" --data '{ "eulaAccepted": true }' | jq -r '.status' | tr -d \") log "Accept EULA |${_accept_eula}|" @@ -946,7 +946,7 @@ echo $ClusterJSON > cluster.json ## Add the Secondary Network inside Era ## log "Create ${NW2_NAME} DHCP/IPAM Network" - _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks" --data "{"name": "${NW2_NAME}","type": "DHCP"}" | jq -r '.id' | tr -d \") + _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks" --data '{"name": "'${NW2_NAME}'","type": "DHCP"}' | jq -r '.id' | tr -d \") log "Created ${NW2_NAME} Network with Network ID |${_dhcp_network_id}|" From 8cc1627066142e4eadc22a7ab0415c69122feb31 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 23 Apr 2020 20:39:50 -0700 Subject: [PATCH 537/691] Update lib.pc.sh --- scripts/lib.pc.sh | 71 ++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 67 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index e6da85c..f857139 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -985,12 +985,75 @@ EOF log "Created ${NW3_NAME} Network with Network ID |${_static_network_id}|" - #network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X GET "https://${ERA_HOST}/era/v0.8/resources/networks" | jq -r '.id' | tr -d \") +## Create the CUSTOM_EXTRA_SMALL Compute Profile inside Era ## +log "Create the CUSTOM_EXTRA_SMALL Compute Profile" -#log "Get ${NW3_NAME} Network ID is ${network_id}" -#log "Adding IP Pool ${NW3_START} - ${NW3_END}" +HTTP_JSON_BODY=$(cat < Date: Thu, 23 Apr 2020 20:52:43 -0700 Subject: [PATCH 538/691] Update lib.pc.sh --- scripts/lib.pc.sh | 52 ++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index f857139..d42e05b 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1020,7 +1020,7 @@ EOF log "Created CUSTOM_EXTRA_SMALL Compute Profile with ID |${_xs_compute_profile_id}|" -## Create the CUSTOM_EXTRA_SMALL Compute Profile inside Era ## +## Create the ORACLE_SMALL Compute Profile inside Era ## log "Create the ORACLE_SMALL Compute Profile" HTTP_JSON_BODY=$(cat < Date: Thu, 23 Apr 2020 21:35:27 -0700 Subject: [PATCH 539/691] Updates for Oracle Source Images --- scripts/era_oracle_bootcamp.sh | 4 +- scripts/global.vars.sh | 44 ++++++++-------- scripts/lib.pe.sh | 91 ++++++++++++++++++++-------------- 3 files changed, 76 insertions(+), 63 deletions(-) diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index c976122..836a88b 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -35,8 +35,8 @@ case ${1} in && authentication_source \ && pe_auth \ && deploy_era \ - && deploy_oracle \ - && upload_oracle_patch_images + && deploy_oracle_12c \ + && deploy_oracle_19c diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f9d9b82..b09fc95 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -47,30 +47,26 @@ MSSQL_SourceVM="Win2016SQLSource" MSSQL_SourceVM_Image1="MSSQL_1" MSSQL_SourceVM_Image2="MSSQL_2" -Oracle_SourceVM="Oracle12cSource" -Oracle_SourceVM_BootImage="12c_bootdisk" -Oracle_SourceVM_Image1="12c_disk1" -Oracle_SourceVM_Image2="12c_disk2" -Oracle_SourceVM_Image3="12c_disk3" -Oracle_SourceVM_Image4="12c_disk4" -Oracle_SourceVM_Image5="12c_disk5" -Oracle_SourceVM_Image6="12c_disk6" -Oracle_SourceVM_Image7="" -Oracle_SourceVM_Image8="" -Oracle_SourceVM_Image9="" -Oracle_SourceVM_Image10="" - -Oracle_Patch_BootImage="19c-bootdisk" -Oracle_Patch_Image1="19c-disk1" -Oracle_Patch_Image2="19c-disk2" -Oracle_Patch_Image3="19c-disk3" -Oracle_Patch_Image4="19c-disk4" -Oracle_Patch_Image5="19c-disk5" -Oracle_Patch_Image6="19c-disk6" -Oracle_Patch_Image7="19c-disk7" -Oracle_Patch_Image8="19c-disk8" -Oracle_Patch_Image9="19c-disk9" -Oracle_Patch_Image10="" +Oracle_12c_SourceVM="Oracle12cSource" +Oracle_12c_SourceVM_BootImage="12c_bootdisk" +Oracle_12c_SourceVM_Image1="12c_disk1" +Oracle_12c_SourceVM_Image2="12c_disk2" +Oracle_12c_SourceVM_Image3="12c_disk3" +Oracle_12c_SourceVM_Image4="12c_disk4" +Oracle_12c_SourceVM_Image5="12c_disk5" +Oracle_12c_SourceVM_Image6="12c_disk6" + +Oracle_19c_SourceVM="Oracle19cSource" +Oracle_19c_SourceVM_BootImage="19c-bootdisk" +Oracle_19c_SourceVM_Image1="19c-disk1" +Oracle_19c_SourceVM_Image2="19c-disk2" +Oracle_19c_SourceVM_Image3="19c-disk3" +Oracle_19c_SourceVM_Image4="19c-disk4" +Oracle_19c_SourceVM_Image5="19c-disk5" +Oracle_19c_SourceVM_Image6="19c-disk6" +Oracle_19c_SourceVM_Image7="19c-disk7" +Oracle_19c_SourceVM_Image8="19c-disk8" +Oracle_19c_SourceVM_Image9="19c-disk9" #################################################### # 3rd Party images used at GTS or Add-On Labs diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 58e63e0..f79dad9 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1190,33 +1190,33 @@ function deploy_mssql() { # Routine to Create Era Bootcamp PreProvisioned Oracle Server ######################################################################################################################################### -function deploy_oracle() { - - if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_SourceVM_BootImage} | wc --lines) == 0 )); then - log "Import ${Oracle_SourceVM_BootImage} image from ${QCOW2_REPOS}..." - acli image.create ${Oracle_SourceVM_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_BootImage}.qcow2" - acli image.create ${Oracle_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image1}.qcow2" - acli image.create ${Oracle_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image2}.qcow2" - acli image.create ${Oracle_SourceVM_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image3}.qcow2" - acli image.create ${Oracle_SourceVM_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image4}.qcow2" - acli image.create ${Oracle_SourceVM_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image5}.qcow2" - acli image.create ${Oracle_SourceVM_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_SourceVM_Image6}.qcow2" +function deploy_oracle_12c() { + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_12c_SourceVM_BootImage} | wc --lines) == 0 )); then + log "Import ${Oracle_12c_SourceVM_BootImage} image from ${QCOW2_REPOS}..." + acli image.create ${Oracle_12c_SourceVM_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_BootImage}.qcow2" + acli image.create ${Oracle_12c_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image1}.qcow2" + acli image.create ${Oracle_12c_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image2}.qcow2" + acli image.create ${Oracle_12c_SourceVM_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image3}.qcow2" + acli image.create ${Oracle_12c_SourceVM_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image4}.qcow2" + acli image.create ${Oracle_12c_SourceVM_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image5}.qcow2" + acli image.create ${Oracle_12c_SourceVM_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle12cSIHA/${Oracle_12c_SourceVM_Image6}.qcow2" else log "Image found, assuming ready. Skipping ${Oracle_SourceVM} import." fi echo "## Oracle12cVM_Creation_INPROGRESS ##" - acli "vm.create ${Oracle_SourceVM} memory=32000M num_cores_per_vcpu=2 num_vcpus=2" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_BootImage}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image1}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image2}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image3}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image4}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image5}" - acli "vm.disk_create ${Oracle_SourceVM} clone_from_image=${Oracle_SourceVM_Image6}" - acli "vm.nic_create ${Oracle_SourceVM} network=${NW2_NAME}" - echo "## ${Oracle_SourceVM} - Powering On ##" - acli "vm.on ${Oracle_SourceVM}" + acli "vm.create ${Oracle_12c_SourceVM} memory=4G num_cores_per_vcpu=2 num_vcpus=2" + acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_BootImage}" + acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image1}" + acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image2}" + acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image3}" + acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image4}" + acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image5}" + acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image6}" + acli "vm.nic_create ${Oracle_12c_SourceVM} network=${NW2_NAME}" + echo "## ${Oracle_12c_SourceVM} - Powering On ##" + acli "vm.on ${Oracle_12c_SourceVM}" echo "### Oracle12cVM_Creation_COMPLETE ##" @@ -1226,24 +1226,41 @@ function deploy_oracle() { # Routine to Upload Era Bootcamp Patch images for Oracle ######################################################################################################################################### -function upload_oracle_patch_images() { - - if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_Patch_BootImage} | wc --lines) == 0 )); then - log "Import ${Oracle_Patch_BootImage} image from ${QCOW2_REPOS}..." - acli image.create ${Oracle_Patch_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_BootImage}.qcow2" - acli image.create ${Oracle_Patch_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image1}.qcow2" - acli image.create ${Oracle_Patch_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image2}.qcow2" - acli image.create ${Oracle_Patch_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image3}.qcow2" - acli image.create ${Oracle_Patch_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image4}.qcow2" - acli image.create ${Oracle_Patch_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image5}.qcow2" - acli image.create ${Oracle_Patch_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image6}.qcow2" - acli image.create ${Oracle_Patch_Image7} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image7}.qcow2" - acli image.create ${Oracle_Patch_Image8} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image8}.qcow2" - acli image.create ${Oracle_Patch_Image9} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_Patch_Image9}.qcow2" +function deploy_oracle_19c() { + + if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${Oracle_19c_SourceVM_BootImage} | wc --lines) == 0 )); then + log "Import ${Oracle_19c_SourceVM_BootImage} image from ${QCOW2_REPOS}..." + acli image.create ${Oracle_19c_SourceVM_BootImage} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_BootImage}.qcow2" + acli image.create ${Oracle_19c_SourceVM_Image1} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image1}.qcow2" + acli image.create ${Oracle_19c_SourceVM_Image2} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image2}.qcow2" + acli image.create ${Oracle_19c_SourceVM_Image3} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image3}.qcow2" + acli image.create ${Oracle_19c_SourceVM_Image4} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image4}.qcow2" + acli image.create ${Oracle_19c_SourceVM_Image5} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image5}.qcow2" + acli image.create ${Oracle_19c_SourceVM_Image6} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image6}.qcow2" + acli image.create ${Oracle_19c_SourceVM_Image7} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image7}.qcow2" + acli image.create ${Oracle_19c_SourceVM_Image8} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image8}.qcow2" + acli image.create ${Oracle_19c_SourceVM_Image9} image_type=kDiskImage wait=true container=${STORAGE_ERA} source_url="${QCOW2_REPOS}era/oracle19cSIHA/${Oracle_19c_SourceVM_Image9}.qcow2" else - log "Image found, assuming ready. Skipping ${Oracle_Patch_BootImage} import." + log "Image found, assuming ready. Skipping ${Oracle_19c_SourceVM_BootImage} import." fi + echo "## Oracle19cVM_Creation_INPROGRESS ##" + acli "vm.create ${Oracle_19c_SourceVM} memory=8G num_cores_per_vcpu=1 num_vcpus=2" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_BootImage}" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image1}" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image2}" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image3}" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image4}" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image5}" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image6}" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image7}" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image8}" + acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image9}" + acli "vm.nic_create ${Oracle_19c_SourceVM} network=${NW2_NAME}" + echo "## ${Oracle_19c_SourceVM} - Powering On ##" + acli "vm.on ${Oracle_19c_SourceVM}" + echo "### Oracle19cVM_Creation_COMPLETE ##" + } ################################################################################################################################################### From 37ecb3da7b9382fe6396809f11ca25121f6015c8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 23 Apr 2020 22:56:52 -0700 Subject: [PATCH 540/691] Update for NW3 Netmask --- scripts/global.vars.sh | 1 + scripts/lib.pc.sh | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index b09fc95..24ffebd 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -186,6 +186,7 @@ case "${OCTET[3]}" in NW3_NAME='EraManaged' NW3_VLAN=${NW2_VLAN} + NW3_NETMASK='255.255.255.128' NW3_START="${IPV4_PREFIX}.220" NW3_END="${IPV4_PREFIX}.253" ;; diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index d42e05b..c2c5c88 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -974,7 +974,7 @@ HTTP_JSON_BODY=$(cat < Date: Thu, 23 Apr 2020 23:07:11 -0700 Subject: [PATCH 541/691] Update lib.pc.sh --- scripts/lib.pc.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index c2c5c88..6203fa8 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -975,7 +975,11 @@ HTTP_JSON_BODY=$(cat < Date: Thu, 23 Apr 2020 23:19:46 -0700 Subject: [PATCH 542/691] Update lib.pc.sh --- scripts/lib.pc.sh | 52 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6203fa8..adbc087 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -989,6 +989,58 @@ EOF log "Created ${NW3_NAME} Network with Network ID |${_static_network_id}|" +## Create the Primary-MSSQL-NETWORK Network Profile inside Era ## +log "Create the Primary-MSSQL-NETWORK Network Profile" + +HTTP_JSON_BODY=$(cat < Date: Thu, 23 Apr 2020 23:35:46 -0700 Subject: [PATCH 543/691] Update splunk_bootcamp.sh --- scripts/splunk_bootcamp.sh | 20 +------------------- 1 file changed, 1 insertion(+), 19 deletions(-) diff --git a/scripts/splunk_bootcamp.sh b/scripts/splunk_bootcamp.sh index 8cadd10..cd8fc7d 100755 --- a/scripts/splunk_bootcamp.sh +++ b/scripts/splunk_bootcamp.sh @@ -18,20 +18,7 @@ case ${1} in PE | pe ) . lib.pe.sh - #export PC_DEV_VERSION='5.10.2' - #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' - #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' - #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' - #export FILES_VERSION='3.2.0.1' - #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - export AUTH_SERVER='AutoAD' - export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zp' export _external_nw_name="${1}" @@ -44,7 +31,6 @@ case ${1} in && network_configure \ && authentication_source \ && pe_auth \ - && prism_pro_server_deploy \ && files_install \ && sleep 30 \ && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ @@ -96,12 +82,9 @@ case ${1} in CentOS7.qcow2 \ WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ - HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ - veeam/VeeamAHVProxy2.0.404.qcow2 \ ) export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ - veeam/VBR_10.0.0.4442.iso \ ) @@ -153,10 +136,9 @@ case ${1} in && pc_project \ && object_store \ && karbon_image_download \ + && images \ && flow_enable \ && pc_cluster_img_import \ - && seedPC \ - && images \ && prism_check 'PC' log "Non-blocking functions (in development) follow." From 40e77e085f2e060135f683b5ff281e70d3eb9ea7 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 24 Apr 2020 16:12:25 -0700 Subject: [PATCH 544/691] Oracle Network Profile Update --- scripts/lib.pc.sh | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index adbc087..ef36b96 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1015,6 +1015,32 @@ EOF log "Created Primary-MSSQL-NETWORK Network Profile with ID |${_primary_network_profile_id}|" +## Create the Primary_ORACLE_NETWORKNetwork Profile inside Era ## +log "Create the Primary_ORACLE_NETWORK Network Profile" + +HTTP_JSON_BODY=$(cat < Date: Sun, 26 Apr 2020 20:24:21 -0700 Subject: [PATCH 545/691] Update era_oracle_bootcamp.sh --- scripts/era_oracle_bootcamp.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index 836a88b..5da1fc8 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -35,7 +35,6 @@ case ${1} in && authentication_source \ && pe_auth \ && deploy_era \ - && deploy_oracle_12c \ && deploy_oracle_19c From 78f834486b0d217dd77b86308c9e3eee114f338d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 27 Apr 2020 13:03:08 -0700 Subject: [PATCH 546/691] Update for Era param --- scripts/lib.pc.sh | 71 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index ef36b96..b8d89f7 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1187,6 +1187,77 @@ EOF log "Created NTNXLAB Domain Profile with ID |${_ntnxlab_domain_profile_id}|" +## Create the ORACLE_SMALL_PARAMS Parameters Profile inside Era ## +log "Create the ORACLE_SMALL_PARAMS Parameters Profile" + +HTTP_JSON_BODY=$(cat < Date: Tue, 28 Apr 2020 15:39:23 -0700 Subject: [PATCH 547/691] Updates for Private Cloud to PC 5.17 --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 3cda00d..93ed3d6 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -16,7 +16,7 @@ WORKSHOPS=(\ "Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ -"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ "Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ From 5f809e89d8eef6bfc7fa4b2e0ee6ce592b94b6cb Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 28 Apr 2020 21:21:50 -0700 Subject: [PATCH 548/691] Databases all staging Updates --- scripts/era_bootcamp.sh | 18 +++++++----------- stage_workshop.sh | 4 ++-- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 9141a03..ab35c9a 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -23,11 +23,6 @@ case ${1} in #export NW2_NAME='EraManaged' export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" - export NW3_NAME='EraManaged' - export NW3_VLAN=${NW2_VLAN} - #export NW3_SUBNET="${IPV4_PREFIX}.129/25" - export NW3_START="${IPV4_PREFIX}.220" - export NW3_END="${IPV4_PREFIX}.253" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -35,9 +30,14 @@ case ${1} in dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ && pe_license \ && pe_init \ + && create_era_container \ && era_network_configure\ && authentication_source \ - && pe_auth + && pe_auth \ + && deploy_era \ + && deploy_mssql \ + && deploy_oracle_19c + if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -82,9 +82,6 @@ case ${1} in ) export QCOW2_IMAGES=(\ - MSSQL-2016-VM.qcow2 \ - Windows2016.qcow2 \ - CentOS7.qcow2 \ WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ ) @@ -140,8 +137,7 @@ case ${1} in && images \ && flow_enable \ && pc_cluster_img_import \ - && upload_era_calm_blueprint \ - && sleep 30 \ + && configure_era \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/stage_workshop.sh b/stage_workshop.sh index 93ed3d6..de33c91 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -20,7 +20,7 @@ WORKSHOPS=(\ "Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ -"Databases with Era Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ +"Databases Era -Stage All- Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ @@ -83,7 +83,7 @@ function stage_clusters() { _pe_launch='privatecloud_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^Databases" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Databases Era -Stage All- Bootcamp" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='era_bootcamp.sh' _pc_launch=${_pe_launch} From 7c9f938dd25aecf9adb7bb17482b2804a3bee4b5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 29 Apr 2020 12:13:40 -0700 Subject: [PATCH 549/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index de33c91..66066fd 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -22,7 +22,7 @@ WORKSHOPS=(\ "Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era -Stage All- Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ -"Calm Workshop (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ +"Calm Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ From 54a6ad8f877c2ad5f65ec54b3f9df27817d017f4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 29 Apr 2020 19:17:32 -0700 Subject: [PATCH 550/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b8d89f7..3554cf1 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1154,7 +1154,7 @@ HTTP_JSON_BODY=$(cat < Date: Wed, 29 Apr 2020 20:56:06 -0700 Subject: [PATCH 551/691] Update frame_bootcamp.sh --- scripts/frame_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index 5c5d7f0..7d3bc9b 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -134,7 +134,7 @@ case ${1} in export ISO_IMAGES=(\ FrameCCA-2.1.0.iso \ FrameCCA-2.1.6.iso \ - FrameGuestAgentInstaller_1.0.2.2_7930.iso \ + FrameGuestAgentInstaller_1.0.2.7.iso \ Nutanix-VirtIO-1.1.5.iso \ ) From ee44d19a1758d383733faa02d089fed1ec2ad82c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 30 Apr 2020 13:41:14 -0700 Subject: [PATCH 552/691] Removed Objects Darksite Deploy --- scripts/lib.pc.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 3554cf1..c67ce00 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -313,16 +313,16 @@ function object_store() { # Enable Dark Site Repo and wait 3 seconds - mspctl airgap --enable --lcm-server=${OBJECTS_OFFLINE_REPO} - sleep 3 + #mspctl airgap --enable --lcm-server=${OBJECTS_OFFLINE_REPO} + #sleep 3 # Confirm airgap is enabled - _response=$(mspctl airgap --status | grep "\"enable\":true" | wc -l) + #_response=$(mspctl airgap --status | grep "\"enable\":true" | wc -l) - if [ $_response -eq 1 ]; then - log "Objects dark site staging successfully enabled. Response is $_response. " - else - log "Objects failed to enable dark site staging. Will use standard WAN download (this will take longer). Response is $_response." - fi + #if [ $_response -eq 1 ]; then + # log "Objects dark site staging successfully enabled. Response is $_response. " + #else + # log "Objects failed to enable dark site staging. Will use standard WAN download (this will take longer). Response is $_response." + #fi # Payload for the _json_data _json_data='{"kind":"subnet"}' From 9b30d4b0a405bd9715bf57f77778798a8a3e8e1a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 30 Apr 2020 16:12:07 -0700 Subject: [PATCH 553/691] Update lib.pc.sh --- scripts/lib.pc.sh | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index c67ce00..e627134 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1015,6 +1015,32 @@ EOF log "Created Primary-MSSQL-NETWORK Network Profile with ID |${_primary_network_profile_id}|" +## Create the Primary_ORACLE_NETWORKNetwork Profile inside Era ## +log "Create the Primary_PGSQL_NETWORK Network Profile" + +HTTP_JSON_BODY=$(cat < Date: Thu, 30 Apr 2020 17:09:09 -0700 Subject: [PATCH 554/691] Updates for release --- scripts/all_bootcamp.sh | 185 +++++++++++++++++++++++++++++++ scripts/privatecloud_bootcamp.sh | 11 -- stage_workshop.sh | 17 ++- 3 files changed, 197 insertions(+), 16 deletions(-) create mode 100755 scripts/all_bootcamp.sh diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh new file mode 100755 index 0000000..a1ae0ab --- /dev/null +++ b/scripts/all_bootcamp.sh @@ -0,0 +1,185 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoAD' + export PrismOpsServer='GTSPrismOpsLabUtilityServer' + export SeedPC='GTSseedPC.zp' + export NW2_DHCP_START="${IPV4_PREFIX}.132" + export NW2_DHCP_END="${IPV4_PREFIX}.219" + export _external_nw_name="${1}" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 + && deploy_era \ + && deploy_mssql \ + && deploy_oracle_19c + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + deploy_peer_mgmt_server "${PMC}" \ + && deploy_peer_agent_server "${AGENTA}" \ + && deploy_peer_agent_server "${AGENTB}" + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + export BUCKETS_VIP="${IPV4_PREFIX}.17" + export OBJECTS_NW_START="${IPV4_PREFIX}.18" + export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export _prio_images_arr=(\ + Windows2016.qcow2 \ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + ) + + export QCOW2_IMAGES=(\ + CentOS7.qcow2 \ + Win10v1903.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ + veeam/VeeamAHVProxy2.0.404.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + veeam/VBR_10.0.0.4442.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && karbon_enable \ + && objects_enable \ + && lcm \ + && pc_project \ + && object_store \ + && karbon_image_download \ + && flow_enable \ + && pc_cluster_img_import \ + && configure_era \ + && upload_citrix_calm_blueprint \ + && sleep 30 \ + && images \ + && seedPC \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/privatecloud_bootcamp.sh b/scripts/privatecloud_bootcamp.sh index 8cadd10..88af6b8 100755 --- a/scripts/privatecloud_bootcamp.sh +++ b/scripts/privatecloud_bootcamp.sh @@ -18,17 +18,6 @@ case ${1} in PE | pe ) . lib.pe.sh - #export PC_DEV_VERSION='5.10.2' - #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' - #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' - #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' - #export FILES_VERSION='3.2.0.1' - #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - export AUTH_SERVER='AutoAD' export PrismOpsServer='GTSPrismOpsLabUtilityServer' export SeedPC='GTSseedPC.zp' diff --git a/stage_workshop.sh b/stage_workshop.sh index 66066fd..65c13bf 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,10 +11,6 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ "Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ @@ -26,11 +22,17 @@ WORKSHOPS=(\ "Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ +"Stage-All Bootcamps (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Legacy Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ + "Cloud Native Application Modernization Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ +#"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ +#"Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ +#"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ #"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ #"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ #"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ @@ -63,7 +65,12 @@ function stage_clusters() { # Map workshop to staging script(s) and libraries, # _pe_launch will be executed on PE - if (( $(echo ${_workshop} | grep -i "^Bootcamp Staging" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Stage-All Bootcamps" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='all_bootcamp.sh' + _pc_launch=${_pe_launch} + fi + if (( $(echo ${_workshop} | grep -i "^Legacy Bootcamp Staging" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='bootcamp.sh' _pc_launch=${_pe_launch} From b43c2a36afa69905141f658edab4d9e6eeb1bc41 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 30 Apr 2020 19:33:35 -0700 Subject: [PATCH 555/691] Update all_bootcamp.sh --- scripts/all_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index a1ae0ab..f90160d 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -42,7 +42,7 @@ case ${1} in && file_analytics_install \ && sleep 30 \ && create_file_analytics_server \ - && sleep 30 + && sleep 30 \ && deploy_era \ && deploy_mssql \ && deploy_oracle_19c From f723c39facec9da84e7a5715a897081ee19a0775 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 30 Apr 2020 20:40:01 -0700 Subject: [PATCH 556/691] Updates --- scripts/all_bootcamp.sh | 3 ++- scripts/era_bootcamp.sh | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index f90160d..cb81ecc 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -31,7 +31,8 @@ case ${1} in dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ && pe_license \ && pe_init \ - && network_configure \ + && create_era_container \ + && era_network_configure \ && authentication_source \ && pe_auth \ && prism_pro_server_deploy \ diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index ab35c9a..7615065 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -31,7 +31,7 @@ case ${1} in && pe_license \ && pe_init \ && create_era_container \ - && era_network_configure\ + && era_network_configure \ && authentication_source \ && pe_auth \ && deploy_era \ From 4751ccebaf142971cf11a6e26d20ae8148cd6b1f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 12 May 2020 15:53:31 -0700 Subject: [PATCH 557/691] updates for Eramanaged on 10.138 --- scripts/global.vars.sh | 1 - scripts/lib.pe.sh | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 24ffebd..36065d2 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -185,7 +185,6 @@ case "${OCTET[3]}" in NW2_DHCP_END="${IPV4_PREFIX}.253" NW3_NAME='EraManaged' - NW3_VLAN=${NW2_VLAN} NW3_NETMASK='255.255.255.128' NW3_START="${IPV4_PREFIX}.220" NW3_END="${IPV4_PREFIX}.253" diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index f79dad9..2c69b59 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -614,7 +614,7 @@ function era_network_configure() { if [[ ! -z "${NW3_NAME}" ]]; then log "Create EraManaged network: Name: ${NW3_NAME}, VLAN: ${NW3_VLAN}" - acli "net.create ${NW3_NAME} vlan=${NW3_VLAN}" + acli "net.create ${NW3_NAME} vlan=${NW2_VLAN}" fi fi From 6e645e593d9d7f4bccd0679892f634a06386e517 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 15 May 2020 15:01:37 -0700 Subject: [PATCH 558/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 2c69b59..53ce0fd 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1160,8 +1160,6 @@ acli "vm.on ${ERAServerName}" function deploy_mssql() { - num_sql_vms=3 - if (( $(source /etc/profile.d/nutanix_env.sh && acli image.list | grep ${MSSQL_SourceVM_Image} | wc --lines) == 0 )); then log "Import ${MSSQL_SourceVM_Image} image from ${QCOW2_REPOS}..." From 91ab4a08fd6f2427446fc21527f228f38b70a356 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 18 May 2020 13:54:22 -0700 Subject: [PATCH 559/691] Update all_bootcamp.sh --- scripts/all_bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index cb81ecc..fa4b7b6 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -153,6 +153,7 @@ case ${1} in && pc_project \ && object_store \ && karbon_image_download \ + && priority_images \ && flow_enable \ && pc_cluster_img_import \ && configure_era \ From 0a7ed07174dea7ef8ca4b1beda8d8c9450e02945 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 21 May 2020 21:45:42 -0700 Subject: [PATCH 560/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 36065d2..37c60dc 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -354,7 +354,7 @@ case "${OCTET[0]}.${OCTET[1]}" in #'https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64' \ ) SSHPASS_REPOS=(\ - 'http://10.136.239.131/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ + 'http://10.136.239.13/workshop_staging/sshpass-1.06-2.el7.x86_64.rpm' \ #'http://mirror.centos.org/centos/7/extras/x86_64/Packages/sshpass-1.06-2.el7.x86_64.rpm' \ ) QCOW2_REPOS=(\ From 84fcdfa2de4109cad154ecdb2bfbe5a68f2786d4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 12:26:45 -0700 Subject: [PATCH 561/691] Updates for SNC --- bootstrap.sh | 0 documentation/autodc/add_group_and_users.sh | 0 hol_stageworkshop.sh | 0 hooks/autohook.sh | 0 hooks/pre-commit/01-release | 6 +- hooks/scripts/semver_release.sh | 0 scripts/all_bootcamp.sh | 2 +- scripts/app_modernization_bootcamp.sh | 0 scripts/basic_bootcamp.sh | 0 scripts/bootcamp.sh | 0 scripts/calm_bootcamp.sh | 0 scripts/cicd_bootcamp.sh | 0 scripts/citrix_bootcamp.sh | 2 +- scripts/dev_privatecloud_bootcamp.sh | 2 +- scripts/era_bootcamp.sh | 0 scripts/era_mssql_bootcamp.sh | 0 scripts/era_oracle_bootcamp.sh | 0 scripts/era_postgres_bootcamp.sh | 0 scripts/files_bootcamp.sh | 2 +- scripts/frame_bootcamp.sh | 2 +- scripts/global.vars.sh | 74 +++++++++++++-------- scripts/images_only.sh | 0 scripts/lib.common.sh | 0 scripts/lib.pc.org.sh | 0 scripts/lib.pc.sh | 0 scripts/lib.pe.sh | 3 +- scripts/lib.shell-convenience.sh | 0 scripts/localhost.sh | 0 scripts/privatecloud_bootcamp.sh | 2 +- scripts/snc_bootcamp.sh | 0 scripts/snc_ts2020.sh | 0 scripts/splunk_bootcamp.sh | 2 +- scripts/ts2019.sh | 0 scripts/ts2020.sh | 0 scripts/vmdisk2image-pc.sh | 0 scripts/we-lib.common.sh | 0 scripts/we-ts2019.sh | 0 stage_workshop.sh | 6 +- sync_upstream.sh | 0 test/objects/object_airgap.sh | 0 test/repo_source.sh | 0 41 files changed, 64 insertions(+), 39 deletions(-) mode change 100755 => 100644 bootstrap.sh mode change 100755 => 100644 documentation/autodc/add_group_and_users.sh mode change 100755 => 100644 hol_stageworkshop.sh mode change 100755 => 100644 hooks/autohook.sh mode change 120000 => 100644 hooks/pre-commit/01-release mode change 100755 => 100644 hooks/scripts/semver_release.sh mode change 100755 => 100644 scripts/all_bootcamp.sh mode change 100755 => 100644 scripts/app_modernization_bootcamp.sh mode change 100755 => 100644 scripts/basic_bootcamp.sh mode change 100755 => 100644 scripts/bootcamp.sh mode change 100755 => 100644 scripts/calm_bootcamp.sh mode change 100755 => 100644 scripts/cicd_bootcamp.sh mode change 100755 => 100644 scripts/citrix_bootcamp.sh mode change 100755 => 100644 scripts/dev_privatecloud_bootcamp.sh mode change 100755 => 100644 scripts/era_bootcamp.sh mode change 100755 => 100644 scripts/era_mssql_bootcamp.sh mode change 100755 => 100644 scripts/era_oracle_bootcamp.sh mode change 100755 => 100644 scripts/era_postgres_bootcamp.sh mode change 100755 => 100644 scripts/files_bootcamp.sh mode change 100755 => 100644 scripts/frame_bootcamp.sh mode change 100755 => 100644 scripts/global.vars.sh mode change 100755 => 100644 scripts/images_only.sh mode change 100755 => 100644 scripts/lib.common.sh mode change 100755 => 100644 scripts/lib.pc.org.sh mode change 100755 => 100644 scripts/lib.pc.sh mode change 100755 => 100644 scripts/lib.pe.sh mode change 100755 => 100644 scripts/lib.shell-convenience.sh mode change 100755 => 100644 scripts/localhost.sh mode change 100755 => 100644 scripts/privatecloud_bootcamp.sh mode change 100755 => 100644 scripts/snc_bootcamp.sh mode change 100755 => 100644 scripts/snc_ts2020.sh mode change 100755 => 100644 scripts/splunk_bootcamp.sh mode change 100755 => 100644 scripts/ts2019.sh mode change 100755 => 100644 scripts/ts2020.sh mode change 100755 => 100644 scripts/vmdisk2image-pc.sh mode change 100755 => 100644 scripts/we-lib.common.sh mode change 100755 => 100644 scripts/we-ts2019.sh mode change 100755 => 100644 stage_workshop.sh mode change 100755 => 100644 sync_upstream.sh mode change 100755 => 100644 test/objects/object_airgap.sh mode change 100755 => 100644 test/repo_source.sh diff --git a/bootstrap.sh b/bootstrap.sh old mode 100755 new mode 100644 diff --git a/documentation/autodc/add_group_and_users.sh b/documentation/autodc/add_group_and_users.sh old mode 100755 new mode 100644 diff --git a/hol_stageworkshop.sh b/hol_stageworkshop.sh old mode 100755 new mode 100644 diff --git a/hooks/autohook.sh b/hooks/autohook.sh old mode 100755 new mode 100644 diff --git a/hooks/pre-commit/01-release b/hooks/pre-commit/01-release deleted file mode 120000 index d5316c0..0000000 --- a/hooks/pre-commit/01-release +++ /dev/null @@ -1 +0,0 @@ -../scripts/semver_release.sh \ No newline at end of file diff --git a/hooks/pre-commit/01-release b/hooks/pre-commit/01-release new file mode 100644 index 0000000..05eb028 --- /dev/null +++ b/hooks/pre-commit/01-release @@ -0,0 +1,5 @@ +XSym +0028 +5863f36caffa453eb1b2b296b337da2a +../scripts/semver_release.sh + \ No newline at end of file diff --git a/hooks/scripts/semver_release.sh b/hooks/scripts/semver_release.sh old mode 100755 new mode 100644 diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh old mode 100755 new mode 100644 index fa4b7b6..fda24e5 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -38,7 +38,7 @@ case ${1} in && prism_pro_server_deploy \ && files_install \ && sleep 30 \ - && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ && sleep 30 \ && file_analytics_install \ && sleep 30 \ diff --git a/scripts/app_modernization_bootcamp.sh b/scripts/app_modernization_bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/basic_bootcamp.sh b/scripts/basic_bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/calm_bootcamp.sh b/scripts/calm_bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/cicd_bootcamp.sh b/scripts/cicd_bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh old mode 100755 new mode 100644 index 4d9f722..03e44a0 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -34,7 +34,7 @@ case ${1} in && prism_pro_server_deploy \ && files_install \ && sleep 30 \ - && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ && sleep 30 \ && file_analytics_install \ && sleep 30 \ diff --git a/scripts/dev_privatecloud_bootcamp.sh b/scripts/dev_privatecloud_bootcamp.sh old mode 100755 new mode 100644 index 0da029a..a421ade --- a/scripts/dev_privatecloud_bootcamp.sh +++ b/scripts/dev_privatecloud_bootcamp.sh @@ -48,7 +48,7 @@ case ${1} in && prism_pro_server_deploy \ && files_install \ && sleep 30 \ - && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ && sleep 30 \ && file_analytics_install \ && sleep 30 \ diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh old mode 100755 new mode 100644 index 199f6bd..af22b73 --- a/scripts/files_bootcamp.sh +++ b/scripts/files_bootcamp.sh @@ -34,7 +34,7 @@ case ${1} in && prism_pro_server_deploy \ && files_install \ && sleep 30 \ - && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ && sleep 30 \ && file_analytics_install \ && sleep 30 \ diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh old mode 100755 new mode 100644 index 7d3bc9b..3d7953b --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -73,7 +73,7 @@ case ${1} in && prism_pro_server_deploy \ && files_install \ && sleep 30 \ - && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ && sleep 30 \ && file_analytics_install \ && sleep 30 \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh old mode 100755 new mode 100644 index 37c60dc..7780630 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,11 +2,11 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.17' +PC_DEV_VERSION='5.17.0.2' PC_CURRENT_VERSION='5.16.1.2' PC_STABLE_VERSION='5.11.2.1' -FILES_VERSION='3.6.1.2' -FILE_ANALYTICS_VERSION='2.1.0' +FILES_VERSION='3.6.3' +FILE_ANALYTICS_VERSION='2.1.1.1' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -147,6 +147,11 @@ case "${OCTET[3]}" in NW1_GATEWAY="${IPV4_PREFIX}.1" NW1_DHCP_START="${IPV4_PREFIX}.38" NW1_DHCP_END="${IPV4_PREFIX}.58" + NW2_NAME='' + NW2_VLAN='' + NW2_SUBNET='' + NW2_DHCP_START='' + NW2_DHCP_END='' ;; 71 ) # We are in Partition 2 @@ -154,6 +159,11 @@ case "${OCTET[3]}" in NW1_GATEWAY="${IPV4_PREFIX}.65" NW1_DHCP_START="${IPV4_PREFIX}.102" NW1_DHCP_END="${IPV4_PREFIX}.122" + NW2_NAME='' + NW2_VLAN='' + NW2_SUBNET='' + NW2_DHCP_START='' + NW2_DHCP_END='' ;; 135 ) # We are in Partition 3 @@ -161,6 +171,11 @@ case "${OCTET[3]}" in NW1_GATEWAY="${IPV4_PREFIX}.129" NW1_DHCP_START="${IPV4_PREFIX}.166" NW1_DHCP_END="${IPV4_PREFIX}.186" + NW2_NAME='' + NW2_VLAN='' + NW2_SUBNET='' + NW2_DHCP_START='' + NW2_DHCP_END='' ;; 199 ) # We are in Partition 4 @@ -168,6 +183,11 @@ case "${OCTET[3]}" in NW1_GATEWAY="${IPV4_PREFIX}.193" NW1_DHCP_START="${IPV4_PREFIX}.230" NW1_DHCP_END="${IPV4_PREFIX}.250" + NW2_NAME='' + NW2_VLAN='' + NW2_SUBNET='' + NW2_DHCP_START='' + NW2_DHCP_END='' ;; @@ -216,16 +236,16 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/euphrates-5.17-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.17-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.2.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.1.2.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' - FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.3.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.3-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' + FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -255,16 +275,16 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.17-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.2.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.1.2.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.3.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.3-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -294,16 +314,16 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/euphrates-5.17-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.2.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.1.2.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.3.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.3-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -338,16 +358,16 @@ case "${OCTET[0]}.${OCTET[1]}" in fi ;; 10.136 ) # HPOC us-west = BLR - PC_DEV_METAURL='http://10.136.239.13/workshop_staging/euphrates-5.17-stable-prism_central-metadata.json' - PC_DEV_URL='http://10.136.239.13/workshop_staging/euphrates-5.17-stable-prism_central.tar' + PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.2.json' + PC_DEV_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.2-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.136.239.13/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.6.1.2.json' - FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.1.2-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0-metadata.json' - FILE_ANALYTICS_URL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.6-release-2.1.0.qcow2' + FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.6.3.json' + FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.3-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' + FILE_ANALYTICS_URL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' JQ_REPOS=(\ 'http://10.136.239.13/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ diff --git a/scripts/images_only.sh b/scripts/images_only.sh old mode 100755 new mode 100644 diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh old mode 100755 new mode 100644 diff --git a/scripts/lib.pc.org.sh b/scripts/lib.pc.org.sh old mode 100755 new mode 100644 diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh old mode 100755 new mode 100644 diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh old mode 100755 new mode 100644 index 53ce0fd..ba84b0a --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -427,7 +427,8 @@ function create_file_analytics_server() { #_nw_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Secondary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") - _nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"subnet","filter": "name==Secondary"}' 'https://localhost:9440/api/nutanix/v3/subnets/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + #_nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"subnet","filter": "name==Secondary"}' 'https://localhost:9440/api/nutanix/v3/subnets/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _nw_uuid=$(curl ${CURL_HTTP_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data '{"kind":"subnet","filter": "name==Primary"}' 'https://localhost:9440/api/nutanix/v3/subnets/list' | jq -r '.entities[] | .metadata.uuid' | tr -d \") # Get the Container UUIDs log "Get ${STORAGE_DEFAULT} Container UUID" diff --git a/scripts/lib.shell-convenience.sh b/scripts/lib.shell-convenience.sh old mode 100755 new mode 100644 diff --git a/scripts/localhost.sh b/scripts/localhost.sh old mode 100755 new mode 100644 diff --git a/scripts/privatecloud_bootcamp.sh b/scripts/privatecloud_bootcamp.sh old mode 100755 new mode 100644 index 88af6b8..e51075c --- a/scripts/privatecloud_bootcamp.sh +++ b/scripts/privatecloud_bootcamp.sh @@ -36,7 +36,7 @@ case ${1} in && prism_pro_server_deploy \ && files_install \ && sleep 30 \ - && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ && sleep 30 \ && file_analytics_install \ && sleep 30 \ diff --git a/scripts/snc_bootcamp.sh b/scripts/snc_bootcamp.sh old mode 100755 new mode 100644 diff --git a/scripts/snc_ts2020.sh b/scripts/snc_ts2020.sh old mode 100755 new mode 100644 diff --git a/scripts/splunk_bootcamp.sh b/scripts/splunk_bootcamp.sh old mode 100755 new mode 100644 index cd8fc7d..3f93f62 --- a/scripts/splunk_bootcamp.sh +++ b/scripts/splunk_bootcamp.sh @@ -33,7 +33,7 @@ case ${1} in && pe_auth \ && files_install \ && sleep 30 \ - && create_file_server "${NW1_NAME}" "${NW2_NAME}" \ + && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ && sleep 30 \ && file_analytics_install \ && sleep 30 \ diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh old mode 100755 new mode 100644 diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh old mode 100755 new mode 100644 diff --git a/scripts/vmdisk2image-pc.sh b/scripts/vmdisk2image-pc.sh old mode 100755 new mode 100644 diff --git a/scripts/we-lib.common.sh b/scripts/we-lib.common.sh old mode 100755 new mode 100644 diff --git a/scripts/we-ts2019.sh b/scripts/we-ts2019.sh old mode 100755 new mode 100644 diff --git a/stage_workshop.sh b/stage_workshop.sh old mode 100755 new mode 100644 index 65c13bf..fa4af4b --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -12,7 +12,7 @@ begin # - PC #.# WORKSHOPS=(\ "Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ -"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ +"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.2) = Development" \ "Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ @@ -27,7 +27,7 @@ WORKSHOPS=(\ "Cloud Native Application Modernization Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ -"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ +"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.2) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ #"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ @@ -55,7 +55,7 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.17" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.17.0.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" diff --git a/sync_upstream.sh b/sync_upstream.sh old mode 100755 new mode 100644 diff --git a/test/objects/object_airgap.sh b/test/objects/object_airgap.sh old mode 100755 new mode 100644 diff --git a/test/repo_source.sh b/test/repo_source.sh old mode 100755 new mode 100644 From c36b122509f0be0aebeb904a52ba33039d433637 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 12:46:02 -0700 Subject: [PATCH 562/691] PrismOps and SeedPC --- scripts/privatecloud_bootcamp.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/privatecloud_bootcamp.sh b/scripts/privatecloud_bootcamp.sh index e51075c..6891a82 100644 --- a/scripts/privatecloud_bootcamp.sh +++ b/scripts/privatecloud_bootcamp.sh @@ -19,8 +19,8 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' - export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zp' + export PrismOpsServer='PrismOpsServer517latest' + export SeedPC='seedPC517latest.zip' export _external_nw_name="${1}" From b5334fb090ee8f02c4c2a6b6ff25f7d2e4687742 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 12:49:17 -0700 Subject: [PATCH 563/691] Updates for SeedPC --- scripts/citrix_bootcamp.sh | 2 +- scripts/dev_privatecloud_bootcamp.sh | 2 +- scripts/files_bootcamp.sh | 2 +- scripts/frame_bootcamp.sh | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 03e44a0..3799878 100644 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -20,7 +20,7 @@ case ${1} in export AUTH_SERVER='AutoAD' export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zp' + export SeedPC='GTSseedPC.zip' args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/dev_privatecloud_bootcamp.sh b/scripts/dev_privatecloud_bootcamp.sh index a421ade..e990978 100644 --- a/scripts/dev_privatecloud_bootcamp.sh +++ b/scripts/dev_privatecloud_bootcamp.sh @@ -31,7 +31,7 @@ case ${1} in export AUTH_SERVER='AutoAD' export PrismOpsServer='PrismOpsServer517' - export SeedPC='seedPC517.zp' + export SeedPC='seedPC517.zip' export _external_nw_name="${1}" diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh index af22b73..aa6e96e 100644 --- a/scripts/files_bootcamp.sh +++ b/scripts/files_bootcamp.sh @@ -20,7 +20,7 @@ case ${1} in export AUTH_SERVER='AutoAD' export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zp' + export SeedPC='GTSseedPC.zip' args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index 3d7953b..a64d332 100644 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -20,7 +20,7 @@ case ${1} in export AUTH_SERVER='AutoAD' export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zp' + export SeedPC='GTSseedPC.zip' # Networking needs for Frame Bootcamp export NW2_DHCP_START="${IPV4_PREFIX}.132" From 79fbfdb5630ba7a85953ace6fa04cc38ca262c58 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 12:51:44 -0700 Subject: [PATCH 564/691] exacutable --- bootstrap.sh | 0 hol_stageworkshop.sh | 0 scripts/all_bootcamp.sh | 0 scripts/app_modernization_bootcamp.sh | 0 scripts/basic_bootcamp.sh | 0 scripts/bootcamp.sh | 0 scripts/calm_bootcamp.sh | 0 scripts/cicd_bootcamp.sh | 0 scripts/citrix_bootcamp.sh | 0 scripts/dev_privatecloud_bootcamp.sh | 0 scripts/era_bootcamp.sh | 0 scripts/era_mssql_bootcamp.sh | 0 scripts/era_oracle_bootcamp.sh | 0 scripts/era_postgres_bootcamp.sh | 0 scripts/files_bootcamp.sh | 0 scripts/frame_bootcamp.sh | 0 scripts/global.vars.sh | 0 scripts/images_only.sh | 0 scripts/lib.common.sh | 0 scripts/lib.pc.org.sh | 0 scripts/lib.pc.sh | 0 scripts/lib.pe.sh | 0 scripts/lib.shell-convenience.sh | 0 scripts/localhost.sh | 0 scripts/privatecloud_bootcamp.sh | 0 scripts/snc_bootcamp.sh | 0 scripts/snc_ts2020.sh | 0 scripts/splunk_bootcamp.sh | 0 scripts/ts2019.sh | 0 scripts/ts2020.sh | 0 scripts/vmdisk2image-pc.sh | 0 scripts/we-lib.common.sh | 0 scripts/we-ts2019.sh | 0 stage_workshop.sh | 0 sync_upstream.sh | 0 we_push_centos_cl_disk.sh | 0 we_stage_workshop.sh | 0 37 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 bootstrap.sh mode change 100644 => 100755 hol_stageworkshop.sh mode change 100644 => 100755 scripts/all_bootcamp.sh mode change 100644 => 100755 scripts/app_modernization_bootcamp.sh mode change 100644 => 100755 scripts/basic_bootcamp.sh mode change 100644 => 100755 scripts/bootcamp.sh mode change 100644 => 100755 scripts/calm_bootcamp.sh mode change 100644 => 100755 scripts/cicd_bootcamp.sh mode change 100644 => 100755 scripts/citrix_bootcamp.sh mode change 100644 => 100755 scripts/dev_privatecloud_bootcamp.sh mode change 100644 => 100755 scripts/era_bootcamp.sh mode change 100644 => 100755 scripts/era_mssql_bootcamp.sh mode change 100644 => 100755 scripts/era_oracle_bootcamp.sh mode change 100644 => 100755 scripts/era_postgres_bootcamp.sh mode change 100644 => 100755 scripts/files_bootcamp.sh mode change 100644 => 100755 scripts/frame_bootcamp.sh mode change 100644 => 100755 scripts/global.vars.sh mode change 100644 => 100755 scripts/images_only.sh mode change 100644 => 100755 scripts/lib.common.sh mode change 100644 => 100755 scripts/lib.pc.org.sh mode change 100644 => 100755 scripts/lib.pc.sh mode change 100644 => 100755 scripts/lib.pe.sh mode change 100644 => 100755 scripts/lib.shell-convenience.sh mode change 100644 => 100755 scripts/localhost.sh mode change 100644 => 100755 scripts/privatecloud_bootcamp.sh mode change 100644 => 100755 scripts/snc_bootcamp.sh mode change 100644 => 100755 scripts/snc_ts2020.sh mode change 100644 => 100755 scripts/splunk_bootcamp.sh mode change 100644 => 100755 scripts/ts2019.sh mode change 100644 => 100755 scripts/ts2020.sh mode change 100644 => 100755 scripts/vmdisk2image-pc.sh mode change 100644 => 100755 scripts/we-lib.common.sh mode change 100644 => 100755 scripts/we-ts2019.sh mode change 100644 => 100755 stage_workshop.sh mode change 100644 => 100755 sync_upstream.sh mode change 100644 => 100755 we_push_centos_cl_disk.sh mode change 100644 => 100755 we_stage_workshop.sh diff --git a/bootstrap.sh b/bootstrap.sh old mode 100644 new mode 100755 diff --git a/hol_stageworkshop.sh b/hol_stageworkshop.sh old mode 100644 new mode 100755 diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/app_modernization_bootcamp.sh b/scripts/app_modernization_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/basic_bootcamp.sh b/scripts/basic_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/bootcamp.sh b/scripts/bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/calm_bootcamp.sh b/scripts/calm_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/cicd_bootcamp.sh b/scripts/cicd_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/dev_privatecloud_bootcamp.sh b/scripts/dev_privatecloud_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh old mode 100644 new mode 100755 diff --git a/scripts/images_only.sh b/scripts/images_only.sh old mode 100644 new mode 100755 diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh old mode 100644 new mode 100755 diff --git a/scripts/lib.pc.org.sh b/scripts/lib.pc.org.sh old mode 100644 new mode 100755 diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh old mode 100644 new mode 100755 diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh old mode 100644 new mode 100755 diff --git a/scripts/lib.shell-convenience.sh b/scripts/lib.shell-convenience.sh old mode 100644 new mode 100755 diff --git a/scripts/localhost.sh b/scripts/localhost.sh old mode 100644 new mode 100755 diff --git a/scripts/privatecloud_bootcamp.sh b/scripts/privatecloud_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/snc_bootcamp.sh b/scripts/snc_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/snc_ts2020.sh b/scripts/snc_ts2020.sh old mode 100644 new mode 100755 diff --git a/scripts/splunk_bootcamp.sh b/scripts/splunk_bootcamp.sh old mode 100644 new mode 100755 diff --git a/scripts/ts2019.sh b/scripts/ts2019.sh old mode 100644 new mode 100755 diff --git a/scripts/ts2020.sh b/scripts/ts2020.sh old mode 100644 new mode 100755 diff --git a/scripts/vmdisk2image-pc.sh b/scripts/vmdisk2image-pc.sh old mode 100644 new mode 100755 diff --git a/scripts/we-lib.common.sh b/scripts/we-lib.common.sh old mode 100644 new mode 100755 diff --git a/scripts/we-ts2019.sh b/scripts/we-ts2019.sh old mode 100644 new mode 100755 diff --git a/stage_workshop.sh b/stage_workshop.sh old mode 100644 new mode 100755 diff --git a/sync_upstream.sh b/sync_upstream.sh old mode 100644 new mode 100755 diff --git a/we_push_centos_cl_disk.sh b/we_push_centos_cl_disk.sh old mode 100644 new mode 100755 diff --git a/we_stage_workshop.sh b/we_stage_workshop.sh old mode 100644 new mode 100755 From a86b2114cd291e45254ec20397c99d0fde145f10 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 15:07:59 -0700 Subject: [PATCH 565/691] adding Leap Add-On Bootcamp Staging --- scripts/leap_addon_bootcamp.sh | 145 +++++++++++++++++++++++++++++++++ stage_workshop.sh | 6 ++ 2 files changed, 151 insertions(+) create mode 100755 scripts/leap_addon_bootcamp.sh diff --git a/scripts/leap_addon_bootcamp.sh b/scripts/leap_addon_bootcamp.sh new file mode 100755 index 0000000..ae50ab7 --- /dev/null +++ b/scripts/leap_addon_bootcamp.sh @@ -0,0 +1,145 @@ +#!/usr/bin/env bash + #-x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export _external_nw_name="${1}" + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && pc_project \ + && flow_enable \ + && pc_cluster_img_import \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/stage_workshop.sh b/stage_workshop.sh index fa4af4b..9912df9 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -13,6 +13,7 @@ begin WORKSHOPS=(\ "Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ "Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.2) = Development" \ +"Leap Add-On Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.2) = Development" \ "Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ @@ -90,6 +91,11 @@ function stage_clusters() { _pe_launch='privatecloud_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^Leap Add-On" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='leap_addon_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^Databases Era -Stage All- Bootcamp" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='era_bootcamp.sh' From 41582042c88db046a33daa3b3e356ff63cccb1a1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 16:22:09 -0700 Subject: [PATCH 566/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 7780630..29bf9c5 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -126,7 +126,7 @@ DATA_SERVICE_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 1)) PC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 2)) FILE_ANALYTICS_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 22)) PrismOpsServer_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 5))" -ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 15)) +ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 7)) CITRIX_DDC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 8)) DNS_SERVERS='8.8.8.8' NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' From 4fcc777b1bc2661d208a5deddffb23b79c96c18a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 17:26:29 -0700 Subject: [PATCH 567/691] Updates to allow for SNC --- scripts/lib.pe.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index ba84b0a..387782c 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1174,7 +1174,7 @@ function deploy_mssql() { acli "vm.create ${MSSQL_SourceVM} memory=2046M num_cores_per_vcpu=1 num_vcpus=2" acli "vm.disk_create ${MSSQL_SourceVM} clone_from_image=${MSSQL_SourceVM_Image1}" acli "vm.disk_create ${MSSQL_SourceVM} clone_from_image=${MSSQL_SourceVM_Image2}" - acli "vm.nic_create ${MSSQL_SourceVM} network=${NW2_NAME}" + acli "vm.nic_create ${MSSQL_SourceVM} network=${NW1_NAME}" echo "## ${MSSQL_SourceVM} - Powering On ##" acli "vm.on ${MSSQL_SourceVM}" echo "## SQLVM_Creation_COMPLETE ##" @@ -1213,7 +1213,7 @@ function deploy_oracle_12c() { acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image4}" acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image5}" acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image6}" - acli "vm.nic_create ${Oracle_12c_SourceVM} network=${NW2_NAME}" + acli "vm.nic_create ${Oracle_12c_SourceVM} network=${NW1_NAME}" echo "## ${Oracle_12c_SourceVM} - Powering On ##" acli "vm.on ${Oracle_12c_SourceVM}" echo "### Oracle12cVM_Creation_COMPLETE ##" @@ -1255,7 +1255,7 @@ function deploy_oracle_19c() { acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image7}" acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image8}" acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image9}" - acli "vm.nic_create ${Oracle_19c_SourceVM} network=${NW2_NAME}" + acli "vm.nic_create ${Oracle_19c_SourceVM} network=${NW1_NAME}" echo "## ${Oracle_19c_SourceVM} - Powering On ##" acli "vm.on ${Oracle_19c_SourceVM}" echo "### Oracle19cVM_Creation_COMPLETE ##" @@ -1291,7 +1291,7 @@ function deploy_peer_mgmt_server() { acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" acli "vm.disk_create ${VMNAME} clone_from_image=${PeerMgmtServer}" # MTM TODO replace net1 with appropriate variable - acli "vm.nic_create ${VMNAME} network=${NW2_NAME}" + acli "vm.nic_create ${VMNAME} network=${NW1_NAME}" #log "Power on ${VMNAME} VM..." echo "${VMNAME} - Powering on..." acli "vm.on ${VMNAME}" @@ -1325,7 +1325,7 @@ function deploy_peer_agent_server() { acli "uhura.vm.create_with_customize ${VMNAME} num_vcpus=2 num_cores_per_vcpu=2 memory=4G sysprep_config_path=file:///home/nutanix/peer_staging/unattend_${VMNAME}.xml" acli "vm.disk_create ${VMNAME} clone_from_image=${PeerAgentServer}" # MTM TODO replace net1 with appropriate variable - acli "vm.nic_create ${VMNAME} network=${NW2_NAME}" + acli "vm.nic_create ${VMNAME} network=${NW1_NAME}" #log "Power on ${VMNAME} VM..." echo "${VMNAME} - Powering on..." acli "vm.on ${VMNAME}" From 3b4a6aeb05be0e677074dd1547c7db29b18dfce0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 21:04:04 -0700 Subject: [PATCH 568/691] Updates for SNC --- scripts/global.vars.sh | 39 ++++++++++++++++++++++++++++++--------- scripts/lib.pe.sh | 2 +- stage_workshop.sh | 28 ++++++++++++++-------------- 3 files changed, 45 insertions(+), 24 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 29bf9c5..9d8b287 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,7 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.17.0.2' +PC_DEV_VERSION='5.17.0.3' PC_CURRENT_VERSION='5.16.1.2' PC_STABLE_VERSION='5.11.2.1' FILES_VERSION='3.6.3' @@ -42,6 +42,7 @@ ERAServerName='EraServer' ERA_USER="admin" ERA_PASSWORD="nutanix/4u" ERA_Default_PASSWORD="Nutanix/4u" +ERA_Container_RF="2" MSSQL_SourceVM="Win2016SQLSource" MSSQL_SourceVM_Image1="MSSQL_1" @@ -152,6 +153,11 @@ case "${OCTET[3]}" in NW2_SUBNET='' NW2_DHCP_START='' NW2_DHCP_END='' + NW3_NAME='' + NW3_NETMASK='' + NW3_START="" + NW3_END="" + ERA_Container_RF="1" ;; 71 ) # We are in Partition 2 @@ -164,6 +170,11 @@ case "${OCTET[3]}" in NW2_SUBNET='' NW2_DHCP_START='' NW2_DHCP_END='' + NW3_NAME='' + NW3_NETMASK='' + NW3_START="" + NW3_END="" + ERA_Container_RF="1" ;; 135 ) # We are in Partition 3 @@ -176,6 +187,11 @@ case "${OCTET[3]}" in NW2_SUBNET='' NW2_DHCP_START='' NW2_DHCP_END='' + NW3_NAME='' + NW3_NETMASK='' + NW3_START="" + NW3_END="" + ERA_Container_RF="1" ;; 199 ) # We are in Partition 4 @@ -188,6 +204,11 @@ case "${OCTET[3]}" in NW2_SUBNET='' NW2_DHCP_START='' NW2_DHCP_END='' + NW3_NAME='' + NW3_NETMASK='' + NW3_START="" + NW3_END="" + ERA_Container_RF="1" ;; @@ -236,8 +257,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.2.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.2-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' @@ -275,8 +296,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.2.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.2-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -314,8 +335,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.2.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.2-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -358,8 +379,8 @@ case "${OCTET[0]}.${OCTET[1]}" in fi ;; 10.136 ) # HPOC us-west = BLR - PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.2.json' - PC_DEV_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.2-stable-prism_central.tar' + PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json' + PC_DEV_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.11.2.1.json' diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 387782c..34a215e 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1121,7 +1121,7 @@ acli "vm.on ${PrismOpsServer}" function create_era_container() { log "Creating Era Storage Container" - ncli container create name="${STORAGE_ERA}" rf=2 sp-name="${STORAGE_POOL}" enable-compression=true compression-delay=60 + ncli container create name="${STORAGE_ERA}" rf="${ERA_Container_RF}" sp-name="${STORAGE_POOL}" enable-compression=true compression-delay=60 } diff --git a/stage_workshop.sh b/stage_workshop.sh index 9912df9..d80150f 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,19 +11,19 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ -"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.2) = Development" \ -"Leap Add-On Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.2) = Development" \ -"Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ -"Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ -"Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ -"Databases Era -Stage All- Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ -"Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ -"Calm Bootcamp (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ -"Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ -"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ -"Stage-All Bootcamps (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.16.1.2) = Current" \ +"Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ +"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ +"Leap Add-On Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ +"Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ +"Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV 5.17.0.3) = Development" \ +"Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV 5.17.0.3) = Development" \ +"Databases Era -Stage All- Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ +"Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV 5.17.0.3) = Development" \ +"Calm Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV 5.17.0.3) = Development" \ +"Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ +"Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ +"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ +"Stage-All Bootcamps (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ "Legacy Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ "Cloud Native Application Modernization Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ @@ -56,7 +56,7 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.17.0.2" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" From 415928e94ffaa6e517e58ce146b985b8bcdf5be5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 21:40:15 -0700 Subject: [PATCH 569/691] Update stage_workshop.sh --- stage_workshop.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index d80150f..fc86bb8 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -15,11 +15,11 @@ WORKSHOPS=(\ "Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ "Leap Add-On Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ "Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ -"Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV 5.17.0.3) = Development" \ -"Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV 5.17.0.3) = Development" \ +"Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ +"Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ "Databases Era -Stage All- Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ -"Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV 5.17.0.3) = Development" \ -"Calm Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV 5.17.0.3) = Development" \ +"Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ +"Calm Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ "Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ "Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ "Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ From 5a52932437db37ec937978176fe1a0e2e1cc2372 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 21:46:03 -0700 Subject: [PATCH 570/691] Update stage_workshop.sh --- stage_workshop.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index fc86bb8..39e7eb2 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -56,11 +56,11 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "^PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "^PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" fi From 48174b205c15c1bb6dbb6dbbfb09fcef552185b1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 21:46:49 -0700 Subject: [PATCH 571/691] Update stage_workshop.sh --- stage_workshop.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 39e7eb2..fc86bb8 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -56,11 +56,11 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "^PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "^PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" - elif (( $(echo ${_workshop} | grep -i "^PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" fi From 40f04c275f425553cbcc07491764a1695cc497f6 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 3 Jun 2020 21:47:29 -0700 Subject: [PATCH 572/691] Update stage_workshop.sh --- stage_workshop.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index fc86bb8..9395023 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -28,8 +28,8 @@ WORKSHOPS=(\ "Cloud Native Application Modernization Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ "Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ -"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.2) = Development" \ -"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17) = Development" \ +"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ +"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ #"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ #"Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ From 7dfc8e44135437a6064582e4381123427cb63f44 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 4 Jun 2020 20:36:32 -0700 Subject: [PATCH 573/691] ERA SNC Updates --- scripts/global.vars.sh | 5 +++++ scripts/lib.pc.sh | 6 +++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 9d8b287..b07d66d 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -42,6 +42,7 @@ ERAServerName='EraServer' ERA_USER="admin" ERA_PASSWORD="nutanix/4u" ERA_Default_PASSWORD="Nutanix/4u" +ERA_NETWORK="Secondary" ERA_Container_RF="2" MSSQL_SourceVM="Win2016SQLSource" @@ -157,6 +158,7 @@ case "${OCTET[3]}" in NW3_NETMASK='' NW3_START="" NW3_END="" + ERA_NETWORK="Primary" ERA_Container_RF="1" ;; @@ -174,6 +176,7 @@ case "${OCTET[3]}" in NW3_NETMASK='' NW3_START="" NW3_END="" + ERA_NETWORK="Primary" ERA_Container_RF="1" ;; @@ -191,6 +194,7 @@ case "${OCTET[3]}" in NW3_NETMASK='' NW3_START="" NW3_END="" + ERA_NETWORK="Primary" ERA_Container_RF="1" ;; @@ -208,6 +212,7 @@ case "${OCTET[3]}" in NW3_NETMASK='' NW3_START="" NW3_END="" + ERA_NETWORK="Primary" ERA_Container_RF="1" ;; diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index e627134..0e9961a 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1002,7 +1002,7 @@ HTTP_JSON_BODY=$(cat < Date: Thu, 4 Jun 2020 21:50:38 -0700 Subject: [PATCH 574/691] Update for Objects SNC --- scripts/global.vars.sh | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index b07d66d..33e31f1 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -32,6 +32,12 @@ CURL_HTTP_OPTS="${CURL_POST_OPTS} --write-out %{http_code}" SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null' SSH_OPTS+=' -q' # -v' +#################################################### +# OBJECTS VARs +################################################### + + + #################################################### # Era VARs ################################################### @@ -130,6 +136,10 @@ FILE_ANALYTICS_HOST=${IPV4_PREFIX}.$((${OCTET[3]} - 22)) PrismOpsServer_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 5))" ERA_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 7)) CITRIX_DDC_HOST=${IPV4_PREFIX}.$((${OCTET[3]} + 8)) +BUCKETS_DNS_IP="${IPV4_PREFIX}.16" +BUCKETS_VIP="${IPV4_PREFIX}.17" +OBJECTS_NW_START="${IPV4_PREFIX}.18" +OBJECTS_NW_END="${IPV4_PREFIX}.21" DNS_SERVERS='8.8.8.8' NTP_SERVERS='0.us.pool.ntp.org,1.us.pool.ntp.org,2.us.pool.ntp.org,3.us.pool.ntp.org' SUBNET_MASK="255.255.255.128" @@ -160,6 +170,10 @@ case "${OCTET[3]}" in NW3_END="" ERA_NETWORK="Primary" ERA_Container_RF="1" + BUCKETS_DNS_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 25)) + BUCKETS_VIP=${IPV4_PREFIX}.$((${OCTET[3]} + 26)) + OBJECTS_NW_START=${IPV4_PREFIX}.$((${OCTET[3]} + 27)) + OBJECTS_NW_END=${IPV4_PREFIX}.$((${OCTET[3]} + 30)) ;; 71 ) # We are in Partition 2 @@ -178,6 +192,10 @@ case "${OCTET[3]}" in NW3_END="" ERA_NETWORK="Primary" ERA_Container_RF="1" + BUCKETS_DNS_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 25)) + BUCKETS_VIP=${IPV4_PREFIX}.$((${OCTET[3]} + 26)) + OBJECTS_NW_START=${IPV4_PREFIX}.$((${OCTET[3]} + 27)) + OBJECTS_NW_END=${IPV4_PREFIX}.$((${OCTET[3]} + 30)) ;; 135 ) # We are in Partition 3 @@ -196,6 +214,10 @@ case "${OCTET[3]}" in NW3_END="" ERA_NETWORK="Primary" ERA_Container_RF="1" + BUCKETS_DNS_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 25)) + BUCKETS_VIP=${IPV4_PREFIX}.$((${OCTET[3]} + 26)) + OBJECTS_NW_START=${IPV4_PREFIX}.$((${OCTET[3]} + 27)) + OBJECTS_NW_END=${IPV4_PREFIX}.$((${OCTET[3]} + 30)) ;; 199 ) # We are in Partition 4 @@ -214,6 +236,10 @@ case "${OCTET[3]}" in NW3_END="" ERA_NETWORK="Primary" ERA_Container_RF="1" + BUCKETS_DNS_IP=${IPV4_PREFIX}.$((${OCTET[3]} + 25)) + BUCKETS_VIP=${IPV4_PREFIX}.$((${OCTET[3]} + 26)) + OBJECTS_NW_START=${IPV4_PREFIX}.$((${OCTET[3]} + 27)) + OBJECTS_NW_END=${IPV4_PREFIX}.$((${OCTET[3]} + 30)) ;; From f9d1b75f60f067b9a8ecc37e67e2160906210d5f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 4 Jun 2020 21:54:44 -0700 Subject: [PATCH 575/691] Updates for Objects SNC --- scripts/all_bootcamp.sh | 8 ++++---- scripts/basic_bootcamp.sh | 8 ++++---- scripts/calm_bootcamp.sh | 8 ++++---- scripts/cicd_bootcamp.sh | 8 ++++---- scripts/citrix_bootcamp.sh | 8 ++++---- scripts/dev_privatecloud_bootcamp.sh | 8 ++++---- scripts/era_bootcamp.sh | 8 ++++---- scripts/era_mssql_bootcamp.sh | 8 ++++---- scripts/era_oracle_bootcamp.sh | 8 ++++---- scripts/era_postgres_bootcamp.sh | 8 ++++---- scripts/files_bootcamp.sh | 8 ++++---- scripts/frame_bootcamp.sh | 8 ++++---- scripts/privatecloud_bootcamp.sh | 8 ++++---- scripts/splunk_bootcamp.sh | 8 ++++---- 14 files changed, 56 insertions(+), 56 deletions(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index fda24e5..3514d7a 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -82,10 +82,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ Windows2016.qcow2 \ diff --git a/scripts/basic_bootcamp.sh b/scripts/basic_bootcamp.sh index 6343890..da45b26 100755 --- a/scripts/basic_bootcamp.sh +++ b/scripts/basic_bootcamp.sh @@ -62,10 +62,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ Windows2016.qcow2 \ diff --git a/scripts/calm_bootcamp.sh b/scripts/calm_bootcamp.sh index 464e48f..dd39b23 100755 --- a/scripts/calm_bootcamp.sh +++ b/scripts/calm_bootcamp.sh @@ -62,10 +62,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ Windows2016.qcow2 \ diff --git a/scripts/cicd_bootcamp.sh b/scripts/cicd_bootcamp.sh index 464e48f..dd39b23 100755 --- a/scripts/cicd_bootcamp.sh +++ b/scripts/cicd_bootcamp.sh @@ -62,10 +62,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ Windows2016.qcow2 \ diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 3799878..f133051 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -75,10 +75,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ Windows2016.qcow2 \ diff --git a/scripts/dev_privatecloud_bootcamp.sh b/scripts/dev_privatecloud_bootcamp.sh index e990978..96bddd0 100755 --- a/scripts/dev_privatecloud_bootcamp.sh +++ b/scripts/dev_privatecloud_bootcamp.sh @@ -87,10 +87,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ Windows2016.qcow2 \ diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 7615065..b00c52f 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -72,10 +72,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ ERA-Server-build-1.2.1.qcow2 \ diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index c7ab00d..5ff4efa 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -71,10 +71,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ #ERA-Server-build-1.2.1.qcow2 \ diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index 5da1fc8..b57c9fd 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -72,10 +72,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ ERA-Server-build-1.2.1.qcow2 \ diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index 2b268a4..99c1078 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -69,10 +69,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ ERA-Server-build-1.2.1.qcow2 \ diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh index aa6e96e..3d513ed 100755 --- a/scripts/files_bootcamp.sh +++ b/scripts/files_bootcamp.sh @@ -77,10 +77,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ Windows2016.qcow2 \ diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index a64d332..9493638 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -121,10 +121,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ Windows2016.qcow2 \ diff --git a/scripts/privatecloud_bootcamp.sh b/scripts/privatecloud_bootcamp.sh index 6891a82..233e684 100755 --- a/scripts/privatecloud_bootcamp.sh +++ b/scripts/privatecloud_bootcamp.sh @@ -75,10 +75,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ Windows2016.qcow2 \ diff --git a/scripts/splunk_bootcamp.sh b/scripts/splunk_bootcamp.sh index 3f93f62..08bb292 100755 --- a/scripts/splunk_bootcamp.sh +++ b/scripts/splunk_bootcamp.sh @@ -72,10 +72,10 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ Windows2016.qcow2 \ From 9c8e05e5c6dd23ae283768c82291799b7ba6f49d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 7 Jun 2020 18:30:22 -0700 Subject: [PATCH 576/691] updates for Prod release of PC 5.17.0.3 --- release.json | 6 +++--- scripts/global.vars.sh | 2 +- stage_workshop.sh | 32 ++++++++++++++++---------------- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/release.json b/release.json index 81e8eb4..0ef8c0b 100644 --- a/release.json +++ b/release.json @@ -27,7 +27,7 @@ "CommitsSinceVersionSource": 14, "CommitsSinceVersionSourcePadded": "0014", "CommitDate": "2019-03-20", - "PrismCentralStable": "5.8.2", - "PrismCentralCurrent": "5.10.2", - "PrismCentralDev": "5.10.2" + "PrismCentralStable": "5.11.2.1", + "PrismCentralCurrent": "5.17.0.3", + "PrismCentralDev": "5.17.0.3" } diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 33e31f1..a2585d9 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -3,7 +3,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' PC_DEV_VERSION='5.17.0.3' -PC_CURRENT_VERSION='5.16.1.2' +PC_CURRENT_VERSION='5.17.0.3' PC_STABLE_VERSION='5.11.2.1' FILES_VERSION='3.6.3' FILE_ANALYTICS_VERSION='2.1.1.1' diff --git a/stage_workshop.sh b/stage_workshop.sh index 9395023..40c9bd4 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,23 +11,23 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ -"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ -"Leap Add-On Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ -"Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ -"Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ -"Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ -"Databases Era -Stage All- Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ -"Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ -"Calm Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ -"Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ -"Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ -"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Development" \ -"Stage-All Bootcamps (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ +"Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Leap Add-On Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ +"Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ +"Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ +"Databases Era -Stage All- Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ +"Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Calm Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ +"Stage-All Bootcamps (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Legacy Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"Cloud Native Application Modernization Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ -"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x/AHV PC 5.16.1.2) = Current" \ +"Cloud Native Application Modernization Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ +"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ "In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ @@ -58,7 +58,7 @@ function stage_clusters() { # TODO: make WORKSHOPS and map a JSON configuration file? if (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.16.1.2" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" From 3fecc188f1994c26acd906a6799f468bcbad60c8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Sun, 7 Jun 2020 18:50:38 -0700 Subject: [PATCH 577/691] Updates for Prism Ops seedPC and Util Server --- scripts/all_bootcamp.sh | 2 -- scripts/citrix_bootcamp.sh | 2 -- scripts/dev_privatecloud_bootcamp.sh | 2 -- scripts/files_bootcamp.sh | 2 -- scripts/frame_bootcamp.sh | 2 -- scripts/global.vars.sh | 2 +- scripts/privatecloud_bootcamp.sh | 2 -- 7 files changed, 1 insertion(+), 13 deletions(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index 3514d7a..7363f20 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -19,8 +19,6 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' - export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zp' export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" export _external_nw_name="${1}" diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index f133051..903393d 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -19,8 +19,6 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' - export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zip' args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/dev_privatecloud_bootcamp.sh b/scripts/dev_privatecloud_bootcamp.sh index 96bddd0..e47f34c 100755 --- a/scripts/dev_privatecloud_bootcamp.sh +++ b/scripts/dev_privatecloud_bootcamp.sh @@ -30,8 +30,6 @@ case ${1} in #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' export AUTH_SERVER='AutoAD' - export PrismOpsServer='PrismOpsServer517' - export SeedPC='seedPC517.zip' export _external_nw_name="${1}" diff --git a/scripts/files_bootcamp.sh b/scripts/files_bootcamp.sh index 3d513ed..b771590 100755 --- a/scripts/files_bootcamp.sh +++ b/scripts/files_bootcamp.sh @@ -19,8 +19,6 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' - export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zip' args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index 9493638..cb37cc8 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -19,8 +19,6 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' - export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zip' # Networking needs for Frame Bootcamp export NW2_DHCP_START="${IPV4_PREFIX}.132" diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index a2585d9..02e239e 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -16,7 +16,7 @@ STORAGE_IMAGES='Images' STORAGE_ERA='Era' ATTEMPTS=40 SLEEP=60 -PrismOpsServer='PrismProLabUtilityServer' +PrismOpsServer='PrismOpsServer' SeedPC='seedPC.zip' CALM_RSA_KEY_FILE='calm_rsa_key.env' diff --git a/scripts/privatecloud_bootcamp.sh b/scripts/privatecloud_bootcamp.sh index 233e684..0005ae2 100755 --- a/scripts/privatecloud_bootcamp.sh +++ b/scripts/privatecloud_bootcamp.sh @@ -19,8 +19,6 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' - export PrismOpsServer='PrismOpsServer517latest' - export SeedPC='seedPC517latest.zip' export _external_nw_name="${1}" From a876e083e69b20ed12c1d1792383f836c2396b5f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 9 Jun 2020 11:42:10 -0700 Subject: [PATCH 578/691] Update for Prism Ops Lab Utility server --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 02e239e..4d2e46c 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -16,7 +16,7 @@ STORAGE_IMAGES='Images' STORAGE_ERA='Era' ATTEMPTS=40 SLEEP=60 -PrismOpsServer='PrismOpsServer' +PrismOpsServer='PrismOpsLabUtilityServer' SeedPC='seedPC.zip' CALM_RSA_KEY_FILE='calm_rsa_key.env' From 427aed5c30e05c6c5bc426951898f21049942d86 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 10 Jun 2020 15:57:47 -0700 Subject: [PATCH 579/691] Updates for Auth --- scripts/lib.pc.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 0e9961a..85fa9b1 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -458,8 +458,9 @@ EOF log "directories: _test=|${_test}|_http_body=|${_http_body}|" log "Add Role Mappings to Groups for PC logins (not projects, which are separate)..." - #TODO:20 hardcoded role mappings - for _group in 'SSP Admins' 'SSP Power Users' 'SSP Developers' 'SSP Basic Users'; do + #TODO:20 hardcoded role mapping + groups=('SSP Admins' 'SSP Developers' 'SSP Consumers' 'SSP Operators' 'SSP Custom' 'Bootcamp Users') + for _group in "${groups[@]}"; do _http_body=$(cat < Date: Wed, 10 Jun 2020 16:23:18 -0700 Subject: [PATCH 580/691] Update Bootcamp Infra Project - Project Admin --- scripts/lib.pc.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 85fa9b1..95c61dc 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1316,7 +1316,7 @@ HTTP_JSON_BODY=$(cat < Date: Wed, 10 Jun 2020 20:45:18 -0700 Subject: [PATCH 581/691] Update lib.pc.sh --- scripts/lib.pc.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 95c61dc..cb19f9e 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -459,7 +459,8 @@ EOF log "Add Role Mappings to Groups for PC logins (not projects, which are separate)..." #TODO:20 hardcoded role mapping - groups=('SSP Admins' 'SSP Developers' 'SSP Consumers' 'SSP Operators' 'SSP Custom' 'Bootcamp Users') + #groups=('SSP Admins' 'SSP Developers' 'SSP Consumers' 'SSP Operators' 'SSP Custom' 'Bootcamp Users') + groups=('SSP Admins') for _group in "${groups[@]}"; do _http_body=$(cat < Date: Tue, 16 Jun 2020 13:20:27 -0700 Subject: [PATCH 582/691] Update all_bootcamp.sh --- scripts/all_bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index 7363f20..698637b 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -100,6 +100,7 @@ case ${1} in ) export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ + Windows2016.iso \ veeam/VBR_10.0.0.4442.iso \ ) From ef26a108882c164216435ccdcc827ac61efd312b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 16 Jun 2020 13:23:25 -0700 Subject: [PATCH 583/691] Update lib.pc.sh --- scripts/lib.pc.sh | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 0e9961a..c4fd161 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -424,6 +424,7 @@ function pc_auth() { local _http_body local _pc_version local _test + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " # TODO:50 FUTURE: pass AUTH_SERVER argument @@ -459,6 +460,10 @@ EOF log "Add Role Mappings to Groups for PC logins (not projects, which are separate)..." #TODO:20 hardcoded role mappings + + # Get Role UUID # + _role_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/roles/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"role","filter": "name==Prism Admin"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + for _group in 'SSP Admins' 'SSP Power Users' 'SSP Developers' 'SSP Basic Users'; do _http_body=$(cat < Date: Tue, 16 Jun 2020 21:20:10 -0700 Subject: [PATCH 584/691] Update all_bootcamp.sh --- scripts/all_bootcamp.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index 698637b..e6fe11a 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -163,7 +163,6 @@ case ${1} in && prism_check 'PC' log "Non-blocking functions (in development) follow." - pc_project pc_admin # ntnx_download 'AOS' # function in lib.common.sh From 156e4ff716bc21ad9f3c616ca06927c2876036f2 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 18 Jun 2020 07:27:51 -0700 Subject: [PATCH 585/691] Update stage_workshop.sh --- stage_workshop.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 40c9bd4..3e88811 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -14,20 +14,20 @@ WORKSHOPS=(\ "Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Leap Add-On Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ -"Databases Era with MSSQL Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ -"Databases Era with Oracle Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ -"Databases Era with Postgres Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ -"Databases Era -Stage All- Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ +"Databases Era with MSSQL Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Databases Era with Oracle Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Databases Era with Postgres Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Databases Era -Stage All- Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Calm Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ -"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ +"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Stage-All Bootcamps (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Legacy Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"Cloud Native Application Modernization Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ -"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x/AHV PC 5.17.0.3) = Current" \ +"Cloud Native Application Modernization Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ +"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ "In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ From 987aa5e85e2d0d3a5693ea4fb7f75f9b74dbc6ce Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Wed, 24 Jun 2020 21:14:44 +0200 Subject: [PATCH 586/691] Test Karbon LCM Update issue --- scripts/calm_bootcamp_test.sh | 160 ++++++++++++++++++++++++++++++++++ stage_workshop.sh | 7 +- 2 files changed, 166 insertions(+), 1 deletion(-) create mode 100755 scripts/calm_bootcamp_test.sh diff --git a/scripts/calm_bootcamp_test.sh b/scripts/calm_bootcamp_test.sh new file mode 100755 index 0000000..b681e6e --- /dev/null +++ b/scripts/calm_bootcamp_test.sh @@ -0,0 +1,160 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoDC' + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + + # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be + if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then + pc_smtp + fi + + ssp_auth \ + && calm_enable \ + && objects_enable \ + && lcm \ + && pc_project \ + && object_store \ + && karbon_image_download \ + && flow_enable \ + && pc_cluster_img_import \ + && upload_karbon_calm_blueprint \ + && sleep 30 \ + && upload_CICDInfra_calm_blueprint \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/stage_workshop.sh b/stage_workshop.sh index 3e88811..828dbad 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -25,7 +25,7 @@ WORKSHOPS=(\ "Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Stage-All Bootcamps (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Legacy Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ - +"Calm Bootcamp Test(AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Cloud Native Application Modernization Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Current" \ "In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ @@ -126,6 +126,11 @@ function stage_clusters() { _pe_launch='calm_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^Calm Bootcamp Test" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='calm_bootcamp_test.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^Citrix" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='citrix_bootcamp.sh' From b01d93b5a5c9a324e53c846b03821c50fe365cfa Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 25 Jun 2020 12:14:42 +0200 Subject: [PATCH 587/691] Update calm_bootcamp_test.sh Changed to normal Windows AD. --- scripts/calm_bootcamp_test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/calm_bootcamp_test.sh b/scripts/calm_bootcamp_test.sh index b681e6e..cd3563b 100755 --- a/scripts/calm_bootcamp_test.sh +++ b/scripts/calm_bootcamp_test.sh @@ -18,7 +18,7 @@ case ${1} in PE | pe ) . lib.pe.sh - export AUTH_SERVER='AutoDC' + export AUTH_SERVER='AutoAD' args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable From 6a937844d2b178f8e2c1d515d4e8806bf1ddaf03 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 25 Jun 2020 14:20:43 +0200 Subject: [PATCH 588/691] Updated script for Calm script bootcamp Calm is using the AutoDC, but that DC hasn't got the SSP Custom group and users ssigned which is needed by the project setup.... --- scripts/calm_bootcamp_test.sh | 2 +- scripts/lib.pe.sh | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/scripts/calm_bootcamp_test.sh b/scripts/calm_bootcamp_test.sh index cd3563b..b681e6e 100755 --- a/scripts/calm_bootcamp_test.sh +++ b/scripts/calm_bootcamp_test.sh @@ -18,7 +18,7 @@ case ${1} in PE | pe ) . lib.pe.sh - export AUTH_SERVER='AutoAD' + export AUTH_SERVER='AutoDC' args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 34a215e..58d6a04 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -215,6 +215,17 @@ function authentication_source() { fi done + # Adding the needed group and users for the Calm Script part Project + group_name="SSP Custom" + remote_exec 'SSH' 'AUTH_SERVER' \ + "samba-tool group add ${group_name}"\ + 'OPTIONAL' + sleep ${_sleep} + remote_exec 'SSH' 'AUTH_SERVER' \ + "for i in `samba-tool user list | grep ^user`; do samba-tool group addmembers ${group_name} $i;done" \ + 'OPTIONAL' + sleep ${_sleep} + fi ;; 'OpenLDAP') From d84e34ffcf4ad2e94957274bdf579e56c08a9102 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 25 Jun 2020 16:46:58 +0200 Subject: [PATCH 589/691] Update calm_bootcamp_test.sh Brought Karbon back in the game --- scripts/calm_bootcamp_test.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/calm_bootcamp_test.sh b/scripts/calm_bootcamp_test.sh index b681e6e..be8a84c 100755 --- a/scripts/calm_bootcamp_test.sh +++ b/scripts/calm_bootcamp_test.sh @@ -124,6 +124,7 @@ case ${1} in ssp_auth \ && calm_enable \ && objects_enable \ + && karbon \ && lcm \ && pc_project \ && object_store \ From fcb1936744c5690169b938e2cefaafb844e33cf4 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 25 Jun 2020 16:57:17 +0200 Subject: [PATCH 590/691] Update lib.pe.sh Small changes to the command --- scripts/lib.pe.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 58d6a04..9b4b54f 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -216,13 +216,13 @@ function authentication_source() { done # Adding the needed group and users for the Calm Script part Project - group_name="SSP Custom" + remote_exec 'SSH' 'AUTH_SERVER' \ - "samba-tool group add ${group_name}"\ + 'samba-tool group add "SSP Custom"' \ 'OPTIONAL' sleep ${_sleep} remote_exec 'SSH' 'AUTH_SERVER' \ - "for i in `samba-tool user list | grep ^user`; do samba-tool group addmembers ${group_name} $i;done" \ + 'for i in `samba-tool user list | grep ^user`; do samba-tool group addmembers "SSP Custom" $i;done' \ 'OPTIONAL' sleep ${_sleep} From d02fed05ca1745635a7b4fb76ac592b7ae111f09 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 25 Jun 2020 18:08:32 +0200 Subject: [PATCH 591/691] Update calm_bootcamp_test.sh --- scripts/calm_bootcamp_test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/calm_bootcamp_test.sh b/scripts/calm_bootcamp_test.sh index be8a84c..d8e6c0b 100755 --- a/scripts/calm_bootcamp_test.sh +++ b/scripts/calm_bootcamp_test.sh @@ -124,7 +124,7 @@ case ${1} in ssp_auth \ && calm_enable \ && objects_enable \ - && karbon \ + && karbon_enable \ && lcm \ && pc_project \ && object_store \ From 0daf8d25ec4ef50e780b3a5792fee7b30f856c42 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 08:20:22 +0200 Subject: [PATCH 592/691] Update lib.pc.sh Added to have always an adition to the version_arr, even if there is nothing to be updated. We jump/skip later in the module. --- scripts/lib.pc.sh | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b720c6a..7ed5460 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -128,8 +128,13 @@ function lcm() { # Grabbing the versions of the UUID and put them in a versions array for uuid in "${uuid_arr[@]}" do - # Get the latest version from the to be updated uuid - version_ar+=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | sort |tail -1 | tr -d \")) + # Get the latest version from the to be updated uuid. Put always a value in the array otherwise we loose/have skewed verrsions to products + version=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | sort |tail -1 | tr -d \")) + # If no version upgrade available add a blank item in the array + if [[ -z $version ]]; then + version=' ' + fi + version_ar+=$version done # Copy the right info into the to be used array fi From 2272d612cca4e4ab0f7cf8236faf8ca5b356ba8f Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 08:59:09 +0200 Subject: [PATCH 593/691] Update lib.pe.sh Changed the remark for the AutoDC so we have the correct info in the script. --- scripts/lib.pe.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 9b4b54f..87480c3 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -215,8 +215,7 @@ function authentication_source() { fi done - # Adding the needed group and users for the Calm Script part Project - + # Adding the needed group and users to the AutoDC that may be used. Calm would otherwise have no BootInfra Project remote_exec 'SSH' 'AUTH_SERVER' \ 'samba-tool group add "SSP Custom"' \ 'OPTIONAL' From c0c0e3bf0e6447f00d944c6c9b1d01d30172709e Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 09:39:51 +0200 Subject: [PATCH 594/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7ed5460..19debf4 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -149,7 +149,7 @@ function lcm() { count=0 while [ $count -lt ${#uuid_arr[@]} ] do - if [ ! -z ${version_ar[$count]} ]; then + if [[ ! -z ${version_ar[$count]} ]]; then _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" fi From 6239afd593819e7cc72b24793d0f3cf1f1030075 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 10:58:59 +0200 Subject: [PATCH 595/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 19debf4..fd93c1e 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -132,9 +132,9 @@ function lcm() { version=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | sort |tail -1 | tr -d \")) # If no version upgrade available add a blank item in the array if [[ -z $version ]]; then - version=' ' + version='' fi - version_ar+=$version + version_ar+=($version) done # Copy the right info into the to be used array fi From 6f9c6216732e99511faab749bdbc6b1efd9e51ba Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 12:08:56 +0200 Subject: [PATCH 596/691] Update lib.pe.sh PE Registration Process more controlled. --- scripts/lib.pe.sh | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 87480c3..67f9924 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -770,10 +770,25 @@ function cluster_check() { local _test_exit local CURL_HTTP_OPTS=' --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure ' - log "PC is version 5.8, enabling and checking" + log "PC is installed, registering the PE to PC" # Enable the PE to PC registration _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') + _pc_ip_addr=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") + while [[ -z $_pc_ip_addr ]] + do + log "Registering PE has failed, retrying" + curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data + (( _loop++ )) + _pc_ip_addr=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") + sleep 5 + log "Sleeping for 5 seconds before retrying...$_loop/$_attempts" + if [[ $_loop >= $_attempts ]] + log "We have tried 10 times and the cluster is not able to register... Exiting the script!!" + exit 1 + fi + done + log "PE has been regsitered to PC... Progressing..." } ############################################################################################################################################################################### From c47146fc2f1574bafff3b7b7eb4b0dbacbb78525 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 12:54:42 +0200 Subject: [PATCH 597/691] Update lib.pc.sh It seems that we can not add an empty item in the array, so now we are looking for NA in the version to skip the UUID... --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index fd93c1e..9fa9fca 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -132,7 +132,7 @@ function lcm() { version=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | sort |tail -1 | tr -d \")) # If no version upgrade available add a blank item in the array if [[ -z $version ]]; then - version='' + version='NA' fi version_ar+=($version) done @@ -149,7 +149,7 @@ function lcm() { count=0 while [ $count -lt ${#uuid_arr[@]} ] do - if [[ ! -z ${version_ar[$count]} ]]; then + if [[ ${version_ar[$count]} != *"NA"* ]]; then _json_data+="[\\\"${uuid_arr[$count]}\\\",\\\"${version_ar[$count]}\\\"]," log "Found UUID ${uuid_arr[$count]} and version ${version_ar[$count]}" fi From d5bf47dda823e759885150678cfc0965f4356829 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 13:12:20 +0200 Subject: [PATCH 598/691] Update lib.pe.sh Typo... --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 67f9924..8b82e0e 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -783,7 +783,7 @@ function cluster_check() { _pc_ip_addr=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") sleep 5 log "Sleeping for 5 seconds before retrying...$_loop/$_attempts" - if [[ $_loop >= $_attempts ]] + if [[ $_loop -gt $_attempts ]] log "We have tried 10 times and the cluster is not able to register... Exiting the script!!" exit 1 fi From ae4676d4c615466689f38f8235da30dcc6d83f75 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 13:13:41 +0200 Subject: [PATCH 599/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 8b82e0e..8961e1b 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -783,7 +783,7 @@ function cluster_check() { _pc_ip_addr=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") sleep 5 log "Sleeping for 5 seconds before retrying...$_loop/$_attempts" - if [[ $_loop -gt $_attempts ]] + if [[ $_loop -gt $_attempts ]]; then log "We have tried 10 times and the cluster is not able to register... Exiting the script!!" exit 1 fi From 670ecf9a6d4d4115607b9afc2c39688cd266208c Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 13:56:27 +0200 Subject: [PATCH 600/691] Update lib.pe.sh Removing the check... --- scripts/lib.pe.sh | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 8961e1b..2806486 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -774,21 +774,6 @@ function cluster_check() { # Enable the PE to PC registration _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') - _pc_ip_addr=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") - while [[ -z $_pc_ip_addr ]] - do - log "Registering PE has failed, retrying" - curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data - (( _loop++ )) - _pc_ip_addr=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") - sleep 5 - log "Sleeping for 5 seconds before retrying...$_loop/$_attempts" - if [[ $_loop -gt $_attempts ]]; then - log "We have tried 10 times and the cluster is not able to register... Exiting the script!!" - exit 1 - fi - done - log "PE has been regsitered to PC... Progressing..." } ############################################################################################################################################################################### From 35d10ee5ae393d72be3a567c40ee46fbb67e3a23 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 16:13:23 +0200 Subject: [PATCH 601/691] Update lib.pe.sh Reintroducing the PE regostration test --- scripts/lib.pe.sh | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 2806486..9e5695e 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -774,6 +774,25 @@ function cluster_check() { # Enable the PE to PC registration _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') + + # Let's sleep a few seconds before moving on + sleep 5 + + _pc_ip_addr=$(curl $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") + while [[ -z $_pc_ip_addr ]] + do + log "Registering PE has failed, retrying" + curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data + (( _loop++ )) + _pc_ip_addr=$(curl $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") + sleep 5 + log "Sleeping for 5 seconds before retrying...$_loop/$_attempts" + if [[ $_loop -gt $_attempts ]]; then + log "We have tried 10 times and the cluster is not able to register... Exiting the script!!" + exit 1 + fi + done + log "PE has been regsitered to PC... Progressing..." } ############################################################################################################################################################################### From 41b33c787d87166a352a370bcb1eb0bc4c11d6f5 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 16:40:01 +0200 Subject: [PATCH 602/691] Update lib.pe.sh Text typo --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 9e5695e..21ccbbe 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -792,7 +792,7 @@ function cluster_check() { exit 1 fi done - log "PE has been regsitered to PC... Progressing..." + log "PE has been registered to PC... Progressing..." } ############################################################################################################################################################################### From 3126921f97126f0c0de66892f1f6efcd87d6adbc Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 26 Jun 2020 17:00:57 +0200 Subject: [PATCH 603/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 21ccbbe..80c8d2a 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -792,7 +792,7 @@ function cluster_check() { exit 1 fi done - log "PE has been registered to PC... Progressing..." + log "PE has been registered to PC... Proceeding..." } ############################################################################################################################################################################### From 1dd030cc8a5ec8a94651f0f44f206ed385e81fc6 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 1 Jul 2020 20:11:59 -0700 Subject: [PATCH 604/691] Update frame_bootcamp.sh --- scripts/frame_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index cb37cc8..ed841fa 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -65,7 +65,7 @@ case ${1} in dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ && pe_license \ && pe_init \ - && frame_network_configure \ + && network_configure \ && authentication_source \ && pe_auth \ && prism_pro_server_deploy \ From 9b2eb77e03c769ca019c0e1920bff12539a4c75f Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Thu, 2 Jul 2020 19:28:53 +0200 Subject: [PATCH 605/691] Update privatecloud_bootcamp.sh --- scripts/privatecloud_bootcamp.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/privatecloud_bootcamp.sh b/scripts/privatecloud_bootcamp.sh index 0005ae2..cbc69e6 100755 --- a/scripts/privatecloud_bootcamp.sh +++ b/scripts/privatecloud_bootcamp.sh @@ -89,6 +89,7 @@ case ${1} in export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ veeam/VBR_10.0.0.4442.iso \ + Windows2016.iso \ ) From e6968152f206f1109b168d58ac6944105522cc36 Mon Sep 17 00:00:00 2001 From: Willem Essenstam Date: Fri, 3 Jul 2020 09:28:38 +0200 Subject: [PATCH 606/691] Delete calm_bootcamp_test.sh --- scripts/calm_bootcamp_test.sh | 161 ---------------------------------- 1 file changed, 161 deletions(-) delete mode 100755 scripts/calm_bootcamp_test.sh diff --git a/scripts/calm_bootcamp_test.sh b/scripts/calm_bootcamp_test.sh deleted file mode 100755 index d8e6c0b..0000000 --- a/scripts/calm_bootcamp_test.sh +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/env bash -# -x - -#__main()__________ - -# Source Nutanix environment (PATH + aliases), then common routines + global variables -. /etc/profile.d/nutanix_env.sh -. lib.common.sh -. global.vars.sh -begin - -args_required 'EMAIL PE_PASSWORD PC_VERSION' - -#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization -# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! - -case ${1} in - PE | pe ) - . lib.pe.sh - - export AUTH_SERVER='AutoDC' - - args_required 'PE_HOST PC_LAUNCH' - ssh_pubkey & # non-blocking, parallel suitable - - dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ - && pe_license \ - && pe_init \ - && network_configure \ - && authentication_source \ - && pe_auth - - if (( $? == 0 )) ; then - pc_install "${NW1_NAME}" \ - && prism_check 'PC' \ - - if (( $? == 0 )) ; then - _command="EMAIL=${EMAIL} \ - PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" - - cluster_check \ - && log "Remote asynchroneous PC Image import script... ${_command}" \ - && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & - - pc_configure \ - && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" - log "PE = https://${PE_HOST}:9440" - log "PC = https://${PC_HOST}:9440" - - #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & - #dependencies 'remove' 'sshpass' - finish - fi - else - finish - _error=18 - log "Error ${_error}: in main functional chain, exit!" - exit ${_error} - fi - ;; - PC | pc ) - . lib.pc.sh - - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - #export BUCKETS_VIP="${IPV4_PREFIX}.17" - #export OBJECTS_NW_START="${IPV4_PREFIX}.18" - #export OBJECTS_NW_END="${IPV4_PREFIX}.21" - - export QCOW2_IMAGES=(\ - Windows2016.qcow2 \ - CentOS7.qcow2 \ - WinToolsVM.qcow2 \ - Linux_ToolsVM.qcow2 \ - ) - export ISO_IMAGES=(\ - Nutanix-VirtIO-1.1.5.iso \ - ) - - run_once - - dependencies 'install' 'jq' || exit 13 - - ssh_pubkey & # non-blocking, parallel suitable - - pc_passwd - ntnx_cmd # check cli services available? - - export NUCLEI_SERVER='localhost' - export NUCLEI_USERNAME="${PRISM_ADMIN}" - export NUCLEI_PASSWORD="${PE_PASSWORD}" - # nuclei -debug -username admin -server localhost -password x vm.list - - if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX - log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" - pe_determine ${1} - . global.vars.sh # re-populate PE_HOST dependencies - else - CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ - jq -r .data[0].clusterDetails.clusterName) - if [[ ${CLUSTER_NAME} != '' ]]; then - log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." - fi - fi - - if [[ ! -z "${2}" ]]; then # hidden bonus - log "Don't forget: $0 first.last@nutanixdc.local%password" - calm_update && exit 0 - fi - - export ATTEMPTS=2 - export SLEEP=10 - - pc_init \ - && pc_dns_add \ - && pc_ui \ - && pc_auth \ - - # If we run this in a none HPOC we must skip the SMTP config as we have no idea what the SMTP server will be - if [[ ! -z ${SMTP_SERVER_ADDRESS} ]]; then - pc_smtp - fi - - ssp_auth \ - && calm_enable \ - && objects_enable \ - && karbon_enable \ - && lcm \ - && pc_project \ - && object_store \ - && karbon_image_download \ - && flow_enable \ - && pc_cluster_img_import \ - && upload_karbon_calm_blueprint \ - && sleep 30 \ - && upload_CICDInfra_calm_blueprint \ - && images \ - && prism_check 'PC' - - log "Non-blocking functions (in development) follow." - #pc_project - pc_admin - # ntnx_download 'AOS' # function in lib.common.sh - - unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD - - if (( $? == 0 )); then - #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ - #&& - log "PC = https://${PC_HOST}:9440" - finish - else - _error=19 - log "Error ${_error}: failed to reach PC!" - exit ${_error} - fi - ;; - FILES | files | afs ) - files_install - ;; -esac From 6c19a8ec41b9749138a0bef4b28490a04c0f5bad Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 29 Jul 2020 11:51:53 -0700 Subject: [PATCH 607/691] Updates for New Software versions --- scripts/all_bootcamp.sh | 3 +++ scripts/frame_bootcamp.sh | 39 -------------------------------------- scripts/global.vars.sh | 40 +++++++++++++++++++-------------------- scripts/lib.pc.sh | 30 ++++++++++++++--------------- stage_workshop.sh | 38 ++++++++++++++++++------------------- 5 files changed, 57 insertions(+), 93 deletions(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index e6fe11a..788dd03 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -101,6 +101,9 @@ case ${1} in export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ Windows2016.iso \ + FrameCCA-2.1.6.iso \ + FrameGuestAgentInstaller_1.0.2.7.iso \ + Nutanix-VirtIO-1.1.5.iso \ veeam/VBR_10.0.0.4442.iso \ ) diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index ed841fa..f805c94 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -20,45 +20,6 @@ case ${1} in export AUTH_SERVER='AutoAD' - # Networking needs for Frame Bootcamp - export NW2_DHCP_START="${IPV4_PREFIX}.132" - export NW2_DHCP_END="${IPV4_PREFIX}.149" - export NW2_DHCP_START2="${IPV4_PREFIX}.250" - export NW2_DHCP_END2="${IPV4_PREFIX}.253" - - export USERNW01_NAME='User01-Network' - export USERNW01_VLAN=${NW2_VLAN} - - export USERNW02_NAME='User02-Network' - export USERNW02_VLAN=${NW2_VLAN} - - export USERNW03_NAME='User03-Network' - export USERNW03_VLAN=${NW2_VLAN} - - export USERNW04_NAME='User04-Network' - export USERNW04_VLAN=${NW2_VLAN} - - export USERNW05_NAME='User05-Network' - export USERNW05_VLAN=${NW2_VLAN} - - export USERNW06_NAME='User06-Network' - export USERNW06_VLAN=${NW2_VLAN} - - export USERNW07_NAME='User07-Network' - export USERNW07_VLAN=${NW2_VLAN} - - export USERNW08_NAME='User08-Network' - export USERNW08_VLAN=${NW2_VLAN} - - export USERNW09_NAME='User09-Network' - export USERNW09_VLAN=${NW2_VLAN} - - export USERNW10_NAME='User10-Network' - export USERNW10_VLAN=${NW2_VLAN} - - export USERNW11_NAME='User11-Network' - export USERNW11_VLAN=${NW2_VLAN} - args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4d2e46c..e31291c 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,10 +2,10 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='5.17.0.3' +PC_DEV_VERSION='2020.7' PC_CURRENT_VERSION='5.17.0.3' PC_STABLE_VERSION='5.11.2.1' -FILES_VERSION='3.6.3' +FILES_VERSION='3.7.0' FILE_ANALYTICS_VERSION='2.1.1.1' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' @@ -43,7 +43,7 @@ SSH_OPTS+=' -q' # -v' ################################################### ERA_Blueprint='EraServerDeployment.json' -ERAServerImage='ERA-Server-build-1.2.1.qcow2' +ERAServerImage='ERA-Server-build-1.3.1.qcow2' ERAServerName='EraServer' ERA_USER="admin" ERA_PASSWORD="nutanix/4u" @@ -288,14 +288,14 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.7.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.7.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.6.3.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.3-stable.qcow2' + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.7.0.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' JQ_REPOS=(\ @@ -327,14 +327,14 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.7.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.7.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.3.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.3-stable.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' JQ_REPOS=(\ @@ -366,14 +366,14 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.7.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.7.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.6.3.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.3-stable.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.0.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' JQ_REPOS=(\ @@ -394,7 +394,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) AUTOAD_REPOS=(\ - 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ + 'http://10.42.194.11/workshop_staging/AutoAD_05272020.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' @@ -410,14 +410,14 @@ case "${OCTET[0]}.${OCTET[1]}" in fi ;; 10.136 ) # HPOC us-west = BLR - PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json' - PC_DEV_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' + PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.7.json' + PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.7.tar' PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.136.239.13/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' - FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.6.3.json' - FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.6.3-stable.qcow2' + FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.7.0.json' + FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' FILE_ANALYTICS_URL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' JQ_REPOS=(\ diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9fa9fca..327a864 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -131,7 +131,7 @@ function lcm() { # Get the latest version from the to be updated uuid. Put always a value in the array otherwise we loose/have skewed verrsions to products version=($(jq --arg uuid "$uuid" '.group_results[].entity_results[] | select (.data[].values[].values[]==$uuid) .data[] | select (.name=="version") .values[].values[]' reply_json_ver.json | sort |tail -1 | tr -d \")) # If no version upgrade available add a blank item in the array - if [[ -z $version ]]; then + if [[ -z $version ]]; then version='NA' fi version_ar+=($version) @@ -902,14 +902,14 @@ log "EraServer IP |${ERA_HOST}|" ## Create the EraManaged network inside Era ## log "Reset Default Era Password" - _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/update" --data '{ "password": "'${ERA_PASSWORD}'"}' | jq -r '.status' | tr -d \") + _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/auth/update" --data '{ "password": "'${ERA_PASSWORD}'"}' | jq -r '.status' | tr -d \") log "Password Reset |${_reset_passwd}|" ## Accept EULA ## log "Accept Era EULA" - _accept_eula=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/auth/validate" --data '{ "eulaAccepted": true }' | jq -r '.status' | tr -d \") + _accept_eula=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/auth/validate" --data '{ "eulaAccepted": true }' | jq -r '.status' | tr -d \") log "Accept EULA |${_accept_eula}|" @@ -936,7 +936,7 @@ HTTP_JSON_BODY=$(cat < cluster.json - _task_id=$(curl -k -H 'Content-Type: multipart/form-data' -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/clusters/${_era_cluster_id}/json" -F file="@"cluster.json) + _task_id=$(curl -k -H 'Content-Type: multipart/form-data' -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/clusters/${_era_cluster_id}/json" -F file="@"cluster.json) ## Add the Secondary Network inside Era ## log "Create ${NW2_NAME} DHCP/IPAM Network" - _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.8/resources/networks" --data '{"name": "'${NW2_NAME}'","type": "DHCP"}' | jq -r '.id' | tr -d \") + _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/resources/networks" --data '{"name": "'${NW2_NAME}'","type": "DHCP"}' | jq -r '.id' | tr -d \") log "Created ${NW2_NAME} Network with Network ID |${_dhcp_network_id}|" @@ -991,7 +991,7 @@ HTTP_JSON_BODY=$(cat < 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 2020.7" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" From 46a7e64752f415e0546dc56060711392f2585b39 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 29 Jul 2020 11:57:18 -0700 Subject: [PATCH 608/691] Update all_bootcamp.sh --- scripts/all_bootcamp.sh | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index 788dd03..80c93e4 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -86,15 +86,17 @@ case ${1} in #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ - Windows2016.qcow2 \ + Windows2016_05272020.qcow2 \ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ ) export QCOW2_IMAGES=(\ - CentOS7.qcow2 \ - Win10v1903.qcow2 \ - WinToolsVM.qcow2 \ - Linux_ToolsVM.qcow2 \ + CentOS7_05272020.qcow2 \ + Win10v1903_05272020.qcow2 \ + Win10v1909_05272020.qcow2 \ + WinTools_05272020.qcow2 \ + Linux_ToolsVM_05272020.qcow2 + LinuxMint_ToolsVM.qcow2 \ HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ veeam/VeeamAHVProxy2.0.404.qcow2 \ ) From 4660fbb45532e5505182e999bbe2a71f0eae5bb4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 29 Jul 2020 12:47:10 -0700 Subject: [PATCH 609/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index e31291c..e422935 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -394,7 +394,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) AUTOAD_REPOS=(\ - 'http://10.42.194.11/workshop_staging/AutoAD_05272020.qcow2' \ + 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' From 921aeb1e53ded2eee76fc4ffb05bf732f4ca5e2d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 29 Jul 2020 12:51:29 -0700 Subject: [PATCH 610/691] Update era_bootcamp.sh --- scripts/era_bootcamp.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index b00c52f..c75718b 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -84,6 +84,12 @@ case ${1} in export QCOW2_IMAGES=(\ WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ + CentOS7_05272020.qcow2 \ + Win10v1903_05272020.qcow2 \ + Win10v1909_05272020.qcow2 \ + WinTools_05272020.qcow2 \ + Linux_ToolsVM_05272020.qcow2 + LinuxMint_ToolsVM.qcow2 \ ) export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ From 9cb9e086db058cb05c6871920cb9d5d9d3471420 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 29 Jul 2020 14:48:24 -0700 Subject: [PATCH 611/691] Updates for Cloning Source VMs in Era --- scripts/era_mssql_bootcamp.sh | 1 + scripts/global.vars.sh | 11 +++- scripts/lib.pc.sh | 98 +++++++++++++++++++++++++++++++++++ 3 files changed, 108 insertions(+), 2 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 5ff4efa..65fbc0c 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -138,6 +138,7 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && configure_era \ + && clone_mssql_source_vms \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index e422935..ea6f269 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -33,10 +33,17 @@ SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserK SSH_OPTS+=' -q' # -v' #################################################### -# OBJECTS VARs +# Users for Tools VMs and Source VM Clones ################################################### - +USERS=(\ + User01 \ + User02 \ + User03 \ + User04 \ + User05 \ + User06 \ +) #################################################### # Era VARs diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 327a864..bda03cd 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1294,6 +1294,104 @@ set +x } +######################################################################################################################################### +# Routine to Clone MSSQL Source VMs +######################################################################################################################################### + +function clone_mssql_source_vms() { + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + +log "PE Cluster IP |${PE_HOST}|" +log "PC IP |${PC_HOST}|" + +set -x + +## Get Source VM UUID ## +log "Get ${MSSQL_SourceVM} ID" + + _mssql_sourcevm_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/vms/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"vm","filter": "vm_name==Win2016SQLSource"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + +log "${MSSQL_SourceVM} ID: |${_mssql_sourcevm_id}|" + +## Deploy UserXX Clones ## +log "Cloning ${MSSQL_SourceVM}" + +for _user in "${USERS[@]}" ; do + + ClonedVM="${_user}_LinuxMint" + + log "Cloning ${MSSQL_SourceVM} for $_user started.." + log "Cloned VMs Name will be ${ClonedVM}" + +HTTP_JSON_BODY=$(cat < Date: Wed, 29 Jul 2020 15:40:02 -0700 Subject: [PATCH 612/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index ea6f269..6e4072b 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,7 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='2020.7' +PC_DEV_VERSION='pc.2020.7' PC_CURRENT_VERSION='5.17.0.3' PC_STABLE_VERSION='5.11.2.1' FILES_VERSION='3.7.0' From 499c1bac0c9cb60adfecc03640ab78d9ec7913d4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 29 Jul 2020 20:21:41 -0700 Subject: [PATCH 613/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index bda03cd..b9b465b 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1318,7 +1318,7 @@ log "Cloning ${MSSQL_SourceVM}" for _user in "${USERS[@]}" ; do - ClonedVM="${_user}_LinuxMint" + ClonedVM="${_user}_${MSSQL_SourceVM}" log "Cloning ${MSSQL_SourceVM} for $_user started.." log "Cloned VMs Name will be ${ClonedVM}" @@ -1352,7 +1352,7 @@ EOF ## Get Newly Cloned VM"s UUID ## -log "Get ${_user}_LinuxMint ID" +log "Get ${ClonedVM} ID" HTTP_JSON_BODY=$(cat < Date: Thu, 30 Jul 2020 11:42:29 -0700 Subject: [PATCH 614/691] Udates for Era Passwd & Source VM PowerOn --- scripts/global.vars.sh | 3 ++- scripts/lib.pc.sh | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6e4072b..ca798e4 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -53,7 +53,8 @@ ERA_Blueprint='EraServerDeployment.json' ERAServerImage='ERA-Server-build-1.3.1.qcow2' ERAServerName='EraServer' ERA_USER="admin" -ERA_PASSWORD="nutanix/4u" +#ERA_PASSWORD="nutanix/4u" +ERA_PASSWORD="${PE_PASSWORD}" ERA_Default_PASSWORD="Nutanix/4u" ERA_NETWORK="Secondary" ERA_Container_RF="2" diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b9b465b..b77a76f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -902,7 +902,7 @@ log "EraServer IP |${ERA_HOST}|" ## Create the EraManaged network inside Era ## log "Reset Default Era Password" - _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/auth/update" --data '{ "password": "'${ERA_PASSWORD}'"}' | jq -r '.status' | tr -d \") + _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/auth/update" --data '{ "password": "'${PE_PASSWORD}'"}' | jq -r '.status' | tr -d \") log "Password Reset |${_reset_passwd}|" @@ -1340,15 +1340,15 @@ EOF _task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_mssql_sourcevm_id}/clone" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.status.execution_context.task_uuid' | tr -d \") log "Task uuid for Cloning ${MSSQL_SourceVM} is $_task_id ....." - - if [ -z "$_task_id" ]; then - log "Cloning ${MSSQL_SourceVM} has encountered an error..." - else - log "Cloning ${MSSQL_SourceVM} started.." - set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run - # Run the progess checker - loop - fi + sleep 120 + #if [ -z "$_task_id" ]; then + # log "Cloning ${MSSQL_SourceVM} has encountered an error..." + #else + # log "Cloning ${MSSQL_SourceVM} started.." + # set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + # # Run the progess checker + # loop + #fi ## Get Newly Cloned VM"s UUID ## From f766991362455439b54119a582962e5f7e55d9b1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 30 Jul 2020 11:47:21 -0700 Subject: [PATCH 615/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b77a76f..3d88d09 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -902,7 +902,7 @@ log "EraServer IP |${ERA_HOST}|" ## Create the EraManaged network inside Era ## log "Reset Default Era Password" - _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/auth/update" --data '{ "password": "'${PE_PASSWORD}'"}' | jq -r '.status' | tr -d \") + _reset_passwd=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_Default_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/auth/update" --data '{ "password": "'${ERA_PASSWORD}'"}' | jq -r '.status' | tr -d \") log "Password Reset |${_reset_passwd}|" From 48ac41e5e8edf145340b5ffdb0db5cc43abdf98f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 30 Jul 2020 11:50:24 -0700 Subject: [PATCH 616/691] Update lib.pe.sh --- scripts/lib.pe.sh | 132 ++-------------------------------------------- 1 file changed, 3 insertions(+), 129 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 80c8d2a..66f2b3b 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -215,7 +215,7 @@ function authentication_source() { fi done - # Adding the needed group and users to the AutoDC that may be used. Calm would otherwise have no BootInfra Project + # Adding the needed group and users to the AutoDC that may be used. Calm would otherwise have no BootInfra Project remote_exec 'SSH' 'AUTH_SERVER' \ 'samba-tool group add "SSP Custom"' \ 'OPTIONAL' @@ -631,132 +631,6 @@ function era_network_configure() { fi } -############################################################################################################################################################################### -# Routine to create the networks for frame bootcamp -############################################################################################################################################################################### - - -function frame_network_configure() { - local _network_name="${NW1_NAME}" - - if [[ ! -z "${NW2_NAME}" ]]; then - #TODO: accommodate for X networks! - _network_name="${NW2_NAME}" - fi - - if [[ ! -z $(acli "net.list" | grep ${_network_name}) ]]; then - log "IDEMPOTENCY: ${_network_name} network set, skip." - else - args_required 'AUTH_DOMAIN IPV4_PREFIX AUTH_HOST' - - if [[ ! -z $(acli "net.list" | grep 'Rx-Automation-Network') ]]; then - log "Remove Rx-Automation-Network..." - acli "-y net.delete Rx-Automation-Network" - fi - - log "Create primary network: Name: ${NW1_NAME}, VLAN: ${NW1_VLAN}, Subnet: ${NW1_SUBNET}, Domain: ${AUTH_DOMAIN}, Pool: ${NW1_DHCP_START} to ${NW1_DHCP_END}" - acli "net.create ${NW1_NAME} vlan=${NW1_VLAN} ip_config=${NW1_SUBNET}" - acli "net.update_dhcp_dns ${NW1_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${NW1_NAME} start=${NW1_DHCP_START} end=${NW1_DHCP_END}" - - if [[ ! -z "${NW2_NAME}" ]]; then - log "Create secondary network: Name: ${NW2_NAME}, VLAN: ${NW2_VLAN}, Subnet: ${NW2_SUBNET}, Pool: ${NW2_DHCP_START} to ${NW2_DHCP_END}" - acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" - acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" - acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START2} end=${NW2_DHCP_END2}" - fi - - if [[ ! -z "${USERNW01_NAME}" ]]; then - log "Create User network: Name: ${USERNW01_NAME}, VLAN: ${USERNW01_VLAN}, Subnet: ${USERNW01_SUBNET}, Pool: ${USERNW01_DHCP_START} to ${USERNW01_DHCP_END}" - acli "net.create ${USERNW01_NAME} vlan=${USERNW01_VLAN}" - #acli "net.create ${USERNW01_NAME} vlan=${USERNW01_VLAN} ip_config=${USERNW01_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW01_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW01_NAME} start=${USERNW01_DHCP_START} end=${USERNW01_DHCP_END}" - fi - - if [[ ! -z "${USERNW02_NAME}" ]]; then - log "Create User network: Name: ${USERNW02_NAME}, VLAN: ${USERNW02_VLAN}, Subnet: ${USERNW02_SUBNET}, Pool: ${USERNW02_DHCP_START} to ${USERNW02_DHCP_END}" - acli "net.create ${USERNW02_NAME} vlan=${USERNW02_VLAN}" - #acli "net.create ${USERNW02_NAME} vlan=${USERNW02_VLAN} ip_config=${USERNW02_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW02_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW02_NAME} start=${USERNW02_DHCP_START} end=${USERNW02_DHCP_END}" - fi - - if [[ ! -z "${USERNW03_NAME}" ]]; then - log "Create User network: Name: ${USERNW03_NAME}, VLAN: ${USERNW03_VLAN}, Subnet: ${USERNW03_SUBNET}, Pool: ${USERNW03_DHCP_START} to ${USERNW03_DHCP_END}" - acli "net.create ${USERNW03_NAME} vlan=${USERNW03_VLAN}" - #acli "net.create ${USERNW03_NAME} vlan=${USERNW03_VLAN} ip_config=${USERNW03_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW03_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW03_NAME} start=${USERNW03_DHCP_START} end=${USERNW03_DHCP_END}" - fi - - if [[ ! -z "${USERNW04_NAME}" ]]; then - log "Create User network: Name: ${USERNW04_NAME}, VLAN: ${USERNW04_VLAN}, Subnet: ${USERNW04_SUBNET}, Pool: ${USERNW04_DHCP_START} to ${USERNW04_DHCP_END}" - acli "net.create ${USERNW04_NAME} vlan=${USERNW04_VLAN}" - #acli "net.create ${USERNW04_NAME} vlan=${USERNW04_VLAN} ip_config=${USERNW04_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW04_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW04_NAME} start=${USERNW04_DHCP_START} end=${USERNW04_DHCP_END}" - fi - - if [[ ! -z "${USERNW05_NAME}" ]]; then - log "Create User network: Name: ${USERNW05_NAME}, VLAN: ${USERNW05_VLAN}, Subnet: ${USERNW05_SUBNET}, Pool: ${USERNW05_DHCP_START} to ${USERNW05_DHCP_END}" - acli "net.create ${USERNW05_NAME} vlan=${USERNW05_VLAN}" - #acli "net.create ${USERNW05_NAME} vlan=${USERNW05_VLAN} ip_config=${USERNW05_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW05_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW05_NAME} start=${USERNW05_DHCP_START} end=${USERNW05_DHCP_END}" - fi - - if [[ ! -z "${USERNW06_NAME}" ]]; then - log "Create User network: Name: ${USERNW06_NAME}, VLAN: ${USERNW06_VLAN}, Subnet: ${USERNW06_SUBNET}, Pool: ${USERNW06_DHCP_START} to ${USERNW06_DHCP_END}" - acli "net.create ${USERNW06_NAME} vlan=${USERNW06_VLAN}" - #acli "net.create ${USERNW06_NAME} vlan=${USERNW06_VLAN} ip_config=${USERNW06_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW06_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW06_NAME} start=${USERNW06_DHCP_START} end=${USERNW06_DHCP_END}" - fi - - if [[ ! -z "${USERNW07_NAME}" ]]; then - log "Create User network: Name: ${USERNW07_NAME}, VLAN: ${USERNW07_VLAN}, Subnet: ${USERNW07_SUBNET}, Pool: ${USERNW07_DHCP_START} to ${USERNW07_DHCP_END}" - acli "net.create ${USERNW07_NAME} vlan=${USERNW07_VLAN}" - #acli "net.create ${USERNW07_NAME} vlan=${USERNW07_VLAN} ip_config=${USERNW07_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW07_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW07_NAME} start=${USERNW07_DHCP_START} end=${USERNW07_DHCP_END}" - fi - - if [[ ! -z "${USERNW08_NAME}" ]]; then - log "Create User network: Name: ${USERNW08_NAME}, VLAN: ${USERNW08_VLAN}, Subnet: ${USERNW08_SUBNET}, Pool: ${USERNW08_DHCP_START} to ${USERNW08_DHCP_END}" - acli "net.create ${USERNW08_NAME} vlan=${USERNW08_VLAN}" - #acli "net.create ${USERNW08_NAME} vlan=${USERNW08_VLAN} ip_config=${USERNW08_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW08_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW08_NAME} start=${USERNW08_DHCP_START} end=${USERNW08_DHCP_END}" - fi - - if [[ ! -z "${USERNW09_NAME}" ]]; then - log "Create User network: Name: ${USERNW09_NAME}, VLAN: ${USERNW09_VLAN}, Subnet: ${USERNW09_SUBNET}, Pool: ${USERNW09_DHCP_START} to ${USERNW09_DHCP_END}" - acli "net.create ${USERNW09_NAME} vlan=${USERNW09_VLAN}" - #acli "net.create ${USERNW09_NAME} vlan=${USERNW09_VLAN} ip_config=${USERNW09_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW09_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW09_NAME} start=${USERNW09_DHCP_START} end=${USERNW09_DHCP_END}" - fi - - if [[ ! -z "${USERNW10_NAME}" ]]; then - log "Create User network: Name: ${USERNW10_NAME}, VLAN: ${USERNW10_VLAN}, Subnet: ${USERNW10_SUBNET}, Pool: ${USERNW10_DHCP_START} to ${USERNW10_DHCP_END}" - acli "net.create ${USERNW10_NAME} vlan=${USERNW10_VLAN}" - #acli "net.create ${USERNW10_NAME} vlan=${USERNW10_VLAN} ip_config=${USERNW10_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW10_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW10_NAME} start=${USERNW10_DHCP_START} end=${USERNW10_DHCP_END}" - fi - - if [[ ! -z "${USERNW11_NAME}" ]]; then - log "Create User network: Name: ${USERNW11_NAME}, VLAN: ${USERNW11_VLAN}, Subnet: ${USERNW11_SUBNET}, Pool: ${USERNW11_DHCP_START} to ${USERNW11_DHCP_END}" - acli "net.create ${USERNW11_NAME} vlan=${USERNW11_VLAN}" - #acli "net.create ${USERNW11_NAME} vlan=${USERNW11_VLAN} ip_config=${USERNW11_SUBNET}" - #acli "net.update_dhcp_dns ${USERNW11_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" - #acli " net.add_dhcp_pool ${USERNW11_NAME} start=${USERNW11_DHCP_START} end=${USERNW11_DHCP_END}" - fi - fi -} - ############################################################################################################################################################################### # Routine to check if the registration of PE was successful ############################################################################################################################################################################### @@ -774,10 +648,10 @@ function cluster_check() { # Enable the PE to PC registration _json_data="{\"ipAddresses\":[\"${PC_HOST}\"],\"username\":\"${PRISM_ADMIN}\",\"password\":\"${PE_PASSWORD}\",\"port\":null}" _response=$(curl -X POST $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/add_to_multicluster -d $_json_data | jq '.value') - + # Let's sleep a few seconds before moving on sleep 5 - + _pc_ip_addr=$(curl $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") while [[ -z $_pc_ip_addr ]] do From 49dd9502c67976d9856091a8d04271509a715df8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 31 Jul 2020 11:23:23 -0700 Subject: [PATCH 617/691] Updates for Era Users --- scripts/lib.pc.sh | 89 +++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 87 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 3d88d09..549a05f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -896,6 +896,8 @@ function configure_era() { set -x +log "Starting Era Config" + log "PE Cluster IP |${PE_HOST}|" log "EraServer IP |${ERA_HOST}|" @@ -1174,6 +1176,7 @@ log "Create the NTNXLAB Domain Profile" HTTP_JSON_BODY=$(cat < Date: Fri, 31 Jul 2020 11:31:26 -0700 Subject: [PATCH 618/691] Updates for Cloning --- scripts/lib.pc.sh | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 549a05f..77c6857 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1420,18 +1420,18 @@ EOF log "Cloning VM Now" log $HTTP_JSON_BODY - _task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_mssql_sourcevm_id}/clone" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.status.execution_context.task_uuid' | tr -d \") + _task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_mssql_sourcevm_id}/clone" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") log "Task uuid for Cloning ${MSSQL_SourceVM} is $_task_id ....." - sleep 120 - #if [ -z "$_task_id" ]; then - # log "Cloning ${MSSQL_SourceVM} has encountered an error..." - #else - # log "Cloning ${MSSQL_SourceVM} started.." - # set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run - # # Run the progess checker - # loop - #fi + #sleep 240 + if [ -z "$_task_id" ]; then + log "Cloning ${MSSQL_SourceVM} has encountered an error..." + else + log "Cloning ${MSSQL_SourceVM} started.." + set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + # Run the progess checker + loop + fi ## Get Newly Cloned VM"s UUID ## From 2887eae0ee2690f104e2491f7acd4f308f9f619c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 4 Aug 2020 11:48:11 -0700 Subject: [PATCH 619/691] Updates for Era Clones and Calm Project --- scripts/lib.pc.sh | 142 ++++++++++++---------------------------------- 1 file changed, 37 insertions(+), 105 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 77c6857..a0607e6 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -894,7 +894,7 @@ EOF function configure_era() { local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " -set -x +#set -x log "Starting Era Config" @@ -1371,7 +1371,7 @@ done log "Era Config Complete" -set +x +#set +x } @@ -1420,7 +1420,7 @@ EOF log "Cloning VM Now" log $HTTP_JSON_BODY - _task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_mssql_sourcevm_id}/clone" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") + _task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_mssql_sourcevm_id}/clone" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.' | tr -d \") log "Task uuid for Cloning ${MSSQL_SourceVM} is $_task_id ....." #sleep 240 @@ -1456,7 +1456,7 @@ log "${ClonedVM} ID: |${_cloned_vm_id}|" log "Powering on VM Now" -_task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_cloned_vm_id}/set_power_state" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"transition": "ON"}' | jq -r '.status.execution_context.task_uuid' | tr -d \") +_task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_cloned_vm_id}/set_power_state" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"transition": "ON"}' | jq -r '.' | tr -d \") log "Task uuid for Powering on VM is $_task_id ....." @@ -1492,46 +1492,6 @@ function pc_project() { local _nw_uuid local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " -# Creating User Group -log "Creating User Group" - -HTTP_JSON_BODY=$(cat < Date: Tue, 4 Aug 2020 21:06:44 -0700 Subject: [PATCH 620/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index a0607e6..1b39093 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1420,7 +1420,7 @@ EOF log "Cloning VM Now" log $HTTP_JSON_BODY - _task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_mssql_sourcevm_id}/clone" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.' | tr -d \") + _task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_mssql_sourcevm_id}/clone" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data "${HTTP_JSON_BODY}" | jq -r '.task_uuid' | tr -d \") log "Task uuid for Cloning ${MSSQL_SourceVM} is $_task_id ....." #sleep 240 @@ -1456,7 +1456,7 @@ log "${ClonedVM} ID: |${_cloned_vm_id}|" log "Powering on VM Now" -_task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_cloned_vm_id}/set_power_state" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"transition": "ON"}' | jq -r '.' | tr -d \") +_task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_cloned_vm_id}/set_power_state" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"transition": "ON"}' | jq -r '.task_uuid' | tr -d \") log "Task uuid for Powering on VM is $_task_id ....." From b6443d29e9454cbbf11d845f520e665dccde79d5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 4 Aug 2020 21:35:46 -0700 Subject: [PATCH 621/691] Update lib.pc.sh --- scripts/lib.pc.sh | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 1b39093..b84d0f5 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1456,7 +1456,25 @@ log "${ClonedVM} ID: |${_cloned_vm_id}|" log "Powering on VM Now" -_task_id=$(curl ${CURL_HTTP_OPTS} --request POST "https://${PE_HOST}:9440/PrismGateway/services/rest/v2.0/vms/${_cloned_vm_id}/set_power_state" --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"transition": "ON"}' | jq -r '.task_uuid' | tr -d \") +HTTP_JSON_BODY=$(cat < Date: Tue, 4 Aug 2020 23:14:59 -0700 Subject: [PATCH 622/691] Update lib.pc.sh --- scripts/lib.pc.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b84d0f5..4d1a663 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1527,7 +1527,6 @@ log "Get PC Account UUID" _pc_account_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/accounts/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"account","filter":"type==nutanix_pc"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") log "Create BootcampInfra Project ..." -log "User Group UUID = ${_user_group_uuid}" log "NW UUID = ${_nw_uuid}" log "Role UUID = ${_role_uuid}" log "PC Account UUID = ${_pc_account_uuid}" From 9609d9c5892712f6729b628d50535ddab1aa3ba9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 5 Aug 2020 10:23:25 -0700 Subject: [PATCH 623/691] Update lib.pc.sh --- scripts/lib.pc.sh | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 4d1a663..ca271e0 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1390,6 +1390,7 @@ log "PC IP |${PC_HOST}|" set -x ## Get Source VM UUID ## +log "-------------------------------------" log "Get ${MSSQL_SourceVM} ID" _mssql_sourcevm_id=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/vms/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"vm","filter": "vm_name==Win2016SQLSource"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") @@ -1397,6 +1398,7 @@ log "Get ${MSSQL_SourceVM} ID" log "${MSSQL_SourceVM} ID: |${_mssql_sourcevm_id}|" ## Deploy UserXX Clones ## +log "-------------------------------------" log "Cloning ${MSSQL_SourceVM}" for _user in "${USERS[@]}" ; do @@ -1417,8 +1419,9 @@ HTTP_JSON_BODY=$(cat < Date: Thu, 17 Sep 2020 16:37:33 -0700 Subject: [PATCH 624/691] Updates for Calm IaaS --- scripts/calm_iaas_bootcamp.sh | 145 ++++++++++++++++++++++++++++++++++ scripts/global.vars.sh | 18 ++--- stage_workshop.sh | 44 ++++++----- 3 files changed, 179 insertions(+), 28 deletions(-) create mode 100644 scripts/calm_iaas_bootcamp.sh diff --git a/scripts/calm_iaas_bootcamp.sh b/scripts/calm_iaas_bootcamp.sh new file mode 100644 index 0000000..f9fab52 --- /dev/null +++ b/scripts/calm_iaas_bootcamp.sh @@ -0,0 +1,145 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoDC' + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && pc_project \ + && flow_enable \ + && pc_cluster_img_import \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index ca798e4..4b00721 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,7 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='pc.2020.7' +PC_DEV_VERSION='pc.2020.8.0.1' PC_CURRENT_VERSION='5.17.0.3' PC_STABLE_VERSION='5.11.2.1' FILES_VERSION='3.7.0' @@ -296,8 +296,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.7.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.7.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.8.0.1.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.8.0.1.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' @@ -335,8 +335,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.7.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.7.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.8.0.1.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.8.0.1.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -374,8 +374,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.7.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.7.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.8.0.1.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.8.0.1.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -418,8 +418,8 @@ case "${OCTET[0]}.${OCTET[1]}" in fi ;; 10.136 ) # HPOC us-west = BLR - PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.7.json' - PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.7.tar' + PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.8.0.1.json' + PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.8.0.1.tar' PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.16.1.2.json' PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.11.2.1.json' diff --git a/stage_workshop.sh b/stage_workshop.sh index b9c2375..695c77e 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,25 +11,26 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Basic / API Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Private Cloud Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Leap Add-On Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Databases Era with MSSQL Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Databases Era with Oracle Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Databases Era with Postgres Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Databases Era -Stage All- Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Files Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Calm Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Frame Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Citrix Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Stage-All Bootcamps (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ +"Basic / API Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Private Cloud Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Leap Add-On Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Databases Era with MSSQL Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Databases Era with Oracle Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Databases Era with Postgres Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Databases Era -Stage All- Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Files Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Calm IaaS Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Calm Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Frame Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Citrix Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Stage-All Bootcamps (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ #"Legacy Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"Calm Bootcamp Test(AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Current" \ -"Cloud Native Application Modernization Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ -"In Development Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 5.17.0.3) = Development" \ -"In Development SNC (1-Node) Bootcamp Staging (AOS 5.11.x|5.15.x|5.16.x/AHV PC 2020.7) = Development" \ +"Calm Bootcamp Test(AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Current" \ +"Cloud Native Application Modernization Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"In Development Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"In Development SNC (1-Node) Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8 ) = Development" \ #"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ #"Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ @@ -56,7 +57,7 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 2020.7" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 2020.8.0.1" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" @@ -126,6 +127,11 @@ function stage_clusters() { _pe_launch='calm_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^Calm IaaS Bootcamp" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='calm_iaas_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^Calm Bootcamp Test" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='calm_bootcamp_test.sh' From 2c9c3ef6b315eeab340afe34f2123f5a546d3779 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 17 Sep 2020 16:40:08 -0700 Subject: [PATCH 625/691] Update calm_iaas_bootcamp.sh --- scripts/calm_iaas_bootcamp.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 scripts/calm_iaas_bootcamp.sh diff --git a/scripts/calm_iaas_bootcamp.sh b/scripts/calm_iaas_bootcamp.sh old mode 100644 new mode 100755 From af2ee8fcb55b6fda371259de70752a72856f52b0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 17 Sep 2020 17:14:18 -0700 Subject: [PATCH 626/691] updates for PC 2020.8.0.1 --- stage_workshop.sh | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 695c77e..af306d4 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,26 +11,26 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Basic / API Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Private Cloud Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Leap Add-On Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Databases Era with MSSQL Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Databases Era with Oracle Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Databases Era with Postgres Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Databases Era -Stage All- Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Files Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Calm IaaS Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Calm Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Frame Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Citrix Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Stage-All Bootcamps (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ +"Basic / API Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Private Cloud Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Leap Add-On Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Databases Era with MSSQL Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Databases Era with Oracle Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Databases Era with Postgres Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Databases Era -Stage All- Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Files Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Calm IaaS Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Calm Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Frame Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Citrix Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Stage-All Bootcamps (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ #"Legacy Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"Calm Bootcamp Test(AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Current" \ -"Cloud Native Application Modernization Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"In Development Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8) = Development" \ -"In Development SNC (1-Node) Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8 ) = Development" \ +"Calm Bootcamp Test(AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Current" \ +"Cloud Native Application Modernization Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"In Development Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"In Development SNC (1-Node) Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1 ) = Development" \ #"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ #"Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ From 4c760e1b99ef07a2466e5da85437f423619b104d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 17 Sep 2020 17:22:54 -0700 Subject: [PATCH 627/691] AutoAD --- scripts/calm_iaas_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/calm_iaas_bootcamp.sh b/scripts/calm_iaas_bootcamp.sh index f9fab52..a9610fd 100755 --- a/scripts/calm_iaas_bootcamp.sh +++ b/scripts/calm_iaas_bootcamp.sh @@ -18,7 +18,7 @@ case ${1} in PE | pe ) . lib.pe.sh - export AUTH_SERVER='AutoDC' + export AUTH_SERVER='AutoAD' args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable From a1fb227a24b8e7ec4694b9d1bb5faca60691b6b3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 17 Sep 2020 17:39:13 -0700 Subject: [PATCH 628/691] List of Bootcamps Cleanup --- ...mp.sh => Consolidated_Storage_bootcamp.sh} | 110 +++++++------ scripts/karbon_bootcamp.sh | 145 ++++++++++++++++++ .../{snc_ts2020.sh => objects_bootcamp.sh} | 123 +++++++-------- stage_workshop.sh | 47 ++---- 4 files changed, 275 insertions(+), 150 deletions(-) rename scripts/{snc_bootcamp.sh => Consolidated_Storage_bootcamp.sh} (65%) create mode 100644 scripts/karbon_bootcamp.sh rename scripts/{snc_ts2020.sh => objects_bootcamp.sh} (55%) diff --git a/scripts/snc_bootcamp.sh b/scripts/Consolidated_Storage_bootcamp.sh similarity index 65% rename from scripts/snc_bootcamp.sh rename to scripts/Consolidated_Storage_bootcamp.sh index 89471b6..b771590 100755 --- a/scripts/snc_bootcamp.sh +++ b/scripts/Consolidated_Storage_bootcamp.sh @@ -3,7 +3,7 @@ #__main()__________ -# Source Nutanix environment (PATH + aliases), then common routines + global variables +# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables . /etc/profile.d/nutanix_env.sh . lib.common.sh . global.vars.sh @@ -18,24 +18,9 @@ case ${1} in PE | pe ) . lib.pe.sh - ## Export Overrides needed for Single Node Clusters - export NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" - export NW1_GATEWAY="${IPV4_PREFIX}.$((${OCTET[3]} - 5))" - export NW1_DHCP_START="${IPV4_PREFIX}.$((${OCTET[3]} + 33))" - export NW1_DHCP_END="${IPV4_PREFIX}.$((${OCTET[3]} + 53))" - export SUBNET_MASK="255.255.255.192" - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.$((${OCTET[3]} + 25))" - #export BUCKETS_VIP="${IPV4_PREFIX}.$((${OCTET[3]} + 26))" - #export OBJECTS_NW_START="${IPV4_PREFIX}.$((${OCTET[3]} + 27))" - #export OBJECTS_NW_END="${IPV4_PREFIX}.$((${OCTET[3]} + 30))" - - export NW2_NAME='' - export NW2_VLAN='' - export NW2_SUBNET='' - export NW2_DHCP_START='' - export NW2_DHCP_END='' - - args_required 'PE_HOST PC_LAUNCH' + export AUTH_SERVER='AutoAD' + + args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' ssh_pubkey & # non-blocking, parallel suitable dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ @@ -44,22 +29,21 @@ case ${1} in && network_configure \ && authentication_source \ && pe_auth \ - && prism_pro_server_deploy + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ && prism_check 'PC' \ if (( $? == 0 )) ; then - ## TODO: If Debug is set we should run with bash -x. Maybe this???? Or are we going to use a fourth parameter - # if [ ! -z DEBUG ]; then - # bash_cmd='bash' - # else - # bash_cmd='bash -x' - # fi - # _command="EMAIL=${EMAIL} \ - # PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - # PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup ${bash_cmd} ${HOME}/${PC_LAUNCH} IMAGES" _command="EMAIL=${EMAIL} \ PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" @@ -73,11 +57,10 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install && sleep 30 - - create_file_server "${NW1_NAME}" "${NW1_NAME}" && sleep 30 - - file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + deploy_peer_mgmt_server "${PMC}" \ + && deploy_peer_agent_server "${AGENTA}" \ + && deploy_peer_agent_server "${AGENTB}" + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish fi @@ -87,10 +70,27 @@ case ${1} in log "Error ${_error}: in main functional chain, exit!" exit ${_error} fi + ;; PC | pc ) . lib.pc.sh + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + Win10v1903.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + CentOS7.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + run_once dependencies 'install' 'jq' || exit 13 @@ -103,10 +103,6 @@ case ${1} in export NUCLEI_SERVER='localhost' export NUCLEI_USERNAME="${PRISM_ADMIN}" export NUCLEI_PASSWORD="${PE_PASSWORD}" - export BUCKETS_DNS_IP="${IPV4_PREFIX}.$((${OCTET[3]} + 25))" - export BUCKETS_VIP="${IPV4_PREFIX}.$((${OCTET[3]} + 26))" - export OBJECTS_NW_START="${IPV4_PREFIX}.$((${OCTET[3]} + 27))" - export OBJECTS_NW_END="${IPV4_PREFIX}.$((${OCTET[3]} + 30))" # nuclei -debug -username admin -server localhost -password x vm.list if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX @@ -137,20 +133,17 @@ case ${1} in ssp_auth \ && calm_enable \ - && karbon_enable \ - && lcm \ && objects_enable \ + && lcm \ + && pc_project \ && object_store \ - && karbon_image_download \ && images \ && flow_enable \ && pc_cluster_img_import \ - && seedPC \ - && prism_check 'PC' \ - && finish_staging + && prism_check 'PC' log "Non-blocking functions (in development) follow." - pc_project + #pc_project pc_admin # ntnx_download 'AOS' # function in lib.common.sh @@ -171,3 +164,30 @@ case ${1} in files_install ;; esac +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'PE_PASSWORD' + +case ${1} in + PE | pe ) + . lib.pe.sh + + args_required 'PE_HOST' + + dependencies 'install' 'jq' \ + && files_install + + log "PE = https://${PE_HOST}:9440" + ;; +esac + +finish diff --git a/scripts/karbon_bootcamp.sh b/scripts/karbon_bootcamp.sh new file mode 100644 index 0000000..a9610fd --- /dev/null +++ b/scripts/karbon_bootcamp.sh @@ -0,0 +1,145 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoAD' + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + export QCOW2_IMAGES=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + WinToolsVM.qcow2 \ + Linux_ToolsVM.qcow2 \ + ) + export ISO_IMAGES=(\ + Nutanix-VirtIO-1.1.5.iso \ + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && pc_project \ + && flow_enable \ + && pc_cluster_img_import \ + && images \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + #pc_project + pc_admin + # ntnx_download 'AOS' # function in lib.common.sh + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + FILES | files | afs ) + files_install + ;; +esac diff --git a/scripts/snc_ts2020.sh b/scripts/objects_bootcamp.sh similarity index 55% rename from scripts/snc_ts2020.sh rename to scripts/objects_bootcamp.sh index 3226e10..b771590 100755 --- a/scripts/snc_ts2020.sh +++ b/scripts/objects_bootcamp.sh @@ -3,7 +3,7 @@ #__main()__________ -# Source Nutanix environment (PATH + aliases), then common routines + global variables +# Source Nutanix environment (PATH + aliases), then Workshop common routines + global variables . /etc/profile.d/nutanix_env.sh . lib.common.sh . global.vars.sh @@ -18,45 +18,9 @@ case ${1} in PE | pe ) . lib.pe.sh - #export PC_DEV_VERSION='5.10.2' - #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' - #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' - #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' - #export FILES_VERSION='3.2.0.1' - #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - - # Single Node Cluster Options - - export NW1_SUBNET="${IPV4_PREFIX}.$((${OCTET[3]} - 6))/26" - export NW1_GATEWAY="${IPV4_PREFIX}.$((${OCTET[3]} - 5))" - export NW1_DHCP_START="${IPV4_PREFIX}.$((${OCTET[3]} + 33))" - export NW1_DHCP_END="${IPV4_PREFIX}.$((${OCTET[3]} + 53))" - export SUBNET_MASK="255.255.255.192" - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.$((${OCTET[3]} + 25))" - #export BUCKETS_VIP="${IPV4_PREFIX}.$((${OCTET[3]} + 26))" - #export OBJECTS_NW_START="${IPV4_PREFIX}.$((${OCTET[3]} + 27))" - #export OBJECTS_NW_END="${IPV4_PREFIX}.$((${OCTET[3]} + 30))" - - export NW2_NAME='' - export NW2_VLAN='' - export NW2_SUBNET='' - export NW2_DHCP_START='' - export NW2_DHCP_END='' - - #export NW2_DHCP_START="${IPV4_PREFIX}.132" - #export NW2_DHCP_END="${IPV4_PREFIX}.229" - export AUTH_SERVER='AutoAD' - export PrismOpsServer='GTSPrismOpsLabUtilityServer' - export SeedPC='GTSseedPC.zp' - - export _external_nw_name="${1}" - args_required 'PE_HOST PC_LAUNCH' + args_required 'EMAIL PE_HOST PE_PASSWORD PC_VERSION' ssh_pubkey & # non-blocking, parallel suitable dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ @@ -65,7 +29,15 @@ case ${1} in && network_configure \ && authentication_source \ && pe_auth \ - && prism_pro_server_deploy + && prism_pro_server_deploy \ + && files_install \ + && sleep 30 \ + && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ + && sleep 30 \ + && file_analytics_install \ + && sleep 30 \ + && create_file_analytics_server \ + && sleep 30 if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ @@ -85,11 +57,10 @@ case ${1} in log "PE = https://${PE_HOST}:9440" log "PC = https://${PC_HOST}:9440" - files_install && sleep 30 - - create_file_server "${NW1_NAME}" "${NW2_NAME}" && sleep 30 - - file_analytics_install && sleep 30 && dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + deploy_peer_mgmt_server "${PMC}" \ + && deploy_peer_agent_server "${AGENTA}" \ + && deploy_peer_agent_server "${AGENTB}" + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & #dependencies 'remove' 'sshpass' finish fi @@ -99,36 +70,27 @@ case ${1} in log "Error ${_error}: in main functional chain, exit!" exit ${_error} fi + ;; PC | pc ) . lib.pc.sh + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" + export QCOW2_IMAGES=(\ - CentOS7.qcow2 \ Windows2016.qcow2 \ - Windows2012R2.qcow2 \ - Windows10-1709.qcow2 \ - ToolsVM.qcow2 \ + Win10v1903.qcow2 \ + WinToolsVM.qcow2 \ Linux_ToolsVM.qcow2 \ - ERA-Server-build-1.2.0.1.qcow2 \ - MSSQL-2016-VM.qcow2 \ - HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ - VeeamAvailability_1.0.457.vmdk \ - move-3.4.1.qcow2 \ - AutoXD.qcow2 \ + CentOS7.qcow2 \ ) export ISO_IMAGES=(\ - CentOS7.iso \ - Windows2016.iso \ - Windows2012R2.iso \ - Windows10.iso \ Nutanix-VirtIO-1.1.5.iso \ - SQLServer2014SP3.iso \ - Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ - VeeamBR_9.5.4.2615.Update4.iso \ ) - run_once dependencies 'install' 'jq' || exit 13 @@ -141,10 +103,6 @@ case ${1} in export NUCLEI_SERVER='localhost' export NUCLEI_USERNAME="${PRISM_ADMIN}" export NUCLEI_PASSWORD="${PE_PASSWORD}" - export BUCKETS_DNS_IP="${IPV4_PREFIX}.$((${OCTET[3]} + 25))" - export BUCKETS_VIP="${IPV4_PREFIX}.$((${OCTET[3]} + 26))" - export OBJECTS_NW_START="${IPV4_PREFIX}.$((${OCTET[3]} + 27))" - export OBJECTS_NW_END="${IPV4_PREFIX}.$((${OCTET[3]} + 30))" # nuclei -debug -username admin -server localhost -password x vm.list if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX @@ -175,17 +133,13 @@ case ${1} in ssp_auth \ && calm_enable \ - && karbon_enable \ && objects_enable \ && lcm \ + && pc_project \ && object_store \ - && karbon_image_download \ && images \ && flow_enable \ && pc_cluster_img_import \ - && seedPC \ - && pc_project \ - && upload_era_calm_blueprint \ && prism_check 'PC' log "Non-blocking functions (in development) follow." @@ -210,3 +164,30 @@ case ${1} in files_install ;; esac +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'PE_PASSWORD' + +case ${1} in + PE | pe ) + . lib.pe.sh + + args_required 'PE_HOST' + + dependencies 'install' 'jq' \ + && files_install + + log "PE = https://${PE_HOST}:9440" + ;; +esac + +finish diff --git a/stage_workshop.sh b/stage_workshop.sh index af306d4..77545dd 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -18,26 +18,20 @@ WORKSHOPS=(\ "Databases Era with Oracle Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Databases Era with Postgres Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Databases Era -Stage All- Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Consolidated Storage Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Files Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Calm IaaS Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Calm Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Frame Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Citrix Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Stage-All Bootcamps (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -#"Legacy Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -"Calm Bootcamp Test(AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Current" \ "Cloud Native Application Modernization Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ "In Development Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"In Development SNC (1-Node) Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1 ) = Development" \ - -#"SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -#"Previous Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -#"Previous SNC (1-Node) Bootcamp Staging (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -#"Tech Summit 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Stable" \ -#"SNC_GTS 2020 (AOS 5.11.x/AHV PC 5.11.2.1) = Development" \ -#"Tech Summit 2019 (AOS 5.10+/AHV PC 5.10+) = Stable" \ + ) # Adjust function stage_clusters, below, for file/script mappings as needed function stage_clusters() { @@ -72,14 +66,9 @@ function stage_clusters() { _pe_launch='all_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^Legacy Bootcamp Staging" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Consolidated Storage Bootcamp" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='bootcamp.sh' - _pc_launch=${_pe_launch} - fi - if (( $(echo ${_workshop} | grep -i "^SNC" | wc ${WC_ARG}) > 0 )); then - _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='snc_bootcamp.sh' + _pe_launch='Consolidated_Storage_bootcamp.sh' _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i "^Basic / API Bootcamp" | wc ${WC_ARG}) > 0 )); then @@ -122,6 +111,11 @@ function stage_clusters() { _pe_launch='files_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^Objects" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='objects_bootcamp.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^Calm" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='calm_bootcamp.sh' @@ -132,9 +126,9 @@ function stage_clusters() { _pe_launch='calm_iaas_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^Calm Bootcamp Test" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Karbon Bootcamp" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='calm_bootcamp_test.sh' + _pe_launch='karbon_bootcamp.sh' _pc_launch=${_pe_launch} fi if (( $(echo ${_workshop} | grep -i "^Citrix" | wc ${WC_ARG}) > 0 )); then @@ -167,21 +161,6 @@ function stage_clusters() { _pe_launch='dev_privatecloud_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^In Development SNC (1-Node) Bootcamp Staging" | wc ${WC_ARG}) > 0 )); then - _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='dev_privatecloud_bootcamp.sh' - _pc_launch=${_pe_launch} - fi - if (( $(echo ${_workshop} | grep -i Summit | wc ${WC_ARG}) > 0 )); then - _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='ts2020.sh' - _pc_launch=${_pe_launch} - fi - if (( $(echo ${_workshop} | grep -i "^SNC_GTS" | wc ${WC_ARG}) > 0 )); then - _libraries+='lib.pe.sh lib.pc.sh' - _pe_launch='snc_ts2020.sh' - _pc_launch=${_pe_launch} - fi dependencies 'install' 'sshpass' From 78ae7cf7eda98861b73935eb531084f05c27e81f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 17 Sep 2020 17:46:28 -0700 Subject: [PATCH 629/691] Update karbon_bootcamp.sh --- scripts/karbon_bootcamp.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 scripts/karbon_bootcamp.sh diff --git a/scripts/karbon_bootcamp.sh b/scripts/karbon_bootcamp.sh old mode 100644 new mode 100755 From 7fa8549a58a85449101e8adefbcbefccc664d5b0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 17 Sep 2020 18:08:34 -0700 Subject: [PATCH 630/691] Updates for AutoAD --- scripts/app_modernization_bootcamp.sh | 7 +------ scripts/cicd_bootcamp.sh | 6 +----- 2 files changed, 2 insertions(+), 11 deletions(-) diff --git a/scripts/app_modernization_bootcamp.sh b/scripts/app_modernization_bootcamp.sh index 464e48f..7aa5df2 100755 --- a/scripts/app_modernization_bootcamp.sh +++ b/scripts/app_modernization_bootcamp.sh @@ -18,7 +18,7 @@ case ${1} in PE | pe ) . lib.pe.sh - export AUTH_SERVER='AutoDC' + export AUTH_SERVER='AutoAD' args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -62,11 +62,6 @@ case ${1} in PC | pc ) . lib.pc.sh - export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - export BUCKETS_VIP="${IPV4_PREFIX}.17" - export OBJECTS_NW_START="${IPV4_PREFIX}.18" - export OBJECTS_NW_END="${IPV4_PREFIX}.21" - export QCOW2_IMAGES=(\ Windows2016.qcow2 \ CentOS7.qcow2 \ diff --git a/scripts/cicd_bootcamp.sh b/scripts/cicd_bootcamp.sh index dd39b23..e891314 100755 --- a/scripts/cicd_bootcamp.sh +++ b/scripts/cicd_bootcamp.sh @@ -18,7 +18,7 @@ case ${1} in PE | pe ) . lib.pe.sh - export AUTH_SERVER='AutoDC' + export AUTH_SERVER='AutoAD' args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -62,10 +62,6 @@ case ${1} in PC | pc ) . lib.pc.sh - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - #export BUCKETS_VIP="${IPV4_PREFIX}.17" - #export OBJECTS_NW_START="${IPV4_PREFIX}.18" - #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export QCOW2_IMAGES=(\ Windows2016.qcow2 \ From 837f9e9e914a60638ba75a42d8114091e41372e0 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 21 Sep 2020 13:48:40 -0700 Subject: [PATCH 631/691] updates --- scripts/dev_privatecloud_bootcamp.sh | 182 --------------------------- scripts/global.vars.sh | 16 +-- 2 files changed, 8 insertions(+), 190 deletions(-) delete mode 100755 scripts/dev_privatecloud_bootcamp.sh diff --git a/scripts/dev_privatecloud_bootcamp.sh b/scripts/dev_privatecloud_bootcamp.sh deleted file mode 100755 index e47f34c..0000000 --- a/scripts/dev_privatecloud_bootcamp.sh +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env bash - #-x - -#__main()__________ - -# Source Nutanix environment (PATH + aliases), then common routines + global variables -. /etc/profile.d/nutanix_env.sh -. lib.common.sh -. global.vars.sh -begin - -args_required 'EMAIL PE_PASSWORD PC_VERSION' - -#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization -# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! - -case ${1} in - PE | pe ) - . lib.pe.sh - - #export PC_DEV_VERSION='5.10.2' - #export PC_DEV_METAURL='http://10.42.8.50/images/pcdeploy-5.10.2.json' - #export PC_URL='http://10.42.8.50/images/euphrates-5.10.2-stable-prism_central.tar' - #export PC_DEV_METAURL='https://s3.amazonaws.com/get-ahv-images/pcdeploy-5.10.1.1.json' - #export PC_URL='https://s3.amazonaws.com/get-ahv-images/euphrates-5.10.1.1-stable-prism_central.tar' - #export FILES_VERSION='3.2.0.1' - #export FILES_METAURL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='http://10.42.8.50/images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - #export FILES_METAURL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable-metadata.json' - #export FILES_URL='https://s3.amazonaws.com/get-ahv-images/nutanix-afs-el7.3-release-afs-3.2.0.1-stable.qcow2' - - export AUTH_SERVER='AutoAD' - - export _external_nw_name="${1}" - - args_required 'PE_HOST PC_LAUNCH' - ssh_pubkey & # non-blocking, parallel suitable - - dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ - && pe_license \ - && pe_init \ - && network_configure \ - && secondary_network_SNC \ - && authentication_source \ - && pe_auth \ - && prism_pro_server_deploy \ - && files_install \ - && sleep 30 \ - && create_file_server "${NW1_NAME}" "${NW1_NAME}" \ - && sleep 30 \ - && file_analytics_install \ - && sleep 30 \ - && create_file_analytics_server \ - && sleep 30 - - if (( $? == 0 )) ; then - pc_install "${NW1_NAME}" \ - && prism_check 'PC' \ - - if (( $? == 0 )) ; then - _command="EMAIL=${EMAIL} \ - PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ - PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" - - cluster_check \ - && log "Remote asynchroneous PC Image import script... ${_command}" \ - && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & - - pc_configure \ - && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" - log "PE = https://${PE_HOST}:9440" - log "PC = https://${PC_HOST}:9440" - - #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & - #dependencies 'remove' 'sshpass' - finish - fi - else - finish - _error=18 - log "Error ${_error}: in main functional chain, exit!" - exit ${_error} - fi - ;; - PC | pc ) - . lib.pc.sh - - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - #export BUCKETS_VIP="${IPV4_PREFIX}.17" - #export OBJECTS_NW_START="${IPV4_PREFIX}.18" - #export OBJECTS_NW_END="${IPV4_PREFIX}.21" - - export QCOW2_IMAGES=(\ - Windows2016.qcow2 \ - CentOS7.qcow2 \ - WinToolsVM.qcow2 \ - Linux_ToolsVM.qcow2 \ - HYCU/Mine/HYCU-4.0.3-Demo.qcow2 \ - veeam/VeeamAHVProxy2.0.404.qcow2 \ - ) - export ISO_IMAGES=(\ - Nutanix-VirtIO-1.1.5.iso \ - veeam/VBR_10.0.0.4442.iso \ - ) - - - run_once - - dependencies 'install' 'jq' || exit 13 - - ssh_pubkey & # non-blocking, parallel suitable - - pc_passwd - ntnx_cmd # check cli services available? - - export NUCLEI_SERVER='localhost' - export NUCLEI_USERNAME="${PRISM_ADMIN}" - export NUCLEI_PASSWORD="${PE_PASSWORD}" - # nuclei -debug -username admin -server localhost -password x vm.list - - if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX - log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" - pe_determine ${1} - . global.vars.sh # re-populate PE_HOST dependencies - else - CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ - jq -r .data[0].clusterDetails.clusterName) - if [[ ${CLUSTER_NAME} != '' ]]; then - log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." - fi - fi - - if [[ ! -z "${2}" ]]; then # hidden bonus - log "Don't forget: $0 first.last@nutanixdc.local%password" - calm_update && exit 0 - fi - - export ATTEMPTS=2 - export SLEEP=10 - - pc_init \ - && pc_dns_add \ - && pc_ui \ - && pc_auth \ - && pc_smtp - - ssp_auth \ - && calm_enable \ - && karbon_enable \ - && objects_enable \ - && lcm \ - && pc_project \ - && object_store \ - && karbon_image_download \ - && flow_enable \ - && pc_cluster_img_import \ - && seedPC \ - && images \ - && prism_check 'PC' - - log "Non-blocking functions (in development) follow." - #pc_project - pc_admin - # ntnx_download 'AOS' # function in lib.common.sh - - unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD - - if (( $? == 0 )); then - #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ - #&& - log "PC = https://${PC_HOST}:9440" - finish - else - _error=19 - log "Error ${_error}: failed to reach PC!" - exit ${_error} - fi - ;; - FILES | files | afs ) - files_install - ;; -esac diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4b00721..0b70e5c 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -298,8 +298,8 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.8.0.1.json' PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.8.0.1.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.16.1.2.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.7.0.json' @@ -337,8 +337,8 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.8.0.1.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.8.0.1.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.0.json' @@ -376,8 +376,8 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.8.0.1.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.8.0.1.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.16.1.2.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.0.json' @@ -420,8 +420,8 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.136 ) # HPOC us-west = BLR PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.8.0.1.json' PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.8.0.1.tar' - PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.16.1.2.json' - PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.16.1.2-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json' + PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.11.2.1.json' PC_STABLE_URL='http://10.136.239.13/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.7.0.json' From cb17fe97977f326c582dc8ad4bac66cf434b3d65 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 1 Oct 2020 20:12:37 -0700 Subject: [PATCH 632/691] Updatesd for pc.2020.9 --- scripts/all_bootcamp.sh | 2 +- scripts/frame_bootcamp.sh | 3 +-- stage_workshop.sh | 42 +++++++++++++++++++-------------------- 3 files changed, 23 insertions(+), 24 deletions(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index 80c93e4..5fec5d9 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -103,7 +103,7 @@ case ${1} in export ISO_IMAGES=(\ Nutanix-VirtIO-1.1.5.iso \ Windows2016.iso \ - FrameCCA-2.1.6.iso \ + FrameCCA-3.0.0.iso \ FrameGuestAgentInstaller_1.0.2.7.iso \ Nutanix-VirtIO-1.1.5.iso \ veeam/VBR_10.0.0.4442.iso \ diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index f805c94..d0c3016 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -91,8 +91,7 @@ case ${1} in WinToolsVM.qcow2 \ ) export ISO_IMAGES=(\ - FrameCCA-2.1.0.iso \ - FrameCCA-2.1.6.iso \ + FrameCCA-3.0.0.iso \ FrameGuestAgentInstaller_1.0.2.7.iso \ Nutanix-VirtIO-1.1.5.iso \ ) diff --git a/stage_workshop.sh b/stage_workshop.sh index 77545dd..5566cea 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -11,26 +11,26 @@ begin # - Calm || Bootcamp || Citrix || Summit # - PC #.# WORKSHOPS=(\ -"Basic / API Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Private Cloud Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Leap Add-On Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Databases Era with MSSQL Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Databases Era with Oracle Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Databases Era with Postgres Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Databases Era -Stage All- Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Consolidated Storage Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Files Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Calm IaaS Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Calm Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Frame Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Citrix Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Stage-All Bootcamps (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Cloud Native Application Modernization Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ -"In Development Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.8.0.1) = Development" \ +"Basic / API Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Private Cloud Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Leap Add-On Bootcamp (AOS AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Databases Era with MSSQL Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Databases Era with Oracle Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Databases Era with Postgres Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Databases Era -Stage All- Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Consolidated Storage Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Files Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Calm IaaS Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Calm Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Frame Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Citrix Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Private Cloud Splunk on AHV with Objects Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Stage-All Bootcamps (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Cloud Native Application Modernization Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"In Development Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -51,7 +51,7 @@ function stage_clusters() { # Map to latest and greatest of each point release # Metadata URLs MUST be specified in lib.common.sh function: ntnx_download # TODO: make WORKSHOPS and map a JSON configuration file? - if (( $(echo ${_workshop} | grep -i "PC 2020.8.0.1" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "PC 2020.9" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" elif (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" From 997e4c3c195def7caa6dfa4d57c14b485afa70da Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 1 Oct 2020 20:14:16 -0700 Subject: [PATCH 633/691] Update global.vars.sh --- scripts/global.vars.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 0b70e5c..b0bf365 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,7 +2,7 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='pc.2020.8.0.1' +PC_DEV_VERSION='pc.2020.9' PC_CURRENT_VERSION='5.17.0.3' PC_STABLE_VERSION='5.11.2.1' FILES_VERSION='3.7.0' @@ -296,8 +296,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.8.0.1.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.8.0.1.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.9.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.9.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' @@ -335,8 +335,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.8.0.1.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.8.0.1.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -374,8 +374,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.8.0.1.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.8.0.1.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' @@ -418,8 +418,8 @@ case "${OCTET[0]}.${OCTET[1]}" in fi ;; 10.136 ) # HPOC us-west = BLR - PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.8.0.1.json' - PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.8.0.1.tar' + PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.9.json' + PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.9.tar' PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json' PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.11.2.1.json' From 14c766c14c830a2f434fdfa012ea658a4384d6fd Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 2 Oct 2020 10:55:56 -0700 Subject: [PATCH 634/691] Auth Updates --- scripts/lib.pc.sh | 48 +++++++++++++++++++++++++++-------------------- 1 file changed, 28 insertions(+), 20 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index ca271e0..af26ca6 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -435,33 +435,41 @@ function pc_auth() { log "Add Directory ${AUTH_DOMAIN}" _http_body=$(cat <= 5.9" - if (( ${_pc_version[0]} >= 5 && ${_pc_version[1]} >= 9 )); then - _http_body+=$(cat < Date: Fri, 2 Oct 2020 14:47:02 -0700 Subject: [PATCH 635/691] Update lib.pc.sh --- scripts/lib.pc.sh | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index af26ca6..3829795 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -472,22 +472,19 @@ EOF log "directories: _task_id=|${_task_id}|_http_body=|${_http_body}|" log "Add Role Mappings to Groups for PC logins (not projects, which are separate)..." - #TODO:20 hardcoded role mapping - #groups=('SSP Admins' 'SSP Developers' 'SSP Consumers' 'SSP Operators' 'SSP Custom' 'Bootcamp Users') - groups=('SSP Admins') - for _group in "${groups[@]}"; do + _http_body=$(cat < Date: Fri, 2 Oct 2020 19:29:14 -0700 Subject: [PATCH 636/691] Update lib.pc.sh --- scripts/lib.pc.sh | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 3829795..b4241d0 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -475,16 +475,31 @@ EOF _http_body=$(cat < Date: Mon, 5 Oct 2020 20:40:36 -0700 Subject: [PATCH 637/691] Role Mapping --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b4241d0..93bb745 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -474,7 +474,7 @@ EOF log "Add Role Mappings to Groups for PC logins (not projects, which are separate)..." _http_body=$(cat < Date: Mon, 5 Oct 2020 22:46:11 -0700 Subject: [PATCH 638/691] Update lib.pc.sh --- scripts/lib.pc.sh | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 93bb745..9c0466e 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -433,6 +433,8 @@ function pc_auth() { # TODO:50 FUTURE: pass AUTH_SERVER argument +set -x + log "Add Directory ${AUTH_DOMAIN}" _http_body=$(cat < Date: Tue, 6 Oct 2020 14:27:14 -0700 Subject: [PATCH 639/691] Update lib.pc.sh --- scripts/lib.pc.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9c0466e..9403593 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -462,14 +462,14 @@ EOF log "Task uuid for the Auth Source Create is " $_task_id " ....." - if [ -z "$_task_id" ]; then - log "Auth Source Create has encountered an error..." - else - log "Auth Source Create started.." - set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run - # Run the progess checker - loop - fi + #if [ -z "$_task_id" ]; then + # log "Auth Source Create has encountered an error..." + #else + # log "Auth Source Create started.." + # set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + # # Run the progess checker + # loop + #fi log "directories: _task_id=|${_task_id}|_http_body=|${_http_body}|" From 51f7d8136a76f8e851187fd5fc6e47ac3bc5d950 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 6 Oct 2020 21:16:47 -0700 Subject: [PATCH 640/691] Update lib.pc.sh --- scripts/lib.pc.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9403593..598904f 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -458,18 +458,18 @@ set -x EOF ) - _task_id=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" https://localhost:9440/api/nutanix/v3/directory_services) + _task_id=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" https://localhost:9440/api/nutanix/v3/directory_services | jq -r 'status.execution_context.task_uuid' | tr -d \") log "Task uuid for the Auth Source Create is " $_task_id " ....." - #if [ -z "$_task_id" ]; then - # log "Auth Source Create has encountered an error..." - #else - # log "Auth Source Create started.." - # set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run - # # Run the progess checker - # loop - #fi + if [ -z "$_task_id" ]; then + log "Auth Source Create has encountered an error..." + else + log "Auth Source Create started.." + set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + # Run the progess checker + loop + fi log "directories: _task_id=|${_task_id}|_http_body=|${_http_body}|" From c2b156bcbbd631a9a68391cc01f9b2cc727dd10d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 7 Oct 2020 16:11:50 -0700 Subject: [PATCH 641/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 598904f..ec938eb 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -458,7 +458,7 @@ set -x EOF ) - _task_id=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" https://localhost:9440/api/nutanix/v3/directory_services | jq -r 'status.execution_context.task_uuid' | tr -d \") + _task_id=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" https://localhost:9440/api/nutanix/v3/directory_services | jq -r '.status.execution_context.task_uuid' | tr -d \") log "Task uuid for the Auth Source Create is " $_task_id " ....." From 634ead3668eaeb4fdc0fcb1bc185aa65ef21fe7c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 7 Oct 2020 20:19:19 -0700 Subject: [PATCH 642/691] updates --- scripts/global.vars.sh | 2 +- scripts/lib.pc.sh | 20 +++++++++++--------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index b0bf365..bc29730 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -281,7 +281,7 @@ SMTP_SERVER_ADDRESS='mxb-002c1b01.gslb.pphosted.com' SMTP_SERVER_FROM='NutanixHostedPOC@nutanix.com' SMTP_SERVER_PORT=25 -AUTH_SERVER='AutoDC' # default; TODO:180 refactor AUTH_SERVER choice to input file +AUTH_SERVER='AutoAD' # default; TODO:180 refactor AUTH_SERVER choice to input file AUTH_HOST="${IPV4_PREFIX}.$((${OCTET[3]} + 4))" LDAP_PORT=389 AUTH_FQDN='ntnxlab.local' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index ec938eb..66341af 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -460,18 +460,20 @@ EOF _task_id=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X POST --data "${_http_body}" https://localhost:9440/api/nutanix/v3/directory_services | jq -r '.status.execution_context.task_uuid' | tr -d \") - log "Task uuid for the Auth Source Create is " $_task_id " ....." + #log "Task uuid for the Auth Source Create is " $_task_id " ....." - if [ -z "$_task_id" ]; then - log "Auth Source Create has encountered an error..." - else - log "Auth Source Create started.." - set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run + #if [ -z "$_task_id" ]; then + # log "Auth Source Create has encountered an error..." + #else + # log "Auth Source Create started.." + # set _loops=0 # Reset the loop counter so we restart the amount of loops we need to run # Run the progess checker - loop - fi + # loop + #fi + + #log "directories: _task_id=|${_task_id}|_http_body=|${_http_body}|" - log "directories: _task_id=|${_task_id}|_http_body=|${_http_body}|" + sleep 60 log "Add Role Mappings to Groups for PC logins (not projects, which are separate)..." From 486a3e8112d0a1b55fec3308dbda146b6c17bd5f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 9 Oct 2020 12:39:08 -0700 Subject: [PATCH 643/691] updatres for POC Workshop base Staging --- scripts/poc_workshop_base_staging.sh | 139 +++++++++++++++++++++++++++ stage_workshop.sh | 6 ++ 2 files changed, 145 insertions(+) create mode 100755 scripts/poc_workshop_base_staging.sh diff --git a/scripts/poc_workshop_base_staging.sh b/scripts/poc_workshop_base_staging.sh new file mode 100755 index 0000000..7a9c4c7 --- /dev/null +++ b/scripts/poc_workshop_base_staging.sh @@ -0,0 +1,139 @@ +#!/usr/bin/env bash +# -x + +#__main()__________ + +# Source Nutanix environment (PATH + aliases), then common routines + global variables +. /etc/profile.d/nutanix_env.sh +. lib.common.sh +. global.vars.sh +begin + +args_required 'EMAIL PE_PASSWORD PC_VERSION' + +#dependencies 'install' 'jq' && ntnx_download 'PC' & #attempt at parallelization +# Some parallelization possible to critical path; not much: would require pre-requestite checks to work! + +case ${1} in + PE | pe ) + . lib.pe.sh + + export AUTH_SERVER='AutoAD' + + args_required 'PE_HOST PC_LAUNCH' + ssh_pubkey & # non-blocking, parallel suitable + + dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ + && pe_license \ + && pe_init \ + && network_configure \ + && authentication_source \ + && pe_auth \ + + if (( $? == 0 )) ; then + pc_install "${NW1_NAME}" \ + && prism_check 'PC' \ + + if (( $? == 0 )) ; then + _command="EMAIL=${EMAIL} \ + PC_HOST=${PC_HOST} PE_HOST=${PE_HOST} PE_PASSWORD=${PE_PASSWORD} \ + PC_LAUNCH=${PC_LAUNCH} PC_VERSION=${PC_VERSION} nohup bash ${HOME}/${PC_LAUNCH} IMAGES" + + cluster_check \ + && log "Remote asynchroneous PC Image import script... ${_command}" \ + && remote_exec 'ssh' 'PC' "${_command} >> ${HOME}/${PC_LAUNCH%%.sh}.log 2>&1 &" & + + pc_configure \ + && log "PC Configuration complete: Waiting for PC deployment to complete, API is up!" + log "PE = https://${PE_HOST}:9440" + log "PC = https://${PC_HOST}:9440" + + #&& dependencies 'remove' 'jq' & # parallel, optional. Versus: $0 'files' & + #dependencies 'remove' 'sshpass' + finish + fi + else + finish + _error=18 + log "Error ${_error}: in main functional chain, exit!" + exit ${_error} + fi + ;; + PC | pc ) + . lib.pc.sh + + #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" + #export BUCKETS_VIP="${IPV4_PREFIX}.17" + #export OBJECTS_NW_START="${IPV4_PREFIX}.18" + #export OBJECTS_NW_END="${IPV4_PREFIX}.21" + + export QCOW2_IMAGES=(\ + + ) + export ISO_IMAGES=(\ + + ) + + run_once + + dependencies 'install' 'jq' || exit 13 + + ssh_pubkey & # non-blocking, parallel suitable + + pc_passwd + ntnx_cmd # check cli services available? + + export NUCLEI_SERVER='localhost' + export NUCLEI_USERNAME="${PRISM_ADMIN}" + export NUCLEI_PASSWORD="${PE_PASSWORD}" + # nuclei -debug -username admin -server localhost -password x vm.list + + if [[ -z "${PE_HOST}" ]]; then # -z ${CLUSTER_NAME} || #TOFIX + log "CLUSTER_NAME=|${CLUSTER_NAME}|, PE_HOST=|${PE_HOST}|" + pe_determine ${1} + . global.vars.sh # re-populate PE_HOST dependencies + else + CLUSTER_NAME=$(ncli --json=true multicluster get-cluster-state | \ + jq -r .data[0].clusterDetails.clusterName) + if [[ ${CLUSTER_NAME} != '' ]]; then + log "INFO: ncli multicluster get-cluster-state looks good for ${CLUSTER_NAME}." + fi + fi + + if [[ ! -z "${2}" ]]; then # hidden bonus + log "Don't forget: $0 first.last@nutanixdc.local%password" + calm_update && exit 0 + fi + + export ATTEMPTS=2 + export SLEEP=10 + + pc_init \ + && pc_dns_add \ + && pc_ui \ + && pc_auth \ + && pc_smtp + + ssp_auth \ + && calm_enable \ + && lcm \ + && prism_check 'PC' + + log "Non-blocking functions (in development) follow." + + + unset NUCLEI_SERVER NUCLEI_USERNAME NUCLEI_PASSWORD + + if (( $? == 0 )); then + #dependencies 'remove' 'sshpass' && dependencies 'remove' 'jq' \ + #&& + log "PC = https://${PC_HOST}:9440" + finish + else + _error=19 + log "Error ${_error}: failed to reach PC!" + exit ${_error} + fi + ;; + +esac diff --git a/stage_workshop.sh b/stage_workshop.sh index 5566cea..6285157 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -30,6 +30,7 @@ WORKSHOPS=(\ "Stage-All Bootcamps (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ "Cloud Native Application Modernization Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ "Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"POC Workshop Base Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ "In Development Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -156,6 +157,11 @@ function stage_clusters() { _pe_launch='cicd_bootcamp.sh' _pc_launch=${_pe_launch} fi + if (( $(echo ${_workshop} | grep -i "^POC Workshop Base Staging" | wc ${WC_ARG}) > 0 )); then + _libraries+='lib.pe.sh lib.pc.sh' + _pe_launch='poc_workshop_base_staging.sh' + _pc_launch=${_pe_launch} + fi if (( $(echo ${_workshop} | grep -i "^In Development Bootcamp Staging" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='dev_privatecloud_bootcamp.sh' From 44d36f28068cd156e65588271eacf91d0830a4a4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 16 Oct 2020 10:23:14 -0700 Subject: [PATCH 644/691] updates for deploying DB Source VMs --- scripts/era_mssql_bootcamp.sh | 11 ---- scripts/era_oracle_bootcamp.sh | 11 ---- scripts/lib.pe.sh | 91 +++++++++++++++++++--------------- 3 files changed, 52 insertions(+), 61 deletions(-) diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 65fbc0c..21ef015 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -24,7 +24,6 @@ case ${1} in export NW2_DHCP_START="${IPV4_PREFIX}.132" export NW2_DHCP_END="${IPV4_PREFIX}.219" - args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -71,13 +70,7 @@ case ${1} in PC | pc ) . lib.pc.sh - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - #export BUCKETS_VIP="${IPV4_PREFIX}.17" - #export OBJECTS_NW_START="${IPV4_PREFIX}.18" - #export OBJECTS_NW_END="${IPV4_PREFIX}.21" - export _prio_images_arr=(\ - #ERA-Server-build-1.2.1.qcow2 \ ) export QCOW2_IMAGES=(\ @@ -138,7 +131,6 @@ case ${1} in && flow_enable \ && pc_cluster_img_import \ && configure_era \ - && clone_mssql_source_vms \ && prism_check 'PC' log "Non-blocking functions (in development) follow." @@ -159,7 +151,4 @@ case ${1} in exit ${_error} fi ;; - FILES | files | afs ) - files_install - ;; esac diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index b57c9fd..a7f8230 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -37,8 +37,6 @@ case ${1} in && deploy_era \ && deploy_oracle_19c - - if (( $? == 0 )) ; then pc_install "${NW1_NAME}" \ && prism_check 'PC' \ @@ -72,13 +70,7 @@ case ${1} in PC | pc ) . lib.pc.sh - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - #export BUCKETS_VIP="${IPV4_PREFIX}.17" - #export OBJECTS_NW_START="${IPV4_PREFIX}.18" - #export OBJECTS_NW_END="${IPV4_PREFIX}.21" - export _prio_images_arr=(\ - ERA-Server-build-1.2.1.qcow2 \ ) export QCOW2_IMAGES=(\ @@ -159,7 +151,4 @@ case ${1} in exit ${_error} fi ;; - FILES | files | afs ) - files_install - ;; esac diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 66f2b3b..0f7fb49 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1073,18 +1073,20 @@ function deploy_mssql() { log "Image found, assuming ready. Skipping ${MSSQL_SourceVM} import." fi - echo "## SQLVM_Creation_INPROGRESS ##" - acli "vm.create ${MSSQL_SourceVM} memory=2046M num_cores_per_vcpu=1 num_vcpus=2" - acli "vm.disk_create ${MSSQL_SourceVM} clone_from_image=${MSSQL_SourceVM_Image1}" - acli "vm.disk_create ${MSSQL_SourceVM} clone_from_image=${MSSQL_SourceVM_Image2}" - acli "vm.nic_create ${MSSQL_SourceVM} network=${NW1_NAME}" - echo "## ${MSSQL_SourceVM} - Powering On ##" - acli "vm.on ${MSSQL_SourceVM}" - echo "## SQLVM_Creation_COMPLETE ##" - - #echo "## SQLVM_Clone_Creation_INPROGRESS ##" - #acli "vm.clone $MSSQL_SourceVM_User[01..$num_sql_vms] clone_from_vm=${MSSQL_SourceVM}" - #echo "## SQLVM_Clone_Creation_COMPLETE ##" + for _user in "${USERS[@]}" ; do + + SourceVM="${_user}_${MSSQL_SourceVM}" + + echo "## ${SourceVM} Creation_INPROGRESS ##" + acli "vm.create ${SourceVM} memory=2046M num_cores_per_vcpu=1 num_vcpus=2" + acli "vm.disk_create ${ourceVM} clone_from_image=${MSSQL_SourceVM_Image1}" + acli "vm.disk_create ${SourceVM} clone_from_image=${MSSQL_SourceVM_Image2}" + acli "vm.nic_create ${SourceVM} network=${NW1_NAME}" + echo "## ${SourceVM} - Powering On ##" + acli "vm.on ${SourceVM}" + echo "## ${SourceVM} Creation_COMPLETE ##" + + done } @@ -1107,20 +1109,25 @@ function deploy_oracle_12c() { log "Image found, assuming ready. Skipping ${Oracle_SourceVM} import." fi - echo "## Oracle12cVM_Creation_INPROGRESS ##" - acli "vm.create ${Oracle_12c_SourceVM} memory=4G num_cores_per_vcpu=2 num_vcpus=2" - acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_BootImage}" - acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image1}" - acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image2}" - acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image3}" - acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image4}" - acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image5}" - acli "vm.disk_create ${Oracle_12c_SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image6}" - acli "vm.nic_create ${Oracle_12c_SourceVM} network=${NW1_NAME}" - echo "## ${Oracle_12c_SourceVM} - Powering On ##" - acli "vm.on ${Oracle_12c_SourceVM}" - echo "### Oracle12cVM_Creation_COMPLETE ##" + for _user in "${USERS[@]}" ; do + SourceVM="${_user}_${Oracle_12c_SourceVM}" + + echo "## Oracle12cVM_Creation_INPROGRESS ##" + acli "vm.create ${SourceVM} memory=4G num_cores_per_vcpu=2 num_vcpus=2" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_BootImage}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image1}" + acli "vm.disk_create ${SourceVM}clone_from_image=${Oracle_12c_SourceVM_Image2}" + acli "vm.disk_create ${SourceVM}clone_from_image=${Oracle_12c_SourceVM_Image3}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image4}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image5}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image6}" + acli "vm.nic_create ${SourceVM} network=${NW1_NAME}" + echo "## ${SourceVM} - Powering On ##" + acli "vm.on ${SourceVM}" + echo "### ${SourceVM} Creation_COMPLETE ##" + +done } @@ -1146,22 +1153,28 @@ function deploy_oracle_19c() { log "Image found, assuming ready. Skipping ${Oracle_19c_SourceVM_BootImage} import." fi + for _user in "${USERS[@]}" ; do + + SourceVM="${_user}_${Oracle_19c_SourceVM}" + echo "## Oracle19cVM_Creation_INPROGRESS ##" acli "vm.create ${Oracle_19c_SourceVM} memory=8G num_cores_per_vcpu=1 num_vcpus=2" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_BootImage}" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image1}" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image2}" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image3}" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image4}" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image5}" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image6}" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image7}" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image8}" - acli "vm.disk_create ${Oracle_19c_SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image9}" - acli "vm.nic_create ${Oracle_19c_SourceVM} network=${NW1_NAME}" - echo "## ${Oracle_19c_SourceVM} - Powering On ##" - acli "vm.on ${Oracle_19c_SourceVM}" - echo "### Oracle19cVM_Creation_COMPLETE ##" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_BootImage}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image1}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image2}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image3}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image4}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image5}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image6}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image7}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image8}" + acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image9}" + acli "vm.nic_create ${SourceVM} network=${NW1_NAME}" + echo "## ${SourceVM} - Powering On ##" + acli "vm.on ${SourceVM}" + echo "### ${SourceVM} Creation_COMPLETE ##" + +done } From a689b7bde746b1fa576e25d7d06f35576fd7f960 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 16 Oct 2020 14:10:22 -0700 Subject: [PATCH 645/691] updates for 2020.9 --- scripts/global.vars.sh | 38 +++++++++++++++++++------------------- stage_workshop.sh | 4 ++-- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index bc29730..ac75cb2 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -3,8 +3,8 @@ # shellcheck disable=SC2034 RELEASE='release.json' PC_DEV_VERSION='pc.2020.9' -PC_CURRENT_VERSION='5.17.0.3' -PC_STABLE_VERSION='5.11.2.1' +PC_CURRENT_VERSION='pc.2020.9' +PC_STABLE_VERSION='5.17.0.3' FILES_VERSION='3.7.0' FILE_ANALYTICS_VERSION='2.1.1.1' NTNX_INIT_PASSWORD='nutanix/4u' @@ -50,7 +50,7 @@ USERS=(\ ################################################### ERA_Blueprint='EraServerDeployment.json' -ERAServerImage='ERA-Server-build-1.3.1.qcow2' +ERAServerImage='ERA-Server-build-2.0.0.qcow2' ERAServerName='EraServer' ERA_USER="admin" #ERA_PASSWORD="nutanix/4u" @@ -298,10 +298,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.9.json' PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.9.tar' - PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json' - PC_CURRENT_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.11.2.1.json' - PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.9.json' + PC_CURRENT_URL='http://10.55.251.38/workshop_staging/pc.2020.9.tar' + PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json' + PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.7.0.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' @@ -337,10 +337,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.42 ) # HPOC us-west = PHX PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' @@ -376,10 +376,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.38 ) # HPOC us-west = PHX 1-Node Clusters PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' - PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' - PC_CURRENT_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.11.2.1.json' - PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' + PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' + PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' + PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.0.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' @@ -420,10 +420,10 @@ case "${OCTET[0]}.${OCTET[1]}" in 10.136 ) # HPOC us-west = BLR PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.9.json' PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.9.tar' - PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json' - PC_CURRENT_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.11.2.1.json' - PC_STABLE_URL='http://10.136.239.13/workshop_staging/euphrates-5.11.2.1-stable-prism_central.tar' + PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.9.json' + PC_CURRENT_URL='http://10.136.239.13/workshop_staging/pc.2020.9.tar' + PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json' + PC_STABLE_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.7.0.json' FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' diff --git a/stage_workshop.sh b/stage_workshop.sh index 6285157..4e78275 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -54,9 +54,9 @@ function stage_clusters() { # TODO: make WORKSHOPS and map a JSON configuration file? if (( $(echo ${_workshop} | grep -i "PC 2020.9" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_DEV_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 2020.9" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_CURRENT_VERSION}" - elif (( $(echo ${_workshop} | grep -i "PC 5.11.2.1" | wc ${WC_ARG}) > 0 )); then + elif (( $(echo ${_workshop} | grep -i "PC 5.17.0.3" | wc ${WC_ARG}) > 0 )); then export PC_VERSION="${PC_STABLE_VERSION}" fi From bd47a280aeae94abb35a6c1d35fb2d7c2573c981 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 16 Oct 2020 14:43:01 -0700 Subject: [PATCH 646/691] Updates Era Staging --- scripts/era_bootcamp.sh | 1 - scripts/era_mssql_bootcamp.sh | 1 - scripts/era_oracle_bootcamp.sh | 1 - scripts/era_postgres_bootcamp.sh | 1 - 4 files changed, 4 deletions(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index c75718b..49ed244 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -138,7 +138,6 @@ case ${1} in ssp_auth \ && calm_enable \ && lcm \ - && pc_project \ && priority_images \ && images \ && flow_enable \ diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 21ef015..6d1e16a 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -125,7 +125,6 @@ case ${1} in && calm_enable \ && lcm \ && sleep 30 \ - && pc_project \ && priority_images \ && images \ && flow_enable \ diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index a7f8230..da2b762 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -125,7 +125,6 @@ case ${1} in && calm_enable \ && lcm \ && sleep 30 \ - && pc_project \ && priority_images \ && images \ && flow_enable \ diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index 99c1078..ac0dadd 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -132,7 +132,6 @@ case ${1} in && calm_enable \ && lcm \ && sleep 30 \ - && pc_project \ && priority_images \ && images \ && flow_enable \ From 422c41bb1ec30beb729dd311ed97bbb01fb0fb2d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 16 Oct 2020 14:50:31 -0700 Subject: [PATCH 647/691] Updates for Era --- scripts/lib.pe.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 0f7fb49..7ab1eb5 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1113,7 +1113,7 @@ function deploy_oracle_12c() { SourceVM="${_user}_${Oracle_12c_SourceVM}" - echo "## Oracle12cVM_Creation_INPROGRESS ##" + echo "## ${SourceVM} Creation_INPROGRESS ##" acli "vm.create ${SourceVM} memory=4G num_cores_per_vcpu=2 num_vcpus=2" acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_BootImage}" acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_12c_SourceVM_Image1}" @@ -1127,7 +1127,7 @@ function deploy_oracle_12c() { acli "vm.on ${SourceVM}" echo "### ${SourceVM} Creation_COMPLETE ##" -done + done } @@ -1157,8 +1157,8 @@ function deploy_oracle_19c() { SourceVM="${_user}_${Oracle_19c_SourceVM}" - echo "## Oracle19cVM_Creation_INPROGRESS ##" - acli "vm.create ${Oracle_19c_SourceVM} memory=8G num_cores_per_vcpu=1 num_vcpus=2" + echo "## ${SourceVM} Creation_INPROGRESS ##" + acli "vm.create ${SourceVM} memory=8G num_cores_per_vcpu=1 num_vcpus=2" acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_BootImage}" acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image1}" acli "vm.disk_create ${SourceVM} clone_from_image=${Oracle_19c_SourceVM_Image2}" @@ -1174,7 +1174,7 @@ function deploy_oracle_19c() { acli "vm.on ${SourceVM}" echo "### ${SourceVM} Creation_COMPLETE ##" -done + done } From b56329fd70b70d5ffb84e117319578df79ec6e15 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 16 Oct 2020 16:30:27 -0700 Subject: [PATCH 648/691] Update era_bootcamp.sh --- scripts/era_bootcamp.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 49ed244..8cc948d 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -78,7 +78,6 @@ case ${1} in #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ - ERA-Server-build-1.2.1.qcow2 \ ) export QCOW2_IMAGES=(\ From f9404e47adfe52aebc632ac16ff5bb321a729b6d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 19 Oct 2020 09:56:03 -0700 Subject: [PATCH 649/691] Update stage_workshop.sh --- stage_workshop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stage_workshop.sh b/stage_workshop.sh index 4e78275..c35213d 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -117,7 +117,7 @@ function stage_clusters() { _pe_launch='objects_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^Calm" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Calm Bootcamp" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='calm_bootcamp.sh' _pc_launch=${_pe_launch} From 908d5e956ad6a84a023a006a54536cd9bddec19b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 20 Oct 2020 12:56:29 -0700 Subject: [PATCH 650/691] AutoAD Test --- scripts/frame_bootcamp.sh | 5 ----- scripts/global.vars.sh | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/scripts/frame_bootcamp.sh b/scripts/frame_bootcamp.sh index d0c3016..a6ff49f 100755 --- a/scripts/frame_bootcamp.sh +++ b/scripts/frame_bootcamp.sh @@ -80,11 +80,6 @@ case ${1} in PC | pc ) . lib.pc.sh - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - #export BUCKETS_VIP="${IPV4_PREFIX}.17" - #export OBJECTS_NW_START="${IPV4_PREFIX}.18" - #export OBJECTS_NW_END="${IPV4_PREFIX}.21" - export QCOW2_IMAGES=(\ Windows2016.qcow2 \ Win10v1903.qcow2 \ diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index ac75cb2..29c532c 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -363,7 +363,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) AUTOAD_REPOS=(\ - 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ + 'http://10.42.194.11/workshop_staging/AutoAD_10192020.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' From 0b3a5a535990930bf42a8f5dac93037a95f858eb Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 20 Oct 2020 13:14:17 -0700 Subject: [PATCH 651/691] Update global.vars.sh --- scripts/global.vars.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 29c532c..6333c65 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -363,7 +363,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) AUTOAD_REPOS=(\ - 'http://10.42.194.11/workshop_staging/AutoAD_10192020.qcow2' \ + 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' @@ -402,7 +402,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) AUTOAD_REPOS=(\ - 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ + 'http://10.42.194.11/workshop_staging/AutoAD_10192020.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' From f6c5dad77c6c3185208f9a267ad5f56707912c6a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 21 Oct 2020 09:38:46 -0700 Subject: [PATCH 652/691] updates to test new SeedPC --- scripts/global.vars.sh | 2 +- scripts/lib.pc.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6333c65..0aae9ed 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -17,7 +17,7 @@ STORAGE_ERA='Era' ATTEMPTS=40 SLEEP=60 PrismOpsServer='PrismOpsLabUtilityServer' -SeedPC='seedPC.zip' +SeedPC='seedPC_10202020.zip' CALM_RSA_KEY_FILE='calm_rsa_key.env' Citrix_Blueprint='CitrixBootcampInfra.json' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 66341af..9aa76c0 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -651,8 +651,8 @@ function seedPC() { unzip /home/nutanix/${SeedPC} pushd /home/nutanix/lab/ - #_setup=$(/home/nutanix/lab/setupEnv.sh ${PC_HOST} > /dev/null 2>&1) - _setup=$(/home/nutanix/lab/initialize_lab.sh ${PC_HOST} > /dev/null 2>&1) + #_setup=$(/home/nutanix/lab/initialize_lab.sh ${PC_HOST} > /dev/null 2>&1) + _setup=$(/home/nutanix/lab/initialize_lab.sh ${PC_HOST} admin ${PE_PASSWORD} ${PE_HOST} nutanix ${PE_PASSWORD} > /dev/null 2>&1) log "Running Setup Script|$_setup" popd From c05ff22306fcd62d75765cef6a61317a8b15a75e Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 21 Oct 2020 09:44:52 -0700 Subject: [PATCH 653/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 0aae9ed..c7da79c 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -402,7 +402,7 @@ case "${OCTET[0]}.${OCTET[1]}" in 'https://s3.amazonaws.com/get-ahv-images/AutoDC2.qcow2' \ ) AUTOAD_REPOS=(\ - 'http://10.42.194.11/workshop_staging/AutoAD_10192020.qcow2' \ + 'http://10.42.194.11/workshop_staging/AutoAD.qcow2' \ 'https://s3.amazonaws.com/get-ahv-images/AutoAD.qcow2' \ ) PC_DATA='http://10.42.194.11/workshop_staging/seedPC.zip' From 974223840686dcf703c1b6dd758d0dae6978ea02 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 21 Oct 2020 11:19:24 -0700 Subject: [PATCH 654/691] Update global.vars.sh --- scripts/global.vars.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index c7da79c..28e3420 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -16,7 +16,7 @@ STORAGE_IMAGES='Images' STORAGE_ERA='Era' ATTEMPTS=40 SLEEP=60 -PrismOpsServer='PrismOpsLabUtilityServer' +PrismOpsServer='PrismOpsLabUtilityServer_20208' SeedPC='seedPC_10202020.zip' CALM_RSA_KEY_FILE='calm_rsa_key.env' @@ -88,8 +88,8 @@ Oracle_19c_SourceVM_Image9="19c-disk9" # 3rd Party images used at GTS or Add-On Labs ################################################### #Peer Software -PeerMgmtServer='Windows2016-PeerMgmt-18feb20' -PeerAgentServer='Windows2016-PeerAgent-18feb20' +PeerMgmtServer='Windows2016-PeerMgmt-30sep20' +PeerAgentServer='Windows2016-PeerAgent-30sep20' PMC="PeerMgmt" AGENTA="PeerAgent-Files" AGENTB="PeerAgent-Win" From 704476535cddbe599aaf1db6f922687f35ecb7c5 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 21 Oct 2020 11:28:27 -0700 Subject: [PATCH 655/691] Update all_bootcamp.sh --- scripts/all_bootcamp.sh | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index 5fec5d9..0490e53 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -80,20 +80,15 @@ case ${1} in PC | pc ) . lib.pc.sh - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - #export BUCKETS_VIP="${IPV4_PREFIX}.17" - #export OBJECTS_NW_START="${IPV4_PREFIX}.18" - #export OBJECTS_NW_END="${IPV4_PREFIX}.21" - export _prio_images_arr=(\ - Windows2016_05272020.qcow2 \ + Windows2016_10202020.qcow2 \ Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ ) export QCOW2_IMAGES=(\ CentOS7_05272020.qcow2 \ - Win10v1903_05272020.qcow2 \ - Win10v1909_05272020.qcow2 \ + Windows2019.qcow2 \ + Win10v1909_10192020.qcow2 \ WinTools_05272020.qcow2 \ Linux_ToolsVM_05272020.qcow2 LinuxMint_ToolsVM.qcow2 \ From 14bee336ed67334d329f433671132d414597e62c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 21 Oct 2020 18:50:35 -0700 Subject: [PATCH 656/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 7ab1eb5..02a92fe 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1079,7 +1079,7 @@ function deploy_mssql() { echo "## ${SourceVM} Creation_INPROGRESS ##" acli "vm.create ${SourceVM} memory=2046M num_cores_per_vcpu=1 num_vcpus=2" - acli "vm.disk_create ${ourceVM} clone_from_image=${MSSQL_SourceVM_Image1}" + acli "vm.disk_create ${SourceVM} clone_from_image=${MSSQL_SourceVM_Image1}" acli "vm.disk_create ${SourceVM} clone_from_image=${MSSQL_SourceVM_Image2}" acli "vm.nic_create ${SourceVM} network=${NW1_NAME}" echo "## ${SourceVM} - Powering On ##" From e9f1f171fa0ac84f64a2ab5694964c9d7ad31501 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 22 Oct 2020 12:31:01 -0700 Subject: [PATCH 657/691] Update global.vars.sh --- scripts/global.vars.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 28e3420..6cfbc17 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -16,8 +16,8 @@ STORAGE_IMAGES='Images' STORAGE_ERA='Era' ATTEMPTS=40 SLEEP=60 -PrismOpsServer='PrismOpsLabUtilityServer_20208' -SeedPC='seedPC_10202020.zip' +PrismOpsServer='PrismOpsLabUtilityServer' +SeedPC='seedPC.zip' CALM_RSA_KEY_FILE='calm_rsa_key.env' Citrix_Blueprint='CitrixBootcampInfra.json' @@ -59,7 +59,7 @@ ERA_Default_PASSWORD="Nutanix/4u" ERA_NETWORK="Secondary" ERA_Container_RF="2" -MSSQL_SourceVM="Win2016SQLSource" +MSSQL_SourceVM="MSSQLSourceVM" MSSQL_SourceVM_Image1="MSSQL_1" MSSQL_SourceVM_Image2="MSSQL_2" From f48531e04b1865137c1cb23a8180241ae555ec2c Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 22 Oct 2020 13:16:54 -0700 Subject: [PATCH 658/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6cfbc17..f569612 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -33,7 +33,7 @@ SSH_OPTS='-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserK SSH_OPTS+=' -q' # -v' #################################################### -# Users for Tools VMs and Source VM Clones +# Users for Tools VMs and Source VMs ################################################### USERS=(\ From 0e79d3c8ff6977fa372ab1a30a03f59db500d430 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 22 Oct 2020 13:38:34 -0700 Subject: [PATCH 659/691] updates for POC Workshop staging --- scripts/citrix_bootcamp.sh | 6 ++--- scripts/lib.pe.sh | 37 ++++++++++++++++++++++++++++ scripts/poc_workshop_base_staging.sh | 19 +++++++------- stage_workshop.sh | 4 +-- 4 files changed, 52 insertions(+), 14 deletions(-) diff --git a/scripts/citrix_bootcamp.sh b/scripts/citrix_bootcamp.sh index 903393d..09a7dd0 100755 --- a/scripts/citrix_bootcamp.sh +++ b/scripts/citrix_bootcamp.sh @@ -79,9 +79,9 @@ case ${1} in #export OBJECTS_NW_END="${IPV4_PREFIX}.21" export _prio_images_arr=(\ - Windows2016.qcow2 \ - Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ - ) + Windows2016.qcow2 \ + Citrix_Virtual_Apps_and_Desktops_7_1912.iso \ + ) export QCOW2_IMAGES=(\ Win10v1903.qcow2 \ diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 02a92fe..6eea869 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -539,6 +539,43 @@ function network_configure() { fi } +############################################################################################################################################################################### +# Routine to create the networks +############################################################################################################################################################################### + + +function pocguide_network_configure() { + local _network_name="${NW1_NAME}" + + if [[ ! -z "${NW2_NAME}" ]]; then + #TODO: accommodate for X networks! + _network_name="${NW2_NAME}" + fi + + if [[ ! -z $(acli "net.list" | grep ${_network_name}) ]]; then + log "IDEMPOTENCY: ${_network_name} network set, skip." + else + args_required 'AUTH_DOMAIN IPV4_PREFIX AUTH_HOST' + + if [[ ! -z $(acli "net.list" | grep 'Rx-Automation-Network') ]]; then + log "Remove Rx-Automation-Network..." + acli "-y net.delete Rx-Automation-Network" + fi + + log "Create primary network: Name: ${NW1_NAME}, VLAN: ${NW1_VLAN}, Subnet: ${NW1_SUBNET}, Domain: ${AUTH_DOMAIN}, Pool: ${NW1_DHCP_START} to ${NW1_DHCP_END}" + acli "net.create ${NW1_NAME} vlan=${NW1_VLAN} ip_config=${NW1_SUBNET}" + acli "net.update_dhcp_dns ${NW1_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + acli " net.add_dhcp_pool ${NW1_NAME} start=${NW1_DHCP_START} end=${NW1_DHCP_END}" + + if [[ ! -z "${NW2_NAME}" ]]; then + log "Create secondary network: Name: ${NW2_NAME}, VLAN: ${NW2_VLAN}, Subnet: ${NW2_SUBNET}, Pool: ${NW2_DHCP_START} to ${NW2_DHCP_END}" + acli "net.create ${NW2_NAME} vlan=${NW2_VLAN} ip_config=${NW2_SUBNET}" + #acli "net.update_dhcp_dns ${NW2_NAME} servers=${AUTH_HOST},${DNS_SERVERS} domains=${AUTH_FQDN}" + #acli " net.add_dhcp_pool ${NW2_NAME} start=${NW2_DHCP_START} end=${NW2_DHCP_END}" + fi + fi +} + ############################################################################################################################################################################### # Create the Secondary network based on the 3rd OCTET of the cluster for SNCs ############################################################################################################################################################################### diff --git a/scripts/poc_workshop_base_staging.sh b/scripts/poc_workshop_base_staging.sh index 7a9c4c7..e8d8512 100755 --- a/scripts/poc_workshop_base_staging.sh +++ b/scripts/poc_workshop_base_staging.sh @@ -19,6 +19,7 @@ case ${1} in . lib.pe.sh export AUTH_SERVER='AutoAD' + export NW2_NAME='Xray' args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable @@ -26,7 +27,7 @@ case ${1} in dependencies 'install' 'sshpass' && dependencies 'install' 'jq' \ && pe_license \ && pe_init \ - && network_configure \ + && pocguide_network_configure \ && authentication_source \ && pe_auth \ @@ -62,16 +63,16 @@ case ${1} in PC | pc ) . lib.pc.sh - #export BUCKETS_DNS_IP="${IPV4_PREFIX}.16" - #export BUCKETS_VIP="${IPV4_PREFIX}.17" - #export OBJECTS_NW_START="${IPV4_PREFIX}.18" - #export OBJECTS_NW_END="${IPV4_PREFIX}.21" + export _prio_images_arr=(\ + Windows2016.qcow2 \ + CentOS7.qcow2 \ + ) export QCOW2_IMAGES=(\ - + Windows2019.qcow2 \ ) export ISO_IMAGES=(\ - + Nutanix-VirtIO-1.1.5.iso \ ) run_once @@ -115,8 +116,8 @@ case ${1} in && pc_smtp ssp_auth \ - && calm_enable \ - && lcm \ + && priority_images \ + && images \ && prism_check 'PC' log "Non-blocking functions (in development) follow." diff --git a/stage_workshop.sh b/stage_workshop.sh index c35213d..94260f5 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -30,7 +30,7 @@ WORKSHOPS=(\ "Stage-All Bootcamps (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ "Cloud Native Application Modernization Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ "Cloud Native CI/CD with Calm and Karbon Bootcamp (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ -"POC Workshop Base Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ +"SE POC Guide (AHV) Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ "In Development Bootcamp Staging (AOS 5.15.x|5.16.x|5.17.x|5.18.x/AHV PC 2020.9) = Development" \ ) # Adjust function stage_clusters, below, for file/script mappings as needed @@ -157,7 +157,7 @@ function stage_clusters() { _pe_launch='cicd_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^POC Workshop Base Staging" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^SE POC Guide (AHV) Staging" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='poc_workshop_base_staging.sh' _pc_launch=${_pe_launch} From db5f355ef15afc4df26cd542c427cdcbb7ad147b Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 22 Oct 2020 15:39:43 -0700 Subject: [PATCH 660/691] updates for POC Workshop --- scripts/lib.pc.sh | 185 +++++++++++++++------------------------------- scripts/lib.pe.sh | 2 +- 2 files changed, 62 insertions(+), 125 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9aa76c0..5654e24 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -911,10 +911,70 @@ EOF fi } +######################################################################################################################################### +# Routine to Deploy VMs for POC Workshop +######################################################################################################################################### + +function deploy_pocworkshop_vms() { + local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " + + #set -x + + log "Starting SE POC Guide Base VM Deployments" + log "PE Cluster IP |${PE_HOST}|" + + ## Get Cluster UUID ## + log "Get Cluster UUID" + + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") + + log "Cluster UUID |${_cluster_uuid}|" + + ## VM Name Vars ## + VMS=(\ + 1 \ + 2 \ + 3 \ + 4 \ + 5 \ + ) + + ## Creating the VMs ## + Log "Creating the Windows and Linux VMs for use in the SE POC Guide" + + for _vm in "${VMS[@]}" ; do + VMName="WinServer-${_vm}" + + log "Creating ${VMName} Now" + +HTTP_JSON_BODY=$(cat < Date: Thu, 22 Oct 2020 15:49:43 -0700 Subject: [PATCH 661/691] update for creating POC Workshop VMs --- scripts/lib.pc.sh | 120 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 118 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 5654e24..cfff916 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -942,6 +942,53 @@ function deploy_pocworkshop_vms() { ## Creating the VMs ## Log "Creating the Windows and Linux VMs for use in the SE POC Guide" + ## Creating the First WinServer VM ## + + VMName="WinServer" + + Log "Creating ${VMName}" + +HTTP_JSON_BODY=$(cat < Date: Thu, 22 Oct 2020 22:39:30 -0700 Subject: [PATCH 662/691] updates for deploying POC Workshop Base images --- scripts/global.vars.sh | 1 + scripts/lib.pc.sh | 170 ++++++++++++++++++++++++--- scripts/poc_workshop_base_staging.sh | 1 + 3 files changed, 157 insertions(+), 15 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index f569612..4feeed9 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -18,6 +18,7 @@ ATTEMPTS=40 SLEEP=60 PrismOpsServer='PrismOpsLabUtilityServer' SeedPC='seedPC.zip' +BasicUnattend='basic_unattend.xml' CALM_RSA_KEY_FILE='calm_rsa_key.env' Citrix_Blueprint='CitrixBootcampInfra.json' diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index cfff916..9572a63 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -924,12 +924,37 @@ function deploy_pocworkshop_vms() { log "PE Cluster IP |${PE_HOST}|" ## Get Cluster UUID ## + log "-------------------------------------" log "Get Cluster UUID" _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") log "Cluster UUID |${_cluster_uuid}|" + ## Get Primary Network UUID ## + log "-------------------------------------" + log "Get cluster network UUID" + + _nw_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/subnets/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"subnet","filter": "name==Primary"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + log "NW UUID = ${_nw_uuid}" + + ## Get Windows Image UUID ## + log "-------------------------------------" + log "Get Windows Image UUID" + + _windows2016_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==Windows2016.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + log "Windows Image UUID |${_windows2016_uuid}|" + + ## Get CentOS7 Image UUID ## + log "-------------------------------------" + log "Get CentOS7 Image UUID" + + _centos7_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==CentOS7.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + + log "CentOS7 Image UUID |${_centos7_uuid}|" + ## VM Name Vars ## VMS=(\ 1 \ @@ -940,6 +965,7 @@ function deploy_pocworkshop_vms() { ) ## Creating the VMs ## + log "-------------------------------------" Log "Creating the Windows and Linux VMs for use in the SE POC Guide" ## Creating the First WinServer VM ## @@ -950,9 +976,9 @@ function deploy_pocworkshop_vms() { HTTP_JSON_BODY=$(cat < Date: Thu, 22 Oct 2020 22:50:57 -0700 Subject: [PATCH 663/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 9572a63..6bb95da 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -995,7 +995,7 @@ HTTP_JSON_BODY=$(cat < Date: Fri, 23 Oct 2020 10:41:10 -0700 Subject: [PATCH 664/691] Update lib.pc.sh --- scripts/lib.pc.sh | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6bb95da..01c9518 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -671,10 +671,7 @@ function ssp_auth() { local _ssp_connect log "Find ${AUTH_SERVER} uuid" - _ldap_uuid=$(PATH=${PATH}:${HOME}; curl ${CURL_POST_OPTS} \ - --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{ "kind": "directory_service" }' \ - https://localhost:9440/api/nutanix/v3/directory_services/list \ - | jq -r .entities[0].metadata.uuid) + _ldap_uuid=$(PATH=${PATH}:${HOME}; curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{ "kind": "directory_service" }' https://localhost:9440/api/nutanix/v3/directory_services/list | jq -r .entities[0].metadata.uuid) log "_ldap_uuid=|${_ldap_uuid}|" # TODO:110 get directory service name _ldap_name @@ -793,9 +790,7 @@ EOF } EOF ) - _ssp_connect=$(curl ${CURL_POST_OPTS} \ - --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT --data "${_http_body}" \ - https://localhost:9440/api/nutanix/v3/directory_services/${_ldap_uuid}) + _ssp_connect=$(curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} -X PUT --data "${_http_body}" https://localhost:9440/api/nutanix/v3/directory_services/${_ldap_uuid}) log "_ssp_connect=|${_ssp_connect}|" } @@ -1225,7 +1220,7 @@ EOF - +#set +x } From 6145f79a5f23514b74a3d49290db78abe7727bcd Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 26 Oct 2020 16:00:31 -0700 Subject: [PATCH 665/691] upodates for POC Staging --- scripts/lib.pc.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 01c9518..b009a34 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -917,6 +917,8 @@ function deploy_pocworkshop_vms() { log "Starting SE POC Guide Base VM Deployments" log "PE Cluster IP |${PE_HOST}|" + log "PE Cluster IP |${PC_HOST}|" + log "Cluster Name |${CLUSTER_NAME}|" ## Get Cluster UUID ## log "-------------------------------------" @@ -938,7 +940,7 @@ function deploy_pocworkshop_vms() { log "-------------------------------------" log "Get Windows Image UUID" - _windows2016_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==Windows2016.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _windows2016_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/images/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==Windows2016.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") log "Windows Image UUID |${_windows2016_uuid}|" @@ -946,7 +948,7 @@ function deploy_pocworkshop_vms() { log "-------------------------------------" log "Get CentOS7 Image UUID" - _centos7_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==CentOS7.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _centos7_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/images/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==CentOS7.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") log "CentOS7 Image UUID |${_centos7_uuid}|" From 215842d237175e04334b336f18a7bab1104c9aed Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 26 Oct 2020 16:35:13 -0700 Subject: [PATCH 666/691] Update all_bootcamp.sh --- scripts/all_bootcamp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/all_bootcamp.sh b/scripts/all_bootcamp.sh index 0490e53..7808960 100755 --- a/scripts/all_bootcamp.sh +++ b/scripts/all_bootcamp.sh @@ -99,7 +99,7 @@ case ${1} in Nutanix-VirtIO-1.1.5.iso \ Windows2016.iso \ FrameCCA-3.0.0.iso \ - FrameGuestAgentInstaller_1.0.2.7.iso \ + FrameGuestAgentInstaller_1.0.2.8.iso \ Nutanix-VirtIO-1.1.5.iso \ veeam/VBR_10.0.0.4442.iso \ ) From 0ac483808577f3127316b4ec81552f60cfca5360 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 26 Oct 2020 19:04:33 -0700 Subject: [PATCH 667/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b009a34..6339b33 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -924,7 +924,7 @@ function deploy_pocworkshop_vms() { log "-------------------------------------" log "Get Cluster UUID" - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "${CLUSTER_NAME}" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") log "Cluster UUID |${_cluster_uuid}|" From 8caad636c21a7ee37c1e6188330cd7693b55db82 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 27 Oct 2020 13:33:40 -0700 Subject: [PATCH 668/691] Update lib.pc.sh --- scripts/lib.pc.sh | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 6339b33..e91a335 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -518,11 +518,15 @@ function pc_cluster_img_import() { local _uuid local CURL_HTTP_OPTS=" --max-time 25 --silent --header Content-Type:application/json --header Accept:application/json --insecure " - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER_NAME "$CLUSTER_NAME" '.entities[]|select (.status.name==$CLUSTER_NAME)| .metadata.uuid' | tr -d \") + log "Cluster Name |${CLUSTER_NAME}|" - #_cluster_uuid=$(curl ${CURL_HTTP_OPTS} --request POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + ## Get Cluster UUID ## + log "-------------------------------------" + log "Get Cluster UUID" - log "Cluster UUID is ${_cluster_uuid}" + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "${CLUSTER_NAME}" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") + + log "Cluster UUID |${_cluster_uuid}|" _http_body=$(cat < Date: Tue, 27 Oct 2020 21:44:30 -0700 Subject: [PATCH 669/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index e91a335..bd136e7 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -996,7 +996,7 @@ HTTP_JSON_BODY=$(cat < Date: Wed, 28 Oct 2020 09:24:33 -0700 Subject: [PATCH 670/691] fixes --- scripts/lib.pc.sh | 6 +++--- stage_workshop.sh | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 01c9518..435fb3c 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -671,7 +671,7 @@ function ssp_auth() { local _ssp_connect log "Find ${AUTH_SERVER} uuid" - _ldap_uuid=$(PATH=${PATH}:${HOME}; curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{ "kind": "directory_service" }' https://localhost:9440/api/nutanix/v3/directory_services/list | jq -r .entities[0].metadata.uuid) + _ldap_uuid=$(PATH=${PATH}:${HOME}; curl ${CURL_POST_OPTS} --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{ "kind": "directory_service" }' 'https://localhost:9440/api/nutanix/v3/directory_services/list' | jq -r .entities[0].metadata.uuid) log "_ldap_uuid=|${_ldap_uuid}|" # TODO:110 get directory service name _ldap_name @@ -938,7 +938,7 @@ function deploy_pocworkshop_vms() { log "-------------------------------------" log "Get Windows Image UUID" - _windows2016_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==Windows2016.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _windows2016_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/images/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==Windows2016.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") log "Windows Image UUID |${_windows2016_uuid}|" @@ -946,7 +946,7 @@ function deploy_pocworkshop_vms() { log "-------------------------------------" log "Get CentOS7 Image UUID" - _centos7_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==CentOS7.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") + _centos7_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/images/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{"kind":"image","filter":"name==CentOS7.qcow2"}' | jq -r '.entities[] | .metadata.uuid' | tr -d \") log "CentOS7 Image UUID |${_centos7_uuid}|" diff --git a/stage_workshop.sh b/stage_workshop.sh index 94260f5..af564bd 100755 --- a/stage_workshop.sh +++ b/stage_workshop.sh @@ -122,7 +122,7 @@ function stage_clusters() { _pe_launch='calm_bootcamp.sh' _pc_launch=${_pe_launch} fi - if (( $(echo ${_workshop} | grep -i "^Calm IaaS Bootcamp" | wc ${WC_ARG}) > 0 )); then + if (( $(echo ${_workshop} | grep -i "^Calm IaaS" | wc ${WC_ARG}) > 0 )); then _libraries+='lib.pe.sh lib.pc.sh' _pe_launch='calm_iaas_bootcamp.sh' _pc_launch=${_pe_launch} From 926b324f2bf874f0064288917a2d329efd1e7db3 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 28 Oct 2020 12:40:41 -0700 Subject: [PATCH 671/691] updates for clustername fetch --- scripts/lib.common.sh | 1 + scripts/lib.pc.sh | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index ad4b536..6d0d890 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -908,6 +908,7 @@ function pe_determine() { PE | pe ) CLUSTER_NAME=$(echo ${_hold} | jq -r .data.name) PE_HOST=$(echo ${_hold} | jq -r .data.clusterExternalIPAddress) + PE_CLUSTER_NAME=$(echo ${_hold} | jq -r .data.name) ;; PC | Pc | pc ) CLUSTER_NAME=$(echo ${_hold} | jq -r .name) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 19685c7..b1de407 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -922,13 +922,13 @@ function deploy_pocworkshop_vms() { log "Starting SE POC Guide Base VM Deployments" log "PE Cluster IP |${PE_HOST}|" log "PE Cluster IP |${PC_HOST}|" - log "Cluster Name |${CLUSTER_NAME}|" + log "Cluster Name |${PE_CLUSTER_NAME}|" ## Get Cluster UUID ## log "-------------------------------------" log "Get Cluster UUID" - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "${CLUSTER_NAME}" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "${PE_CLUSTER_NAME}" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") log "Cluster UUID |${_cluster_uuid}|" From 82c438c1fd502d9951201025a5142f53f0bdf5ea Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 28 Oct 2020 13:11:13 -0700 Subject: [PATCH 672/691] updated for new MSSQL Source IMages --- scripts/global.vars.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 4feeed9..a0fa4ba 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -61,8 +61,8 @@ ERA_NETWORK="Secondary" ERA_Container_RF="2" MSSQL_SourceVM="MSSQLSourceVM" -MSSQL_SourceVM_Image1="MSSQL_1" -MSSQL_SourceVM_Image2="MSSQL_2" +MSSQL_SourceVM_Image1="MSSQL_monitoring_1_Oct19_2020" +MSSQL_SourceVM_Image2="MSSQL_monitoring_2_Oct19_2020" Oracle_12c_SourceVM="Oracle12cSource" Oracle_12c_SourceVM_BootImage="12c_bootdisk" From 60680f562e3180e76435150bcc0b7c1515580e4a Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 28 Oct 2020 14:49:40 -0700 Subject: [PATCH 673/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index b1de407..19685c7 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -922,13 +922,13 @@ function deploy_pocworkshop_vms() { log "Starting SE POC Guide Base VM Deployments" log "PE Cluster IP |${PE_HOST}|" log "PE Cluster IP |${PC_HOST}|" - log "Cluster Name |${PE_CLUSTER_NAME}|" + log "Cluster Name |${CLUSTER_NAME}|" ## Get Cluster UUID ## log "-------------------------------------" log "Get Cluster UUID" - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "${PE_CLUSTER_NAME}" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "${CLUSTER_NAME}" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") log "Cluster UUID |${_cluster_uuid}|" From 9c324d4e9e413cfc6a1f0844cf19dee5a31e9ad4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 29 Oct 2020 08:30:36 -0700 Subject: [PATCH 674/691] Update lib.pc.sh --- scripts/lib.pc.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 19685c7..1ed23ae 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -928,7 +928,8 @@ function deploy_pocworkshop_vms() { log "-------------------------------------" log "Get Cluster UUID" - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "${CLUSTER_NAME}" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") + #_cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "${CLUSTER_NAME}" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq -r '.entities[]|select (.status.name!=Unnamed)| .metadata.uuid' | tr -d \") log "Cluster UUID |${_cluster_uuid}|" From 3a1b23c641b6ea41ba33bf538591b29cb531ccb4 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 29 Oct 2020 10:26:52 -0700 Subject: [PATCH 675/691] Update lib.pc.sh --- scripts/lib.pc.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 1ed23ae..50cccd4 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -928,8 +928,7 @@ function deploy_pocworkshop_vms() { log "-------------------------------------" log "Get Cluster UUID" - #_cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "${CLUSTER_NAME}" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq -r '.entities[]|select (.status.name!=Unnamed)| .metadata.uuid' | tr -d \") + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "Unnamed" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") log "Cluster UUID |${_cluster_uuid}|" From 4b0820ad671b23fa58a064c426f0268911072e65 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 29 Oct 2020 12:28:55 -0700 Subject: [PATCH 676/691] Update lib.pc.sh --- scripts/lib.pc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 50cccd4..fbf183a 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -928,7 +928,7 @@ function deploy_pocworkshop_vms() { log "-------------------------------------" log "Get Cluster UUID" - _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "Unnamed" '.entities[]|select (.status.name==$CLUSTER)| .metadata.uuid' | tr -d \") + _cluster_uuid=$(curl ${CURL_HTTP_OPTS} -X POST 'https://localhost:9440/api/nutanix/v3/clusters/list' --user ${PRISM_ADMIN}:${PE_PASSWORD} --data '{}' | jq --arg CLUSTER "Unnamed" '.entities[]|select (.status.name!=$CLUSTER)| .metadata.uuid' | tr -d \") log "Cluster UUID |${_cluster_uuid}|" From f9709e9cbb455174dc7371cfe2db02dcaeddac8f Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 30 Oct 2020 08:46:35 -0700 Subject: [PATCH 677/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index fbf183a..39b7156 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -996,7 +996,7 @@ HTTP_JSON_BODY=$(cat < Date: Fri, 30 Oct 2020 15:07:08 -0700 Subject: [PATCH 678/691] Update lib.pc.sh --- scripts/lib.pc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 39b7156..7a3a0d6 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -996,7 +996,7 @@ HTTP_JSON_BODY=$(cat < Date: Mon, 2 Nov 2020 13:02:15 -0800 Subject: [PATCH 679/691] Update lib.common.sh --- scripts/lib.common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index 6d0d890..f67b47c 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -855,7 +855,7 @@ function ntnx_download() { if (( $? == 0 )) ; then log "Success! Delete ${_ncli_softwaretype} sources to free CVM space..." - rm -f ${_source_url##*/} ${_meta_url##*/} + rm -fr ${_source_url##*/} ${_meta_url##*/} else _error=3 log "Error ${_error}: failed ncli upload of ${_ncli_softwaretype}." From 1b69d72baba605b8ac082d608884199443da3e48 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 2 Nov 2020 14:25:12 -0800 Subject: [PATCH 680/691] Update lib.common.sh --- scripts/lib.common.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/lib.common.sh b/scripts/lib.common.sh index f67b47c..6fe35d7 100755 --- a/scripts/lib.common.sh +++ b/scripts/lib.common.sh @@ -853,14 +853,14 @@ function ntnx_download() { file-path="$(pwd)/${_source_url##*/}" \ meta-file-path="$(pwd)/${_meta_url##*/}" - if (( $? == 0 )) ; then - log "Success! Delete ${_ncli_softwaretype} sources to free CVM space..." - rm -fr ${_source_url##*/} ${_meta_url##*/} - else - _error=3 - log "Error ${_error}: failed ncli upload of ${_ncli_softwaretype}." - exit ${_error} - fi + #if (( $? == 0 )) ; then + # log "Success! Delete ${_ncli_softwaretype} sources to free CVM space..." + # rm -fr ${_source_url##*/} ${_meta_url##*/} + #else + # _error=3 + # log "Error ${_error}: failed ncli upload of ${_ncli_softwaretype}." + # exit ${_error} + #fi } ################################################################################## From 202264bb7718d519e52cc619fb3129ce628fef94 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 3 Nov 2020 08:45:56 -0800 Subject: [PATCH 681/691] Updates for Files & File Analytic versions --- scripts/global.vars.sh | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index a0fa4ba..872e726 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -5,8 +5,8 @@ RELEASE='release.json' PC_DEV_VERSION='pc.2020.9' PC_CURRENT_VERSION='pc.2020.9' PC_STABLE_VERSION='5.17.0.3' -FILES_VERSION='3.7.0' -FILE_ANALYTICS_VERSION='2.1.1.1' +FILES_VERSION='3.7.1' +FILE_ANALYTICS_VERSION='2.2.0' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub" @@ -303,10 +303,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.55.251.38/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.7.0.json' - FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' - FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' + FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.7.1.json' + FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json' + FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2' JQ_REPOS=(\ 'http://10.55.251.38/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -342,10 +342,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.1.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -381,10 +381,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.0.json' - FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' - FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' + FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.1.json' + FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json' + FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2' JQ_REPOS=(\ 'http://10.42.194.11/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ @@ -425,10 +425,10 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.136.239.13/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json' PC_STABLE_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.7.0.json' - FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.0-stable.qcow2' - FILE_ANALYTICS_METAURL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1-metadata.json' - FILE_ANALYTICS_URL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.1.1.1.qcow2' + FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.7.1.json' + FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2' + FILE_ANALYTICS_METAURL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json' + FILE_ANALYTICS_URL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2' JQ_REPOS=(\ 'http://10.136.239.13/workshop_staging/jq-linux64.dms' \ 'https://s3.amazonaws.com/get-ahv-images/jq-linux64.dms' \ From bff7c2aa11057e1208d711544497974785626504 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 3 Nov 2020 10:03:30 -0800 Subject: [PATCH 682/691] Update global.vars.sh --- scripts/global.vars.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 872e726..1614c73 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -303,7 +303,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.55.251.38/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json' PC_STABLE_URL='http://10.55.251.38/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - FILES_METAURL='http://10.55.251.38/workshop_staging/afs-3.7.1.json' + FILES_METAURL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable-metadata.json' FILES_URL='http://10.55.251.38/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json' FILE_ANALYTICS_URL='http://10.55.251.38/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2' @@ -342,7 +342,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.1.json' + FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable-metadata.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json' FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2' @@ -381,7 +381,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' PC_STABLE_URL='http://10.42.194.11/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - FILES_METAURL='http://10.42.194.11/workshop_staging/afs-3.7.1.json' + FILES_METAURL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable-metadata.json' FILES_URL='http://10.42.194.11/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json' FILE_ANALYTICS_URL='http://10.42.194.11/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2' @@ -425,7 +425,7 @@ case "${OCTET[0]}.${OCTET[1]}" in PC_CURRENT_URL='http://10.136.239.13/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json' PC_STABLE_URL='http://10.136.239.13/workshop_staging/euphrates-5.17.0.3-stable-prism_central.tar' - FILES_METAURL='http://10.136.239.13/workshop_staging/afs-3.7.1.json' + FILES_METAURL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable-metadata.json' FILES_URL='http://10.136.239.13/workshop_staging/nutanix-afs-el7.3-release-afs-3.7.1-stable.qcow2' FILE_ANALYTICS_METAURL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0-metadata.json' FILE_ANALYTICS_URL='http://10.136.239.13/workshop_staging/nutanix-file_analytics-el7.7-release-2.2.0.qcow2' From 88e5012a4645fe8ce20cb7c108f3b3e0ec80e57d Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Tue, 3 Nov 2020 16:35:46 -0800 Subject: [PATCH 683/691] Update lib.pe.sh --- scripts/lib.pe.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index fcb5954..717d5d2 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -689,6 +689,7 @@ function cluster_check() { # Let's sleep a few seconds before moving on sleep 5 + # Checking if the registration has worked _pc_ip_addr=$(curl $CURL_HTTP_OPTS --user ${PRISM_ADMIN}:${PE_PASSWORD} https://localhost:9440/PrismGateway/services/rest/v1/multicluster/cluster_external_state | jq '.[].clusterDetails.ipAddresses[0]' | tr -d \") while [[ -z $_pc_ip_addr ]] do From 24a4c99d917359d36c1bba10b9c4819269fc14a1 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Mon, 7 Dec 2020 13:04:01 -0800 Subject: [PATCH 684/691] Updated MSSQL Source QCOWS --- scripts/global.vars.sh | 4 ++-- scripts/lib.pc.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 1614c73..a95ae99 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -61,8 +61,8 @@ ERA_NETWORK="Secondary" ERA_Container_RF="2" MSSQL_SourceVM="MSSQLSourceVM" -MSSQL_SourceVM_Image1="MSSQL_monitoring_1_Oct19_2020" -MSSQL_SourceVM_Image2="MSSQL_monitoring_2_Oct19_2020" +MSSQL_SourceVM_Image1="MSSQL_1_11162020" +MSSQL_SourceVM_Image2="MSSQL_2_11162020" Oracle_12c_SourceVM="Oracle12cSource" Oracle_12c_SourceVM_BootImage="12c_bootdisk" diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index 7a3a0d6..e42a212 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1366,7 +1366,7 @@ EOF log "Created Primary-MSSQL-NETWORK Network Profile with ID |${_primary_network_profile_id}|" -## Create the Primary_ORACLE_NETWORKNetwork Profile inside Era ## +## Create the Primary_ORACLE_NETWORK Network Profile inside Era ## log "Create the Primary_PGSQL_NETWORK Network Profile" HTTP_JSON_BODY=$(cat < Date: Wed, 6 Jan 2021 12:47:25 -0800 Subject: [PATCH 685/691] updates for Era --- scripts/global.vars.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index a95ae99..05948ab 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -61,8 +61,8 @@ ERA_NETWORK="Secondary" ERA_Container_RF="2" MSSQL_SourceVM="MSSQLSourceVM" -MSSQL_SourceVM_Image1="MSSQL_1_11162020" -MSSQL_SourceVM_Image2="MSSQL_2_11162020" +MSSQL_SourceVM_Image1="MSSQL_1_12102020" +MSSQL_SourceVM_Image2="MSSQL_2_12102020" Oracle_12c_SourceVM="Oracle12cSource" Oracle_12c_SourceVM_BootImage="12c_bootdisk" From ba23596652135c4644ba918c801fd62dbcb80ad8 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Wed, 6 Jan 2021 15:31:03 -0800 Subject: [PATCH 686/691] updates for current software versions --- scripts/global.vars.sh | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 05948ab..1874b9f 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -2,10 +2,10 @@ # shellcheck disable=SC2034 RELEASE='release.json' -PC_DEV_VERSION='pc.2020.9' +PC_DEV_VERSION='pc.2020.11' PC_CURRENT_VERSION='pc.2020.9' PC_STABLE_VERSION='5.17.0.3' -FILES_VERSION='3.7.1' +FILES_VERSION='3.7.2.1' FILE_ANALYTICS_VERSION='2.2.0' NTNX_INIT_PASSWORD='nutanix/4u' PRISM_ADMIN='admin' @@ -17,7 +17,7 @@ STORAGE_ERA='Era' ATTEMPTS=40 SLEEP=60 PrismOpsServer='PrismOpsLabUtilityServer' -SeedPC='seedPC.zip' +SeedPC='seedPC202011.zip' BasicUnattend='basic_unattend.xml' CALM_RSA_KEY_FILE='calm_rsa_key.env' @@ -297,8 +297,8 @@ AUTH_ADMIN_GROUP='SSP Admins' case "${OCTET[0]}.${OCTET[1]}" in 10.55 ) # HPOC us-east = DUR - PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.9.json' - PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.9.tar' + PC_DEV_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.11.json' + PC_DEV_URL='http://10.55.251.38/workshop_staging/pc.2020.11.tar' PC_CURRENT_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-pc.2020.9.json' PC_CURRENT_URL='http://10.55.251.38/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.55.251.38/workshop_staging/pcdeploy-5.17.0.3.json' @@ -336,8 +336,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.55.251.38/workshop_staging/objects' ;; 10.42 ) # HPOC us-west = PHX - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.11.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.11.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' @@ -375,8 +375,8 @@ case "${OCTET[0]}.${OCTET[1]}" in OBJECTS_OFFLINE_REPO='http://10.42.194.11/workshop_staging/objects' ;; 10.38 ) # HPOC us-west = PHX 1-Node Clusters - PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' - PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' + PC_DEV_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.11.json' + PC_DEV_URL='http://10.42.194.11/workshop_staging/pc.2020.11.tar' PC_CURRENT_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-pc.2020.9.json' PC_CURRENT_URL='http://10.42.194.11/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.42.194.11/workshop_staging/pcdeploy-5.17.0.3.json' @@ -419,8 +419,8 @@ case "${OCTET[0]}.${OCTET[1]}" in fi ;; 10.136 ) # HPOC us-west = BLR - PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.9.json' - PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.9.tar' + PC_DEV_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.11.json' + PC_DEV_URL='http://10.136.239.13/workshop_staging/pc.2020.11.tar' PC_CURRENT_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-pc.2020.9.json' PC_CURRENT_URL='http://10.136.239.13/workshop_staging/pc.2020.9.tar' PC_STABLE_METAURL='http://10.136.239.13/workshop_staging/pcdeploy-5.17.0.3.json' From acf491e391f3a2639dc80a0b6efcdbe7adb7ad41 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Fri, 8 Jan 2021 13:25:28 -0800 Subject: [PATCH 687/691] Update lib.pc.sh --- scripts/lib.pc.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lib.pc.sh b/scripts/lib.pc.sh index e42a212..31b02a3 100755 --- a/scripts/lib.pc.sh +++ b/scripts/lib.pc.sh @@ -1297,7 +1297,8 @@ echo $ClusterJSON > cluster.json ## Add the Secondary Network inside Era ## log "Create ${NW2_NAME} DHCP/IPAM Network" - _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/resources/networks" --data '{"name": "'${NW2_NAME}'","type": "DHCP"}' | jq -r '.id' | tr -d \") + _dhcp_network_id=$(curl ${CURL_HTTP_OPTS} -u ${ERA_USER}:${ERA_PASSWORD} -X POST "https://${ERA_HOST}/era/v0.9/resources/networks" --data '{"name": "'${NW2_NAME}'","type": "DHCP", + "clusterId":"'${_era_cluster_id}'"}' | jq -r '.id' | tr -d \") log "Created ${NW2_NAME} Network with Network ID |${_dhcp_network_id}|" From 8b9975b04d4db157b1ad52079dc0696f1f104003 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 14 Jan 2021 13:09:39 -0800 Subject: [PATCH 688/691] Update global.vars.sh --- scripts/global.vars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 1874b9f..6789d32 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -51,7 +51,7 @@ USERS=(\ ################################################### ERA_Blueprint='EraServerDeployment.json' -ERAServerImage='ERA-Server-build-2.0.0.qcow2' +ERAServerImage='ERA-Server-build-2.1.0.qcow2' ERAServerName='EraServer' ERA_USER="admin" #ERA_PASSWORD="nutanix/4u" From 99c9902c71df73dc18642e66120a1ce604289169 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 14 Jan 2021 13:10:34 -0800 Subject: [PATCH 689/691] Update global.vars.sh --- scripts/global.vars.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 6789d32..14891fc 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -61,8 +61,8 @@ ERA_NETWORK="Secondary" ERA_Container_RF="2" MSSQL_SourceVM="MSSQLSourceVM" -MSSQL_SourceVM_Image1="MSSQL_1_12102020" -MSSQL_SourceVM_Image2="MSSQL_2_12102020" +MSSQL_SourceVM_Image1="MSSQL_1_01132021" +MSSQL_SourceVM_Image2="MSSQL_2_01132021" Oracle_12c_SourceVM="Oracle12cSource" Oracle_12c_SourceVM_BootImage="12c_bootdisk" From 73000195532807f9a84f8834e56d0a4bfdd8c3f9 Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 14 Jan 2021 14:25:33 -0800 Subject: [PATCH 690/691] Update lib.pe.sh --- scripts/lib.pe.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/lib.pe.sh b/scripts/lib.pe.sh index 717d5d2..755bb16 100755 --- a/scripts/lib.pe.sh +++ b/scripts/lib.pe.sh @@ -1116,7 +1116,7 @@ function deploy_mssql() { SourceVM="${_user}_${MSSQL_SourceVM}" echo "## ${SourceVM} Creation_INPROGRESS ##" - acli "vm.create ${SourceVM} memory=2046M num_cores_per_vcpu=1 num_vcpus=2" + acli "vm.create ${SourceVM} memory=2048M num_cores_per_vcpu=1 num_vcpus=2" acli "vm.disk_create ${SourceVM} clone_from_image=${MSSQL_SourceVM_Image1}" acli "vm.disk_create ${SourceVM} clone_from_image=${MSSQL_SourceVM_Image2}" acli "vm.nic_create ${SourceVM} network=${NW1_NAME}" From e8b5aa4602961b316eb4d994dda60695bf0468ed Mon Sep 17 00:00:00 2001 From: Nathan Cox Date: Thu, 21 Jan 2021 08:28:24 -0800 Subject: [PATCH 691/691] updates for GTS21 --- scripts/era_bootcamp.sh | 2 +- scripts/era_mssql_bootcamp.sh | 2 +- scripts/era_oracle_bootcamp.sh | 2 +- scripts/era_postgres_bootcamp.sh | 2 +- scripts/global.vars.sh | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/era_bootcamp.sh b/scripts/era_bootcamp.sh index 8cc948d..51a7819 100755 --- a/scripts/era_bootcamp.sh +++ b/scripts/era_bootcamp.sh @@ -22,7 +22,7 @@ case ${1} in # Networking needs for Era Bootcamp #export NW2_NAME='EraManaged' export NW2_DHCP_START="${IPV4_PREFIX}.132" - export NW2_DHCP_END="${IPV4_PREFIX}.219" + export NW2_DHCP_END="${IPV4_PREFIX}.209" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/era_mssql_bootcamp.sh b/scripts/era_mssql_bootcamp.sh index 6d1e16a..c68b117 100755 --- a/scripts/era_mssql_bootcamp.sh +++ b/scripts/era_mssql_bootcamp.sh @@ -22,7 +22,7 @@ case ${1} in # Networking needs for Era Bootcamp #export NW2_NAME='EraManaged' export NW2_DHCP_START="${IPV4_PREFIX}.132" - export NW2_DHCP_END="${IPV4_PREFIX}.219" + export NW2_DHCP_END="${IPV4_PREFIX}.209" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/era_oracle_bootcamp.sh b/scripts/era_oracle_bootcamp.sh index da2b762..68db5eb 100755 --- a/scripts/era_oracle_bootcamp.sh +++ b/scripts/era_oracle_bootcamp.sh @@ -22,7 +22,7 @@ case ${1} in # Networking needs for Era Bootcamp #export NW2_NAME='EraManaged' export NW2_DHCP_START="${IPV4_PREFIX}.132" - export NW2_DHCP_END="${IPV4_PREFIX}.219" + export NW2_DHCP_END="${IPV4_PREFIX}.209" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/era_postgres_bootcamp.sh b/scripts/era_postgres_bootcamp.sh index ac0dadd..5b28cdf 100755 --- a/scripts/era_postgres_bootcamp.sh +++ b/scripts/era_postgres_bootcamp.sh @@ -22,7 +22,7 @@ case ${1} in # Networking needs for Era Bootcamp #export NW2_NAME='EraManaged' export NW2_DHCP_START="${IPV4_PREFIX}.132" - export NW2_DHCP_END="${IPV4_PREFIX}.219" + export NW2_DHCP_END="${IPV4_PREFIX}.209" args_required 'PE_HOST PC_LAUNCH' ssh_pubkey & # non-blocking, parallel suitable diff --git a/scripts/global.vars.sh b/scripts/global.vars.sh index 14891fc..bd84ff1 100755 --- a/scripts/global.vars.sh +++ b/scripts/global.vars.sh @@ -267,7 +267,7 @@ case "${OCTET[3]}" in NW3_NAME='EraManaged' NW3_NETMASK='255.255.255.128' - NW3_START="${IPV4_PREFIX}.220" + NW3_START="${IPV4_PREFIX}.210" NW3_END="${IPV4_PREFIX}.253" ;;