Rework deployment script to unpack and support discord webhook
need to fetch the script Make script more tolerant of errors. Fix extraction of build_type for GitHub layout Need to ignore pre-existing folders We have multiple artifacts that we are merging into a common tree. despite the clash of folders the files are not going to clash. revert the artifact collection to per platform. Simplify 64/32 test to remove ReleaseType dependencymaster
parent
650404903d
commit
3a79977bcf
|
|
@ -8,7 +8,7 @@ on:
|
|||
- cron: '00 03 * * *' # Run every day at 3am UTC
|
||||
env:
|
||||
AUTOBUILD_VARIABLES_FILE: ${{github.workspace}}/build-variables/variables
|
||||
EXTRA_ARGS: -DFMODSTUDIO=ON -DUSE_KDU=ON --crashreporting
|
||||
EXTRA_ARGS: -DUSE_FMODSTUDIO=ON -DUSE_KDU=ON --crashreporting
|
||||
build_secrets_checkout: ${{github.workspace}}/signing
|
||||
|
||||
|
||||
|
|
@ -84,6 +84,8 @@ jobs:
|
|||
fi
|
||||
if [[ "${{ matrix.addrsize }}" == "64" ]]; then
|
||||
FS_RELEASE_CHAN="${FS_RELEASE_TYPE}x64"
|
||||
else
|
||||
FS_RELEASE_CHAN=${FS_RELEASE_TYPE}
|
||||
fi
|
||||
echo "FS_RELEASE_TYPE=${FS_RELEASE_TYPE}" >> $GITHUB_ENV
|
||||
echo "FS_RELEASE_CHAN=${FS_RELEASE_CHAN}" >> $GITHUB_ENV
|
||||
|
|
@ -198,27 +200,65 @@ jobs:
|
|||
run: rm *${{ env.fallback_platform }}*bz2
|
||||
shell: bash
|
||||
- name: Configure
|
||||
run: autobuild configure --debug -c ReleaseFS -A${{matrix.addrsize}} -- --package --chan ${{env.FS_RELEASE_CHAN}} ${{env.EXTRA_ARGS}} ${{env.FS_GRID}}
|
||||
run: autobuild configure -c ReleaseFS -A${{matrix.addrsize}} -- --package --chan ${{env.FS_RELEASE_CHAN}} ${{env.EXTRA_ARGS}} ${{env.FS_GRID}}
|
||||
shell: bash
|
||||
|
||||
- name: build
|
||||
run: autobuild build --debug -c ReleaseFS -A${{matrix.addrsize}} --no-configure
|
||||
run: autobuild build -c ReleaseFS -A${{matrix.addrsize}} --no-configure
|
||||
shell: bash
|
||||
|
||||
- name: Publish artifacts
|
||||
if: runner.os == 'Windows'
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.FS_RELEASE_TYPE }}-${{ matrix.os }}-${{ matrix.addrsize }}-${{ matrix.grid }}-artifacts.zip
|
||||
path: |
|
||||
build-*/newview/Release/*Setup.exe
|
||||
build-*/newview/Release/*.xz
|
||||
build-linux-*/newview/*.bz2
|
||||
build-darwin-*/newview/*.dmg
|
||||
build-*/newview/Release/*Setup.exe
|
||||
build-*/newview/Release/*.xz
|
||||
|
||||
- name: publish Linux artifacts
|
||||
if: runner.os == 'Linux'
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.FS_RELEASE_TYPE }}-${{ matrix.os }}-${{matrix.addrsize}}-${{matrix.grid}}-artifacts.zip
|
||||
path: |
|
||||
build-linux-*/newview/*.xz
|
||||
build-linux-*/newview/*.bz2
|
||||
|
||||
- name: publish MacOS artifacts
|
||||
if: runner.os == 'macOS'
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.FS_RELEASE_TYPE }}-${{ matrix.os }}-${{matrix.addrsize}}-${{matrix.grid}}-artifacts.zip
|
||||
path: |
|
||||
build-darwin-*/newview/*.dmg
|
||||
build-darwin-*/newview/*.bz2
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build_matrix
|
||||
if: always()
|
||||
steps:
|
||||
- name: Checkout files
|
||||
uses: Bhacaz/checkout-files@v2
|
||||
with:
|
||||
files: fsutils/download_list.py
|
||||
branch: ${{ github.head_ref || github.ref_name || 'master' }}
|
||||
- name: Install discord-webhook library
|
||||
run: pip install discord-webhook
|
||||
|
||||
- name: find channel from Branch name
|
||||
run: |
|
||||
if [[ "${{ github.ref_name }}" == *Release* ]]; then
|
||||
FS_RELEASE_FOLDER=release
|
||||
else
|
||||
if [[ "${{github.event_name}}" == 'schedule' ]]; then
|
||||
FS_RELEASE_FOLDER=nightly
|
||||
else
|
||||
FS_RELEASE_FOLDER=preview
|
||||
fi
|
||||
fi
|
||||
echo "FS_RELEASE_FOLDER=${FS_RELEASE_FOLDER}" >> $GITHUB_ENV
|
||||
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
id: download
|
||||
|
|
@ -227,11 +267,15 @@ jobs:
|
|||
- name: List artifacts download
|
||||
run: ls -R
|
||||
working-directory: ${{steps.download.outputs.download-path}}
|
||||
|
||||
|
||||
- name: Reorganise artifacts ready for server upload.
|
||||
run: python ./fsutils/download_list.py -u ${{steps.download.outputs.download-path}} -w ${{ secrets.RELEASE_WEBHOOK_URL }}
|
||||
|
||||
- name: Setup rclone and download the folder
|
||||
uses: beqjanus/setup-rclone@main
|
||||
with:
|
||||
rclone_config: ${{ secrets.RCLONE_CONFIG }}
|
||||
|
||||
- name: Copy files to remote host
|
||||
run: rclone copy ${{steps.download.outputs.download-path}} fs_deploy:test/
|
||||
run: rclone copy ${{steps.download.outputs.download-path}}/${{ env.FS_RELEASE_FOLDER }} fs_deploy:${{ env.FS_RELEASE_FOLDER }}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,14 +6,9 @@ import time
|
|||
import zipfile
|
||||
import glob
|
||||
import shutil
|
||||
from discord_webhook import DiscordWebhook
|
||||
|
||||
|
||||
# iterate over the files in a directory and pass them to a command line subshell
|
||||
def get_files(path):
|
||||
files = []
|
||||
for root, dirs, files in os.walk(path):
|
||||
# print(f"Found : {files}")
|
||||
return files
|
||||
return None
|
||||
|
||||
|
||||
# run a command line subshell and return the output
|
||||
|
|
@ -65,6 +60,14 @@ def get_files(path):
|
|||
# MD5: 9D5D8021F376194B42F6E7D8E537E45E
|
||||
|
||||
# -------------------------------------------------------------------------------------------------------
|
||||
# iterate over the files in a directory and pass them to a command line subshell
|
||||
def get_files(path):
|
||||
files = []
|
||||
for root, dirs, filenames in os.walk(path):
|
||||
for filename in filenames:
|
||||
files.append(filename)
|
||||
print(f"Found : {files} on {path}")
|
||||
return files
|
||||
|
||||
def run_cmd(cmd):
|
||||
# print(cmd)
|
||||
|
|
@ -73,12 +76,12 @@ def run_cmd(cmd):
|
|||
#using the md5sum command get the md5 for the file
|
||||
|
||||
def get_md5(mdfile):
|
||||
# print(f"mdfile is {mdfile}")
|
||||
md5sum = run_cmd(f"md5sum {mdfile}")
|
||||
#split md5sum on space
|
||||
md5sum = md5sum.split()[0]
|
||||
#remove leading '\'
|
||||
md5sum = md5sum[1:]
|
||||
print(f"generating md5sum for {mdfile} as {md5sum}")
|
||||
return md5sum
|
||||
|
||||
def unzip_file(zip_file, unzip_dir):
|
||||
|
|
@ -86,6 +89,7 @@ def unzip_file(zip_file, unzip_dir):
|
|||
zip_ref.extractall(unzip_dir)
|
||||
|
||||
def flatten_tree(tree_root):
|
||||
print(f"Flattening tree {tree_root}")
|
||||
for root, flatten_dirs, files in os.walk(tree_root, topdown=False):
|
||||
for file in files:
|
||||
# Construct the full path to the file
|
||||
|
|
@ -107,6 +111,8 @@ parser = argparse.ArgumentParser(
|
|||
)
|
||||
parser.add_argument("-r", "--release", required=False, default=False, action="store_true", help="use the release folder in the target URL")
|
||||
parser.add_argument("-u", "--unzip", required=False, default=False, action="store_true", help="unzip the github artifact first")
|
||||
parser.add_argument("-w", "--webhook", help="post details to the webhook")
|
||||
|
||||
# add path_to_directory required parameter to parser
|
||||
parser.add_argument("path_to_directory", help="path to the directory in which we'll look for the files")
|
||||
|
||||
|
|
@ -114,102 +120,162 @@ args = parser.parse_args()
|
|||
path_to_directory = args.path_to_directory
|
||||
release = args.release
|
||||
|
||||
# Create a webhook object with the webhook URL
|
||||
if args.webhook:
|
||||
webhook = DiscordWebhook(url=args.webhook)
|
||||
|
||||
dirs = ["windows", "mac", "linux"]
|
||||
|
||||
if args.unzip:
|
||||
# unzip the github artifact for this OS (`dir`) into the folder `dir`
|
||||
# get the .zip files in args.path_to_directory using glob
|
||||
zips = glob.glob(f"{args.path_to_directory}/*.zip")
|
||||
for file in zips:
|
||||
# print(f"unzipping {file}")
|
||||
if "ubuntu" in file.lower():
|
||||
unzip_file(file, os.path.join(args.path_to_directory, "linux"))
|
||||
if "windows" in file.lower():
|
||||
unzip_file(file, os.path.join(args.path_to_directory, "windows"))
|
||||
if "macos" in file.lower():
|
||||
unzip_file(file, os.path.join(args.path_to_directory, "mac"))
|
||||
# build_types is a map from Beta, Release and Nightly to folder names preview release and nightly
|
||||
build_types = {
|
||||
"Beta": "preview",
|
||||
"Release": "release",
|
||||
"Nightly": "nightly"
|
||||
}
|
||||
|
||||
target_folder = {
|
||||
"ubuntu":"linux",
|
||||
"windows":"windows",
|
||||
"macos":"mac"
|
||||
}
|
||||
|
||||
# unzip the github artifact for this OS (`dir`) into the folder `dir`
|
||||
# get the .zip files in args.path_to_directory using glob
|
||||
print(f"Processing artifacts in {args.path_to_directory}")
|
||||
build_types_created = set()
|
||||
zips = glob.glob(f"{args.path_to_directory}/*.zip")
|
||||
for file in zips:
|
||||
# print(f"unzipping {file}")
|
||||
#extract first word (delimited by '-' from the file name)
|
||||
# build_type is a fullpath but we only want the last folder, remove the leading part of the path leaving just the foldername using basename
|
||||
filename = os.path.basename(file)
|
||||
build_type = filename.split("-")[0]
|
||||
platform = filename.split("-")[1].lower()
|
||||
|
||||
# print(f"build_type is {build_type}")
|
||||
if build_type not in build_types:
|
||||
print(f"Invalid build_type {build_type} using file {file}")
|
||||
continue
|
||||
else:
|
||||
build_folder = build_types[build_type]
|
||||
|
||||
build_types_created.add(build_type)
|
||||
|
||||
build_type_dir = os.path.join(args.path_to_directory, build_folder)
|
||||
|
||||
if platform not in target_folder:
|
||||
print(f"Invalid platform {platform} using file {file}")
|
||||
continue
|
||||
|
||||
unpack_folder = os.path.join(build_type_dir, target_folder[platform])
|
||||
print(f"unpacking {filename} to {unpack_folder}")
|
||||
|
||||
if os.path.isfile(file):
|
||||
# this is an actual zip file
|
||||
unzip_file(file, unpack_folder)
|
||||
else:
|
||||
# Create the destination folder if it doesn't exist
|
||||
# if not os.path.exists(unpack_folder):
|
||||
# os.makedirs(unpack_folder)
|
||||
# Copy the contents of the source folder to the destination folder recursively
|
||||
shutil.copytree(file, unpack_folder, dirs_exist_ok=True)
|
||||
|
||||
output = ""
|
||||
for build_type in build_types_created:
|
||||
build_type_dir = os.path.join(args.path_to_directory, build_types[build_type])
|
||||
if not os.path.exists(build_type_dir):
|
||||
print(f"Unexpected error: {build_type_dir} does not exist, even though it was in the set.")
|
||||
continue
|
||||
# loop over the folder in the build_type_dir
|
||||
for dir in dirs:
|
||||
flatten_tree(os.path.join(args.path_to_directory, dir))
|
||||
print(f"Cleaning up {dir}")
|
||||
# Traverse the directory tree and move all of the files to the root directory
|
||||
flatten_tree(os.path.join(build_type_dir, dir))
|
||||
# Now move the symbols files to the symbols folder
|
||||
symbols_folder = os.path.join(args.path_to_directory, "symbols")
|
||||
# prep the symbols folder
|
||||
symbols_folder = os.path.join(build_type_dir, "symbols")
|
||||
os.mkdir(symbols_folder)
|
||||
# Traverse the directory tree and move all of the files to the root directory
|
||||
symbol_archives = glob.glob(f"{args.path_to_directory}/**/*_hvk*", recursive=True)
|
||||
symbol_archives = glob.glob(f"{build_type_dir}/**/*_hvk*", recursive=True)
|
||||
for sym_file in symbol_archives:
|
||||
print(f"Moving {sym_file} to {symbols_folder}")
|
||||
shutil.move(sym_file, symbols_folder)
|
||||
symbol_archives = glob.glob(f"{args.path_to_directory}/**/*_oss*", recursive=True)
|
||||
symbol_archives = glob.glob(f"{build_type_dir}/**/*_oss*", recursive=True)
|
||||
for sym_file in symbol_archives:
|
||||
print(f"Moving {sym_file} to {symbols_folder}")
|
||||
shutil.move(sym_file, symbols_folder)
|
||||
|
||||
|
||||
file_dict = {}
|
||||
md5_dict = {}
|
||||
# While we're at it, let's print the md5 listing
|
||||
file_dict = {}
|
||||
md5_dict = {}
|
||||
platforms_printable = {"windows":"MS Windows", "mac":"MacOS", "linux":"Linux"}
|
||||
grids_printable = {"SL":"Second Life", "OS":"OpenSim"}
|
||||
|
||||
for dir in dirs:
|
||||
dir = dir.lower()
|
||||
files = get_files(os.path.join(args.path_to_directory, dir))
|
||||
for file in files:
|
||||
full_file = os.path.join(args.path_to_directory, dir, file)
|
||||
md5 = get_md5(full_file)
|
||||
base_name = os.path.basename(file)
|
||||
if "-Release-" in base_name or "-Beta-" in base_name:
|
||||
wordsize = "32"
|
||||
else:
|
||||
wordsize = "64"
|
||||
|
||||
if "FirestormOS-" in base_name:
|
||||
grid = "OS"
|
||||
else:
|
||||
grid = "SL"
|
||||
|
||||
if dir in dirs:
|
||||
file_dict[f"{grid}{dir}{wordsize}"] = full_file
|
||||
md5_dict[f"{grid}{dir}{wordsize}"] = md5
|
||||
|
||||
download_root_preview = "https://downloads.firestormviewer.org/preview"
|
||||
download_root_release = "https://downloads.firestormviewer.org/release"
|
||||
|
||||
if args.release:
|
||||
download_root = download_root_release
|
||||
else:
|
||||
download_root = download_root_preview
|
||||
|
||||
print('''
|
||||
DOWNLOADS''')
|
||||
|
||||
platforms_printable = {"windows":"MS Windows", "mac":"MacOS", "linux":"Linux"}
|
||||
grids_printable = {"SL":"Second Life", "OS":"OpenSim"}
|
||||
|
||||
for dir in dirs:
|
||||
print(f'''-------------------------------------------------------------------------------------------------------
|
||||
{platforms_printable[dir]}
|
||||
''')
|
||||
dir=dir.lower()
|
||||
wordsize = "64"
|
||||
platform = f"{platforms_printable[dir]}"
|
||||
for grid in ["SL", "OS"]:
|
||||
grid_printable = f"{grids_printable[grid]}"
|
||||
download_root = "https://downloads.firestormviewer.org/{build_types[build_type]}/"
|
||||
for dir in dirs:
|
||||
print(f"Getting files for {dir} in {build_type_dir}")
|
||||
files = get_files(os.path.join(build_type_dir, dir))
|
||||
try:
|
||||
print (f"{platform} for {grid_printable} ({wordsize}-bit)")
|
||||
print ( "{}/{}/{}".format(download_root,dir,os.path.basename(file_dict[f"{grid}{dir}{wordsize}"])) )
|
||||
print ()
|
||||
print ( "MD5: {}".format(md5_dict[f"{grid}{dir}{wordsize}"]) )
|
||||
print ()
|
||||
if(dir == "windows"):
|
||||
# Need to do 32 bit as well
|
||||
wordsize = "32"
|
||||
print (f"{platform} for {grid_printable} ({wordsize}-bit)")
|
||||
print ( "{}/{}/{}".format(download_root,dir,os.path.basename(file_dict[f"{grid}{dir}{wordsize}"])) )
|
||||
print ()
|
||||
print ( "MD5: {}".format(md5_dict[f"{grid}{dir}{wordsize}"]) )
|
||||
print ()
|
||||
wordsize = "64"
|
||||
except KeyError:
|
||||
print (f"{platform} for {grid_printable} ({wordsize}-bit) - NOT AVAILABLE")
|
||||
print ()
|
||||
for file in files:
|
||||
full_file = os.path.join(build_type_dir, dir, file)
|
||||
md5 = get_md5(full_file)
|
||||
base_name = os.path.basename(file)
|
||||
if "x64" in base_name:
|
||||
wordsize = "64"
|
||||
else:
|
||||
wordsize = "32"
|
||||
|
||||
if "FirestormOS-" in base_name:
|
||||
grid = "OS"
|
||||
else:
|
||||
grid = "SL"
|
||||
|
||||
print('''
|
||||
-------------------------------------------------------------------------------------------------------''')
|
||||
if dir in dirs:
|
||||
file_dict[f"{grid}{dir}{wordsize}"] = full_file
|
||||
md5_dict[f"{grid}{dir}{wordsize}"] = md5
|
||||
except TypeError:
|
||||
print(f"No files found for {dir} in {build_type_dir}")
|
||||
|
||||
|
||||
output += f'''
|
||||
DOWNLOADS - {build_type}
|
||||
'''
|
||||
|
||||
output += f'''-------------------------------------------------------------------------------------------------------
|
||||
{platforms_printable[dir]}
|
||||
'''
|
||||
dir = dir.lower()
|
||||
wordsize = "64"
|
||||
platform = f"{platforms_printable[dir]}"
|
||||
for grid in ["SL", "OS"]:
|
||||
grid_printable = f"{grids_printable[grid]}"
|
||||
try:
|
||||
output += f"{platform} for {grid_printable} ({wordsize}-bit)\n"
|
||||
output += f"{download_root}/{dir}/{os.path.basename(file_dict[f'{grid}{dir}{wordsize}'])}\n"
|
||||
output += "\n"
|
||||
output += f"MD5: {md5_dict[f'{grid}{dir}{wordsize}']}\n"
|
||||
output += "\n"
|
||||
if dir == "windows":
|
||||
# Need to do 32 bit as well
|
||||
wordsize = "32"
|
||||
output += f"{platform} for {grid_printable} ({wordsize}-bit)\n"
|
||||
output += f"{download_root}/{dir}/{os.path.basename(file_dict[f'{grid}{dir}{wordsize}'])}\n"
|
||||
output += "\n"
|
||||
output += f"MD5: {md5_dict[f'{grid}{dir}{wordsize}']}\n"
|
||||
output += "\n"
|
||||
wordsize = "64"
|
||||
except KeyError:
|
||||
output += f"{platform} for {grid_printable} ({wordsize}-bit) - NOT AVAILABLE\n"
|
||||
output += "\n"
|
||||
output += '''
|
||||
-------------------------------------------------------------------------------------------------------
|
||||
'''
|
||||
|
||||
if args.webhook:
|
||||
# Add the message to the webhook
|
||||
webhook.set_content(content=output)
|
||||
# Send the webhook
|
||||
response = webhook.execute()
|
||||
# Print the response
|
||||
print(f"Webhook response: {response}")
|
||||
print(output)
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue