diff --git a/.github/workflows/build_viewer.yml b/.github/workflows/build_viewer.yml index 63368a939e..ff1cc099bd 100644 --- a/.github/workflows/build_viewer.yml +++ b/.github/workflows/build_viewer.yml @@ -323,7 +323,7 @@ jobs: # npm install -g node-dump-syms - name: Post Bugsplat Symbols - uses: beqjanus/symbol-upload@main + uses: BugSplat-Git/symbol-upload@main with: clientId: ${{ steps.version.outputs.viewer_release_type == 'Release' && secrets.BUGSPLAT_RELEASE_ID || @@ -391,27 +391,30 @@ jobs: with: sparse-checkout: | fsutils/download_list.py + fsutils/build_config.json + fsutils/build_config.py sparse-checkout-cone-mode: false ref: ${{ github.head_ref || github.ref_name || 'master' }} fetch-depth: 1 - name: Install discord-webhook library run: pip install discord-webhook - - name: find channel and webhook from Branch name + - name: find channel and webhook from build_matrix outputs run: | - if [[ "${{ github.ref_name }}" == Firestorm* ]]; then + viewer_release_type=${{ needs.build_matrix.outputs.viewer_release_type }} + if [[ "$viewer_release_type" == "Release" ]]; then FS_RELEASE_FOLDER=release FS_BUILD_WEBHOOK_URL=${{ secrets.RELEASE_WEBHOOK_URL }} - elif [[ "${{ github.ref_name }}" == *review* ]]; then + elif [[ "$viewer_release_type" == "Beta" ]]; then FS_RELEASE_FOLDER=preview FS_BUILD_WEBHOOK_URL=${{ secrets.BETA_WEBHOOK_URL }} - elif [[ "${{ github.ref_name }}" == *alpha* ]]; then + elif [[ "$viewer_release_type" == "Alpha" ]]; then FS_RELEASE_FOLDER=test FS_BUILD_WEBHOOK_URL=${{ secrets.BETA_WEBHOOK_URL }} - elif [[ "${{ github.ref_name }}" == *nightly* ]] || [[ "${{ github.event_name }}" == 'schedule' ]]; then + elif [[ "$viewer_release_type" == "Nightly" ]] || [[ "${{ github.event_name }}" == 'schedule' ]]; then FS_RELEASE_FOLDER=nightly FS_BUILD_WEBHOOK_URL=${{ secrets.NIGHTLY_WEBHOOK_URL }} - elif [[ "${{github.event_name }}" == "workflow_dispatch" ]]; then + elif [[ "$viewer_release_type" == "Manual" ]]; then FS_RELEASE_FOLDER=test FS_BUILD_WEBHOOK_URL=${{ secrets.MANUAL_WEBHOOK_URL }} else @@ -430,7 +433,13 @@ jobs: working-directory: ${{steps.download.outputs.download-path}} - name: Reorganise artifacts ready for server upload. - run: python ./fsutils/download_list.py -u ${{steps.download.outputs.download-path}} -w ${{ env.FS_BUILD_WEBHOOK_URL }} + env: + FS_VIEWER_CHANNEL: ${{ needs.build_matrix.outputs.viewer_channel }} + FS_VIEWER_VERSION: ${{ needs.build_matrix.outputs.viewer_version }} + FS_VIEWER_BUILD: ${{ needs.build_matrix.outputs.viewer_build }} + FS_VIEWER_RELEASE_TYPE: ${{ needs.build_matrix.outputs.viewer_release_type }} + FS_VERSION_MGR_KEY: ${{ secrets.FS_VERSION_MGR_KEY }} + run: python ./fsutils/download_list.py ${{steps.download.outputs.download-path}} -w ${{ env.FS_BUILD_WEBHOOK_URL }} - name: Setup rclone and download the folder uses: beqjanus/setup-rclone@main diff --git a/.github/workflows/deploy_only.yml b/.github/workflows/deploy_only.yml new file mode 100644 index 0000000000..433da18767 --- /dev/null +++ b/.github/workflows/deploy_only.yml @@ -0,0 +1,102 @@ +name: Deploy Viewer + +on: + workflow_dispatch: + inputs: + build_run_id: + description: 'Workflow Run ID of the build to deploy' + required: true + default: '' + viewer_channel: + description: 'viewer_channel' + required: true + default: 'Releasex64' + viewer_version: + description: 'viewer version not including build' + required: true + default: '7.1.10' + viewer_build: + description: 'build id' + required: true + default: '799999' + viewer_release_type: + description: 'release type' + required: true + default: 'Release' + branch: + description: 'Branch to deploy from' + required: false + default: 'master' + +jobs: + deploy: + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + sparse-checkout: | + fsutils/download_list.py + fsutils/build_config.json + fsutils/build_config.py + sparse-checkout-cone-mode: false + ref: ${{ github.head_ref || github.ref_name || 'master' }} + fetch-depth: 1 + - name: Download Build Artifacts + uses: dawidd6/action-download-artifact@v6 + id: download + with: + workflow: build_viewer.yml + run_number: ${{ github.event.inputs.build_run_id }} + path: to_deploy + - name: Install discord-webhook library + run: pip install discord-webhook + + - name: find channel and webhook from build_matrix outputs + run: | + viewer_release_type=${{ github.event.inputs.viewer_release_type }} + if [[ "$viewer_release_type" == "Release" ]]; then + FS_RELEASE_FOLDER=release + FS_BUILD_WEBHOOK_URL=${{ secrets.RELEASE_WEBHOOK_URL }} + elif [[ "$viewer_release_type" == "Beta" ]]; then + FS_RELEASE_FOLDER=preview + FS_BUILD_WEBHOOK_URL=${{ secrets.BETA_WEBHOOK_URL }} + elif [[ "$viewer_release_type" == "Alpha" ]]; then + FS_RELEASE_FOLDER=test + FS_BUILD_WEBHOOK_URL=${{ secrets.BETA_WEBHOOK_URL }} + elif [[ "$viewer_release_type" == "Nightly" ]] || [[ "${{ github.event_name }}" == 'schedule' ]]; then + FS_RELEASE_FOLDER=nightly + FS_BUILD_WEBHOOK_URL=${{ secrets.NIGHTLY_WEBHOOK_URL }} + elif [[ "$viewer_release_type" == "Manual" ]]; then + FS_RELEASE_FOLDER=test + FS_BUILD_WEBHOOK_URL=${{ secrets.MANUAL_WEBHOOK_URL }} + else + FS_RELEASE_TYPE=Unknown + fi + echo "FS_RELEASE_FOLDER=${FS_RELEASE_FOLDER}" >> $GITHUB_ENV + echo "FS_BUILD_WEBHOOK_URL=${FS_BUILD_WEBHOOK_URL}" >> $GITHUB_ENV + + - name: List artifacts download + run: ls -R + working-directory: ${{steps.download.outputs.download-path}} + + - name: Reorganise artifacts ready for server upload. + env: + FS_VIEWER_CHANNEL: ${{ github.event.inputs.viewer_channel }} + FS_VIEWER_VERSION: ${{ github.event.inputs.viewer_version }} + FS_VIEWER_BUILD: ${{ github.event.inputs.viewer_build }} + FS_VIEWER_RELEASE_TYPE: ${{ github.event.inputs.viewer_release_type }} + FS_VERSION_MGR_KEY: ${{ secrets.FS_VERSION_MGR_KEY }} + run: python ./fsutils/download_list.py ./to_deploy -w ${{ env.FS_BUILD_WEBHOOK_URL }} + + - name: Setup rclone and download the folder + uses: beqjanus/setup-rclone@main + with: + rclone_config: ${{ secrets.RCLONE_CONFIG }} + + - name: Copy files to remote host + run: rclone copy ./to_deploy/${{ env.FS_RELEASE_FOLDER }} fs_r2_deploy:viewerdownloads/${{ env.FS_RELEASE_FOLDER }} + diff --git a/fsutils/build_config.json b/fsutils/build_config.json new file mode 100644 index 0000000000..9ca79a8ffe --- /dev/null +++ b/fsutils/build_config.json @@ -0,0 +1,44 @@ +{ + "os_download_dirs": [ + "windows", "mac", "linux" + ], + "fs_version_mgr_platform": { + "windows": "win", + "mac": "mac", + "linux": "lin" + }, + "build_type_hosted_folder": { + "Release": "release", + "Beta": "preview", + "Alpha": "test", + "Nightly": "nightly", + "Unknown": "test" + }, + "os_hosted_folder": { + "windows": "windows", + "macos": "mac", + "ubuntu": "linux" + }, + "platforms_printable": { + "windows": "MS Windows", + "mac": "MacOS", + "linux": "Linux" + }, + "grids_printable": { + "SL": "Second Life", + "OS": "OpenSim" + }, + "download_root": "https://downloads.firestormviewer.org", + "viewer_channel_mapping": { + "Release": "release", + "Beta": "beta", + "Alpha": "alpha", + "Nightly": "nightly" + }, + "build_type_mapping": { + "regular": "regular", + "avx": "avx", + "tracy": "tracy", + "arm": "arm" + } +} diff --git a/fsutils/build_config.py b/fsutils/build_config.py new file mode 100644 index 0000000000..c35f89099d --- /dev/null +++ b/fsutils/build_config.py @@ -0,0 +1,19 @@ +# build_config.py + +import json + +class BuildConfig: + def __init__(self, config_file='./fsutils/build_config.json'): + with open(config_file, 'r') as f: + config_data = json.load(f) + + self.supported_os_dirs = config_data.get('os_download_dirs', []) + # channel_to_build_type is a map from Beta, Release and Nightly to folder names preview release and nightly + self.build_type_hosted_folder = config_data.get('build_type_hosted_folder', {}) + self.fs_version_mgr_platform = config_data.get('fs_version_mgr_platform', {}) + self.os_hosted_folder = config_data.get('os_hosted_folder', {}) + self.platforms_printable = config_data.get('platforms_printable', {}) + self.grids_printable = config_data.get('grids_printable', {}) + self.download_root = config_data.get('download_root', '') + self.viewer_channel_mapping = config_data.get('viewer_channel_mapping', {}) + self.build_type_mapping = config_data.get('build_type_mapping', {}) diff --git a/fsutils/download_list.py b/fsutils/download_list.py index 1d5756b619..95f65f9ef1 100644 --- a/fsutils/download_list.py +++ b/fsutils/download_list.py @@ -6,10 +6,26 @@ import time import zipfile import glob import shutil +import hashlib +import pytz +from datetime import datetime +import requests from discord_webhook import DiscordWebhook +from build_config import BuildConfig +def get_current_date_str(): + now = datetime.now(pytz.timezone('UTC')) + day = now.day + month = now.month + year = now.year + return f"{day}{month}{year}" +def generate_secret(secret_key): + current_date = get_current_date_str() + data = secret_key + current_date + secret_for_api = hashlib.sha1(data.encode()).hexdigest() + return secret_for_api # run a command line subshell and return the output @@ -103,171 +119,340 @@ def flatten_tree(tree_root): # Delete the subdirectory and its contents shutil.rmtree(subdir_path) +def get_build_variables(): + """ + Extracts initial build variables from environment variables. + In practice these are set from the outputs of the earlier matrix commands. + Returns: + dict: A dictionary containing 'version' and 'build_number'. + """ + import os -# parse args first arg optional -r (release) second arg mandatory string path_to_directory + version = os.environ.get('FS_VIEWER_VERSION') + build_number = os.environ.get('FS_VIEWER_BUILD') + release_type = os.environ.get('FS_VIEWER_RELEASE_TYPE') -parser = argparse.ArgumentParser( - prog="print_download_list", - description="Prints the list of files for download and their md5 checksums" - ) -parser.add_argument("-r", "--release", required=False, default=False, action="store_true", help="use the release folder in the target URL") -parser.add_argument("-u", "--unzip", required=False, default=False, action="store_true", help="unzip the github artifact first") -parser.add_argument("-w", "--webhook", help="post details to the webhook") + if not version or not build_number or not release_type: + raise ValueError("Environment variables 'FS_VIEWER_VERSION' and 'FS_VIEWER_BUILD' must be set.") -# add path_to_directory required parameter to parser -parser.add_argument("path_to_directory", help="path to the directory in which we'll look for the files") + return { + 'version': version, + 'build_number': build_number, + 'version_full': f"{version}.{build_number}", + 'release_type': release_type, + } -args = parser.parse_args() -path_to_directory = args.path_to_directory -release = args.release +def get_hosted_folder_for_build_type(build_type, config): + return config.build_type_hosted_folder.get( + build_type, + config.build_type_hosted_folder.get("Unknown") + ) -# Create a webhook object with the webhook URL -if args.webhook: - webhook = DiscordWebhook(url=args.webhook) +def is_supported_build_type(build_type, config): + if build_type in config.build_type_hosted_folder: + return True + else: + return False +def get_hosted_folder_for_os_type(os_type, config): + return config.os_hosted_folder.get( + os_type + ) -dirs = ["windows", "mac", "linux"] +def get_supported_os(os_name, config): + # throws for unexpected os_name + return config.os_hosted_folder.get(os_name) -# build_types is a map from Beta, Release and Nightly to folder names preview release and nightly -build_types = { - "Alpha": "test", - "Beta": "preview", - "Release": "release", - "Nightly": "nightly", - "Unknown": "test" -} - -target_folder = { - "ubuntu":"linux", - "windows":"windows", - "macos":"mac" -} - -# unzip the github artifact for this OS (`dir`) into the folder `dir` -# get the .zip files in args.path_to_directory using glob -print(f"Processing artifacts in {args.path_to_directory}") -build_types_created = set() -zips = glob.glob(f"{args.path_to_directory}/*.zip") -for file in zips: +def extract_vars_from_zipfile_name(file): + # File is an artifact file sometihng like Nightly-windows-2022-64-sl-artifacts.zip # print(f"unzipping {file}") #extract first word (delimited by '-' from the file name) # build_type is a fullpath but we only want the last folder, remove the leading part of the path leaving just the foldername using basename filename = os.path.basename(file) build_type = filename.split("-")[0] platform = filename.split("-")[1].lower() + return filename,build_type, platform - # print(f"build_type is {build_type}") - if build_type not in build_types: - print(f"Invalid build_type {build_type} from file {file} using 'Unknown'") - build_type = "Unknown" - build_folder = build_types[build_type] - - build_types_created.add(build_type) +def unpack_artifacts(path_to_artifacts_directory, config): + build_types_found = {} + zips = glob.glob(f"{path_to_artifacts_directory}/*.zip") + for file in zips: + print(f"Processing zip file {file}") + filename, build_type, platform = extract_vars_from_zipfile_name(file) + print(f"Identified filename {filename}, build_type {build_type} and platform {platform} from file {file}") + if is_supported_build_type( build_type, config) == False: + print(f"Invalid build_type {build_type} from file {file} using 'Unknown' instead") + build_type = "Unknown" + else: + print(f"Using build_type {build_type} from file {file}") - build_type_dir = os.path.join(args.path_to_directory, build_folder) + build_folder = get_hosted_folder_for_build_type(build_type, config) + print(f"build_folder {build_folder}") + try: + build_type_dir = os.path.join(path_to_artifacts_directory, build_folder) + except Exception as e: + print(f"An error occurred while creating build_type_dir folder from {path_to_artifacts_directory} and {build_folder}: {e}") + continue + print(f"build_type_dir {build_type_dir}") + os_folder = get_hosted_folder_for_os_type(platform, config) + print(f"os_folder {os_folder}") + try: + unpack_folder = os.path.join(build_type_dir, os_folder) + except Exception as e: + print(f"An error occurred while creating unpack_folder folder from {build_type_dir} and {os_folder}: {e}") + continue + print(f"unpacking {filename} to {unpack_folder}") + if os.path.isfile(file): + # this is an actual zip file + try: + unzip_file(file, unpack_folder) + except zipfile.BadZipFile: + print(f"Skipping {file} as it is not a valid zip file") + continue + except Exception as e: + print(f"An error occurred while unpacking {file}: {e} , skipping file {filename}") + continue + else: + # Create the destination folder if it doesn't exist + # if not os.path.exists(unpack_folder): + # os.makedirs(unpack_folder) + # Copy the contents of the source folder to the destination folder recursively + shutil.copytree(file, unpack_folder, dirs_exist_ok=True) + print(f"Finished unpacking {filename} to {unpack_folder}") + if build_type not in build_types_found: + print(f"Creating build_type {build_type} entry in build_types_found") + build_types_found[build_type] = { + "build_type": build_type, + "build_type_folder": build_folder, + "build_type_fullpath": build_type_dir, + "os_folders": [], + } + if os_folder not in build_types_found[build_type]["os_folders"]: + build_types_found[build_type]["os_folders"].append(os_folder) + print(f"Appended {os_folder} to build_type {build_type}") + print(f"Finished processing artifacts for build_type {build_type}") + return build_types_found - if platform not in target_folder: - print(f"Invalid platform {platform} using file {file}") - continue - - unpack_folder = os.path.join(build_type_dir, target_folder[platform]) - print(f"unpacking {filename} to {unpack_folder}") - - if os.path.isfile(file): - # this is an actual zip file - unzip_file(file, unpack_folder) - else: - # Create the destination folder if it doesn't exist - # if not os.path.exists(unpack_folder): - # os.makedirs(unpack_folder) - # Copy the contents of the source folder to the destination folder recursively - shutil.copytree(file, unpack_folder, dirs_exist_ok=True) - -output = "" -for build_type in build_types_created: - build_type_dir = os.path.join(args.path_to_directory, build_types[build_type]) +def restructure_folders(build_type, config): + print(f"Restructuring folders for build_type {build_type}") + build_type_dir = build_type["build_type_fullpath"] if not os.path.exists(build_type_dir): - print(f"Unexpected error: {build_type_dir} does not exist, even though it was in the set.") - continue + print(f"Unexpected error: path {build_type_dir} does not exist, even though it was in the set.") + raise FileNotFoundError # loop over the folder in the build_type_dir - for dir in dirs: - print(f"Cleaning up {dir}") + for platform_folder in build_type["os_folders"]: + print(f"Cleaning up {platform_folder}") # Traverse the directory tree and move all of the files to the root directory - flatten_tree(os.path.join(build_type_dir, dir)) + flatten_tree(os.path.join(build_type_dir, platform_folder)) # Now move the symbols files to the symbols folder - # prep the symbols folder + # Define the folder for symbols symbols_folder = os.path.join(build_type_dir, "symbols") os.mkdir(symbols_folder) - symbol_archives = glob.glob(f"{build_type_dir}/**/*_hvk*", recursive=True) - for sym_file in symbol_archives: - print(f"Moving {sym_file} to {symbols_folder}") - shutil.move(sym_file, symbols_folder) - symbol_archives = glob.glob(f"{build_type_dir}/**/*_oss*", recursive=True) - for sym_file in symbol_archives: - print(f"Moving {sym_file} to {symbols_folder}") - shutil.move(sym_file, symbols_folder) + # prep the symbols folder + symbol_patterns = ["*_hvk*", "*_oss*"] + # Loop through each pattern, find matching files, and move them + for pattern in symbol_patterns: + symbol_archives = glob.glob(f"{build_type_dir}/**/{pattern}", recursive=True) + for sym_file in symbol_archives: + print(f"Moving {sym_file} to {symbols_folder}") + shutil.move(sym_file, symbols_folder) + +def gather_build_info(build_type, config): + print(f"Gathering build info for build_type {build_type}") # While we're at it, let's print the md5 listing - file_dict = {} - md5_dict = {} - platforms_printable = {"windows":"MS Windows", "mac":"MacOS", "linux":"Linux"} - grids_printable = {"SL":"Second Life", "OS":"OpenSim"} - - download_root = f"https://downloads.firestormviewer.org/{build_types[build_type]}" - output += f''' -DOWNLOADS - {build_type} -------------------------------------------------------------------------------------------------------- -''' - for dir in dirs: - print(f"Getting files for {dir} in {build_type_dir}") - files = get_files(os.path.join(build_type_dir, dir)) + download_root = f"{config.download_root}/{build_type['build_type_folder']}" + # for each os that we have built for + build_type_dir = build_type["build_type_fullpath"] + for platform_folder in build_type["os_folders"]: + print(f"Getting files for {platform_folder} in {build_type_dir}") + build_type_platform_folder = os.path.join(build_type_dir, platform_folder) + files = get_files(build_type_platform_folder) try: for file in files: - full_file = os.path.join(build_type_dir, dir, file) - md5 = get_md5(full_file) + full_file = os.path.join(build_type_platform_folder, file) base_name = os.path.basename(file) - wordsize = "64" + file_URI = f"{download_root}/{platform_folder}/{base_name}" + md5 = get_md5(full_file) if "FirestormOS-" in base_name: grid = "OS" else: grid = "SL" - if dir in dirs: - file_dict[f"{grid}{dir}{wordsize}"] = full_file - md5_dict[f"{grid}{dir}{wordsize}"] = md5 + file_key = f"{grid}-{platform_folder}" + + # if platform_folder in config.os_download_dirs: + if "downloadable_artifacts" not in build_type: + build_type["downloadable_artifacts"] = {} + + build_type["downloadable_artifacts"][f"{file_key}"] = { + "file_path": full_file, + "file_download_URI": file_URI, + "grid": grid, + "fs_ver_mgr_platform": config.fs_version_mgr_platform.get(platform_folder), + "md5": md5, + } + except TypeError: - print(f"No files found for {dir} in {build_type_dir}") + print(f"Error processing files for {platform_folder} in {build_type_dir}") + continue + except Exception as e: + print(f"An error occurred while processing files for {platform_folder} in {build_type_dir}: {e}") + continue + print(f"Created build info: {build_type}") + return build_type - - - output += f''' -{platforms_printable[dir]} +def create_discord_message(build_info, config): +# Start with a header line + text_summary = f''' +DOWNLOADS - {build_info["build_type"]} +------------------------------------------------------------------------------------------------------- ''' - dir = dir.lower() - wordsize = "64" - platform = f"{platforms_printable[dir]}" +# for each platform we potentailly build for +# Append platform label in printable form + for platform_folder in config.supported_os_dirs: + platform_printable = config.platforms_printable[platform_folder] + text_summary += f''' +{platform_printable} +''' + platform_folder = platform_folder.lower() for grid in ["SL", "OS"]: - grid_printable = f"{grids_printable[grid]}" + grid_printable = f"{config.grids_printable[grid]}" try: - output += f"{platform} for {grid_printable} ({wordsize}-bit)\n" - output += f"{download_root}/{dir}/{os.path.basename(file_dict[f'{grid}{dir}{wordsize}'])}\n" - output += "\n" - output += f"MD5: {md5_dict[f'{grid}{dir}{wordsize}']}\n" - output += "\n" + file_key = f"{grid}-{platform_folder}" + text_summary += f"{platform_printable} for {grid_printable}\n" + text_summary += f"{build_info['downloadable_artifacts'][file_key]['file_download_URI']}\n" + text_summary += "\n" + text_summary += f"MD5: {build_info['downloadable_artifacts'][file_key]['md5']}\n" + text_summary += "\n" except KeyError: - output += f"{platform} for {grid_printable} ({wordsize}-bit) - NOT AVAILABLE\n" - output += "\n" - output += '''------------------------------------------------------------------------------------------------------- + text_summary += f"{platform_printable} for {grid_printable} - NOT AVAILABLE\n" + text_summary += "\n" + text_summary += ''' +------------------------------------------------------------------------------------------------------- ''' + return text_summary - if args.webhook: - # Add the message to the webhook - webhook.set_content(content=output) - # Send the webhook - response = webhook.execute() - # Print the response - if not response.ok: - print(f"Webhook Error {response.status_code}: {response.text}") - print(output) +def update_fs_version_mgr(build_info, config): + print(f"Updating Firestorm Version Manager for build_type {build_info['build_type']}") + # Read the secret key from environment variables + secret_key = os.environ.get('FS_VERSION_MGR_KEY') + if not secret_key: + print("Error: FS_VERSION_MGR_KEY not set") + sys.exit(1) + secret_for_api = generate_secret(secret_key) + build_type = build_info["build_type"] + version = os.environ.get('FS_VIEWER_VERSION') + channel = os.environ.get('FS_VIEWER_CHANNEL') + build_number = os.environ.get('FS_VIEWER_BUILD') + + build_variant = "regular" + for file_key in build_info["downloadable_artifacts"]: + try: + download_link = build_info["downloadable_artifacts"][file_key]["file_download_URI"] + md5_checksum = build_info["downloadable_artifacts"][file_key]["md5"] + grid = build_info["downloadable_artifacts"][file_key]["grid"].lower() + os_name = build_info["downloadable_artifacts"][file_key]["fs_ver_mgr_platform"] + except KeyError: + print(f"Error: Could not find downloadable artifacts for {file_key}") + continue + + payload = { + "viewer_channel": channel, + "grid_type": grid, + "operating_system": os_name, + "build_type": build_type.lower(), + "viewer_version": version, + "build_number": int(build_number), + "download_link": download_link, + "md5_checksum": md5_checksum + } + print(f"Payload (without secret): {payload}") + payload["secret"] = secret_for_api + + # Make the API call + url = "https://www.firestormviewer.org/set-fs-vrsns-jsn/" + headers = {"Content-Type": "application/json"} + + response = None # Initialize response to None + + try: + response = requests.post(url, json=payload, headers=headers) + + # Manually check for status code instead of raising an exception + if response.status_code == 200: + response_data = response.json() + result = response_data.get('result') + message = response_data.get('message') + + if result == 'success': + print(f"Version manager updated successfully for {os_name} {build_variant}") + else: + print(f"Error updating version manager: {message}") + else: + print(f"Unexpected status code received: {response.status_code}") + print(f"Response body: {response.text}") + + except requests.exceptions.RequestException as e: + print(f"API request failed: {e}") + + # Additional error handling + if response and response.status_code == 403: + print("Status Code:", response.status_code) + print("Response Headers:", response.headers) + print("Response Body:", response.text) + + except ValueError: + print("API response is not valid JSON") + +# parse args first arg optional -r (release) second arg mandatory string path_to_directory +def main(): + try: + # Initialise the build configuration + config = BuildConfig() + + parser = argparse.ArgumentParser( + prog="print_download_list", + description="Prints the list of files for download and their md5 checksums" + ) + parser.add_argument("-w", "--webhook", help="post details to the webhook") + + # add path_to_directory required parameter to parser + parser.add_argument("path_to_directory", help="path to the directory in which we'll look for the files") + + args = parser.parse_args() + + # Create a webhook object with the webhook URL + if args.webhook: + webhook = DiscordWebhook(url=args.webhook) + + # unzip the github artifact for this OS (`dir`) into the folder `dir` + # get the .zip files in args.path_to_directory using glob + print(f"Processing artifacts in {args.path_to_directory}") + build_types_created = unpack_artifacts(args.path_to_directory, config) + print(f"buuild types created: {build_types_created}") + for build_type_key, build_type in build_types_created.items(): + print(f"Processing {build_type_key}") + restructure_folders(build_type, config) + build_info = gather_build_info(build_type, config) + update_fs_version_mgr(build_info, config) + + discord_text = create_discord_message(build_info, config) + if args.webhook: + # Add the message to the webhook + webhook.set_content(content=discord_text) + # Send the webhook + response = webhook.execute() + # Print the response + if not response.ok: + print(f"Webhook Error {response.status_code}: {response.text}") + print(discord_text) + except Exception as e: + print(f"An error occurred: {e}") + sys.exit(1) + +if __name__ == '__main__': + import sys + main() \ No newline at end of file diff --git a/indra/llplugin/llpluginprocessparent.cpp b/indra/llplugin/llpluginprocessparent.cpp index a7cfcd9f8e..8b359d055f 100644 --- a/indra/llplugin/llpluginprocessparent.cpp +++ b/indra/llplugin/llpluginprocessparent.cpp @@ -157,8 +157,18 @@ LLPluginProcessParent::ptr_t LLPluginProcessParent::create(LLPluginProcessParent /*static*/ void LLPluginProcessParent::shutdown() { - LLCoros::LockType lock(*sInstancesMutex); - + // FIRE-34497 - lock maybe be null during shutdown due to fiber shutdown race condition + // LLCoros::LockType lock(*sInstancesMutex); + std::unique_ptr lock; + if (sInstancesMutex) + { + lock = std::make_unique(*sInstancesMutex); + } + else + { + LL_WARNS("Plugin") << "shutdown called but no instances mutex available" << LL_ENDL; + } + // mapInstances_t::iterator it; for (it = sInstances.begin(); it != sInstances.end(); ++it) { diff --git a/indra/newview/lldrawpoolavatar.cpp b/indra/newview/lldrawpoolavatar.cpp index dc783d27b7..2584a97c54 100644 --- a/indra/newview/lldrawpoolavatar.cpp +++ b/indra/newview/lldrawpoolavatar.cpp @@ -109,7 +109,6 @@ S32 cube_channel = -1; LLDrawPoolAvatar::LLDrawPoolAvatar(U32 type) : LLFacePool(type) - , mAvatar(nullptr) // Add avatar hitbox debug - remember avatar pointer in case avatar draw face breaks { } @@ -689,9 +688,37 @@ void LLDrawPoolAvatar::renderAvatars(LLVOAvatar* single_avatar, S32 pass) return; } + + if (mDrawFace.empty() && !single_avatar) + { + return; + } + + LLVOAvatar *avatarp { nullptr }; + + if (single_avatar) + { + avatarp = single_avatar; + } + else + { + LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("Find avatarp"); // Tracy markup + const LLFace *facep = mDrawFace[0]; + if (!facep || !facep->getDrawable()) // trap possible null dereference + { + return; + } + avatarp = (LLVOAvatar *)facep->getDrawable()->getVObj().get(); + } + + if (!avatarp || avatarp->isDead() || avatarp->mDrawable.isNull()) // trap possible null dereference + { + return; + } + // Add avatar hitbox debug static LLCachedControl render_hitbox(gSavedSettings, "DebugRenderHitboxes", false); - if (render_hitbox && pass == 2 && mAvatar && !mAvatar->isControlAvatar()) + if (render_hitbox && pass == 2 && !avatarp->isControlAvatar()) { LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("render_hitbox"); @@ -703,13 +730,13 @@ void LLDrawPoolAvatar::renderAvatars(LLVOAvatar* single_avatar, S32 pass) LLGLEnable blend(GL_BLEND); gGL.getTexUnit(0)->unbind(LLTexUnit::TT_TEXTURE); - LLColor4 avatar_color = LLNetMap::getAvatarColor(mAvatar->getID()); + LLColor4 avatar_color = LLNetMap::getAvatarColor(avatarp->getID()); gGL.diffuseColor4f(avatar_color.mV[VRED], avatar_color.mV[VGREEN], avatar_color.mV[VBLUE], avatar_color.mV[VALPHA]); gGL.setLineWidth(2.0f); - const LLQuaternion& rot = mAvatar->getRotationRegion(); - const LLVector3& pos = mAvatar->getPositionAgent(); - const LLVector3& size = mAvatar->getScale(); + const LLQuaternion& rot = avatarp->getRotationRegion(); + const LLVector3& pos = avatarp->getPositionAgent(); + const LLVector3& size = avatarp->getScale(); // drawBoxOutline partly copied from llspatialpartition.cpp below @@ -766,34 +793,6 @@ void LLDrawPoolAvatar::renderAvatars(LLVOAvatar* single_avatar, S32 pass) } } // - - if (mDrawFace.empty() && !single_avatar) - { - return; - } - - LLVOAvatar *avatarp { nullptr }; - - if (single_avatar) - { - avatarp = single_avatar; - } - else - { - LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("Find avatarp"); // Tracy markup - const LLFace *facep = mDrawFace[0]; - if (!facep->getDrawable()) - { - return; - } - avatarp = (LLVOAvatar *)facep->getDrawable()->getVObj().get(); - } - - if (avatarp->isDead() || avatarp->mDrawable.isNull()) - { - return; - } - // rendertime Tracy annotations { LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("check fully_loaded"); diff --git a/indra/newview/lldrawpoolavatar.h b/indra/newview/lldrawpoolavatar.h index 8db0bf1831..704979def3 100644 --- a/indra/newview/lldrawpoolavatar.h +++ b/indra/newview/lldrawpoolavatar.h @@ -121,8 +121,6 @@ typedef enum void renderAvatars(LLVOAvatar *single_avatar, S32 pass = -1); // renders only one avatar if single_avatar is not null. - LLVOAvatar* mAvatar; // Add avatar hitbox debug - remember avatar pointer in case avatar draw face breaks - static bool sSkipOpaque; static bool sSkipTransparent; static S32 sShadowPass; diff --git a/indra/newview/llviewermenu.cpp b/indra/newview/llviewermenu.cpp index 31244b1e69..2c9ea7022c 100644 --- a/indra/newview/llviewermenu.cpp +++ b/indra/newview/llviewermenu.cpp @@ -6580,7 +6580,17 @@ void handle_take(bool take_separate) // MAINT-290 // Reason: Showing the confirmation dialog resets object selection, thus there is nothing to derez. // Fix: pass selection to the confirm_take, so that selection doesn't "die" after confirmation dialog is opened - params.functor.function(boost::bind(confirm_take, _1, _2, LLSelectMgr::instance().getSelection())); + params.functor.function([take_separate](const LLSD ¬ification, const LLSD &response) + { + if (take_separate) + { + confirm_take_separate(notification, response, LLSelectMgr::instance().getSelection()); + } + else + { + confirm_take(notification, response, LLSelectMgr::instance().getSelection()); + } + }); if(locked_but_takeable_object || !you_own_everything) diff --git a/indra/newview/llvoavatar.cpp b/indra/newview/llvoavatar.cpp index 79400cd52f..0323fb3841 100644 --- a/indra/newview/llvoavatar.cpp +++ b/indra/newview/llvoavatar.cpp @@ -8028,7 +8028,6 @@ LLDrawable *LLVOAvatar::createDrawable(LLPipeline *pipeline) mNumInitFaces = mDrawable->getNumFaces() ; dirtyMesh(2); - poolp->mAvatar = this; // Add avatar hitbox debug - remember avatar pointer in case avatar draw face breaks return mDrawable; } diff --git a/indra/newview/llvoicevivox.cpp b/indra/newview/llvoicevivox.cpp index e133b394bb..517d1ea03f 100644 --- a/indra/newview/llvoicevivox.cpp +++ b/indra/newview/llvoicevivox.cpp @@ -5152,8 +5152,7 @@ bool LLVivoxVoiceClient::isVoiceWorking() const //Added stateSessionTerminated state to avoid problems with call in parcels with disabled voice (EXT-4758) // Condition with joining spatial num was added to take into account possible problems with connection to voice // server(EXT-4313). See bug descriptions and comments for MAX_NORMAL_JOINING_SPATIAL_NUM for more info. - return (mSpatialJoiningNum < MAX_NORMAL_JOINING_SPATIAL_NUM) && mIsProcessingChannels; -// return (mSpatialJoiningNum < MAX_NORMAL_JOINING_SPATIAL_NUM) && (stateLoggedIn <= mState) && (mState <= stateSessionTerminated); + return (mSpatialJoiningNum < MAX_NORMAL_JOINING_SPATIAL_NUM) && mIsLoggedIn; } // Returns true if the indicated participant in the current audio session is really an SL avatar. diff --git a/indra/newview/llvoicewebrtc.cpp b/indra/newview/llvoicewebrtc.cpp index f3150bfdc9..5c74e2d8c7 100644 --- a/indra/newview/llvoicewebrtc.cpp +++ b/indra/newview/llvoicewebrtc.cpp @@ -420,7 +420,7 @@ void LLWebRTCVoiceClient::notifyStatusObservers(LLVoiceClientStatusObserver::ESt status != LLVoiceClientStatusObserver::STATUS_LEFT_CHANNEL && status != LLVoiceClientStatusObserver::STATUS_VOICE_DISABLED) { - bool voice_status = LLVoiceClient::getInstance()->voiceEnabled() && LLVoiceClient::getInstance()->isVoiceWorking(); + bool voice_status = LLVoiceClient::getInstance()->voiceEnabled() && mIsProcessingChannels; gAgent.setVoiceConnected(voice_status); @@ -1335,7 +1335,10 @@ bool LLWebRTCVoiceClient::startAdHocSession(const LLSD& channelInfo, bool notify bool LLWebRTCVoiceClient::isVoiceWorking() const { - return mIsProcessingChannels; + // webrtc is working if the coroutine is active in the case of + // webrtc. WebRTC doesn't need to connect to a secondary process + // or a login server to become active. + return mIsCoroutineActive; } // Returns true if calling back the session URI after the session has closed is possible. diff --git a/indra/newview/rlvhelper.cpp b/indra/newview/rlvhelper.cpp index 0768c09813..2599dfbf06 100644 --- a/indra/newview/rlvhelper.cpp +++ b/indra/newview/rlvhelper.cpp @@ -207,7 +207,7 @@ RlvBehaviourDictionary::RlvBehaviourDictionary() addEntry(new RlvBehaviourGenericToggleProcessor("setcam_eyeoffset")); addModifier(RLV_BHVR_SETCAM_EYEOFFSET, RLV_MODIFIER_SETCAM_EYEOFFSET, new RlvBehaviourModifierHandler("Camera - Eye Offset", LLVector3::zero, true, nullptr)); addEntry(new RlvBehaviourGenericToggleProcessor("setcam_eyeoffsetscale")); - addModifier(RLV_BHVR_SETCAM_EYEOFFSETSCALE, RLV_MODIFIER_SETCAM_EYEOFFSETSCALE, new RlvBehaviourModifierHandler("Camera - Eye Offset Scale", 0, true, nullptr)); + addModifier(RLV_BHVR_SETCAM_EYEOFFSETSCALE, RLV_MODIFIER_SETCAM_EYEOFFSETSCALE, new RlvBehaviourModifierHandler("Camera - Eye Offset Scale", 0.0f, true, nullptr)); addEntry(new RlvBehaviourGenericToggleProcessor("setcam_focusoffset")); addModifier(RLV_BHVR_SETCAM_FOCUSOFFSET, RLV_MODIFIER_SETCAM_FOCUSOFFSET, new RlvBehaviourModifierHandler("Camera - Focus Offset", LLVector3d::zero, true, nullptr)); addEntry(new RlvBehaviourProcessor("setcam_fovmin")); diff --git a/indra/newview/skins/default/xui/az/panel_preferences_graphics1.xml b/indra/newview/skins/default/xui/az/panel_preferences_graphics1.xml index fbabba237d..4131ddb6d3 100644 --- a/indra/newview/skins/default/xui/az/panel_preferences_graphics1.xml +++ b/indra/newview/skins/default/xui/az/panel_preferences_graphics1.xml @@ -130,9 +130,6 @@ Teksturların çəkilişi: - Tekstur keyfiyyət səviyyəsi: diff --git a/indra/newview/skins/default/xui/de/floater_window_size.xml b/indra/newview/skins/default/xui/de/floater_window_size.xml index b1e94127a8..dfc7ba9aa1 100644 --- a/indra/newview/skins/default/xui/de/floater_window_size.xml +++ b/indra/newview/skins/default/xui/de/floater_window_size.xml @@ -20,6 +20,7 @@ +