From dcf115beb64312c616909f50141003a376039125 Mon Sep 17 00:00:00 2001 From: Beq Date: Thu, 12 Sep 2024 16:34:16 +0100 Subject: [PATCH 01/33] FIRE-34496 - BugSplat Crash: renderAvatars (when imposter rendering) Fix for mAvatar not initialised when single_avatar is null. Also added additional null checks in the avatar rendering process to prevent potential null dereference issues. --- indra/newview/lldrawpoolavatar.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/indra/newview/lldrawpoolavatar.cpp b/indra/newview/lldrawpoolavatar.cpp index f286a04ae1..fd34be11bc 100644 --- a/indra/newview/lldrawpoolavatar.cpp +++ b/indra/newview/lldrawpoolavatar.cpp @@ -693,7 +693,7 @@ void LLDrawPoolAvatar::renderAvatars(LLVOAvatar* single_avatar, S32 pass) // Add avatar hitbox debug static LLCachedControl render_hitbox(gSavedSettings, "DebugRenderHitboxes", false); - if (render_hitbox && pass == 2 && mAvatar && !mAvatar->isControlAvatar()) + if (render_hitbox && pass == 2 && (single_avatar || mAvatar) && !mAvatar->isControlAvatar()) { LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("render_hitbox"); @@ -784,14 +784,14 @@ void LLDrawPoolAvatar::renderAvatars(LLVOAvatar* single_avatar, S32 pass) { LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("Find avatarp"); // Tracy markup const LLFace *facep = mDrawFace[0]; - if (!facep->getDrawable()) + if (!facep || !facep->getDrawable()) // trap possible null dereference { return; } avatarp = (LLVOAvatar *)facep->getDrawable()->getVObj().get(); } - if (avatarp->isDead() || avatarp->mDrawable.isNull()) + if (!avatarp || avatarp->isDead() || avatarp->mDrawable.isNull()) // trap possible null dereference { return; } From 2824d2d23b1b5d951a5d400499080e7a66f92c2a Mon Sep 17 00:00:00 2001 From: PanteraPolnocy Date: Thu, 12 Sep 2024 22:48:40 +0200 Subject: [PATCH 02/33] FIRE-34486 Italian Translation Update, by Spartaco Zemenis --- indra/newview/skins/default/xui/it/notifications.xml | 3 +++ .../newview/skins/default/xui/it/panel_preferences_sound.xml | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/indra/newview/skins/default/xui/it/notifications.xml b/indra/newview/skins/default/xui/it/notifications.xml index 00393a5f19..37e51b5e76 100644 --- a/indra/newview/skins/default/xui/it/notifications.xml +++ b/indra/newview/skins/default/xui/it/notifications.xml @@ -5296,6 +5296,9 @@ Se vuoi vedere questo oggetto, toglilo e indossalo su un punto di attacco dell'a Il caricamento è attualmente in corso. Riprova più tardi. + + Le scene GLTF non sono ancora supportate dalla tua scheda grafica. + Stai per abilitare AutoFPS. Tutte le impostazioni grafiche non salvate andranno perse. diff --git a/indra/newview/skins/default/xui/it/panel_preferences_sound.xml b/indra/newview/skins/default/xui/it/panel_preferences_sound.xml index 97e670c895..0d55bd44a9 100644 --- a/indra/newview/skins/default/xui/it/panel_preferences_sound.xml +++ b/indra/newview/skins/default/xui/it/panel_preferences_sound.xml @@ -124,10 +124,10 @@ - + - + Soppressione del rumore: From a4d6fad698bc0f32c7b6779e16932c1e26b2d8dd Mon Sep 17 00:00:00 2001 From: Beq Date: Fri, 13 Sep 2024 23:03:16 +0100 Subject: [PATCH 03/33] FIRE-34497 - Bugsplat - crash during shutdown due to null mutex. This is evidently a race condition, not everyone is crashing and according to LL it does not repro in DeltaFPS (though it does in Atlasaurus for them too). This probably suggests that DeltaFPS just happened to move the race condition needle a little in the right direction. --- indra/llplugin/llpluginprocessparent.cpp | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/indra/llplugin/llpluginprocessparent.cpp b/indra/llplugin/llpluginprocessparent.cpp index 4b80a4e452..dd138f1a0c 100644 --- a/indra/llplugin/llpluginprocessparent.cpp +++ b/indra/llplugin/llpluginprocessparent.cpp @@ -187,8 +187,18 @@ LLPluginProcessParent::ptr_t LLPluginProcessParent::create(LLPluginProcessParent /*static*/ void LLPluginProcessParent::shutdown() { - LLCoros::LockType lock(*sInstancesMutex); - + // FIRE-34497 - lock maybe be null during shutdown due to fiber shutdown race condition + // LLCoros::LockType lock(*sInstancesMutex); + std::unique_ptr lock; + if (sInstancesMutex) + { + lock = std::make_unique(*sInstancesMutex); + } + else + { + LL_WARNS("Plugin") << "shutdown called but no instances mutex available" << LL_ENDL; + } + // mapInstances_t::iterator it; for (it = sInstances.begin(); it != sInstances.end(); ++it) { From 2a3cf663967420c8303ae05b1c10c074f0d9b300 Mon Sep 17 00:00:00 2001 From: Roxanne Skelly Date: Wed, 11 Sep 2024 23:50:08 -0700 Subject: [PATCH 04/33] Merge pull request #2551 from secondlife/roxie/webrtc-1436 Fix issue with improper reporting as to whether voice is working. --- indra/newview/llvoicevivox.cpp | 3 +-- indra/newview/llvoicewebrtc.cpp | 7 +++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/indra/newview/llvoicevivox.cpp b/indra/newview/llvoicevivox.cpp index 2aa30ade57..247ee77f97 100644 --- a/indra/newview/llvoicevivox.cpp +++ b/indra/newview/llvoicevivox.cpp @@ -5152,8 +5152,7 @@ bool LLVivoxVoiceClient::isVoiceWorking() const //Added stateSessionTerminated state to avoid problems with call in parcels with disabled voice (EXT-4758) // Condition with joining spatial num was added to take into account possible problems with connection to voice // server(EXT-4313). See bug descriptions and comments for MAX_NORMAL_JOINING_SPATIAL_NUM for more info. - return (mSpatialJoiningNum < MAX_NORMAL_JOINING_SPATIAL_NUM) && mIsProcessingChannels; -// return (mSpatialJoiningNum < MAX_NORMAL_JOINING_SPATIAL_NUM) && (stateLoggedIn <= mState) && (mState <= stateSessionTerminated); + return (mSpatialJoiningNum < MAX_NORMAL_JOINING_SPATIAL_NUM) && mIsLoggedIn; } // Returns true if the indicated participant in the current audio session is really an SL avatar. diff --git a/indra/newview/llvoicewebrtc.cpp b/indra/newview/llvoicewebrtc.cpp index 69d087d034..7b780ac8ff 100644 --- a/indra/newview/llvoicewebrtc.cpp +++ b/indra/newview/llvoicewebrtc.cpp @@ -420,7 +420,7 @@ void LLWebRTCVoiceClient::notifyStatusObservers(LLVoiceClientStatusObserver::ESt status != LLVoiceClientStatusObserver::STATUS_LEFT_CHANNEL && status != LLVoiceClientStatusObserver::STATUS_VOICE_DISABLED) { - bool voice_status = LLVoiceClient::getInstance()->voiceEnabled() && LLVoiceClient::getInstance()->isVoiceWorking(); + bool voice_status = LLVoiceClient::getInstance()->voiceEnabled() && mIsProcessingChannels; gAgent.setVoiceConnected(voice_status); @@ -1335,7 +1335,10 @@ bool LLWebRTCVoiceClient::startAdHocSession(const LLSD& channelInfo, bool notify bool LLWebRTCVoiceClient::isVoiceWorking() const { - return mIsProcessingChannels; + // webrtc is working if the coroutine is active in the case of + // webrtc. WebRTC doesn't need to connect to a secondary process + // or a login server to become active. + return mIsCoroutineActive; } // Returns true if calling back the session URI after the session has closed is possible. From 064337d2e50f8f91eecfbf9021309751ee3d7c31 Mon Sep 17 00:00:00 2001 From: Ansariel Date: Fri, 9 Aug 2024 04:02:30 +0200 Subject: [PATCH 05/33] Restore max texture size option in preferences that accidentally got lost --- .../xui/de/panel_preferences_graphics1.xml | 12 ++++- .../xui/en/panel_preferences_graphics1.xml | 52 ++++++++++++++++--- 2 files changed, 56 insertions(+), 8 deletions(-) diff --git a/indra/newview/skins/default/xui/de/panel_preferences_graphics1.xml b/indra/newview/skins/default/xui/de/panel_preferences_graphics1.xml index 479cfd9939..c85c2c1aeb 100644 --- a/indra/newview/skins/default/xui/de/panel_preferences_graphics1.xml +++ b/indra/newview/skins/default/xui/de/panel_preferences_graphics1.xml @@ -169,7 +169,17 @@ - + + + + + + + + (erfordert Neustart) + Maximales Qualitätslevel für Texturen: diff --git a/indra/newview/skins/default/xui/en/panel_preferences_graphics1.xml b/indra/newview/skins/default/xui/en/panel_preferences_graphics1.xml index 5444d8c410..e82bb3590b 100644 --- a/indra/newview/skins/default/xui/en/panel_preferences_graphics1.xml +++ b/indra/newview/skins/default/xui/en/panel_preferences_graphics1.xml @@ -1126,25 +1126,63 @@ If you do not understand the distinction then leave this control alone." + top_pad="4" + width="250"/> + + + + + + + + + (requires restart) + Max Texture Quality Level: From 0fcf3291923d6681f3f4cb465375556cda2f5be0 Mon Sep 17 00:00:00 2001 From: PanteraPolnocy Date: Sun, 15 Sep 2024 02:25:44 +0200 Subject: [PATCH 06/33] FSRestrictMaxTextureSize change for few other languages --- .../default/xui/az/panel_preferences_graphics1.xml | 3 --- .../default/xui/it/panel_preferences_graphics1.xml | 11 +++++++++-- .../default/xui/ja/panel_preferences_graphics1.xml | 1 - .../default/xui/pl/panel_preferences_graphics1.xml | 10 +++++++++- 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/indra/newview/skins/default/xui/az/panel_preferences_graphics1.xml b/indra/newview/skins/default/xui/az/panel_preferences_graphics1.xml index fbabba237d..4131ddb6d3 100644 --- a/indra/newview/skins/default/xui/az/panel_preferences_graphics1.xml +++ b/indra/newview/skins/default/xui/az/panel_preferences_graphics1.xml @@ -130,9 +130,6 @@ Teksturların çəkilişi: - Tekstur keyfiyyət səviyyəsi: diff --git a/indra/newview/skins/default/xui/it/panel_preferences_graphics1.xml b/indra/newview/skins/default/xui/it/panel_preferences_graphics1.xml index 239101b5af..b9d40ce747 100644 --- a/indra/newview/skins/default/xui/it/panel_preferences_graphics1.xml +++ b/indra/newview/skins/default/xui/it/panel_preferences_graphics1.xml @@ -140,8 +140,15 @@ Rendering texture: - - + + + + + + + + (richiede riavvio) + Max livello qualità texture: diff --git a/indra/newview/skins/default/xui/ja/panel_preferences_graphics1.xml b/indra/newview/skins/default/xui/ja/panel_preferences_graphics1.xml index b07652c7dd..5f68a691c1 100644 --- a/indra/newview/skins/default/xui/ja/panel_preferences_graphics1.xml +++ b/indra/newview/skins/default/xui/ja/panel_preferences_graphics1.xml @@ -175,7 +175,6 @@ - diff --git a/indra/newview/skins/default/xui/pl/panel_preferences_graphics1.xml b/indra/newview/skins/default/xui/pl/panel_preferences_graphics1.xml index 3392908838..8278ad7fef 100644 --- a/indra/newview/skins/default/xui/pl/panel_preferences_graphics1.xml +++ b/indra/newview/skins/default/xui/pl/panel_preferences_graphics1.xml @@ -140,7 +140,15 @@ Rendering tekstur: - + + + + + + + + (wymagany restart) + Maks. jakość tekstur: From 2673522b20453965695b66d5e77346cf2ba42800 Mon Sep 17 00:00:00 2001 From: Beq Date: Sun, 15 Sep 2024 13:58:21 +0100 Subject: [PATCH 07/33] Revert back to Bugsplat-Git now that they've merged my fix We may still need to use mine, so let's test --- .github/workflows/build_viewer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_viewer.yml b/.github/workflows/build_viewer.yml index 63368a939e..70fe4a87c4 100644 --- a/.github/workflows/build_viewer.yml +++ b/.github/workflows/build_viewer.yml @@ -323,7 +323,7 @@ jobs: # npm install -g node-dump-syms - name: Post Bugsplat Symbols - uses: beqjanus/symbol-upload@main + uses: BugSplat-Git/symbol-upload@main with: clientId: ${{ steps.version.outputs.viewer_release_type == 'Release' && secrets.BUGSPLAT_RELEASE_ID || From ec9e5f06ec36f4bb67940d5c4c64dc64739a201f Mon Sep 17 00:00:00 2001 From: Beq Date: Sun, 15 Sep 2024 19:47:11 +0100 Subject: [PATCH 08/33] FIRE-34496 - additional fixes Previous fix was not correct. The underlying issue is that the cached `mAvatar` pointer is being invalidated and as a result crashing. using avatarp instead avoids this and removes the awkward cached pointer. Might need to review if the previous reason for mAvatar reappears. --- indra/newview/lldrawpoolavatar.cpp | 67 +++++++++++++++--------------- indra/newview/lldrawpoolavatar.h | 2 - indra/newview/llvoavatar.cpp | 1 - 3 files changed, 33 insertions(+), 37 deletions(-) diff --git a/indra/newview/lldrawpoolavatar.cpp b/indra/newview/lldrawpoolavatar.cpp index fd34be11bc..d4007cbf22 100644 --- a/indra/newview/lldrawpoolavatar.cpp +++ b/indra/newview/lldrawpoolavatar.cpp @@ -111,7 +111,6 @@ S32 cube_channel = -1; LLDrawPoolAvatar::LLDrawPoolAvatar(U32 type) : LLFacePool(type) - , mAvatar(nullptr) // Add avatar hitbox debug - remember avatar pointer in case avatar draw face breaks { } @@ -691,9 +690,37 @@ void LLDrawPoolAvatar::renderAvatars(LLVOAvatar* single_avatar, S32 pass) return; } + + if (mDrawFace.empty() && !single_avatar) + { + return; + } + + LLVOAvatar *avatarp { nullptr }; + + if (single_avatar) + { + avatarp = single_avatar; + } + else + { + LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("Find avatarp"); // Tracy markup + const LLFace *facep = mDrawFace[0]; + if (!facep || !facep->getDrawable()) // trap possible null dereference + { + return; + } + avatarp = (LLVOAvatar *)facep->getDrawable()->getVObj().get(); + } + + if (!avatarp || avatarp->isDead() || avatarp->mDrawable.isNull()) // trap possible null dereference + { + return; + } + // Add avatar hitbox debug static LLCachedControl render_hitbox(gSavedSettings, "DebugRenderHitboxes", false); - if (render_hitbox && pass == 2 && (single_avatar || mAvatar) && !mAvatar->isControlAvatar()) + if (render_hitbox && pass == 2 && !avatarp->isControlAvatar()) { LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("render_hitbox"); @@ -705,13 +732,13 @@ void LLDrawPoolAvatar::renderAvatars(LLVOAvatar* single_avatar, S32 pass) LLGLEnable blend(GL_BLEND); gGL.getTexUnit(0)->unbind(LLTexUnit::TT_TEXTURE); - LLColor4 avatar_color = LLNetMap::getAvatarColor(mAvatar->getID()); + LLColor4 avatar_color = LLNetMap::getAvatarColor(avatarp->getID()); gGL.diffuseColor4f(avatar_color.mV[VRED], avatar_color.mV[VGREEN], avatar_color.mV[VBLUE], avatar_color.mV[VALPHA]); gGL.setLineWidth(2.0f); - const LLQuaternion& rot = mAvatar->getRotationRegion(); - const LLVector3& pos = mAvatar->getPositionAgent(); - const LLVector3& size = mAvatar->getScale(); + const LLQuaternion& rot = avatarp->getRotationRegion(); + const LLVector3& pos = avatarp->getPositionAgent(); + const LLVector3& size = avatarp->getScale(); // drawBoxOutline partly copied from llspatialpartition.cpp below @@ -768,34 +795,6 @@ void LLDrawPoolAvatar::renderAvatars(LLVOAvatar* single_avatar, S32 pass) } } // - - if (mDrawFace.empty() && !single_avatar) - { - return; - } - - LLVOAvatar *avatarp { nullptr }; - - if (single_avatar) - { - avatarp = single_avatar; - } - else - { - LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("Find avatarp"); // Tracy markup - const LLFace *facep = mDrawFace[0]; - if (!facep || !facep->getDrawable()) // trap possible null dereference - { - return; - } - avatarp = (LLVOAvatar *)facep->getDrawable()->getVObj().get(); - } - - if (!avatarp || avatarp->isDead() || avatarp->mDrawable.isNull()) // trap possible null dereference - { - return; - } - // rendertime Tracy annotations { LL_PROFILE_ZONE_NAMED_CATEGORY_AVATAR("check fully_loaded"); diff --git a/indra/newview/lldrawpoolavatar.h b/indra/newview/lldrawpoolavatar.h index 8db0bf1831..704979def3 100644 --- a/indra/newview/lldrawpoolavatar.h +++ b/indra/newview/lldrawpoolavatar.h @@ -121,8 +121,6 @@ typedef enum void renderAvatars(LLVOAvatar *single_avatar, S32 pass = -1); // renders only one avatar if single_avatar is not null. - LLVOAvatar* mAvatar; // Add avatar hitbox debug - remember avatar pointer in case avatar draw face breaks - static bool sSkipOpaque; static bool sSkipTransparent; static S32 sShadowPass; diff --git a/indra/newview/llvoavatar.cpp b/indra/newview/llvoavatar.cpp index b2d6d6e141..d9a8551f80 100644 --- a/indra/newview/llvoavatar.cpp +++ b/indra/newview/llvoavatar.cpp @@ -8056,7 +8056,6 @@ LLDrawable *LLVOAvatar::createDrawable(LLPipeline *pipeline) mNumInitFaces = mDrawable->getNumFaces() ; dirtyMesh(2); - poolp->mAvatar = this; // Add avatar hitbox debug - remember avatar pointer in case avatar draw face breaks return mDrawable; } From c0896a239e4a8f73df87aabbaf8be1f9b9ed36dc Mon Sep 17 00:00:00 2001 From: Beq Date: Sun, 15 Sep 2024 21:06:09 +0100 Subject: [PATCH 09/33] Fix for merge oversight. take_separate/coalesced now respects param --- indra/newview/llviewermenu.cpp | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/indra/newview/llviewermenu.cpp b/indra/newview/llviewermenu.cpp index 0bf8a2e3f7..bd9387e22b 100644 --- a/indra/newview/llviewermenu.cpp +++ b/indra/newview/llviewermenu.cpp @@ -6483,7 +6483,17 @@ void handle_take(bool take_separate) // MAINT-290 // Reason: Showing the confirmation dialog resets object selection, thus there is nothing to derez. // Fix: pass selection to the confirm_take, so that selection doesn't "die" after confirmation dialog is opened - params.functor.function(boost::bind(confirm_take, _1, _2, LLSelectMgr::instance().getSelection())); + params.functor.function([take_separate](const LLSD ¬ification, const LLSD &response) + { + if (take_separate) + { + confirm_take_separate(notification, response, LLSelectMgr::instance().getSelection()); + } + else + { + confirm_take(notification, response, LLSelectMgr::instance().getSelection()); + } + }); if(locked_but_takeable_object || !you_own_everything) From 33dbff8d7c52a45f30e56f00362232ac11261377 Mon Sep 17 00:00:00 2001 From: Beq Date: Mon, 16 Sep 2024 02:25:56 +0100 Subject: [PATCH 10/33] Massive overhaul of build publishing This will doubtless break horribly. --- .github/workflows/build_viewer.yml | 22 +- fsutils/build_config.json | 47 ++++ fsutils/build_config.py | 20 ++ fsutils/download_list.py | 404 ++++++++++++++++++++--------- 4 files changed, 362 insertions(+), 131 deletions(-) create mode 100644 fsutils/build_config.json create mode 100644 fsutils/build_config.py diff --git a/.github/workflows/build_viewer.yml b/.github/workflows/build_viewer.yml index 70fe4a87c4..1067ddc7be 100644 --- a/.github/workflows/build_viewer.yml +++ b/.github/workflows/build_viewer.yml @@ -391,27 +391,30 @@ jobs: with: sparse-checkout: | fsutils/download_list.py + fsutils/build_config.json + fsutils/build_config.py sparse-checkout-cone-mode: false ref: ${{ github.head_ref || github.ref_name || 'master' }} fetch-depth: 1 - name: Install discord-webhook library run: pip install discord-webhook - - name: find channel and webhook from Branch name + - name: find channel and webhook from build_matrix outputs run: | - if [[ "${{ github.ref_name }}" == Firestorm* ]]; then + viewer_release_type=${{ needs.build_matrix.outputs.viewer_release_type }} + if [[ "$viewer_release_type" == "Release" ]]; then FS_RELEASE_FOLDER=release FS_BUILD_WEBHOOK_URL=${{ secrets.RELEASE_WEBHOOK_URL }} - elif [[ "${{ github.ref_name }}" == *review* ]]; then + elif [[ "$viewer_release_type" == "Beta" ]]; then FS_RELEASE_FOLDER=preview FS_BUILD_WEBHOOK_URL=${{ secrets.BETA_WEBHOOK_URL }} - elif [[ "${{ github.ref_name }}" == *alpha* ]]; then + elif [[ "$viewer_release_type" == "Alpha" ]]; then FS_RELEASE_FOLDER=test FS_BUILD_WEBHOOK_URL=${{ secrets.BETA_WEBHOOK_URL }} - elif [[ "${{ github.ref_name }}" == *nightly* ]] || [[ "${{ github.event_name }}" == 'schedule' ]]; then + elif [[ "$viewer_release_type" == "Nightly" ]] || [[ "${{ github.event_name }}" == 'schedule' ]]; then FS_RELEASE_FOLDER=nightly FS_BUILD_WEBHOOK_URL=${{ secrets.NIGHTLY_WEBHOOK_URL }} - elif [[ "${{github.event_name }}" == "workflow_dispatch" ]]; then + elif [[ "$viewer_release_type" == "Manual" ]]; then FS_RELEASE_FOLDER=test FS_BUILD_WEBHOOK_URL=${{ secrets.MANUAL_WEBHOOK_URL }} else @@ -430,7 +433,12 @@ jobs: working-directory: ${{steps.download.outputs.download-path}} - name: Reorganise artifacts ready for server upload. - run: python ./fsutils/download_list.py -u ${{steps.download.outputs.download-path}} -w ${{ env.FS_BUILD_WEBHOOK_URL }} + env: + FS_VIEWER_CHANNEL: ${{ needs.build_matrix.outputs.viewer_channel }} + FS_VIEWER_VERSION: ${{ needs.build_matrix.outputs.viewer_version }} + FS_VIEWER_BUILD: ${{ needs.build_matrix.outputs.viewer_build }} + FS_VIEWER_RELEASE_TYPE: ${{ needs.build_matrix.outputs.viewer_release_type }} + run: python ./fsutils/download_list.py ${{steps.download.outputs.download-path}} -w ${{ env.FS_BUILD_WEBHOOK_URL }} - name: Setup rclone and download the folder uses: beqjanus/setup-rclone@main diff --git a/fsutils/build_config.json b/fsutils/build_config.json new file mode 100644 index 0000000000..d52b4c65ff --- /dev/null +++ b/fsutils/build_config.json @@ -0,0 +1,47 @@ +{ + "os_download_dirs": [ + "windows", "mac", "linux" + ], + "fs_version_mgr_platform": { + "windows": "win", + "mac": "mac", + "linux": "lin" + }, + "build_type_hosted_folder": { + "Release": "release", + "Beta": "preview", + "Alpha": "test", + "Nightly": "nightly" + }, + "os_hosted_folder": { + "windows": "windows", + "darwin": "mac", + "linux": "linux" + }, + "platforms_printable": { + "windows": "MS Windows", + "mac": "MacOS", + "linux": "Linux" + }, + "grids_printable": { + "SL": "Second Life", + "OS": "OpenSim" + }, + "download_root": "https://downloads.firestormviewer.org", + "viewer_channel_mapping": { + "Release": "release", + "Beta": "beta", + "Alpha": "alpha", + "Nightly": "nightly" + }, + "grid_type_mapping": { + "SecondLife": "sl", + "OpenSim": "os" + }, + "build_type_mapping": { + "regular": "regular", + "avx": "avx", + "tracy": "tracy", + "arm": "arm" + } +} diff --git a/fsutils/build_config.py b/fsutils/build_config.py new file mode 100644 index 0000000000..1e37043b08 --- /dev/null +++ b/fsutils/build_config.py @@ -0,0 +1,20 @@ +# build_config.py + +import json + +class BuildConfig: + def __init__(self, config_file='config.json'): + with open(config_file, 'r') as f: + config_data = json.load(f) + + self.supported_os_dirs = config_data.get('os_download_dirs', []) + # channel_to_build_type is a map from Beta, Release and Nightly to folder names preview release and nightly + self.build_type_hosted_folder = config_data.get('build_types', {}) + self.fs_version_mgr_platform = config_data.get('fs_version_mgr_platform', {}) + self.os_hosted_folder = config_data.get('target_folder', {}) + self.platforms_printable = config_data.get('platforms_printable', {}) + self.grids_printable = config_data.get('grids_printable', {}) + self.download_root = config_data.get('download_root', '') + self.viewer_channel_mapping = config_data.get('viewer_channel_mapping', {}) + self.grid_type_mapping = config_data.get('grid_type_mapping', {}) + self.build_type_mapping = config_data.get('build_type_mapping', {}) diff --git a/fsutils/download_list.py b/fsutils/download_list.py index 1d5756b619..add4f3cf42 100644 --- a/fsutils/download_list.py +++ b/fsutils/download_list.py @@ -6,10 +6,37 @@ import time import zipfile import glob import shutil +import hashlib +import pytz +from datetime import datetime +import requests from discord_webhook import DiscordWebhook +from build_config import BuildConfig +def get_current_date_str(): + now = datetime.now(pytz.timezone('UTC')) + day = now.day + month = now.month + year = now.year + return f"{day}{month}{year}" +def generate_secret(secret_key): + current_date = get_current_date_str() + data = secret_key + current_date + secret_for_api = hashlib.sha1(data.encode()).hexdigest() + return secret_for_api + +def map_build_type(build_variant): + # Map your build_variant to the API's build_type values + build_type_mapping = { + 'Release': 'regular', + 'Beta': 'beta', + 'Alpha': 'alpha', + 'Nightly': 'nightly', + # Add other mappings if necessary + } + return build_type_mapping.get(build_variant, 'regular') # run a command line subshell and return the output @@ -103,171 +130,300 @@ def flatten_tree(tree_root): # Delete the subdirectory and its contents shutil.rmtree(subdir_path) +def get_build_variables(): + """ + Extracts initial build variables from environment variables. + In practice these are set from the outputs of the earlier matrix commands. + Returns: + dict: A dictionary containing 'version' and 'build_number'. + """ + import os -# parse args first arg optional -r (release) second arg mandatory string path_to_directory + version = os.environ.get('FS_VIEWER_VERSION') + build_number = os.environ.get('FS_VIEWER_BUILD') + release_type = os.environ.get('FS_VIEWER_RELEASE_TYPE') -parser = argparse.ArgumentParser( - prog="print_download_list", - description="Prints the list of files for download and their md5 checksums" - ) -parser.add_argument("-r", "--release", required=False, default=False, action="store_true", help="use the release folder in the target URL") -parser.add_argument("-u", "--unzip", required=False, default=False, action="store_true", help="unzip the github artifact first") -parser.add_argument("-w", "--webhook", help="post details to the webhook") + if not version or not build_number or not release_type: + raise ValueError("Environment variables 'FS_VIEWER_VERSION' and 'FS_VIEWER_BUILD' must be set.") -# add path_to_directory required parameter to parser -parser.add_argument("path_to_directory", help="path to the directory in which we'll look for the files") + return { + 'version': version, + 'build_number': build_number, + 'version_full': f"{version}.{build_number}", + 'release_type': release_type, + } -args = parser.parse_args() -path_to_directory = args.path_to_directory -release = args.release +def get_hosted_folder_for_build_type(build_type, config): + return config.build_type_hosted_folder.get( + build_type, + config.build_type_hosted_folder.get("Unknown") + ) -# Create a webhook object with the webhook URL -if args.webhook: - webhook = DiscordWebhook(url=args.webhook) +def get_supported_build_type(build_type, config): + if build_type in config.build_type_hosted_folder: + return build_type + else: + return None +def get_hosted_folder_for_os_type(os_type, config): + return config.build_type_hosted_folder.get( + os_type + ) -dirs = ["windows", "mac", "linux"] +def get_supported_os(os_name, config): + # throws for unexpected os_name + return config.os_hosted_folder.get(os_name) -# build_types is a map from Beta, Release and Nightly to folder names preview release and nightly -build_types = { - "Alpha": "test", - "Beta": "preview", - "Release": "release", - "Nightly": "nightly", - "Unknown": "test" -} - -target_folder = { - "ubuntu":"linux", - "windows":"windows", - "macos":"mac" -} - -# unzip the github artifact for this OS (`dir`) into the folder `dir` -# get the .zip files in args.path_to_directory using glob -print(f"Processing artifacts in {args.path_to_directory}") -build_types_created = set() -zips = glob.glob(f"{args.path_to_directory}/*.zip") -for file in zips: +def extract_vars_from_zipfile_name(file): + # File is an artifact file sometihng like Nightly-windows-2022-64-sl-artifacts.zip # print(f"unzipping {file}") #extract first word (delimited by '-' from the file name) # build_type is a fullpath but we only want the last folder, remove the leading part of the path leaving just the foldername using basename filename = os.path.basename(file) build_type = filename.split("-")[0] platform = filename.split("-")[1].lower() + return filename,build_type, platform - # print(f"build_type is {build_type}") - if build_type not in build_types: - print(f"Invalid build_type {build_type} from file {file} using 'Unknown'") - build_type = "Unknown" - build_folder = build_types[build_type] - - build_types_created.add(build_type) +def unpack_artifacts(path_to_artifacts_directory, config): + build_types_found = {} + zips = glob.glob(f"{path_to_artifacts_directory}/*.zip") + for file in zips: + filename, build_type, platform = extract_vars_from_zipfile_name(file) - build_type_dir = os.path.join(args.path_to_directory, build_folder) + build_type = get_supported_build_type( build_type, config) + if build_type == None: + print(f"Invalid build_type {build_type} from file {file} using 'Unknown'") + build_type = "Unknown" + build_folder = get_hosted_folder_for_build_type(build_type, config) + build_type_dir = os.path.join(path_to_artifacts_directory, build_folder) - if platform not in target_folder: - print(f"Invalid platform {platform} using file {file}") - continue - - unpack_folder = os.path.join(build_type_dir, target_folder[platform]) - print(f"unpacking {filename} to {unpack_folder}") + try: + os_folder = get_hosted_folder_for_os_type(platform, config) + except KeyError: + print(f"Invalid OS platform {platform} specified by file {file} - skipping") + continue + except Exception as e: + print(f"An error occurred while getting OS folder: {e}") + continue + + unpack_folder = os.path.join(build_type_dir, os_folder) + print(f"unpacking {filename} to {unpack_folder}") + try: + unzip_file(file, unpack_folder) + except zipfile.BadZipFile: + print(f"Skipping {file} as it is not a valid zip file") + continue + except Exception as e: + print(f"An error occurred while unpacking {file}: {e} , skipping file {filename}") + continue + + if build_type not in build_types_found: + build_types_found[build_type] = { + "build_type": build_type, + "build_type_folder": build_folder, + "build_type_fullpath": build_type_dir, + "os_folders": [], + } + build_types_found[build_type]["os_folders"].append(os_folder) + return build_types_found - if os.path.isfile(file): - # this is an actual zip file - unzip_file(file, unpack_folder) - else: - # Create the destination folder if it doesn't exist - # if not os.path.exists(unpack_folder): - # os.makedirs(unpack_folder) - # Copy the contents of the source folder to the destination folder recursively - shutil.copytree(file, unpack_folder, dirs_exist_ok=True) - -output = "" -for build_type in build_types_created: - build_type_dir = os.path.join(args.path_to_directory, build_types[build_type]) +def restructure_folders(build_type, config): + build_type_dir = build_type["build_type_fullpath"] if not os.path.exists(build_type_dir): - print(f"Unexpected error: {build_type_dir} does not exist, even though it was in the set.") - continue + print(f"Unexpected error: path {build_type_dir} does not exist, even though it was in the set.") + raise FileNotFoundError # loop over the folder in the build_type_dir - for dir in dirs: - print(f"Cleaning up {dir}") + for platform_folder in build_type["os_folders"]: + print(f"Cleaning up {platform_folder}") # Traverse the directory tree and move all of the files to the root directory - flatten_tree(os.path.join(build_type_dir, dir)) + flatten_tree(os.path.join(build_type_dir, platform_folder)) # Now move the symbols files to the symbols folder - # prep the symbols folder + # Define the folder for symbols symbols_folder = os.path.join(build_type_dir, "symbols") os.mkdir(symbols_folder) - symbol_archives = glob.glob(f"{build_type_dir}/**/*_hvk*", recursive=True) - for sym_file in symbol_archives: - print(f"Moving {sym_file} to {symbols_folder}") - shutil.move(sym_file, symbols_folder) - symbol_archives = glob.glob(f"{build_type_dir}/**/*_oss*", recursive=True) - for sym_file in symbol_archives: - print(f"Moving {sym_file} to {symbols_folder}") - shutil.move(sym_file, symbols_folder) + # prep the symbols folder + symbol_patterns = ["*_hvk*", "*_oss*"] + # Loop through each pattern, find matching files, and move them + for pattern in symbol_patterns: + symbol_archives = glob.glob(f"{build_type_dir}/**/{pattern}", recursive=True) + for sym_file in symbol_archives: + print(f"Moving {sym_file} to {symbols_folder}") + shutil.move(sym_file, symbols_folder) + +def gather_build_info(build_type, config): # While we're at it, let's print the md5 listing - file_dict = {} - md5_dict = {} - platforms_printable = {"windows":"MS Windows", "mac":"MacOS", "linux":"Linux"} - grids_printable = {"SL":"Second Life", "OS":"OpenSim"} - - download_root = f"https://downloads.firestormviewer.org/{build_types[build_type]}" - output += f''' -DOWNLOADS - {build_type} -------------------------------------------------------------------------------------------------------- -''' - for dir in dirs: - print(f"Getting files for {dir} in {build_type_dir}") - files = get_files(os.path.join(build_type_dir, dir)) + download_root = f"{config.download_root}/{build_type["build_type_folder"]}" + # for each os that we have built for + build_type_dir = build_type["build_type_fullpath"] + for platform_folder in build_type["os_folders"]: + print(f"Getting files for {platform_folder} in {build_type_dir}") + build_type_platform_folder = os.path.join(build_type_dir, platform_folder) + files = get_files(build_type_platform_folder) try: for file in files: - full_file = os.path.join(build_type_dir, dir, file) - md5 = get_md5(full_file) + full_file = os.path.join(build_type_platform_folder, file) base_name = os.path.basename(file) - wordsize = "64" + file_URI = f"{download_root}/{platform_folder}/{base_name}" + md5 = get_md5(full_file) if "FirestormOS-" in base_name: grid = "OS" else: grid = "SL" - if dir in dirs: - file_dict[f"{grid}{dir}{wordsize}"] = full_file - md5_dict[f"{grid}{dir}{wordsize}"] = md5 + file_key = f"{grid}-{platform_folder}" + + # if platform_folder in config.os_download_dirs: + if "downloadable_artifacts" not in build_type: + build_type["downloadable_artifacts"] = {} + + build_type["downloadable_artifacts"] = { f"{file_key}":{ + "file_path": full_file, + "file_download_URI": file_URI, + "grid": grid, + "fs_ver_mgr_platform": config.fs_version_mgr_platform.get(platform_folder), + "md5": md5, + }} except TypeError: - print(f"No files found for {dir} in {build_type_dir}") + print(f"Error processing files for {platform_folder} in {build_type_dir}") + continue + except Exception as e: + print(f"An error occurred while processing files for {platform_folder} in {build_type_dir}: {e}") + continue + return build_type - - - output += f''' -{platforms_printable[dir]} +def create_discord_message(build_info, config): +# Start with a header line + text_summary = f''' +DOWNLOADS - {build_info["build_type"]} +------------------------------------------------------------------------------------------------------- ''' - dir = dir.lower() - wordsize = "64" - platform = f"{platforms_printable[dir]}" +# for each platform we potentailly build for +# Append platform label in printable form + for platform_folder in config.supported_os_dirs: + platform_printable = config.platforms_printable[platform_folder] + text_summary += f''' +{platform_printable} +''' + platform_folder = platform_folder.lower() for grid in ["SL", "OS"]: - grid_printable = f"{grids_printable[grid]}" + grid_printable = f"{config.grids_printable[grid]}" try: - output += f"{platform} for {grid_printable} ({wordsize}-bit)\n" - output += f"{download_root}/{dir}/{os.path.basename(file_dict[f'{grid}{dir}{wordsize}'])}\n" - output += "\n" - output += f"MD5: {md5_dict[f'{grid}{dir}{wordsize}']}\n" - output += "\n" + file_key = f"{grid}-{platform_folder}" + text_summary += f"{platform_printable} for {grid_printable}\n" + text_summary += f"{build_info["downloadable_artifacts"][file_key]["file_download_URI"]}\n" + text_summary += "\n" + text_summary += f"MD5: {build_info["downloadable_artifacts"][file_key]["md5"]}\n" + text_summary += "\n" except KeyError: - output += f"{platform} for {grid_printable} ({wordsize}-bit) - NOT AVAILABLE\n" - output += "\n" - output += '''------------------------------------------------------------------------------------------------------- + text_summary += f"{platform_printable} for {grid_printable} - NOT AVAILABLE\n" + text_summary += "\n" + text_summary += ''' +------------------------------------------------------------------------------------------------------- ''' + return text_summary - if args.webhook: - # Add the message to the webhook - webhook.set_content(content=output) - # Send the webhook - response = webhook.execute() - # Print the response - if not response.ok: - print(f"Webhook Error {response.status_code}: {response.text}") - print(output) +def update_fs_version_mgr(build_info, config): + # Read the secret key from environment variables + secret_key = os.environ.get('FS_VERSION_MGR_KEY') + if not secret_key: + print("Error: FS_VERSION_MGR_KEY not set") + sys.exit(1) + secret_for_api = generate_secret(secret_key) + build_type = build_info["build_type"] + version = os.environ.get('FS_VIEWER_VERSION') + channel = os.environ.get('FS_VIEWER_CHANNEL') + build_number = os.environ.get('FS_VIEWER_BUILD') + + build_variant = "regular" + for file_key in build_info["downloadable_artifacts"]: + try: + download_link = build_info["downloadable_artifacts"][file_key]["file_download_URI"] + md5_checksum = build_info["downloadable_artifacts"][file_key]["md5"] + grid = build_info["downloadable_artifacts"][file_key]["grid"].lower() + os_name = build_info["downloadable_artifacts"][file_key]["fs_ver_mgr_platform"] + except KeyError: + print(f"Error: Could not find downloadable artifacts for {file_key}") + continue + + payload = { + "secret": secret_for_api, + "viewer_channel": channel, + "grid_type": grid, + "operating_system": os_name, + "build_type": build_type.lower(), + "viewer_version": version, + "build_number": int(build_number), + "download_link": download_link, + "md5_checksum": md5_checksum + } + + # Make the API call + url = "https://www.firestormviewer.org/set-fs-vrsns-jsn/" + headers = {"Content-Type": "application/json"} + + try: + response = requests.post(url, json=payload, headers=headers) + response.raise_for_status() + response_data = response.json() + result = response_data.get('result') + message = response_data.get('message') + if result == 'success': + print(f"Version manager updated successfully for {os_name} {build_variant}") + else: + print(f"Error updating version manager: {message}") + except requests.exceptions.RequestException as e: + print(f"API request failed: {e}") + except ValueError: + print("API response is not valid JSON") + +# parse args first arg optional -r (release) second arg mandatory string path_to_directory +def main(): + try: + # Initialise the build configuration + config = BuildConfig() + + parser = argparse.ArgumentParser( + prog="print_download_list", + description="Prints the list of files for download and their md5 checksums" + ) + parser.add_argument("-w", "--webhook", help="post details to the webhook") + + # add path_to_directory required parameter to parser + parser.add_argument("path_to_directory", help="path to the directory in which we'll look for the files") + + args = parser.parse_args() + + # Create a webhook object with the webhook URL + if args.webhook: + webhook = DiscordWebhook(url=args.webhook) + + # unzip the github artifact for this OS (`dir`) into the folder `dir` + # get the .zip files in args.path_to_directory using glob + print(f"Processing artifacts in {args.path_to_directory}") + build_types_created = unpack_artifacts(args.path_to_directory, config) + + for build_type in build_types_created: + restructure_folders(build_type, config) + build_info = gather_build_info(build_type, config) + update_fs_version_mgr(build_info, config) + + discord_text = create_discord_message(build_info, config) + if args.webhook: + # Add the message to the webhook + webhook.set_content(content=discord_text) + # Send the webhook + response = webhook.execute() + # Print the response + if not response.ok: + print(f"Webhook Error {response.status_code}: {response.text}") + print(discord_text) + except Exception as e: + print(f"An error occurred: {e}") + sys.exit(1) + +if __name__ == '__main__': + import sys + main() \ No newline at end of file From 3351dc71da82898c28638922f5b0ab28f8e2a69e Mon Sep 17 00:00:00 2001 From: Beq Date: Mon, 16 Sep 2024 08:30:13 +0100 Subject: [PATCH 11/33] fix quote usage --- fsutils/download_list.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/fsutils/download_list.py b/fsutils/download_list.py index add4f3cf42..9167dd5ad6 100644 --- a/fsutils/download_list.py +++ b/fsutils/download_list.py @@ -253,7 +253,7 @@ def restructure_folders(build_type, config): def gather_build_info(build_type, config): # While we're at it, let's print the md5 listing - download_root = f"{config.download_root}/{build_type["build_type_folder"]}" + download_root = f"{config.download_root}/{build_type['build_type_folder']}" # for each os that we have built for build_type_dir = build_type["build_type_fullpath"] for platform_folder in build_type["os_folders"]: @@ -312,9 +312,9 @@ DOWNLOADS - {build_info["build_type"]} try: file_key = f"{grid}-{platform_folder}" text_summary += f"{platform_printable} for {grid_printable}\n" - text_summary += f"{build_info["downloadable_artifacts"][file_key]["file_download_URI"]}\n" + text_summary += f"{build_info['downloadable_artifacts'][file_key]['file_download_URI']}\n" text_summary += "\n" - text_summary += f"MD5: {build_info["downloadable_artifacts"][file_key]["md5"]}\n" + text_summary += f"MD5: {build_info['downloadable_artifacts'][file_key]['md5']}\n" text_summary += "\n" except KeyError: text_summary += f"{platform_printable} for {grid_printable} - NOT AVAILABLE\n" From 86522638a4b246617ff339881179827c50b08031 Mon Sep 17 00:00:00 2001 From: Beq Date: Mon, 16 Sep 2024 10:26:40 +0100 Subject: [PATCH 12/33] Fix incorrect default config file name. --- fsutils/build_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fsutils/build_config.py b/fsutils/build_config.py index 1e37043b08..0e02ceb837 100644 --- a/fsutils/build_config.py +++ b/fsutils/build_config.py @@ -3,7 +3,7 @@ import json class BuildConfig: - def __init__(self, config_file='config.json'): + def __init__(self, config_file='build_config.json'): with open(config_file, 'r') as f: config_data = json.load(f) From 00ddbf985a8a80db69fa41bff28692ce308d8d9d Mon Sep 17 00:00:00 2001 From: Beq Date: Mon, 16 Sep 2024 12:47:16 +0100 Subject: [PATCH 13/33] try again on path, and cleanup unused helper --- fsutils/build_config.py | 2 +- fsutils/download_list.py | 11 ----------- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/fsutils/build_config.py b/fsutils/build_config.py index 0e02ceb837..6b8fe8c97f 100644 --- a/fsutils/build_config.py +++ b/fsutils/build_config.py @@ -3,7 +3,7 @@ import json class BuildConfig: - def __init__(self, config_file='build_config.json'): + def __init__(self, config_file='./fsutils/build_config.json'): with open(config_file, 'r') as f: config_data = json.load(f) diff --git a/fsutils/download_list.py b/fsutils/download_list.py index 9167dd5ad6..28edb31db9 100644 --- a/fsutils/download_list.py +++ b/fsutils/download_list.py @@ -27,17 +27,6 @@ def generate_secret(secret_key): secret_for_api = hashlib.sha1(data.encode()).hexdigest() return secret_for_api -def map_build_type(build_variant): - # Map your build_variant to the API's build_type values - build_type_mapping = { - 'Release': 'regular', - 'Beta': 'beta', - 'Alpha': 'alpha', - 'Nightly': 'nightly', - # Add other mappings if necessary - } - return build_type_mapping.get(build_variant, 'regular') - # run a command line subshell and return the output # We want to get the following output by looping over the files From 7c812767dd6539fdb0a00201a911de02d591ed76 Mon Sep 17 00:00:00 2001 From: Ansariel Date: Mon, 16 Sep 2024 13:54:45 +0200 Subject: [PATCH 14/33] FIRE-34503: Fix crash when running RLVa @setcam_eyeoffsetscale command --- indra/newview/rlvhelper.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indra/newview/rlvhelper.cpp b/indra/newview/rlvhelper.cpp index 5bc0aae4b6..c02ffabfc8 100644 --- a/indra/newview/rlvhelper.cpp +++ b/indra/newview/rlvhelper.cpp @@ -207,7 +207,7 @@ RlvBehaviourDictionary::RlvBehaviourDictionary() addEntry(new RlvBehaviourGenericToggleProcessor("setcam_eyeoffset")); addModifier(RLV_BHVR_SETCAM_EYEOFFSET, RLV_MODIFIER_SETCAM_EYEOFFSET, new RlvBehaviourModifierHandler("Camera - Eye Offset", LLVector3::zero, true, nullptr)); addEntry(new RlvBehaviourGenericToggleProcessor("setcam_eyeoffsetscale")); - addModifier(RLV_BHVR_SETCAM_EYEOFFSETSCALE, RLV_MODIFIER_SETCAM_EYEOFFSETSCALE, new RlvBehaviourModifierHandler("Camera - Eye Offset Scale", 0, true, nullptr)); + addModifier(RLV_BHVR_SETCAM_EYEOFFSETSCALE, RLV_MODIFIER_SETCAM_EYEOFFSETSCALE, new RlvBehaviourModifierHandler("Camera - Eye Offset Scale", 0.0f, true, nullptr)); addEntry(new RlvBehaviourGenericToggleProcessor("setcam_focusoffset")); addModifier(RLV_BHVR_SETCAM_FOCUSOFFSET, RLV_MODIFIER_SETCAM_FOCUSOFFSET, new RlvBehaviourModifierHandler("Camera - Focus Offset", LLVector3d::zero, true, nullptr)); addEntry(new RlvBehaviourProcessor("setcam_fovmin")); From cfe96f8c3fda839188943fe4cfb97f1d0c5afc2f Mon Sep 17 00:00:00 2001 From: Ansariel Date: Mon, 16 Sep 2024 14:04:32 +0200 Subject: [PATCH 15/33] FIRE-34396: Add 4K UHD option to window size floater --- indra/newview/skins/default/xui/de/floater_window_size.xml | 1 + indra/newview/skins/default/xui/en/floater_window_size.xml | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/indra/newview/skins/default/xui/de/floater_window_size.xml b/indra/newview/skins/default/xui/de/floater_window_size.xml index b1e94127a8..dfc7ba9aa1 100644 --- a/indra/newview/skins/default/xui/de/floater_window_size.xml +++ b/indra/newview/skins/default/xui/de/floater_window_size.xml @@ -20,6 +20,7 @@ +