diff --git a/.github/workflows/build_viewer.yml b/.github/workflows/build_viewer.yml
index 08872166db..8adec3ef10 100644
--- a/.github/workflows/build_viewer.yml
+++ b/.github/workflows/build_viewer.yml
@@ -1,8 +1,14 @@
name: Build viewer
-on: push
-env:
+on:
+ push:
+ branches:
+ - "*release"
+ - master
+ schedule:
+ - cron: '00 03 * * *' # Run every day at 3am UTC
+env:
AUTOBUILD_VARIABLES_FILE: ${{github.workspace}}/build-variables/variables
- EXTRA_ARGS: -DFMODSTUDIO=ON -DUSE_KDU=ON --crashreporting
+ EXTRA_ARGS: -DUSE_FMODSTUDIO=ON -DUSE_KDU=ON --crashreporting
build_secrets_checkout: ${{github.workspace}}/signing
@@ -67,15 +73,22 @@ jobs:
- name: find channel from Branch name
run: |
- if [[ "${{ github.ref_name }}" == *"Release"* ]]; then
- FS_RELEASE_CHAN="Release"
+ if [[ "${{ github.ref_name }}" == *Release* ]]; then
+ FS_RELEASE_TYPE=Release
else
- FS_RELEASE_CHAN="Beta"
+ if [[ "${{github.event_name}}" == 'schedule' ]]; then
+ FS_RELEASE_TYPE=Nightly
+ else
+ FS_RELEASE_TYPE=Beta
+ fi
fi
if [[ "${{ matrix.addrsize }}" == "64" ]]; then
- FS_RELEASE_CHAN="${FS_RELEASE_CHAN}x64"
+ FS_RELEASE_CHAN="${FS_RELEASE_TYPE}x64"
+ else
+ FS_RELEASE_CHAN=${FS_RELEASE_TYPE}
fi
- echo "FS_RELEASE_CHAN=\"${FS_RELEASE_CHAN}\"" >> $GITHUB_ENV
+ echo "FS_RELEASE_TYPE=${FS_RELEASE_TYPE}" >> $GITHUB_ENV
+ echo "FS_RELEASE_CHAN=${FS_RELEASE_CHAN}" >> $GITHUB_ENV
echo "Building for channel ${FS_RELEASE_CHAN}"
shell: bash
@@ -187,18 +200,18 @@ jobs:
run: rm *${{ env.fallback_platform }}*bz2
shell: bash
- name: Configure
- run: autobuild configure --debug -c ReleaseFS -A${{matrix.addrsize}} -- --package --chan ${{env.FS_RELEASE_CHAN}} ${{env.EXTRA_ARGS}} ${{env.FS_GRID}}
+ run: autobuild configure -c ReleaseFS -A${{matrix.addrsize}} -- --package --chan ${{env.FS_RELEASE_CHAN}} ${{env.EXTRA_ARGS}} ${{env.FS_GRID}}
shell: bash
- name: build
- run: autobuild build --debug -c ReleaseFS -A${{matrix.addrsize}} --no-configure
+ run: autobuild build -c ReleaseFS -A${{matrix.addrsize}} --no-configure
shell: bash
-
- - name: publish Windows artifacts
+
+ - name: Publish artifacts
if: runner.os == 'Windows'
uses: actions/upload-artifact@v3
with:
- name: ${{ matrix.os }}-${{matrix.addrsize}}-${{matrix.grid}}-artifacts.zip
+ name: ${{ env.FS_RELEASE_TYPE }}-${{ matrix.os }}-${{ matrix.addrsize }}-${{ matrix.grid }}-artifacts.zip
path: |
build-*/newview/Release/*Setup.exe
build-*/newview/Release/*.xz
@@ -207,7 +220,7 @@ jobs:
if: runner.os == 'Linux'
uses: actions/upload-artifact@v3
with:
- name: ${{ matrix.os }}-${{matrix.addrsize}}-${{matrix.grid}}-artifacts.zip
+ name: ${{ env.FS_RELEASE_TYPE }}-${{ matrix.os }}-${{matrix.addrsize}}-${{matrix.grid}}-artifacts.zip
path: |
build-linux-*/newview/*.xz
build-linux-*/newview/*.bz2
@@ -216,7 +229,53 @@ jobs:
if: runner.os == 'macOS'
uses: actions/upload-artifact@v3
with:
- name: ${{ matrix.os }}-${{matrix.addrsize}}-${{matrix.grid}}-artifacts.zip
+ name: ${{ env.FS_RELEASE_TYPE }}-${{ matrix.os }}-${{matrix.addrsize}}-${{matrix.grid}}-artifacts.zip
path: |
build-darwin-*/newview/*.dmg
build-darwin-*/newview/*.bz2
+ deploy:
+ runs-on: ubuntu-latest
+ needs: build_matrix
+ if: always()
+ steps:
+ - name: Checkout files
+ uses: Bhacaz/checkout-files@v2
+ with:
+ files: fsutils/download_list.py
+ branch: ${{ github.head_ref || github.ref_name || 'master' }}
+ - name: Install discord-webhook library
+ run: pip install discord-webhook
+
+ - name: find channel from Branch name
+ run: |
+ if [[ "${{ github.ref_name }}" == *Release* ]]; then
+ FS_RELEASE_FOLDER=release
+ else
+ if [[ "${{github.event_name}}" == 'schedule' ]]; then
+ FS_RELEASE_FOLDER=nightly
+ else
+ FS_RELEASE_FOLDER=preview
+ fi
+ fi
+ echo "FS_RELEASE_FOLDER=${FS_RELEASE_FOLDER}" >> $GITHUB_ENV
+
+ - name: Download artifacts
+ uses: actions/download-artifact@v3
+ id: download
+ with:
+ path: to_deploy
+ - name: List artifacts download
+ run: ls -R
+ working-directory: ${{steps.download.outputs.download-path}}
+
+ - name: Reorganise artifacts ready for server upload.
+ run: python ./fsutils/download_list.py -u ${{steps.download.outputs.download-path}} -w ${{ secrets.RELEASE_WEBHOOK_URL }}
+
+ - name: Setup rclone and download the folder
+ uses: beqjanus/setup-rclone@main
+ with:
+ rclone_config: ${{ secrets.RCLONE_CONFIG }}
+
+ - name: Copy files to remote host
+ run: rclone copy ${{steps.download.outputs.download-path}}/${{ env.FS_RELEASE_FOLDER }} fs_deploy:${{ env.FS_RELEASE_FOLDER }}
+
diff --git a/fsutils/download_list.py b/fsutils/download_list.py
index 0747ae7e4c..b8534efeff 100644
--- a/fsutils/download_list.py
+++ b/fsutils/download_list.py
@@ -6,14 +6,9 @@ import time
import zipfile
import glob
import shutil
+from discord_webhook import DiscordWebhook
+
-# iterate over the files in a directory and pass them to a command line subshell
-def get_files(path):
- files = []
- for root, dirs, files in os.walk(path):
- # print(f"Found : {files}")
- return files
- return None
# run a command line subshell and return the output
@@ -65,6 +60,14 @@ def get_files(path):
# MD5: 9D5D8021F376194B42F6E7D8E537E45E
# -------------------------------------------------------------------------------------------------------
+# iterate over the files in a directory and pass them to a command line subshell
+def get_files(path):
+ files = []
+ for root, dirs, filenames in os.walk(path):
+ for filename in filenames:
+ files.append(filename)
+ print(f"Found : {files} on {path}")
+ return files
def run_cmd(cmd):
# print(cmd)
@@ -73,12 +76,12 @@ def run_cmd(cmd):
#using the md5sum command get the md5 for the file
def get_md5(mdfile):
- # print(f"mdfile is {mdfile}")
md5sum = run_cmd(f"md5sum {mdfile}")
#split md5sum on space
md5sum = md5sum.split()[0]
#remove leading '\'
md5sum = md5sum[1:]
+ print(f"generating md5sum for {mdfile} as {md5sum}")
return md5sum
def unzip_file(zip_file, unzip_dir):
@@ -86,6 +89,7 @@ def unzip_file(zip_file, unzip_dir):
zip_ref.extractall(unzip_dir)
def flatten_tree(tree_root):
+ print(f"Flattening tree {tree_root}")
for root, flatten_dirs, files in os.walk(tree_root, topdown=False):
for file in files:
# Construct the full path to the file
@@ -107,6 +111,8 @@ parser = argparse.ArgumentParser(
)
parser.add_argument("-r", "--release", required=False, default=False, action="store_true", help="use the release folder in the target URL")
parser.add_argument("-u", "--unzip", required=False, default=False, action="store_true", help="unzip the github artifact first")
+parser.add_argument("-w", "--webhook", help="post details to the webhook")
+
# add path_to_directory required parameter to parser
parser.add_argument("path_to_directory", help="path to the directory in which we'll look for the files")
@@ -114,102 +120,162 @@ args = parser.parse_args()
path_to_directory = args.path_to_directory
release = args.release
+# Create a webhook object with the webhook URL
+if args.webhook:
+ webhook = DiscordWebhook(url=args.webhook)
+
dirs = ["windows", "mac", "linux"]
-if args.unzip:
- # unzip the github artifact for this OS (`dir`) into the folder `dir`
- # get the .zip files in args.path_to_directory using glob
- zips = glob.glob(f"{args.path_to_directory}/*.zip")
- for file in zips:
- # print(f"unzipping {file}")
- if "ubuntu" in file.lower():
- unzip_file(file, os.path.join(args.path_to_directory, "linux"))
- if "windows" in file.lower():
- unzip_file(file, os.path.join(args.path_to_directory, "windows"))
- if "macos" in file.lower():
- unzip_file(file, os.path.join(args.path_to_directory, "mac"))
+# build_types is a map from Beta, Release and Nightly to folder names preview release and nightly
+build_types = {
+ "Beta": "preview",
+ "Release": "release",
+ "Nightly": "nightly"
+}
+
+target_folder = {
+ "ubuntu":"linux",
+ "windows":"windows",
+ "macos":"mac"
+}
+
+# unzip the github artifact for this OS (`dir`) into the folder `dir`
+# get the .zip files in args.path_to_directory using glob
+print(f"Processing artifacts in {args.path_to_directory}")
+build_types_created = set()
+zips = glob.glob(f"{args.path_to_directory}/*.zip")
+for file in zips:
+ # print(f"unzipping {file}")
+ #extract first word (delimited by '-' from the file name)
+ # build_type is a fullpath but we only want the last folder, remove the leading part of the path leaving just the foldername using basename
+ filename = os.path.basename(file)
+ build_type = filename.split("-")[0]
+ platform = filename.split("-")[1].lower()
+
+ # print(f"build_type is {build_type}")
+ if build_type not in build_types:
+ print(f"Invalid build_type {build_type} using file {file}")
+ continue
+ else:
+ build_folder = build_types[build_type]
+
+ build_types_created.add(build_type)
+
+ build_type_dir = os.path.join(args.path_to_directory, build_folder)
+
+ if platform not in target_folder:
+ print(f"Invalid platform {platform} using file {file}")
+ continue
+
+ unpack_folder = os.path.join(build_type_dir, target_folder[platform])
+ print(f"unpacking {filename} to {unpack_folder}")
+
+ if os.path.isfile(file):
+ # this is an actual zip file
+ unzip_file(file, unpack_folder)
+ else:
+ # Create the destination folder if it doesn't exist
+ # if not os.path.exists(unpack_folder):
+ # os.makedirs(unpack_folder)
+ # Copy the contents of the source folder to the destination folder recursively
+ shutil.copytree(file, unpack_folder, dirs_exist_ok=True)
+
+output = ""
+for build_type in build_types_created:
+ build_type_dir = os.path.join(args.path_to_directory, build_types[build_type])
+ if not os.path.exists(build_type_dir):
+ print(f"Unexpected error: {build_type_dir} does not exist, even though it was in the set.")
+ continue
+ # loop over the folder in the build_type_dir
for dir in dirs:
- flatten_tree(os.path.join(args.path_to_directory, dir))
+ print(f"Cleaning up {dir}")
+ # Traverse the directory tree and move all of the files to the root directory
+ flatten_tree(os.path.join(build_type_dir, dir))
# Now move the symbols files to the symbols folder
- symbols_folder = os.path.join(args.path_to_directory, "symbols")
+ # prep the symbols folder
+ symbols_folder = os.path.join(build_type_dir, "symbols")
os.mkdir(symbols_folder)
- # Traverse the directory tree and move all of the files to the root directory
- symbol_archives = glob.glob(f"{args.path_to_directory}/**/*_hvk*", recursive=True)
+ symbol_archives = glob.glob(f"{build_type_dir}/**/*_hvk*", recursive=True)
for sym_file in symbol_archives:
print(f"Moving {sym_file} to {symbols_folder}")
shutil.move(sym_file, symbols_folder)
- symbol_archives = glob.glob(f"{args.path_to_directory}/**/*_oss*", recursive=True)
+ symbol_archives = glob.glob(f"{build_type_dir}/**/*_oss*", recursive=True)
for sym_file in symbol_archives:
print(f"Moving {sym_file} to {symbols_folder}")
shutil.move(sym_file, symbols_folder)
-
-file_dict = {}
-md5_dict = {}
+ # While we're at it, let's print the md5 listing
+ file_dict = {}
+ md5_dict = {}
+ platforms_printable = {"windows":"MS Windows", "mac":"MacOS", "linux":"Linux"}
+ grids_printable = {"SL":"Second Life", "OS":"OpenSim"}
-for dir in dirs:
- dir = dir.lower()
- files = get_files(os.path.join(args.path_to_directory, dir))
- for file in files:
- full_file = os.path.join(args.path_to_directory, dir, file)
- md5 = get_md5(full_file)
- base_name = os.path.basename(file)
- if "-Release-" in base_name or "-Beta-" in base_name:
- wordsize = "32"
- else:
- wordsize = "64"
-
- if "FirestormOS-" in base_name:
- grid = "OS"
- else:
- grid = "SL"
-
- if dir in dirs:
- file_dict[f"{grid}{dir}{wordsize}"] = full_file
- md5_dict[f"{grid}{dir}{wordsize}"] = md5
-
-download_root_preview = "https://downloads.firestormviewer.org/preview"
-download_root_release = "https://downloads.firestormviewer.org/release"
-
-if args.release:
- download_root = download_root_release
-else:
- download_root = download_root_preview
-
-print('''
-DOWNLOADS''')
-
-platforms_printable = {"windows":"MS Windows", "mac":"MacOS", "linux":"Linux"}
-grids_printable = {"SL":"Second Life", "OS":"OpenSim"}
-
-for dir in dirs:
- print(f'''-------------------------------------------------------------------------------------------------------
-{platforms_printable[dir]}
-''')
- dir=dir.lower()
- wordsize = "64"
- platform = f"{platforms_printable[dir]}"
- for grid in ["SL", "OS"]:
- grid_printable = f"{grids_printable[grid]}"
+ download_root = f"https://downloads.firestormviewer.org/{build_types[build_type]}/"
+ for dir in dirs:
+ print(f"Getting files for {dir} in {build_type_dir}")
+ files = get_files(os.path.join(build_type_dir, dir))
try:
- print (f"{platform} for {grid_printable} ({wordsize}-bit)")
- print ( "{}/{}/{}".format(download_root,dir,os.path.basename(file_dict[f"{grid}{dir}{wordsize}"])) )
- print ()
- print ( "MD5: {}".format(md5_dict[f"{grid}{dir}{wordsize}"]) )
- print ()
- if(dir == "windows"):
- # Need to do 32 bit as well
- wordsize = "32"
- print (f"{platform} for {grid_printable} ({wordsize}-bit)")
- print ( "{}/{}/{}".format(download_root,dir,os.path.basename(file_dict[f"{grid}{dir}{wordsize}"])) )
- print ()
- print ( "MD5: {}".format(md5_dict[f"{grid}{dir}{wordsize}"]) )
- print ()
- wordsize = "64"
- except KeyError:
- print (f"{platform} for {grid_printable} ({wordsize}-bit) - NOT AVAILABLE")
- print ()
+ for file in files:
+ full_file = os.path.join(build_type_dir, dir, file)
+ md5 = get_md5(full_file)
+ base_name = os.path.basename(file)
+ if "x64" in base_name:
+ wordsize = "64"
+ else:
+ wordsize = "32"
+
+ if "FirestormOS-" in base_name:
+ grid = "OS"
+ else:
+ grid = "SL"
-print('''
--------------------------------------------------------------------------------------------------------''')
+ if dir in dirs:
+ file_dict[f"{grid}{dir}{wordsize}"] = full_file
+ md5_dict[f"{grid}{dir}{wordsize}"] = md5
+ except TypeError:
+ print(f"No files found for {dir} in {build_type_dir}")
+
+
+ output += f'''
+DOWNLOADS - {build_type}
+'''
+
+ output += f'''-------------------------------------------------------------------------------------------------------
+{platforms_printable[dir]}
+'''
+ dir = dir.lower()
+ wordsize = "64"
+ platform = f"{platforms_printable[dir]}"
+ for grid in ["SL", "OS"]:
+ grid_printable = f"{grids_printable[grid]}"
+ try:
+ output += f"{platform} for {grid_printable} ({wordsize}-bit)\n"
+ output += f"{download_root}/{dir}/{os.path.basename(file_dict[f'{grid}{dir}{wordsize}'])}\n"
+ output += "\n"
+ output += f"MD5: {md5_dict[f'{grid}{dir}{wordsize}']}\n"
+ output += "\n"
+ if dir == "windows":
+ # Need to do 32 bit as well
+ wordsize = "32"
+ output += f"{platform} for {grid_printable} ({wordsize}-bit)\n"
+ output += f"{download_root}/{dir}/{os.path.basename(file_dict[f'{grid}{dir}{wordsize}'])}\n"
+ output += "\n"
+ output += f"MD5: {md5_dict[f'{grid}{dir}{wordsize}']}\n"
+ output += "\n"
+ wordsize = "64"
+ except KeyError:
+ output += f"{platform} for {grid_printable} ({wordsize}-bit) - NOT AVAILABLE\n"
+ output += "\n"
+ output += '''
+-------------------------------------------------------------------------------------------------------
+'''
+
+ if args.webhook:
+ # Add the message to the webhook
+ webhook.set_content(content=output)
+ # Send the webhook
+ response = webhook.execute()
+ # Print the response
+ print(f"Webhook response: {response}")
+ print(output)
diff --git a/indra/llinventory/llsettingswater.cpp b/indra/llinventory/llsettingswater.cpp
index 90f99e8198..29c6bf0000 100644
--- a/indra/llinventory/llsettingswater.cpp
+++ b/indra/llinventory/llsettingswater.cpp
@@ -290,6 +290,20 @@ F32 LLSettingsWater::getModifiedWaterFogDensity(bool underwater) const
if (underwater && underwater_fog_mod > 0.0f)
{
underwater_fog_mod = llclamp(underwater_fog_mod, 0.0f, 10.0f);
+ // BUG-233797/BUG-233798 -ve underwater fog density can cause (unrecoverable) blackout.
+ // raising a negative number to a non-integral power results in a non-real result (which is NaN for our purposes)
+ // Two methods were tested, number 2 is being used:
+ // 1) Force the fog_mod to be integral. The effect is unlikely to be nice, but it is better than blackness.
+ // In this method a few of the combinations are "usable" but the water colour is effectively inverted (blue becomes yellow)
+ // this seems to be unlikely to be a desirable use case for the majority.
+ // 2) Force density to be an arbitrary non-negative (i.e. 1) when underwater and modifier is not an integer (1 was aribtrarily chosen as it gives at least some notion of fog in the transition)
+ // This is more restrictive, effectively forcing a density under certain conditions, but allowing the range of #1 and avoiding blackness in other cases
+ // at the cost of overriding the fog density.
+ if(fog_density < 0.0f && underwater_fog_mod != (F32)llround(underwater_fog_mod) )
+ {
+ fog_density = 1.0f;
+ }
+ //
fog_density = pow(fog_density, underwater_fog_mod);
}
return fog_density;
diff --git a/indra/newview/NACLantispam.cpp b/indra/newview/NACLantispam.cpp
index a72bbbffc8..9b5e709f10 100644
--- a/indra/newview/NACLantispam.cpp
+++ b/indra/newview/NACLantispam.cpp
@@ -103,18 +103,18 @@ NACLAntiSpamQueueEntry* NACLAntiSpamQueue::getEntry(const LLUUID& source)
}
else
{
- return NULL;
+ return nullptr;
}
}
void NACLAntiSpamQueue::clearEntries()
{
- for (spam_queue_entry_map_t::iterator it = mEntries.begin(); it != mEntries.end(); ++it)
+ for (auto& [id, entry] : mEntries)
{
//AO: Only clear entries that are not blocked.
- if (!it->second->getBlocked())
+ if (!entry->getBlocked())
{
- it->second->clearEntry();
+ entry->clearEntry();
}
}
}
@@ -139,15 +139,14 @@ void NACLAntiSpamQueue::blockEntry(const LLUUID& source)
mEntries[source]->setBlocked();
}
-S32 NACLAntiSpamQueue::checkEntry(const LLUUID& name, U32 multiplier)
-// Returns 0 if unblocked, 1 if check results in a new block, 2 if by an existing block
+EAntispamCheckResult NACLAntiSpamQueue::checkEntry(const LLUUID& name, U32 multiplier)
{
spam_queue_entry_map_t::iterator it = mEntries.find(name);
if (it != mEntries.end())
{
if (it->second->getBlocked())
{
- return 2;
+ return EAntispamCheckResult::ExistingBlock;
}
U32 eTime = it->second->getEntryTime();
U32 currentTime = time(0);
@@ -158,11 +157,11 @@ S32 NACLAntiSpamQueue::checkEntry(const LLUUID& name, U32 multiplier)
if (eAmount > (mQueueAmount * multiplier))
{
it->second->setBlocked();
- return 1;
+ return EAntispamCheckResult::NewBlock;
}
else
{
- return 0;
+ return EAntispamCheckResult::Unblocked;
}
}
else
@@ -170,7 +169,7 @@ S32 NACLAntiSpamQueue::checkEntry(const LLUUID& name, U32 multiplier)
it->second->clearEntry();
it->second->updateEntryAmount();
it->second->updateEntryTime();
- return 0;
+ return EAntispamCheckResult::Unblocked;
}
}
else
@@ -180,7 +179,7 @@ S32 NACLAntiSpamQueue::checkEntry(const LLUUID& name, U32 multiplier)
entry->updateEntryAmount();
entry->updateEntryTime();
mEntries[name] = entry;
- return 0;
+ return EAntispamCheckResult::Unblocked;
}
}
@@ -219,7 +218,7 @@ NACLAntiSpamRegistry::~NACLAntiSpamRegistry()
const char* NACLAntiSpamRegistry::getQueueName(EAntispamQueue queue)
{
- if (queue >= ANTISPAM_QUEUE_MAX)
+ if (queue >= ANTISPAM_QUEUE_MAX || queue < ANTISPAM_QUEUE_CHAT)
{
return "Unknown";
}
@@ -228,7 +227,7 @@ const char* NACLAntiSpamRegistry::getQueueName(EAntispamQueue queue)
void NACLAntiSpamRegistry::setRegisteredQueueTime(EAntispamQueue queue, U32 time)
{
- if (queue >= ANTISPAM_QUEUE_MAX || mQueues[queue] == NULL)
+ if (queue >= ANTISPAM_QUEUE_MAX || queue < ANTISPAM_QUEUE_CHAT || mQueues[queue] == nullptr)
{
LL_ERRS("AntiSpam") << "CODE BUG: Attempting to use a antispam queue that was not created or was outside of the reasonable range of queues. Queue: " << getQueueName(queue) << LL_ENDL;
return;
@@ -239,7 +238,7 @@ void NACLAntiSpamRegistry::setRegisteredQueueTime(EAntispamQueue queue, U32 time
void NACLAntiSpamRegistry::setRegisteredQueueAmount(EAntispamQueue queue, U32 amount)
{
- if (queue >= ANTISPAM_QUEUE_MAX || mQueues[queue] == NULL)
+ if (queue >= ANTISPAM_QUEUE_MAX || queue < ANTISPAM_QUEUE_CHAT || mQueues[queue] == nullptr)
{
LL_ERRS("AntiSpam") << "CODE BUG: Attempting to use a antispam queue that was not created or was outside of the reasonable range of queues. Queue: " << getQueueName(queue) << LL_ENDL;
return;
@@ -283,7 +282,7 @@ void NACLAntiSpamRegistry::setAllQueueAmounts(U32 amount)
void NACLAntiSpamRegistry::clearRegisteredQueue(EAntispamQueue queue)
{
- if (queue >= ANTISPAM_QUEUE_MAX || mQueues[queue] == NULL)
+ if (queue >= ANTISPAM_QUEUE_MAX || queue < ANTISPAM_QUEUE_CHAT || mQueues[queue] == nullptr)
{
LL_ERRS("AntiSpam") << "CODE BUG: Attempting to use a antispam queue that was not created or was outside of the reasonable range of queues. Queue: " << getQueueName(queue) << LL_ENDL;
return;
@@ -294,7 +293,7 @@ void NACLAntiSpamRegistry::clearRegisteredQueue(EAntispamQueue queue)
void NACLAntiSpamRegistry::purgeRegisteredQueue(EAntispamQueue queue)
{
- if (queue >= ANTISPAM_QUEUE_MAX || mQueues[queue] == NULL)
+ if (queue >= ANTISPAM_QUEUE_MAX || queue < ANTISPAM_QUEUE_CHAT || mQueues[queue] == nullptr)
{
LL_ERRS("AntiSpam") << "CODE BUG: Attempting to use a antispam queue that was not created or was outside of the reasonable range of queues. Queue: " << getQueueName(queue) << LL_ENDL;
return;
@@ -317,7 +316,7 @@ void NACLAntiSpamRegistry::blockOnQueue(EAntispamQueue queue, const LLUUID& sour
}
else
{
- if (queue >= ANTISPAM_QUEUE_MAX || mQueues[queue] == NULL)
+ if (queue >= ANTISPAM_QUEUE_MAX || queue < ANTISPAM_QUEUE_CHAT || mQueues[queue] == nullptr)
{
LL_ERRS("AntiSpam") << "CODE BUG: Attempting to use a antispam queue that was not created or was outside of the reasonable range of queues. Queue: " << getQueueName(queue) << LL_ENDL;
return;
@@ -362,14 +361,14 @@ bool NACLAntiSpamRegistry::checkQueue(EAntispamQueue queue, const LLUUID& source
}
}
- S32 result = 0;
+ EAntispamCheckResult result{ EAntispamCheckResult::Unblocked };
if (mGlobalQueue)
{
result = checkGlobalEntry(source, multiplier);
}
else
{
- if (queue >= ANTISPAM_QUEUE_MAX || mQueues[queue] == NULL)
+ if (queue >= ANTISPAM_QUEUE_MAX || queue < ANTISPAM_QUEUE_CHAT || mQueues[queue] == nullptr)
{
LL_ERRS("AntiSpam") << "CODE BUG: Attempting to use a antispam queue that was not created or was outside of the reasonable range of queues. Queue: " << getQueueName(queue) << LL_ENDL;
return false;
@@ -377,17 +376,17 @@ bool NACLAntiSpamRegistry::checkQueue(EAntispamQueue queue, const LLUUID& source
result = mQueues[queue]->checkEntry(source, multiplier);
}
- if (result == 0) // safe
+ if (result == EAntispamCheckResult::Unblocked) // safe
{
return false;
}
- if (result == 2) // previously blocked
+ if (result == EAntispamCheckResult::ExistingBlock) // previously blocked
{
return true;
}
- if (result == 1) // newly blocked, result == 1
+ if (result == EAntispamCheckResult::NewBlock) // newly blocked, result == 1
{
if (!LLMuteList::getInstance()->isMuted(source))
{
@@ -402,16 +401,14 @@ bool NACLAntiSpamRegistry::checkQueue(EAntispamQueue queue, const LLUUID& source
{
bool sent = false;
- for (LLWorld::region_list_t::const_iterator iter = LLWorld::getInstance()->getRegionList().begin();
- iter != LLWorld::getInstance()->getRegionList().end(); ++iter)
+ for (auto region : LLWorld::getInstance()->getRegionList())
{
- LLViewerRegion* region = *iter;
if (gMessageSystem && region && region->isAlive())
{
gMessageSystem->newMessage(_PREHASH_RequestObjectPropertiesFamily);
gMessageSystem->nextBlockFast(_PREHASH_AgentData);
gMessageSystem->addUUIDFast(_PREHASH_AgentID, gAgentID);
- gMessageSystem->addUUIDFast(_PREHASH_SessionID, gAgent.getSessionID());
+ gMessageSystem->addUUIDFast(_PREHASH_SessionID, gAgentSessionID);
gMessageSystem->nextBlockFast(_PREHASH_ObjectData);
gMessageSystem->addU32Fast(_PREHASH_RequestFlags, 0);
gMessageSystem->addUUIDFast(_PREHASH_ObjectID, source);
@@ -499,7 +496,7 @@ bool NACLAntiSpamRegistry::isBlockedOnQueue(EAntispamQueue queue, const LLUUID&
}
else
{
- if (queue >= ANTISPAM_QUEUE_MAX || mQueues[queue] == NULL)
+ if (queue >= ANTISPAM_QUEUE_MAX || queue < ANTISPAM_QUEUE_CHAT || mQueues[queue] == nullptr)
{
LL_ERRS("AntiSpam") << "CODE BUG: Attempting to use a antispam queue that was not created or was outside of the reasonable range of queues. Queue: " << getQueueName(queue) << LL_ENDL;
return false;
@@ -547,12 +544,11 @@ void NACLAntiSpamRegistry::clearAllQueues()
void NACLAntiSpamRegistry::purgeAllQueues()
{
- std::map::iterator it = mAvatarNameCallbackConnections.begin();
- for (; it != mAvatarNameCallbackConnections.end(); ++it)
+ for (auto& [avid, callback] : mAvatarNameCallbackConnections)
{
- if (it->second.connected())
+ if (callback.connected())
{
- it->second.disconnect();
+ callback.disconnect();
}
}
mAvatarNameCallbackConnections.clear();
@@ -574,14 +570,14 @@ void NACLAntiSpamRegistry::purgeAllQueues()
mObjectData.clear();
}
-S32 NACLAntiSpamRegistry::checkGlobalEntry(const LLUUID& source, U32 multiplier)
+EAntispamCheckResult NACLAntiSpamRegistry::checkGlobalEntry(const LLUUID& source, U32 multiplier)
{
spam_queue_entry_map_t::iterator it = mGlobalEntries.find(source);
if (it != mGlobalEntries.end())
{
if (it->second->getBlocked())
{
- return 2;
+ return EAntispamCheckResult::ExistingBlock;
}
U32 eTime = it->second->getEntryTime();
@@ -592,11 +588,11 @@ S32 NACLAntiSpamRegistry::checkGlobalEntry(const LLUUID& source, U32 multiplier)
U32 eAmount = it->second->getEntryAmount();
if (eAmount > (mGlobalAmount * multiplier))
{
- return 1;
+ return EAntispamCheckResult::NewBlock;
}
else
{
- return 0;
+ return EAntispamCheckResult::Unblocked;
}
}
else
@@ -604,7 +600,7 @@ S32 NACLAntiSpamRegistry::checkGlobalEntry(const LLUUID& source, U32 multiplier)
it->second->clearEntry();
it->second->updateEntryAmount();
it->second->updateEntryTime();
- return 0;
+ return EAntispamCheckResult::Unblocked;
}
}
else
@@ -613,15 +609,15 @@ S32 NACLAntiSpamRegistry::checkGlobalEntry(const LLUUID& source, U32 multiplier)
entry->updateEntryAmount();
entry->updateEntryTime();
mGlobalEntries[source] = entry;
- return 0;
+ return EAntispamCheckResult::Unblocked;
}
}
void NACLAntiSpamRegistry::clearGlobalEntries()
{
- for (spam_queue_entry_map_t::iterator it = mGlobalEntries.begin(); it != mGlobalEntries.end(); ++it)
+ for (auto& [id, entry] : mGlobalEntries)
{
- it->second->clearEntry();
+ entry->clearEntry();
}
}
diff --git a/indra/newview/NACLantispam.h b/indra/newview/NACLantispam.h
index d5cf7889a6..cf85aa3dca 100644
--- a/indra/newview/NACLantispam.h
+++ b/indra/newview/NACLantispam.h
@@ -1,8 +1,8 @@
#ifndef NACL_ANTISPAM_H
#define NACL_ANTISPAM_H
-#include
-#include
+#include
+#include
#include "llsingleton.h"
#include "llavatarnamecache.h"
@@ -25,6 +25,14 @@ typedef enum e_antispam_source_type
ANTISPAM_SOURCE_OBJECT
} EAntispamSource;
+
+enum class EAntispamCheckResult
+{
+ Unblocked,
+ NewBlock,
+ ExistingBlock
+};
+
struct AntispamObjectData
{
std::string mName;
@@ -58,8 +66,8 @@ private:
bool mBlocked;
};
-typedef boost::unordered_map spam_queue_entry_map_t;
-typedef boost::unordered_set collision_sound_set_t;
+typedef std::unordered_map spam_queue_entry_map_t;
+typedef std::unordered_set collision_sound_set_t;
class NACLAntiSpamQueue
{
@@ -77,7 +85,7 @@ protected:
void setTime(U32 time);
void blockEntry(const LLUUID& source);
- S32 checkEntry(const LLUUID& source, U32 multiplier);
+ EAntispamCheckResult checkEntry(const LLUUID& source, U32 multiplier);
NACLAntiSpamQueueEntry* getEntry(const LLUUID& source);
void clearEntries();
@@ -119,7 +127,7 @@ private:
const char* getQueueName(EAntispamQueue queue);
void blockGlobalEntry(const LLUUID& source);
- S32 checkGlobalEntry(const LLUUID& source, U32 multiplier);
+ EAntispamCheckResult checkGlobalEntry(const LLUUID& source, U32 multiplier);
void clearGlobalEntries();
void purgeGlobalEntries();
diff --git a/indra/newview/llfloaterimagepreview.cpp b/indra/newview/llfloaterimagepreview.cpp
index 45493cb6a5..551573ecda 100644
--- a/indra/newview/llfloaterimagepreview.cpp
+++ b/indra/newview/llfloaterimagepreview.cpp
@@ -164,6 +164,49 @@ BOOL LLFloaterImagePreview::postBuild()
}
getChild("temp_check")->setVisible(enable_temp_uploads);
//
+
+ // detect and strip empty alpha layers from images on upload
+ getChild("ok_btn")->setCommitCallback(boost::bind(&LLFloaterImagePreview::onBtnUpload, this));
+
+ getChild("uploaded_size_text")->setTextArg("[X_RES]", llformat("%d", mRawImagep->getWidth()));
+ getChild("uploaded_size_text")->setTextArg("[Y_RES]", llformat("%d", mRawImagep->getHeight()));
+
+ mEmptyAlphaCheck = getChild("strip_alpha_check");
+
+ if (mRawImagep->getComponents() != 4)
+ {
+ getChild("image_alpha_warning")->setVisible(false);
+ getChild("uploaded_size_text")->setTextArg("[ALPHA]", getString("no_alpha"));
+ return true;
+ }
+
+ U32 imageBytes = mRawImagep->getWidth() * mRawImagep->getHeight() * 4;
+
+ U32 emptyAlphaCount = 0;
+ U8* data = mRawImagep->getData();
+ for (U32 i = 3; i < imageBytes; i += 4)
+ {
+ if (data[i] > ALPHA_EMPTY_THRESHOLD)
+ {
+ emptyAlphaCount++;
+ }
+ }
+
+ if (emptyAlphaCount > (imageBytes / 4 * ALPHA_EMPTY_THRESHOLD_RATIO))
+ {
+ getChild("image_alpha_warning")->setVisible(true);
+
+ mEmptyAlphaCheck->setCommitCallback(boost::bind(&LLFloaterImagePreview::emptyAlphaCheckboxCallback, this));
+ mEmptyAlphaCheck->setValue(true);
+ }
+ else
+ {
+ getChild("image_alpha_warning")->setVisible(false);
+ mEmptyAlphaCheck->setValue(false);
+ }
+
+ getChild("uploaded_size_text")->setTextArg("[ALPHA]", getString(mEmptyAlphaCheck->getValue() ? "no_alpha" : "with_alpha"));
+ //
}
else
{
@@ -181,47 +224,6 @@ BOOL LLFloaterImagePreview::postBuild()
// detect and strip empty alpha layers from images on upload
// getChild("ok_btn")->setCommitCallback(boost::bind(&LLFloaterNameDesc::onBtnOK, this));
- getChild("ok_btn")->setCommitCallback(boost::bind(&LLFloaterImagePreview::onBtnUpload, this));
-
- getChild("uploaded_size_text")->setTextArg("[X_RES]", llformat("%d", mRawImagep->getWidth()));
- getChild("uploaded_size_text")->setTextArg("[Y_RES]", llformat("%d", mRawImagep->getHeight()));
-
- mEmptyAlphaCheck = getChild("strip_alpha_check");
-
- if (mRawImagep->getComponents() != 4)
- {
- getChild("image_alpha_warning")->setVisible(false);
- getChild("uploaded_size_text")->setTextArg("[ALPHA]", getString("no_alpha"));
- return true;
- }
-
- U32 imageBytes = mRawImagep->getWidth() * mRawImagep->getHeight() * 4;
-
- U32 emptyAlphaCount = 0;
- U8* data = mRawImagep->getData();
- for (U32 i = 3; i < imageBytes; i += 4)
- {
- if (data[i] > ALPHA_EMPTY_THRESHOLD)
- {
- emptyAlphaCount++;
- }
- }
-
- if (emptyAlphaCount > (imageBytes / 4 * ALPHA_EMPTY_THRESHOLD_RATIO))
- {
- getChild("image_alpha_warning")->setVisible(true);
-
- mEmptyAlphaCheck->setCommitCallback(boost::bind(&LLFloaterImagePreview::emptyAlphaCheckboxCallback, this));
- mEmptyAlphaCheck->setValue(true);
- }
- else
- {
- getChild("image_alpha_warning")->setVisible(false);
- mEmptyAlphaCheck->setValue(false);
- }
-
- getChild("uploaded_size_text")->setTextArg("[ALPHA]", getString(mEmptyAlphaCheck->getValue() ? "no_alpha" : "with_alpha"));
- //
return TRUE;
}
diff --git a/indra/newview/llpanelavatar.cpp b/indra/newview/llpanelavatar.cpp
index 3632d7c631..30c879f8ed 100644
--- a/indra/newview/llpanelavatar.cpp
+++ b/indra/newview/llpanelavatar.cpp
@@ -86,15 +86,18 @@ void LLPanelProfileTab::setAvatarId(const LLUUID& avatar_id)
mSelfProfile = (getAvatarId() == gAgentID);
// FIRE-32179: Make drag-n-drop sharing of items possible again
- LLProfileDropTarget* target = getChild("drop_target");
- if (avatar_id == gAgentID)
+ LLProfileDropTarget* target = findChild("drop_target");
+ if (target)
{
- // hide drop target on own profile
- target->setVisible(false);
- }
- else
- {
- target->setAgentID(avatar_id);
+ if (avatar_id == gAgentID)
+ {
+ // hide drop target on own profile
+ target->setVisible(false);
+ }
+ else
+ {
+ target->setAgentID(avatar_id);
+ }
}
//
}
diff --git a/indra/newview/llpanelprofile.cpp b/indra/newview/llpanelprofile.cpp
index 2940eb951d..430ed650b0 100644
--- a/indra/newview/llpanelprofile.cpp
+++ b/indra/newview/llpanelprofile.cpp
@@ -1201,7 +1201,9 @@ void LLPanelProfileSecondLife::resetData()
resetLoading();
// Set default image and 1:1 dimensions for it
- mSecondLifePic->setValue("Generic_Person_Large");
+ // Retain texture picker for profile images
+ //mSecondLifePic->setValue("Generic_Person_Large");
+ mSecondLifePic->setImageAssetID(LLUUID::null);
mImageId = LLUUID::null;
// Fix LL UI/UX design accident
@@ -3028,7 +3030,9 @@ void LLPanelProfileFirstLife::apply(LLAvatarData* data)
void LLPanelProfileFirstLife::resetData()
{
setDescriptionText(std::string());
- mPicture->setValue("Generic_Person_Large");
+ // Retain texture picker for profile images
+ //mPicture->setValue("Generic_Person_Large");
+ mPicture->setImageAssetID(LLUUID::null);
mImageId = LLUUID::null;
// remove the buttons and just have click image to update profile
diff --git a/indra/newview/skins/default/xui/en/panel_profile_firstlife.xml b/indra/newview/skins/default/xui/en/panel_profile_firstlife.xml
index dcb39241a9..a8076b8550 100644
--- a/indra/newview/skins/default/xui/en/panel_profile_firstlife.xml
+++ b/indra/newview/skins/default/xui/en/panel_profile_firstlife.xml
@@ -26,7 +26,7 @@
/>
+
+ Слышать мультимедиа и звуки из:
+
+
+
+
+
Автоматически включать звук после телепортации:
@@ -51,7 +58,7 @@
-Автовоспроизведение
+ Автовоспроизведение
diff --git a/indra/newview/skins/starlight/xui/en/panel_fs_profile_secondlife.xml b/indra/newview/skins/starlight/xui/en/panel_fs_profile_secondlife.xml
deleted file mode 100644
index a8468bc0e1..0000000000
--- a/indra/newview/skins/starlight/xui/en/panel_fs_profile_secondlife.xml
+++ /dev/null
@@ -1,574 +0,0 @@
-
-
-
-Online
-
-
-Offline
-
-
-[ACCTTYPE]
-[PAYMENTINFO]
-[FIRESTORM][FSDEV][FSSUPP][FSQA][FSGW]
-
-
-
-http://www.secondlife.com/account/billing.php?lang=en
-
-
-http://www.secondlife.com/account/partners.php?lang=en
-
-
-
-
-
-[REG_DATE]
-([AGE])
-
-
-[NAME]
-
-
-[DISPLAY_NAME]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Drop inventory item here.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/indra/newview/skins/starlightcui/xui/en/panel_fs_profile_secondlife.xml b/indra/newview/skins/starlightcui/xui/en/panel_fs_profile_secondlife.xml
deleted file mode 100644
index a8468bc0e1..0000000000
--- a/indra/newview/skins/starlightcui/xui/en/panel_fs_profile_secondlife.xml
+++ /dev/null
@@ -1,574 +0,0 @@
-
-
-
-Online
-
-
-Offline
-
-
-[ACCTTYPE]
-[PAYMENTINFO]
-[FIRESTORM][FSDEV][FSSUPP][FSQA][FSGW]
-
-
-
-http://www.secondlife.com/account/billing.php?lang=en
-
-
-http://www.secondlife.com/account/partners.php?lang=en
-
-
-
-
-
-[REG_DATE]
-([AGE])
-
-
-[NAME]
-
-
-[DISPLAY_NAME]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Drop inventory item here.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/indra/newview/skins/vintage/xui/en/panel_profile_secondlife.xml b/indra/newview/skins/vintage/xui/en/panel_profile_secondlife.xml
index 8f573d24c9..dbd4453f99 100644
--- a/indra/newview/skins/vintage/xui/en/panel_profile_secondlife.xml
+++ b/indra/newview/skins/vintage/xui/en/panel_profile_secondlife.xml
@@ -233,6 +233,7 @@
-import shlex
-import zipfile
-#
+sys.dont_write_bytecode = True # Prevents creating __pycache__ directory
from fs_viewer_manifest import FSViewerManifest # Manifest extensions for Firestorm