Merge branch 'DRTVWR-527-maint' of https://bitbucket.org/lindenlab/viewer
# Conflicts: # autobuild.xml # indra/cmake/Python.cmake # indra/lib/python/indra/util/llmanifest.py # indra/lib/python/indra/util/test_win32_manifest.py # indra/llaudio/llstreamingaudio_fmodstudio.cpp # indra/llaudio/llstreamingaudio_fmodstudio.h # indra/llcommon/llerror.cpp # indra/newview/llappviewer.cpp # indra/newview/lleventnotifier.cpp # indra/newview/llpanellandmarks.cpp # indra/newview/llpreviewanim.cpp # indra/newview/llpreviewanim.h # indra/newview/llviewerregion.cpp # indra/newview/skins/default/xui/de/menu_place_add_button.xml # indra/newview/skins/default/xui/it/menu_place_add_button.xml # indra/newview/skins/default/xui/it/menu_teleport_history_item.xml # indra/newview/skins/default/xui/ja/menu_place_add_button.xml # indra/newview/skins/default/xui/pl/menu_place_add_button.xml # indra/newview/skins/default/xui/pl/menu_teleport_history_item.xml # indra/newview/skins/default/xui/ru/menu_place_add_button.xml # indra/newview/skins/default/xui/ru/menu_teleport_history_item.xml # indra/newview/viewer_manifest.pymaster
commit
2ba4a164e2
|
|
@ -16,6 +16,9 @@ build_Linux_Doxygen = true
|
|||
# Need viewer-build-variables as well as other shared repositories
|
||||
buildscripts_shared_more_NAMEs="build_secrets build_variables git_hooks"
|
||||
|
||||
# Python 3 / SL-15742
|
||||
BUILDSCRIPTS_PY3 = "true"
|
||||
|
||||
################################################################
|
||||
#### Examples of how to set the viewer_channel ####
|
||||
#
|
||||
|
|
|
|||
|
|
@ -2449,9 +2449,9 @@
|
|||
<key>archive</key>
|
||||
<map>
|
||||
<key>hash</key>
|
||||
<string>14fac452271ebfba37ba5ddcf5bffa54</string>
|
||||
<string>da57838d80cf332f4a3026713a13f086</string>
|
||||
<key>url</key>
|
||||
<string>http://s3-proxy.lindenlab.com/private-builds-secondlife-com/ct2/54842/510078/llphysicsextensions_source-1.0.538972-darwin64-538972.tar.bz2</string>
|
||||
<string>https://s3-proxy.lindenlab.com/private-builds-secondlife-com/ct2/90708/824484/llphysicsextensions_source-1.0.565754-darwin64-565754.tar.bz2</string>
|
||||
</map>
|
||||
<key>name</key>
|
||||
<string>darwin64</string>
|
||||
|
|
@ -2473,16 +2473,16 @@
|
|||
<key>archive</key>
|
||||
<map>
|
||||
<key>hash</key>
|
||||
<string>f3c066c1aebed8a6519a3e5ce64b9a3c</string>
|
||||
<string>28ad884012aa0bb70cf4101853af2f9a</string>
|
||||
<key>url</key>
|
||||
<string>http://s3-proxy.lindenlab.com/private-builds-secondlife-com/ct2/54982/511796/llphysicsextensions_source-1.0.538972-windows-538972.tar.bz2</string>
|
||||
<string>https://s3-proxy.lindenlab.com/private-builds-secondlife-com/ct2/90733/824570/llphysicsextensions_source-1.0.565768-windows-565768.tar.bz2</string>
|
||||
</map>
|
||||
<key>name</key>
|
||||
<string>windows</string>
|
||||
</map>
|
||||
</map>
|
||||
<key>version</key>
|
||||
<string>1.0.538972</string>
|
||||
<string>1.0.565768</string>
|
||||
</map>
|
||||
<key>llphysicsextensions_stub</key>
|
||||
<map>
|
||||
|
|
@ -3466,9 +3466,9 @@ Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors</string>
|
|||
<key>archive</key>
|
||||
<map>
|
||||
<key>hash</key>
|
||||
<string>a3c8357a2f5a62cd7de43181b02553bc</string>
|
||||
<string>33438e15e609794233d88f2ca6f8e476</string>
|
||||
<key>url</key>
|
||||
<string>https://automated-builds-secondlife-com.s3.amazonaws.com/ct2/91396/829032/viewer_manager-2.0.566227-darwin64-566227.tar.bz2</string>
|
||||
<string>https://automated-builds-secondlife-com.s3.amazonaws.com/ct2/92307/834951/viewer_manager-2.0.566853-darwin64-566853.tar.bz2</string>
|
||||
</map>
|
||||
<key>name</key>
|
||||
<string>darwin64</string>
|
||||
|
|
@ -3502,9 +3502,9 @@ Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors</string>
|
|||
<key>archive</key>
|
||||
<map>
|
||||
<key>hash</key>
|
||||
<string>0654b449d9bdf3507664cf5caa67336f</string>
|
||||
<string>f83512f0ed35abf8b24ce66586099842</string>
|
||||
<key>url</key>
|
||||
<string>https://automated-builds-secondlife-com.s3.amazonaws.com/ct2/91397/829041/viewer_manager-2.0.566227-windows-566227.tar.bz2</string>
|
||||
<string>https://automated-builds-secondlife-com.s3.amazonaws.com/ct2/92304/834942/viewer_manager-2.0.566853-windows-566853.tar.bz2</string>
|
||||
</map>
|
||||
<key>name</key>
|
||||
<string>windows</string>
|
||||
|
|
@ -3515,7 +3515,7 @@ Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors</string>
|
|||
<key>source_type</key>
|
||||
<string>hg</string>
|
||||
<key>version</key>
|
||||
<string>2.0.566227</string>
|
||||
<string>2.0.566853</string>
|
||||
</map>
|
||||
<key>vlc-bin</key>
|
||||
<map>
|
||||
|
|
|
|||
|
|
@ -1120,6 +1120,7 @@ Nicky Dasmijn
|
|||
SL-11072
|
||||
SL-13141
|
||||
SL-13642
|
||||
SL-16438
|
||||
Nicky Perian
|
||||
OPEN-1
|
||||
STORM-1087
|
||||
|
|
|
|||
|
|
@ -14,50 +14,28 @@ if (WINDOWS)
|
|||
)
|
||||
else()
|
||||
find_program(PYTHON_EXECUTABLE
|
||||
NAMES python25.exe python23.exe python.exe
|
||||
NAMES python.exe
|
||||
NO_DEFAULT_PATH # added so that cmake does not find cygwin python
|
||||
PATHS
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.8\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.7\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.6\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.5\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.4\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.3\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.8\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.7\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.6\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.5\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.4\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.3\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.7\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.8\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.9\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.10\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.11\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.7\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.8\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.9\\InstallPath]
|
||||
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.10\\InstallPath]
|
||||
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.11\\InstallPath]
|
||||
)
|
||||
endif()
|
||||
elseif (EXISTS /etc/debian_version)
|
||||
# On Debian and Ubuntu, avoid Python 2.4 if possible.
|
||||
|
||||
find_program(PYTHON_EXECUTABLE python PATHS /usr/bin)
|
||||
include(FindPythonInterp)
|
||||
else()
|
||||
find_program(PYTHON_EXECUTABLE python3)
|
||||
|
||||
if (PYTHON_EXECUTABLE)
|
||||
set(PYTHONINTERP_FOUND ON)
|
||||
endif (PYTHON_EXECUTABLE)
|
||||
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||
# On MAC OS X be sure to search standard locations first
|
||||
|
||||
string(REPLACE ":" ";" PATH_LIST "$ENV{PATH}")
|
||||
find_program(PYTHON_EXECUTABLE
|
||||
NAMES python python25 python24 python23
|
||||
NO_DEFAULT_PATH # Avoid searching non-standard locations first
|
||||
PATHS
|
||||
/bin
|
||||
/usr/bin
|
||||
/usr/local/bin
|
||||
${PATH_LIST}
|
||||
)
|
||||
|
||||
if (PYTHON_EXECUTABLE)
|
||||
set(PYTHONINTERP_FOUND ON)
|
||||
endif (PYTHON_EXECUTABLE)
|
||||
else (WINDOWS)
|
||||
include(FindPythonInterp)
|
||||
endif (WINDOWS)
|
||||
|
||||
if (NOT PYTHON_EXECUTABLE)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file run_build_test.py
|
||||
@author Nat Goodspeed
|
||||
|
|
@ -17,7 +17,7 @@ line.
|
|||
|
||||
Example:
|
||||
|
||||
python run_build_test.py -DFOO=bar myprog somearg otherarg
|
||||
python3 run_build_test.py -DFOO=bar myprog somearg otherarg
|
||||
|
||||
sets environment variable FOO=bar, then runs:
|
||||
myprog somearg otherarg
|
||||
|
|
@ -47,7 +47,7 @@ $/LicenseInfo$
|
|||
import os
|
||||
import sys
|
||||
import errno
|
||||
import HTMLParser
|
||||
import html.parser
|
||||
import re
|
||||
import signal
|
||||
import subprocess
|
||||
|
|
@ -111,10 +111,10 @@ def main(command, arguments=[], libpath=[], vars={}):
|
|||
# Now handle arbitrary environment variables. The tricky part is ensuring
|
||||
# that all the keys and values we try to pass are actually strings.
|
||||
if vars:
|
||||
for key, value in vars.items():
|
||||
for key, value in list(vars.items()):
|
||||
# As noted a few lines above, facilitate copy-paste rerunning.
|
||||
log.info("%s='%s' \\" % (key, value))
|
||||
os.environ.update(dict([(str(key), str(value)) for key, value in vars.iteritems()]))
|
||||
os.environ.update(dict([(str(key), str(value)) for key, value in vars.items()]))
|
||||
# Run the child process.
|
||||
command_list = [command]
|
||||
command_list.extend(arguments)
|
||||
|
|
@ -194,7 +194,7 @@ def translate_rc(rc):
|
|||
strc = str(rc)
|
||||
return "terminated by signal %s" % strc
|
||||
|
||||
class TableParser(HTMLParser.HTMLParser):
|
||||
class TableParser(html.parser.HTMLParser):
|
||||
"""
|
||||
This HTMLParser subclass is designed to parse the table we know exists
|
||||
in windows-rcs.html, hopefully without building in too much knowledge of
|
||||
|
|
@ -204,9 +204,7 @@ class TableParser(HTMLParser.HTMLParser):
|
|||
whitespace = re.compile(r'\s*$')
|
||||
|
||||
def __init__(self):
|
||||
# Because Python 2.x's HTMLParser is an old-style class, we must use
|
||||
# old-style syntax to forward the __init__() call -- not super().
|
||||
HTMLParser.HTMLParser.__init__(self)
|
||||
super().__init__()
|
||||
# this will collect all the data, eventually
|
||||
self.table = []
|
||||
# Stack whose top (last item) indicates where to append current
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file start-client.py
|
||||
|
||||
|
|
@ -28,12 +28,12 @@ import os
|
|||
import llstart
|
||||
|
||||
def usage():
|
||||
print """start-client.py
|
||||
print("""start-client.py
|
||||
|
||||
--grid <grid>
|
||||
--farm <grid>
|
||||
--region <starting region name>
|
||||
"""
|
||||
""")
|
||||
|
||||
def start_client(grid, slurl, build_config, my_args):
|
||||
login_url = "https://login.%s.lindenlab.com/cgi-bin/login.cgi" % (grid)
|
||||
|
|
@ -42,7 +42,7 @@ def start_client(grid, slurl, build_config, my_args):
|
|||
"--loginuri" : login_url }
|
||||
viewer_args.update(my_args)
|
||||
# *sigh* We must put --url at the end of the argument list.
|
||||
if viewer_args.has_key("--url"):
|
||||
if "--url" in viewer_args:
|
||||
slurl = viewer_args["--url"]
|
||||
del(viewer_args["--url"])
|
||||
viewer_args = llstart.get_args_from_dict(viewer_args)
|
||||
|
|
@ -54,7 +54,7 @@ def start_client(grid, slurl, build_config, my_args):
|
|||
# but the exe is at indra/build-<xxx>/newview/<target>
|
||||
build_path = os.path.dirname(os.getcwd());
|
||||
f = open("start-client.log", "w")
|
||||
print >>f, "Viewer startup arguments:"
|
||||
print("Viewer startup arguments:", file=f)
|
||||
llstart.start("viewer", "../../newview",
|
||||
"%s/newview/%s/firestorm-bin.exe" % (build_path, build_config),
|
||||
viewer_args, f)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
##
|
||||
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
|
||||
## Second Life Viewer Source Code
|
||||
|
|
@ -27,7 +27,7 @@ import glob
|
|||
|
||||
def delete_file_types(path, filetypes):
|
||||
if os.path.exists(path):
|
||||
print 'Cleaning: ' + path
|
||||
print('Cleaning: ' + path)
|
||||
orig_dir = os.getcwd();
|
||||
os.chdir(path)
|
||||
filelist = []
|
||||
|
|
|
|||
|
|
@ -26,8 +26,8 @@ THE SOFTWARE.
|
|||
$/LicenseInfo$
|
||||
"""
|
||||
|
||||
from compatibility import Incompatible, Older, Newer, Same
|
||||
from tokenstream import TokenStream
|
||||
from .compatibility import Incompatible, Older, Newer, Same
|
||||
from .tokenstream import TokenStream
|
||||
|
||||
###
|
||||
### Message Template
|
||||
|
|
@ -42,8 +42,8 @@ class Template:
|
|||
|
||||
def compatibleWithBase(self, base):
|
||||
messagenames = (
|
||||
frozenset(self.messages.keys())
|
||||
| frozenset(base.messages.keys())
|
||||
frozenset(list(self.messages.keys()))
|
||||
| frozenset(list(base.messages.keys()))
|
||||
)
|
||||
|
||||
compatibility = Same()
|
||||
|
|
@ -142,7 +142,7 @@ class Message:
|
|||
baselen = len(base.blocks)
|
||||
samelen = min(selflen, baselen)
|
||||
|
||||
for i in xrange(0, samelen):
|
||||
for i in range(0, samelen):
|
||||
selfblock = self.blocks[i]
|
||||
baseblock = base.blocks[i]
|
||||
|
||||
|
|
@ -196,7 +196,7 @@ class Block(object):
|
|||
selflen = len(self.variables)
|
||||
baselen = len(base.variables)
|
||||
|
||||
for i in xrange(0, min(selflen, baselen)):
|
||||
for i in range(0, min(selflen, baselen)):
|
||||
selfvar = self.variables[i]
|
||||
basevar = base.variables[i]
|
||||
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class ParseError(Exception):
|
|||
return "line %d: %s @ ... %s" % (
|
||||
self.line, self.reason, self._contextString())
|
||||
|
||||
def __nonzero__(self):
|
||||
def __bool__(self):
|
||||
return False
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ $/LicenseInfo$
|
|||
"""
|
||||
|
||||
from collections import namedtuple, defaultdict
|
||||
import commands
|
||||
import subprocess
|
||||
import errno
|
||||
import filecmp
|
||||
import fnmatch
|
||||
|
|
@ -164,20 +164,20 @@ BASE_ARGUMENTS=[
|
|||
|
||||
def usage(arguments, srctree=""):
|
||||
nd = {'name':sys.argv[0]}
|
||||
print """Usage:
|
||||
print("""Usage:
|
||||
%(name)s [options] [destdir]
|
||||
Options:
|
||||
""" % nd
|
||||
""" % nd)
|
||||
for arg in arguments:
|
||||
default = arg['default']
|
||||
if hasattr(default, '__call__'):
|
||||
default = "(computed value) \"" + str(default(srctree)) + '"'
|
||||
elif default is not None:
|
||||
default = '"' + default + '"'
|
||||
print "\t--%s Default: %s\n\t%s\n" % (
|
||||
print("\t--%s Default: %s\n\t%s\n" % (
|
||||
arg['name'],
|
||||
default,
|
||||
arg['description'] % nd)
|
||||
arg['description'] % nd))
|
||||
|
||||
def main(extra=[]):
|
||||
## print ' '.join((("'%s'" % item) if ' ' in item else item)
|
||||
|
|
@ -202,10 +202,10 @@ def main(extra=[]):
|
|||
for k in 'artwork build dest source'.split():
|
||||
args[k] = os.path.normpath(args[k])
|
||||
|
||||
print "Source tree:", args['source']
|
||||
print "Artwork tree:", args['artwork']
|
||||
print "Build tree:", args['build']
|
||||
print "Destination tree:", args['dest']
|
||||
print("Source tree:", args['source'])
|
||||
print("Artwork tree:", args['artwork'])
|
||||
print("Build tree:", args['build'])
|
||||
print("Destination tree:", args['dest'])
|
||||
|
||||
# early out for help
|
||||
if 'help' in args:
|
||||
|
|
@ -228,7 +228,7 @@ def main(extra=[]):
|
|||
vf = open(args['versionfile'], 'r')
|
||||
args['version'] = vf.read().strip().split('.')
|
||||
except:
|
||||
print "Unable to read versionfile '%s'" % args['versionfile']
|
||||
print("Unable to read versionfile '%s'" % args['versionfile'])
|
||||
# <FS:ND> This will break copy_w_viewer_manifest on Windows 32 and 64 bit builds, the versionfile will not create until the firestorm project.
|
||||
# As copy_w_viewer_manifest does not seem to need the version attribute, we supress the exception for now.
|
||||
# raise
|
||||
|
|
@ -242,7 +242,7 @@ def main(extra=[]):
|
|||
|
||||
# debugging
|
||||
for opt in args:
|
||||
print "Option:", opt, "=", args[opt]
|
||||
print("Option:", opt, "=", args[opt])
|
||||
|
||||
# pass in sourceid as an argument now instead of an environment variable
|
||||
args['sourceid'] = os.environ.get("sourceid", "")
|
||||
|
|
@ -250,18 +250,18 @@ def main(extra=[]):
|
|||
# Build base package.
|
||||
touch = args.get('touch')
|
||||
if touch:
|
||||
print '================ Creating base package'
|
||||
print('================ Creating base package')
|
||||
else:
|
||||
print '================ Starting base copy'
|
||||
print('================ Starting base copy')
|
||||
wm = LLManifest.for_platform(args['platform'], args.get('arch'))(args)
|
||||
wm.do(*args['actions'])
|
||||
# Store package file for later if making touched file.
|
||||
base_package_file = ""
|
||||
if touch:
|
||||
print '================ Created base package ', wm.package_file
|
||||
print('================ Created base package ', wm.package_file)
|
||||
base_package_file = "" + wm.package_file
|
||||
else:
|
||||
print '================ Finished base copy'
|
||||
print('================ Finished base copy')
|
||||
|
||||
# handle multiple packages if set
|
||||
# ''.split() produces empty list
|
||||
|
|
@ -288,26 +288,26 @@ def main(extra=[]):
|
|||
args['sourceid'] = os.environ.get(package_id + "_sourceid")
|
||||
args['dest'] = base_dest_template.format(package_id)
|
||||
if touch:
|
||||
print '================ Creating additional package for "', package_id, '" in ', args['dest']
|
||||
print('================ Creating additional package for "', package_id, '" in ', args['dest'])
|
||||
else:
|
||||
print '================ Starting additional copy for "', package_id, '" in ', args['dest']
|
||||
print('================ Starting additional copy for "', package_id, '" in ', args['dest'])
|
||||
try:
|
||||
wm = LLManifest.for_platform(args['platform'], args.get('arch'))(args)
|
||||
wm.do(*args['actions'])
|
||||
except Exception as err:
|
||||
sys.exit(str(err))
|
||||
if touch:
|
||||
print '================ Created additional package ', wm.package_file, ' for ', package_id
|
||||
print('================ Created additional package ', wm.package_file, ' for ', package_id)
|
||||
with open(base_touch_template.format(package_id), 'w') as fp:
|
||||
fp.write('set package_file=%s\n' % wm.package_file)
|
||||
else:
|
||||
print '================ Finished additional copy "', package_id, '" in ', args['dest']
|
||||
print('================ Finished additional copy "', package_id, '" in ', args['dest'])
|
||||
# Write out the package file in this format, so that it can easily be called
|
||||
# and used in a .bat file - yeah, it sucks, but this is the simplest...
|
||||
if touch:
|
||||
with open(touch, 'w') as fp:
|
||||
fp.write('set package_file=%s\n' % base_package_file)
|
||||
print 'touched', touch
|
||||
print('touched', touch)
|
||||
return 0
|
||||
|
||||
class LLManifestRegistry(type):
|
||||
|
|
@ -319,8 +319,7 @@ class LLManifestRegistry(type):
|
|||
|
||||
MissingFile = namedtuple("MissingFile", ("pattern", "tried"))
|
||||
|
||||
class LLManifest(object):
|
||||
__metaclass__ = LLManifestRegistry
|
||||
class LLManifest(object, metaclass=LLManifestRegistry):
|
||||
manifests = {}
|
||||
def for_platform(self, platform, arch = None):
|
||||
if arch:
|
||||
|
|
@ -412,8 +411,8 @@ class LLManifest(object):
|
|||
def display_stacks(self):
|
||||
width = 1 + max(len(stack) for stack in self.PrefixManager.stacks)
|
||||
for stack in self.PrefixManager.stacks:
|
||||
print "{} {}".format((stack + ':').ljust(width),
|
||||
os.path.join(*getattr(self, stack)))
|
||||
print("{} {}".format((stack + ':').ljust(width),
|
||||
os.path.join(*getattr(self, stack))))
|
||||
|
||||
class PrefixManager(object):
|
||||
# stack attributes we manage in this LLManifest (sub)class
|
||||
|
|
@ -430,7 +429,7 @@ class LLManifest(object):
|
|||
self.prevlen = { stack: len(getattr(self.manifest, stack)) - 1
|
||||
for stack in self.stacks }
|
||||
|
||||
def __nonzero__(self):
|
||||
def __bool__(self):
|
||||
# If the caller wrote:
|
||||
# if self.prefix(...):
|
||||
# then a value of this class had better evaluate as 'True'.
|
||||
|
|
@ -456,7 +455,7 @@ class LLManifest(object):
|
|||
# if we restore the length of each stack to what it was before the
|
||||
# current prefix() block, it doesn't matter whether end_prefix()
|
||||
# was called or not.
|
||||
for stack, prevlen in self.prevlen.items():
|
||||
for stack, prevlen in list(self.prevlen.items()):
|
||||
# find the attribute in 'self.manifest' named by 'stack', and
|
||||
# truncate that list back to 'prevlen'
|
||||
del getattr(self.manifest, stack)[prevlen:]
|
||||
|
|
@ -475,7 +474,7 @@ class LLManifest(object):
|
|||
build = self.build_prefix.pop()
|
||||
dst = self.dst_prefix.pop()
|
||||
if descr and not(src == descr or build == descr or dst == descr):
|
||||
raise ValueError, "End prefix '" + descr + "' didn't match '" +src+ "' or '" +dst + "'"
|
||||
raise ValueError("End prefix '" + descr + "' didn't match '" +src+ "' or '" +dst + "'")
|
||||
|
||||
def get_src_prefix(self):
|
||||
""" Returns the current source prefix."""
|
||||
|
|
@ -542,7 +541,7 @@ class LLManifest(object):
|
|||
Runs an external command.
|
||||
Raises ManifestError exception if the command returns a nonzero status.
|
||||
"""
|
||||
print "Running command:", command
|
||||
print("Running command:", command)
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
subprocess.check_call(command)
|
||||
|
|
@ -570,18 +569,15 @@ class LLManifest(object):
|
|||
a) verify that you really have created it
|
||||
b) schedule it for cleanup"""
|
||||
if not os.path.exists(path):
|
||||
raise ManifestError, "Should be something at path " + path
|
||||
raise ManifestError("Should be something at path " + path)
|
||||
self.created_paths.append(path)
|
||||
|
||||
def put_in_file(self, contents, dst, src=None):
|
||||
# write contents as dst
|
||||
dst_path = self.dst_path_of(dst)
|
||||
self.cmakedirs(os.path.dirname(dst_path))
|
||||
f = open(dst_path, "wb")
|
||||
try:
|
||||
with open(dst_path, 'wb') as f:
|
||||
f.write(contents)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# Why would we create a file in the destination tree if not to include
|
||||
# it in the installer? The default src=None (plus the fact that the
|
||||
|
|
@ -594,13 +590,12 @@ class LLManifest(object):
|
|||
if dst == None:
|
||||
dst = src
|
||||
# read src
|
||||
f = open(self.src_path_of(src), "rbU")
|
||||
contents = f.read()
|
||||
f.close()
|
||||
with open(self.src_path_of(src), "r") as f:
|
||||
contents = f.read()
|
||||
# apply dict replacements
|
||||
for old, new in searchdict.iteritems():
|
||||
for old, new in searchdict.items():
|
||||
contents = contents.replace(old, new)
|
||||
self.put_in_file(contents, dst)
|
||||
self.put_in_file(contents.encode(), dst)
|
||||
self.created_paths.append(dst)
|
||||
|
||||
def copy_action(self, src, dst):
|
||||
|
|
@ -610,7 +605,7 @@ class LLManifest(object):
|
|||
self.created_paths.append(dst)
|
||||
self.ccopymumble(src, dst)
|
||||
else:
|
||||
print "Doesn't exist:", src
|
||||
print("Doesn't exist:", src)
|
||||
|
||||
def package_action(self, src, dst):
|
||||
pass
|
||||
|
|
@ -628,8 +623,8 @@ class LLManifest(object):
|
|||
# file error until all were resolved. This way permits the developer
|
||||
# to resolve them all at once.
|
||||
if self.missing:
|
||||
print '*' * 72
|
||||
print "Missing files:"
|
||||
print('*' * 72)
|
||||
print("Missing files:")
|
||||
# Instead of just dumping each missing file and all the places we
|
||||
# looked for it, group by common sets of places we looked. Use a
|
||||
# set to store the 'tried' directories, to avoid mismatches due to
|
||||
|
|
@ -640,13 +635,13 @@ class LLManifest(object):
|
|||
organize[frozenset(missingfile.tried)].add(missingfile.pattern)
|
||||
# Now dump all the patterns sought in each group of 'tried'
|
||||
# directories.
|
||||
for tried, patterns in organize.items():
|
||||
print " Could not find in:"
|
||||
for tried, patterns in list(organize.items()):
|
||||
print(" Could not find in:")
|
||||
for dir in sorted(tried):
|
||||
print " %s" % dir
|
||||
print(" %s" % dir)
|
||||
for pattern in sorted(patterns):
|
||||
print " %s" % pattern
|
||||
print '*' * 72
|
||||
print(" %s" % pattern)
|
||||
print('*' * 72)
|
||||
raise MissingError('%s patterns could not be found' % len(self.missing))
|
||||
|
||||
def copy_finish(self):
|
||||
|
|
@ -659,7 +654,7 @@ class LLManifest(object):
|
|||
unpacked_file_name = "unpacked_%(plat)s_%(vers)s.tar" % {
|
||||
'plat':self.args['platform'],
|
||||
'vers':'_'.join(self.args['version'])}
|
||||
print "Creating unpacked file:", unpacked_file_name
|
||||
print("Creating unpacked file:", unpacked_file_name)
|
||||
# could add a gz here but that doubles the time it takes to do this step
|
||||
tf = tarfile.open(self.src_path_of(unpacked_file_name), 'w:')
|
||||
# add the entire installation package, at the very top level
|
||||
|
|
@ -670,7 +665,7 @@ class LLManifest(object):
|
|||
""" Delete paths that were specified to have been created by this script"""
|
||||
for c in self.created_paths:
|
||||
# *TODO is this gonna be useful?
|
||||
print "Cleaning up " + c
|
||||
print("Cleaning up " + c)
|
||||
|
||||
def process_either(self, src, dst):
|
||||
# If it's a real directory, recurse through it --
|
||||
|
|
@ -719,7 +714,7 @@ class LLManifest(object):
|
|||
def remove(self, *paths):
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
print "Removing path", path
|
||||
print("Removing path", path)
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
|
|
@ -781,7 +776,7 @@ class LLManifest(object):
|
|||
except (IOError, os.error) as why:
|
||||
errors.append((srcname, dstname, why))
|
||||
if errors:
|
||||
raise ManifestError, errors
|
||||
raise ManifestError(errors)
|
||||
|
||||
|
||||
def cmakedirs(self, path):
|
||||
|
|
@ -893,13 +888,13 @@ class LLManifest(object):
|
|||
break
|
||||
else:
|
||||
# no more prefixes left to try
|
||||
print("\nunable to find '%s'; looked in:\n %s" % (src, '\n '.join(try_prefixes)))
|
||||
print(("\nunable to find '%s'; looked in:\n %s" % (src, '\n '.join(try_prefixes))))
|
||||
self.missing.append(MissingFile(pattern=src, tried=try_prefixes))
|
||||
# At this point 'count' might never have been successfully
|
||||
# assigned! Even if it was, though, we can be sure it is 0.
|
||||
return 0
|
||||
|
||||
print "%d files" % count
|
||||
print("%d files" % count)
|
||||
|
||||
# Let caller check whether we processed as many files as expected. In
|
||||
# particular, let caller notice 0.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file test_win32_manifest.py
|
||||
@brief Test an assembly binding version and uniqueness in a windows dll or exe.
|
||||
|
|
@ -44,10 +44,10 @@ class NoMatchingAssemblyException(AssemblyTestException):
|
|||
pass
|
||||
|
||||
def get_HKLM_registry_value(key_str, value_str):
|
||||
import _winreg
|
||||
reg = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
|
||||
key = _winreg.OpenKey(reg, key_str)
|
||||
value = _winreg.QueryValueEx(key, value_str)[0]
|
||||
import winreg
|
||||
reg = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
key = winreg.OpenKey(reg, key_str)
|
||||
value = winreg.QueryValueEx(key, value_str)[0]
|
||||
#print 'Found: %s' % value
|
||||
return value
|
||||
|
||||
|
|
@ -64,12 +64,12 @@ def find_vc_dir():
|
|||
(product, version))
|
||||
try:
|
||||
return get_HKLM_registry_value(key_str, value_str)
|
||||
except WindowsError, err:
|
||||
except WindowsError as err:
|
||||
x64_key_str = (r'SOFTWARE\Wow6432Node\Microsoft\%s\%s\Setup\VC' % (product, version))
|
||||
try:
|
||||
return get_HKLM_registry_value(x64_key_str, value_str)
|
||||
except:
|
||||
print >> sys.stderr, "Didn't find MS %s version %s " % (product,version)
|
||||
print("Didn't find MS %s version %s " % (product,version), file=sys.stderr)
|
||||
|
||||
raise
|
||||
|
||||
|
|
@ -79,7 +79,7 @@ def find_mt_path():
|
|||
return mt_path
|
||||
|
||||
def test_assembly_binding(src_filename, assembly_name, assembly_ver):
|
||||
print "checking %s dependency %s..." % (src_filename, assembly_name)
|
||||
print("checking %s dependency %s..." % (src_filename, assembly_name))
|
||||
|
||||
(tmp_file_fd, tmp_file_name) = tempfile.mkstemp(suffix='.xml')
|
||||
tmp_file = os.fdopen(tmp_file_fd)
|
||||
|
|
@ -90,10 +90,10 @@ def test_assembly_binding(src_filename, assembly_name, assembly_ver):
|
|||
if os.path.splitext(src_filename)[1].lower() == ".dll":
|
||||
resource_id = ";#2"
|
||||
system_call = '%s -nologo -inputresource:%s%s -out:%s > NUL' % (mt_path, src_filename, resource_id, tmp_file_name)
|
||||
print "Executing: %s" % system_call
|
||||
print("Executing: %s" % system_call)
|
||||
mt_result = os.system(system_call)
|
||||
if mt_result == 31:
|
||||
print "No manifest found in %s" % src_filename
|
||||
print("No manifest found in %s" % src_filename)
|
||||
raise NoManifestException()
|
||||
|
||||
manifest_dom = parse(tmp_file_name)
|
||||
|
|
@ -105,30 +105,30 @@ def test_assembly_binding(src_filename, assembly_name, assembly_ver):
|
|||
versions.append(node.getAttribute('version'))
|
||||
|
||||
if len(versions) == 0:
|
||||
print "No matching assemblies found in %s" % src_filename
|
||||
print("No matching assemblies found in %s" % src_filename)
|
||||
raise NoMatchingAssemblyException()
|
||||
|
||||
#elif len(versions) > 1:
|
||||
# print "Multiple bindings to %s found:" % assembly_name
|
||||
# print versions
|
||||
# print
|
||||
# print("Multiple bindings to %s found:" % assembly_name)
|
||||
# print(versions)
|
||||
# print()
|
||||
# raise MultipleBindingsException(versions)
|
||||
|
||||
#elif versions[0] != assembly_ver:
|
||||
# print "Unexpected version found for %s:" % assembly_name
|
||||
# print "Wanted %s, found %s" % (assembly_ver, versions[0])
|
||||
# print
|
||||
# print("Unexpected version found for %s:" % assembly_name)
|
||||
# print("Wanted %s, found %s" % (assembly_ver, versions[0]))
|
||||
# print()
|
||||
# raise UnexpectedVersionException(assembly_ver, versions[0])
|
||||
|
||||
os.remove(tmp_file_name)
|
||||
|
||||
print "SUCCESS: %s OK!" % src_filename
|
||||
print
|
||||
print("SUCCESS: %s OK!" % src_filename)
|
||||
print()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print
|
||||
print "Running test_win32_manifest.py..."
|
||||
print()
|
||||
print("Running test_win32_manifest.py...")
|
||||
|
||||
usage = 'test_win32_manfest <srcFileName> <assemblyName> <assemblyVersion>'
|
||||
|
||||
|
|
@ -137,9 +137,9 @@ if __name__ == '__main__':
|
|||
assembly_name = sys.argv[2]
|
||||
assembly_ver = sys.argv[3]
|
||||
except:
|
||||
print "Usage:"
|
||||
print usage
|
||||
print
|
||||
print("Usage:")
|
||||
print(usage)
|
||||
print()
|
||||
raise
|
||||
|
||||
test_assembly_binding(src_filename, assembly_name, assembly_ver)
|
||||
|
|
|
|||
|
|
@ -544,8 +544,6 @@ namespace
|
|||
protected:
|
||||
Globals();
|
||||
public:
|
||||
std::ostringstream messageStream;
|
||||
bool messageStreamInUse;
|
||||
std::string mFatalMessage;
|
||||
|
||||
void addCallSite(LLError::CallSite&);
|
||||
|
|
@ -562,7 +560,9 @@ namespace
|
|||
};
|
||||
|
||||
Globals::Globals()
|
||||
: mSettingsConfig(new SettingsConfig())
|
||||
:
|
||||
callSites(),
|
||||
mSettingsConfig(new SettingsConfig())
|
||||
{
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ public:
|
|||
// notice Python specially: we provide Python LLSD serialization
|
||||
// support, so there's a pretty good reason to implement plugins
|
||||
// in that language.
|
||||
if (cparams.args.size() && (desclower == "python" || desclower == "python.exe"))
|
||||
if (cparams.args.size() && (desclower == "python" || desclower == "python3" || desclower == "python.exe"))
|
||||
{
|
||||
mDesc = LLProcess::basename(cparams.args()[0]);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -145,13 +145,13 @@ namespace tut
|
|||
" data = ''.join(parts)\n"
|
||||
" assert len(data) == length\n"
|
||||
" try:\n"
|
||||
" return llsd.parse(data)\n"
|
||||
" return llsd.parse(data.encode())\n"
|
||||
// Seems the old indra.base.llsd module didn't properly
|
||||
// convert IndexError (from running off end of string) to
|
||||
// LLSDParseError.
|
||||
" except (IndexError, llsd.LLSDParseError), e:\n"
|
||||
" except (IndexError, llsd.LLSDParseError) as e:\n"
|
||||
" msg = 'Bad received packet (%s)' % e\n"
|
||||
" print >>sys.stderr, '%s, %s bytes:' % (msg, len(data))\n"
|
||||
" print('%s, %s bytes:' % (msg, len(data)), file=sys.stderr)\n"
|
||||
" showmax = 40\n"
|
||||
// We've observed failures with very large packets;
|
||||
// dumping the entire packet wastes time and space.
|
||||
|
|
@ -167,12 +167,12 @@ namespace tut
|
|||
" data = data[:trunc]\n"
|
||||
" ellipsis = '... (%s more)' % (length - trunc)\n"
|
||||
" offset = -showmax\n"
|
||||
" for offset in xrange(0, len(data)-showmax, showmax):\n"
|
||||
" print >>sys.stderr, '%04d: %r +' % \\\n"
|
||||
" (offset, data[offset:offset+showmax])\n"
|
||||
" for offset in range(0, len(data)-showmax, showmax):\n"
|
||||
" print('%04d: %r +' % \\\n"
|
||||
" (offset, data[offset:offset+showmax]), file=sys.stderr)\n"
|
||||
" offset += showmax\n"
|
||||
" print >>sys.stderr, '%04d: %r%s' % \\\n"
|
||||
" (offset, data[offset:], ellipsis)\n"
|
||||
" print('%04d: %r%s' % \\\n"
|
||||
" (offset, data[offset:], ellipsis), file=sys.stderr)\n"
|
||||
" raise ParseError(msg, data)\n"
|
||||
"\n"
|
||||
"# deal with initial stdin message\n"
|
||||
|
|
@ -189,7 +189,7 @@ namespace tut
|
|||
" sys.stdout.flush()\n"
|
||||
"\n"
|
||||
"def send(pump, data):\n"
|
||||
" put(llsd.format_notation(dict(pump=pump, data=data)))\n"
|
||||
" put(llsd.format_notation(dict(pump=pump, data=data)).decode())\n"
|
||||
"\n"
|
||||
"def request(pump, data):\n"
|
||||
" # we expect 'data' is a dict\n"
|
||||
|
|
@ -253,7 +253,7 @@ namespace tut
|
|||
{
|
||||
set_test_name("bad stdout protocol");
|
||||
NamedTempFile script("py",
|
||||
"print 'Hello from Python!'\n");
|
||||
"print('Hello from Python!')\n");
|
||||
CaptureLog log(LLError::LEVEL_WARN);
|
||||
waitfor(LLLeap::create(get_test_name(),
|
||||
sv(list_of(PYTHON)(script.getName()))));
|
||||
|
|
@ -438,8 +438,8 @@ namespace tut
|
|||
// guess how many messages it will take to
|
||||
// accumulate BUFFERED_LENGTH
|
||||
"count = int(" << BUFFERED_LENGTH << "/samplen)\n"
|
||||
"print >>sys.stderr, 'Sending %s requests' % count\n"
|
||||
"for i in xrange(count):\n"
|
||||
"print('Sending %s requests' % count, file=sys.stderr)\n"
|
||||
"for i in range(count):\n"
|
||||
" request('" << api.getName() << "', dict(reqid=i))\n"
|
||||
// The assumption in this specific test that
|
||||
// replies will arrive in the same order as
|
||||
|
|
@ -450,7 +450,7 @@ namespace tut
|
|||
// arbitrary order, and we'd have to tick them
|
||||
// off from a set.
|
||||
"result = ''\n"
|
||||
"for i in xrange(count):\n"
|
||||
"for i in range(count):\n"
|
||||
" resp = get()\n"
|
||||
" if resp['data']['reqid'] != i:\n"
|
||||
" result = 'expected reqid=%s in %s' % (i, resp)\n"
|
||||
|
|
@ -476,13 +476,13 @@ namespace tut
|
|||
"desired = int(sys.argv[1])\n"
|
||||
// 7 chars per item: 6 digits, 1 comma
|
||||
"count = int((desired - 50)/7)\n"
|
||||
"large = ''.join('%06d,' % i for i in xrange(count))\n"
|
||||
"large = ''.join('%06d,' % i for i in range(count))\n"
|
||||
// Pass 'large' as reqid because we know the API
|
||||
// will echo reqid, and we want to receive it back.
|
||||
"request('" << api.getName() << "', dict(reqid=large))\n"
|
||||
"try:\n"
|
||||
" resp = get()\n"
|
||||
"except ParseError, e:\n"
|
||||
"except ParseError as e:\n"
|
||||
" # try to find where e.data diverges from expectation\n"
|
||||
// Normally we'd expect a 'pump' key in there,
|
||||
// too, with value replypump(). But Python
|
||||
|
|
@ -493,17 +493,18 @@ namespace tut
|
|||
// strange.
|
||||
" expect = llsd.format_notation(dict(data=dict(reqid=large)))\n"
|
||||
" chunk = 40\n"
|
||||
" for offset in xrange(0, max(len(e.data), len(expect)), chunk):\n"
|
||||
" for offset in range(0, max(len(e.data), len(expect)), chunk):\n"
|
||||
" if e.data[offset:offset+chunk] != \\\n"
|
||||
" expect[offset:offset+chunk]:\n"
|
||||
" print >>sys.stderr, 'Offset %06d: expect %r,\\n'\\\n"
|
||||
" print('Offset %06d: expect %r,\\n'\\\n"
|
||||
" ' get %r' %\\\n"
|
||||
" (offset,\n"
|
||||
" expect[offset:offset+chunk],\n"
|
||||
" e.data[offset:offset+chunk])\n"
|
||||
" e.data[offset:offset+chunk]),\n"
|
||||
" file=sys.stderr)\n"
|
||||
" break\n"
|
||||
" else:\n"
|
||||
" print >>sys.stderr, 'incoming data matches expect?!'\n"
|
||||
" print('incoming data matches expect?!', file=sys.stderr)\n"
|
||||
" send('" << result.getName() << "', '%s: %s' % (e.__class__.__name__, e))\n"
|
||||
" sys.exit(1)\n"
|
||||
"\n"
|
||||
|
|
@ -512,7 +513,7 @@ namespace tut
|
|||
" send('" << result.getName() << "', '')\n"
|
||||
" sys.exit(0)\n"
|
||||
// Here we know echoed did NOT match; try to find where
|
||||
"for i in xrange(count):\n"
|
||||
"for i in range(count):\n"
|
||||
" start = 7*i\n"
|
||||
" end = 7*(i+1)\n"
|
||||
" if end > len(echoed)\\\n"
|
||||
|
|
|
|||
|
|
@ -360,10 +360,10 @@ namespace tut
|
|||
"import time" EOL
|
||||
EOL
|
||||
"time.sleep(2)" EOL
|
||||
"print >>sys.stdout, 'stdout after wait'" EOL
|
||||
"print('stdout after wait', file=sys.stdout)" EOL
|
||||
"sys.stdout.flush()" EOL
|
||||
"time.sleep(2)" EOL
|
||||
"print >>sys.stderr, 'stderr after wait'" EOL
|
||||
"print('stderr after wait', file=sys.stderr)" EOL
|
||||
"sys.stderr.flush()" EOL
|
||||
);
|
||||
|
||||
|
|
@ -381,7 +381,11 @@ namespace tut
|
|||
|
||||
std::vector<const char*> argv;
|
||||
apr_proc_t child;
|
||||
#if defined(LL_WINDOWS)
|
||||
argv.push_back("python");
|
||||
#else
|
||||
argv.push_back("python3");
|
||||
#endif
|
||||
// Have to have a named copy of this std::string so its c_str() value
|
||||
// will persist.
|
||||
std::string scriptname(script.getName());
|
||||
|
|
@ -573,7 +577,7 @@ namespace tut
|
|||
// note nonstandard output-file arg!
|
||||
"with open(sys.argv[3], 'w') as f:\n"
|
||||
" for arg in sys.argv[1:]:\n"
|
||||
" print >>f, arg\n");
|
||||
" print(arg, file=f)\n");
|
||||
// We expect that PythonProcessLauncher has already appended
|
||||
// its own NamedTempFile to mParams.args (sys.argv[0]).
|
||||
py.mParams.args.add("first arg"); // sys.argv[1]
|
||||
|
|
@ -742,7 +746,7 @@ namespace tut
|
|||
"with open(sys.argv[1], 'w') as f:\n"
|
||||
" f.write('ok')\n"
|
||||
"# wait for 'go' from test program\n"
|
||||
"for i in xrange(60):\n"
|
||||
"for i in range(60):\n"
|
||||
" time.sleep(1)\n"
|
||||
" with open(sys.argv[2]) as f:\n"
|
||||
" go = f.read()\n"
|
||||
|
|
@ -804,7 +808,7 @@ namespace tut
|
|||
"with open(sys.argv[1], 'w') as f:\n"
|
||||
" f.write('ok')\n"
|
||||
"# wait for 'go' from test program\n"
|
||||
"for i in xrange(60):\n"
|
||||
"for i in range(60):\n"
|
||||
" time.sleep(1)\n"
|
||||
" with open(sys.argv[2]) as f:\n"
|
||||
" go = f.read()\n"
|
||||
|
|
@ -857,7 +861,7 @@ namespace tut
|
|||
set_test_name("'bogus' test");
|
||||
CaptureLog recorder;
|
||||
PythonProcessLauncher py(get_test_name(),
|
||||
"print 'Hello world'\n");
|
||||
"print('Hello world')\n");
|
||||
py.mParams.files.add(LLProcess::FileParam("bogus"));
|
||||
py.mPy = LLProcess::create(py.mParams);
|
||||
ensure("should have rejected 'bogus'", ! py.mPy);
|
||||
|
|
@ -872,7 +876,7 @@ namespace tut
|
|||
// Replace this test with one or more real 'file' tests when we
|
||||
// implement 'file' support
|
||||
PythonProcessLauncher py(get_test_name(),
|
||||
"print 'Hello world'\n");
|
||||
"print('Hello world')\n");
|
||||
py.mParams.files.add(LLProcess::FileParam());
|
||||
py.mParams.files.add(LLProcess::FileParam("file"));
|
||||
py.mPy = LLProcess::create(py.mParams);
|
||||
|
|
@ -887,7 +891,7 @@ namespace tut
|
|||
// implement 'tpipe' support
|
||||
CaptureLog recorder;
|
||||
PythonProcessLauncher py(get_test_name(),
|
||||
"print 'Hello world'\n");
|
||||
"print('Hello world')\n");
|
||||
py.mParams.files.add(LLProcess::FileParam());
|
||||
py.mParams.files.add(LLProcess::FileParam("tpipe"));
|
||||
py.mPy = LLProcess::create(py.mParams);
|
||||
|
|
@ -904,7 +908,7 @@ namespace tut
|
|||
// implement 'npipe' support
|
||||
CaptureLog recorder;
|
||||
PythonProcessLauncher py(get_test_name(),
|
||||
"print 'Hello world'\n");
|
||||
"print('Hello world')\n");
|
||||
py.mParams.files.add(LLProcess::FileParam());
|
||||
py.mParams.files.add(LLProcess::FileParam());
|
||||
py.mParams.files.add(LLProcess::FileParam("npipe"));
|
||||
|
|
@ -980,7 +984,7 @@ namespace tut
|
|||
{
|
||||
set_test_name("get*Pipe() validation");
|
||||
PythonProcessLauncher py(get_test_name(),
|
||||
"print 'this output is expected'\n");
|
||||
"print('this output is expected)'\n");
|
||||
py.mParams.files.add(LLProcess::FileParam("pipe")); // pipe for stdin
|
||||
py.mParams.files.add(LLProcess::FileParam()); // inherit stdout
|
||||
py.mParams.files.add(LLProcess::FileParam("pipe")); // pipe for stderr
|
||||
|
|
@ -1001,13 +1005,13 @@ namespace tut
|
|||
set_test_name("talk to stdin/stdout");
|
||||
PythonProcessLauncher py(get_test_name(),
|
||||
"import sys, time\n"
|
||||
"print 'ok'\n"
|
||||
"print('ok')\n"
|
||||
"sys.stdout.flush()\n"
|
||||
"# wait for 'go' from test program\n"
|
||||
"go = sys.stdin.readline()\n"
|
||||
"if go != 'go\\n':\n"
|
||||
" sys.exit('expected \"go\", saw %r' % go)\n"
|
||||
"print 'ack'\n");
|
||||
"print('ack')\n");
|
||||
py.mParams.files.add(LLProcess::FileParam("pipe")); // stdin
|
||||
py.mParams.files.add(LLProcess::FileParam("pipe")); // stdout
|
||||
py.launch();
|
||||
|
|
@ -1118,7 +1122,7 @@ namespace tut
|
|||
{
|
||||
set_test_name("ReadPipe \"eof\" event");
|
||||
PythonProcessLauncher py(get_test_name(),
|
||||
"print 'Hello from Python!'\n");
|
||||
"print('Hello from Python!')\n");
|
||||
py.mParams.files.add(LLProcess::FileParam()); // stdin
|
||||
py.mParams.files.add(LLProcess::FileParam("pipe")); // stdout
|
||||
py.launch();
|
||||
|
|
|
|||
|
|
@ -1795,7 +1795,7 @@ namespace tut
|
|||
set_test_name("verify NamedTempFile");
|
||||
python("platform",
|
||||
"import sys\n"
|
||||
"print 'Running on', sys.platform\n");
|
||||
"print('Running on', sys.platform)\n");
|
||||
}
|
||||
|
||||
// helper for test<3>
|
||||
|
|
@ -1825,14 +1825,14 @@ namespace tut
|
|||
const char pydata[] =
|
||||
"def verify(iterable):\n"
|
||||
" it = iter(iterable)\n"
|
||||
" assert it.next() == 17\n"
|
||||
" assert abs(it.next() - 3.14) < 0.01\n"
|
||||
" assert it.next() == '''\\\n"
|
||||
" assert next(it) == 17\n"
|
||||
" assert abs(next(it) - 3.14) < 0.01\n"
|
||||
" assert next(it) == '''\\\n"
|
||||
"This string\n"
|
||||
"has several\n"
|
||||
"lines.'''\n"
|
||||
" try:\n"
|
||||
" it.next()\n"
|
||||
" next(it)\n"
|
||||
" except StopIteration:\n"
|
||||
" pass\n"
|
||||
" else:\n"
|
||||
|
|
@ -1855,7 +1855,7 @@ namespace tut
|
|||
" yield llsd.parse(item)\n" <<
|
||||
pydata <<
|
||||
// Don't forget raw-string syntax for Windows pathnames.
|
||||
"verify(parse_each(open(r'" << file.getName() << "')))\n");
|
||||
"verify(parse_each(open(r'" << file.getName() << "', 'rb')))\n");
|
||||
}
|
||||
|
||||
template<> template<>
|
||||
|
|
@ -1870,7 +1870,6 @@ namespace tut
|
|||
|
||||
python("write Python notation",
|
||||
placeholders::arg1 <<
|
||||
"from __future__ import with_statement\n" <<
|
||||
import_llsd <<
|
||||
"DATA = [\n"
|
||||
" 17,\n"
|
||||
|
|
@ -1884,7 +1883,7 @@ namespace tut
|
|||
// N.B. Using 'print' implicitly adds newlines.
|
||||
"with open(r'" << file.getName() << "', 'w') as f:\n"
|
||||
" for item in DATA:\n"
|
||||
" print >>f, llsd.format_notation(item)\n");
|
||||
" print(llsd.format_notation(item).decode(), file=f)\n");
|
||||
|
||||
std::ifstream inf(file.getName().c_str());
|
||||
LLSD item;
|
||||
|
|
|
|||
|
|
@ -135,7 +135,9 @@ public:
|
|||
}
|
||||
}
|
||||
std::ostringstream str;
|
||||
str << "Required header # " << i << " found in response";
|
||||
str << "Required header #" << i << " "
|
||||
<< mHeadersRequired[i].first << "=" << mHeadersRequired[i].second
|
||||
<< " not found in response";
|
||||
ensure(str.str(), found);
|
||||
}
|
||||
}
|
||||
|
|
@ -154,7 +156,9 @@ public:
|
|||
mHeadersDisallowed[i].second))
|
||||
{
|
||||
std::ostringstream str;
|
||||
str << "Disallowed header # " << i << " not found in response";
|
||||
str << "Disallowed header #" << i << " "
|
||||
<< mHeadersDisallowed[i].first << "=" << mHeadersDisallowed[i].second
|
||||
<< " found in response";
|
||||
ensure(str.str(), false);
|
||||
}
|
||||
}
|
||||
|
|
@ -2127,6 +2131,17 @@ void HttpRequestTestObjectType::test<18>()
|
|||
template <> template <>
|
||||
void HttpRequestTestObjectType::test<19>()
|
||||
{
|
||||
// It appears that HttpRequest is fully capable of sending duplicate header values in violation of
|
||||
// this test's expectations. Something needs to budge: is sending duplicate header values desired?
|
||||
//
|
||||
// Test server /reflect/ response headers (mirrored from request)
|
||||
//
|
||||
// X-Reflect-content-type: text/plain
|
||||
// X-Reflect-content-type: text/html
|
||||
// X-Reflect-content-type: application/llsd+xml
|
||||
//
|
||||
skip("FIXME: Bad assertions or broken functionality.");
|
||||
|
||||
ScopedCurlInit ready;
|
||||
|
||||
// Warmup boost::regex to pre-alloc memory for memory size tests
|
||||
|
|
@ -2307,6 +2322,17 @@ void HttpRequestTestObjectType::test<19>()
|
|||
template <> template <>
|
||||
void HttpRequestTestObjectType::test<20>()
|
||||
{
|
||||
// It appears that HttpRequest is fully capable of sending duplicate header values in violation of
|
||||
// this test's expectations. Something needs to budge: is sending duplicate header values desired?
|
||||
//
|
||||
// Test server /reflect/ response headers (mirrored from request)
|
||||
//
|
||||
// X-Reflect-content-type: text/plain
|
||||
// X-Reflect-content-type: text/html
|
||||
// X-Reflect-content-type: application/llsd+xml
|
||||
//
|
||||
skip("FIXME: Bad assertions or broken functionality.");
|
||||
|
||||
ScopedCurlInit ready;
|
||||
|
||||
// Warmup boost::regex to pre-alloc memory for memory size tests
|
||||
|
|
@ -2512,6 +2538,17 @@ void HttpRequestTestObjectType::test<20>()
|
|||
template <> template <>
|
||||
void HttpRequestTestObjectType::test<21>()
|
||||
{
|
||||
// It appears that HttpRequest is fully capable of sending duplicate header values in violation of
|
||||
// this test's expectations. Something needs to budge: is sending duplicate header values desired?
|
||||
//
|
||||
// Test server /reflect/ response headers (mirrored from request)
|
||||
//
|
||||
// X-Reflect-content-type: text/plain
|
||||
// X-Reflect-content-type: text/html
|
||||
// X-Reflect-content-type: application/llsd+xml
|
||||
//
|
||||
skip("FIXME: Bad assertions or broken functionality.");
|
||||
|
||||
ScopedCurlInit ready;
|
||||
|
||||
// Warmup boost::regex to pre-alloc memory for memory size tests
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file test_llsdmessage_peer.py
|
||||
@author Nat Goodspeed
|
||||
|
|
@ -34,11 +34,9 @@ import sys
|
|||
import time
|
||||
import select
|
||||
import getopt
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
||||
from io import StringIO
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
|
||||
|
||||
from llbase.fastest_elementtree import parse as xml_parse
|
||||
from llbase import llsd
|
||||
|
|
@ -97,13 +95,13 @@ class TestHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||
except (KeyError, ValueError):
|
||||
return ""
|
||||
max_chunk_size = 10*1024*1024
|
||||
L = []
|
||||
L = bytes()
|
||||
while size_remaining:
|
||||
chunk_size = min(size_remaining, max_chunk_size)
|
||||
chunk = self.rfile.read(chunk_size)
|
||||
L.append(chunk)
|
||||
L += chunk
|
||||
size_remaining -= len(chunk)
|
||||
return ''.join(L)
|
||||
return L.decode("utf-8")
|
||||
# end of swiped read() logic
|
||||
|
||||
def read_xml(self):
|
||||
|
|
@ -127,8 +125,8 @@ class TestHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||
try:
|
||||
self.answer(dict(reply="success", status=200,
|
||||
reason="Your GET operation worked"))
|
||||
except self.ignore_exceptions, e:
|
||||
print >> sys.stderr, "Exception during GET (ignoring): %s" % str(e)
|
||||
except self.ignore_exceptions as e:
|
||||
print("Exception during GET (ignoring): %s" % str(e), file=sys.stderr)
|
||||
|
||||
def do_POST(self):
|
||||
# Read the provided POST data.
|
||||
|
|
@ -136,8 +134,8 @@ class TestHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||
try:
|
||||
self.answer(dict(reply="success", status=200,
|
||||
reason=self.read()))
|
||||
except self.ignore_exceptions, e:
|
||||
print >> sys.stderr, "Exception during POST (ignoring): %s" % str(e)
|
||||
except self.ignore_exceptions as e:
|
||||
print("Exception during POST (ignoring): %s" % str(e), file=sys.stderr)
|
||||
|
||||
def do_PUT(self):
|
||||
# Read the provided PUT data.
|
||||
|
|
@ -145,8 +143,8 @@ class TestHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||
try:
|
||||
self.answer(dict(reply="success", status=200,
|
||||
reason=self.read()))
|
||||
except self.ignore_exceptions, e:
|
||||
print >> sys.stderr, "Exception during PUT (ignoring): %s" % str(e)
|
||||
except self.ignore_exceptions as e:
|
||||
print("Exception during PUT (ignoring): %s" % str(e), file=sys.stderr)
|
||||
|
||||
def answer(self, data, withdata=True):
|
||||
debug("%s.answer(%s): self.path = %r", self.__class__.__name__, data, self.path)
|
||||
|
|
@ -221,7 +219,7 @@ class TestHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||
self.send_header("Content-type", "text/plain")
|
||||
self.end_headers()
|
||||
if body:
|
||||
self.wfile.write(body)
|
||||
self.wfile.write(body.encode("utf-8"))
|
||||
elif "fail" not in self.path:
|
||||
data = data.copy() # we're going to modify
|
||||
# Ensure there's a "reply" key in data, even if there wasn't before
|
||||
|
|
@ -255,9 +253,9 @@ class TestHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||
self.end_headers()
|
||||
|
||||
def reflect_headers(self):
|
||||
for name in self.headers.keys():
|
||||
# print "Header: %s: %s" % (name, self.headers[name])
|
||||
self.send_header("X-Reflect-" + name, self.headers[name])
|
||||
for (name, val) in self.headers.items():
|
||||
# print("Header: %s %s" % (name, val), file=sys.stderr)
|
||||
self.send_header("X-Reflect-" + name, val)
|
||||
|
||||
if not VERBOSE:
|
||||
# When VERBOSE is set, skip both these overrides because they exist to
|
||||
|
|
@ -283,10 +281,10 @@ class Server(HTTPServer):
|
|||
# default behavior which *shouldn't* cause the program to return
|
||||
# a failure status.
|
||||
def handle_error(self, request, client_address):
|
||||
print '-'*40
|
||||
print 'Ignoring exception during processing of request from',
|
||||
print client_address
|
||||
print '-'*40
|
||||
print('-'*40)
|
||||
print('Ignoring exception during processing of request from %' % (client_address))
|
||||
print('-'*40)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
do_valgrind = False
|
||||
|
|
@ -307,7 +305,7 @@ if __name__ == "__main__":
|
|||
# "Then there's Windows"
|
||||
# Instantiate a Server(TestHTTPRequestHandler) on the first free port
|
||||
# in the specified port range.
|
||||
httpd, port = freeport(xrange(8000, 8020), make_server)
|
||||
httpd, port = freeport(range(8000, 8020), make_server)
|
||||
|
||||
# Pass the selected port number to the subject test program via the
|
||||
# environment. We don't want to impose requirements on the test program's
|
||||
|
|
|
|||
|
|
@ -2458,7 +2458,14 @@ bool LLVolume::unpackVolumeFacesInternal(const LLSD& mdl)
|
|||
|
||||
|
||||
//copy out indices
|
||||
face.resizeIndices(idx.size()/2);
|
||||
S32 num_indices = idx.size() / 2;
|
||||
face.resizeIndices(num_indices);
|
||||
|
||||
if (num_indices > 2 && !face.mIndices)
|
||||
{
|
||||
LL_WARNS() << "Failed to allocate " << num_indices << " indices for face index: " << i << " Total: " << face_count << LL_ENDL;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (idx.empty() || face.mNumIndices < 3)
|
||||
{ //why is there an empty index list?
|
||||
|
|
@ -2477,6 +2484,13 @@ bool LLVolume::unpackVolumeFacesInternal(const LLSD& mdl)
|
|||
U32 num_verts = pos.size()/(3*2);
|
||||
face.resizeVertices(num_verts);
|
||||
|
||||
if (num_verts > 0 && !face.mPositions)
|
||||
{
|
||||
LL_WARNS() << "Failed to allocate " << num_verts << " vertices for face index: " << i << " Total: " << face_count << LL_ENDL;
|
||||
face.resizeIndices(0);
|
||||
continue;
|
||||
}
|
||||
|
||||
LLVector3 minp;
|
||||
LLVector3 maxp;
|
||||
LLVector2 min_tc;
|
||||
|
|
@ -2578,6 +2592,13 @@ bool LLVolume::unpackVolumeFacesInternal(const LLSD& mdl)
|
|||
if (mdl[i].has("Weights"))
|
||||
{
|
||||
face.allocateWeights(num_verts);
|
||||
if (!face.mWeights && num_verts)
|
||||
{
|
||||
LL_WARNS() << "Failed to allocate " << num_verts << " weights for face index: " << i << " Total: " << face_count << LL_ENDL;
|
||||
face.resizeIndices(0);
|
||||
face.resizeVertices(0);
|
||||
continue;
|
||||
}
|
||||
|
||||
LLSD::Binary weights = mdl[i]["Weights"];
|
||||
|
||||
|
|
@ -6435,8 +6456,18 @@ void LLVolumeFace::resizeVertices(S32 num_verts)
|
|||
mTexCoords = NULL;
|
||||
}
|
||||
|
||||
mNumVertices = num_verts;
|
||||
mNumAllocatedVertices = num_verts;
|
||||
|
||||
if (mPositions)
|
||||
{
|
||||
mNumVertices = num_verts;
|
||||
mNumAllocatedVertices = num_verts;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Either num_verts is zero or allocation failure
|
||||
mNumVertices = 0;
|
||||
mNumAllocatedVertices = 0;
|
||||
}
|
||||
|
||||
// Force update
|
||||
mJointRiggingInfoTab.clear();
|
||||
|
|
@ -6537,7 +6568,15 @@ void LLVolumeFace::resizeIndices(S32 num_indices)
|
|||
mIndices = NULL;
|
||||
}
|
||||
|
||||
mNumIndices = num_indices;
|
||||
if (mIndices)
|
||||
{
|
||||
mNumIndices = num_indices;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Either num_indices is zero or allocation failure
|
||||
mNumIndices = 0;
|
||||
}
|
||||
}
|
||||
|
||||
void LLVolumeFace::pushIndex(const U16& idx)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file test_llsdmessage_peer.py
|
||||
@author Nat Goodspeed
|
||||
|
|
@ -31,7 +31,7 @@ $/LicenseInfo$
|
|||
|
||||
import os
|
||||
import sys
|
||||
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
|
||||
from llbase.fastest_elementtree import parse as xml_parse
|
||||
from llbase import llsd
|
||||
|
|
@ -165,7 +165,7 @@ if __name__ == "__main__":
|
|||
# "Then there's Windows"
|
||||
# Instantiate a Server(TestHTTPRequestHandler) on the first free port
|
||||
# in the specified port range.
|
||||
httpd, port = freeport(xrange(8000, 8020), make_server)
|
||||
httpd, port = freeport(range(8000, 8020), make_server)
|
||||
|
||||
# Pass the selected port number to the subject test program via the
|
||||
# environment. We don't want to impose requirements on the test program's
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file testrunner.py
|
||||
@author Nat Goodspeed
|
||||
|
|
@ -41,7 +41,7 @@ VERBOSE = not re.match(r"(0|off|false|quiet)$", VERBOSE, re.IGNORECASE)
|
|||
|
||||
if VERBOSE:
|
||||
def debug(fmt, *args):
|
||||
print fmt % args
|
||||
print(fmt % args)
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
debug = lambda *args: None
|
||||
|
|
@ -99,14 +99,14 @@ def freeport(portlist, expr):
|
|||
# error because we can't return meaningful values. We have no 'port',
|
||||
# therefore no 'expr(port)'.
|
||||
portiter = iter(portlist)
|
||||
port = portiter.next()
|
||||
port = next(portiter)
|
||||
|
||||
while True:
|
||||
try:
|
||||
# If this value of port works, return as promised.
|
||||
value = expr(port)
|
||||
|
||||
except socket.error, err:
|
||||
except socket.error as err:
|
||||
# Anything other than 'Address already in use', propagate
|
||||
if err.args[0] != errno.EADDRINUSE:
|
||||
raise
|
||||
|
|
@ -117,9 +117,9 @@ def freeport(portlist, expr):
|
|||
type, value, tb = sys.exc_info()
|
||||
try:
|
||||
try:
|
||||
port = portiter.next()
|
||||
port = next(portiter)
|
||||
except StopIteration:
|
||||
raise type, value, tb
|
||||
raise type(value).with_traceback(tb)
|
||||
finally:
|
||||
# Clean up local traceback, see docs for sys.exc_info()
|
||||
del tb
|
||||
|
|
@ -138,7 +138,7 @@ def freeport(portlist, expr):
|
|||
# If we've actually arrived at this point, portiter.next() delivered a
|
||||
# new port value. Loop back to pass that to expr(port).
|
||||
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
debug("*** freeport() raising %s: %s", err.__class__.__name__, err)
|
||||
raise
|
||||
|
||||
|
|
@ -227,13 +227,13 @@ def test_freeport():
|
|||
def exc(exception_class, *args):
|
||||
try:
|
||||
yield
|
||||
except exception_class, err:
|
||||
except exception_class as err:
|
||||
for i, expected_arg in enumerate(args):
|
||||
assert expected_arg == err.args[i], \
|
||||
"Raised %s, but args[%s] is %r instead of %r" % \
|
||||
(err.__class__.__name__, i, err.args[i], expected_arg)
|
||||
print "Caught expected exception %s(%s)" % \
|
||||
(err.__class__.__name__, ', '.join(repr(arg) for arg in err.args))
|
||||
print("Caught expected exception %s(%s)" % \
|
||||
(err.__class__.__name__, ', '.join(repr(arg) for arg in err.args)))
|
||||
else:
|
||||
assert False, "Failed to raise " + exception_class.__class__.__name__
|
||||
|
||||
|
|
@ -270,18 +270,18 @@ def test_freeport():
|
|||
# This is the magic exception that should prompt us to retry
|
||||
inuse = socket.error(errno.EADDRINUSE, 'Address already in use')
|
||||
# Get the iterator to our ports list so we can check later if we've used all
|
||||
ports = iter(xrange(5))
|
||||
ports = iter(range(5))
|
||||
with exc(socket.error, errno.EADDRINUSE):
|
||||
freeport(ports, lambda port: raiser(inuse))
|
||||
# did we entirely exhaust 'ports'?
|
||||
with exc(StopIteration):
|
||||
ports.next()
|
||||
next(ports)
|
||||
|
||||
ports = iter(xrange(2))
|
||||
ports = iter(range(2))
|
||||
# Any exception but EADDRINUSE should quit immediately
|
||||
with exc(SomeError):
|
||||
freeport(ports, lambda port: raiser(SomeError()))
|
||||
assert_equals(ports.next(), 1)
|
||||
assert_equals(next(ports), 1)
|
||||
|
||||
# ----------- freeport() with platform-dependent socket stuff ------------
|
||||
# This is what we should've had unit tests to begin with (see CHOP-661).
|
||||
|
|
@ -290,14 +290,14 @@ def test_freeport():
|
|||
sock.bind(('127.0.0.1', port))
|
||||
return sock
|
||||
|
||||
bound0, port0 = freeport(xrange(7777, 7780), newbind)
|
||||
bound0, port0 = freeport(range(7777, 7780), newbind)
|
||||
assert_equals(port0, 7777)
|
||||
bound1, port1 = freeport(xrange(7777, 7780), newbind)
|
||||
bound1, port1 = freeport(range(7777, 7780), newbind)
|
||||
assert_equals(port1, 7778)
|
||||
bound2, port2 = freeport(xrange(7777, 7780), newbind)
|
||||
bound2, port2 = freeport(range(7777, 7780), newbind)
|
||||
assert_equals(port2, 7779)
|
||||
with exc(socket.error, errno.EADDRINUSE):
|
||||
bound3, port3 = freeport(xrange(7777, 7780), newbind)
|
||||
bound3, port3 = freeport(range(7777, 7780), newbind)
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_freeport()
|
||||
|
|
|
|||
|
|
@ -340,7 +340,7 @@
|
|||
<string key="NSMaxSize">{10000000000000, 10000000000000}</string>
|
||||
<string key="NSFrameAutosaveName">Second Life</string>
|
||||
<int key="NSWindowCollectionBehavior">128</int>
|
||||
<bool key="NSWindowIsRestorable">YES</bool>
|
||||
<bool key="NSWindowIsRestorable">NO</bool>
|
||||
</object>
|
||||
<object class="NSWindowTemplate" id="979091056">
|
||||
<int key="NSWindowStyleMask">31</int>
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ def munge_binding_redirect_version(src_manifest_name, src_config_name, dst_confi
|
|||
comment = config_dom.createComment("This file is automatically generated by the build. see indra/newview/build_win32_appConfig.py")
|
||||
config_dom.insertBefore(comment, config_dom.childNodes[0])
|
||||
|
||||
print "Writing: " + dst_config_name
|
||||
print("Writing: " + dst_config_name)
|
||||
f = open(dst_config_name, 'w')
|
||||
config_dom.writexml(f)
|
||||
f.close()
|
||||
|
|
|
|||
|
|
@ -1391,6 +1391,7 @@ bool LLAppViewer::init()
|
|||
// updater.executable = gDirUtilp->getExpandedFilename(LL_PATH_EXECUTABLE, updater_file);
|
||||
//#elif LL_DARWIN
|
||||
// // explicitly run the system Python interpreter on SLVersionChecker.py
|
||||
// // Keep using python2 until SLVersionChecker is converted to python3.
|
||||
// updater.executable = "python";
|
||||
// updater_file = "SLVersionChecker.py";
|
||||
// updater.args.add(gDirUtilp->add(gDirUtilp->getAppRODataDir(), "updater", updater_file));
|
||||
|
|
@ -2199,6 +2200,8 @@ bool LLAppViewer::cleanup()
|
|||
|
||||
if (gAudiop)
|
||||
{
|
||||
LL_INFOS() << "Shutting down audio" << LL_ENDL;
|
||||
|
||||
// be sure to stop the internet stream cleanly BEFORE destroying the interface to stop it.
|
||||
gAudiop->stopInternetStream();
|
||||
// shut down the streaming audio sub-subsystem first, in case it relies on not outliving the general audio subsystem.
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@
|
|||
#include "llfloaterevent.h"
|
||||
#include "llagent.h"
|
||||
#include "llcommandhandler.h" // secondlife:///app/... support
|
||||
#include "lltrans.h"
|
||||
// <FS:CR> FIRE-6310 - Legacy Search
|
||||
#include "fsfloatersearch.h"
|
||||
#include "llviewerfloaterreg.h"
|
||||
|
|
@ -269,8 +270,40 @@ void LLEventNotifier::load(const LLSD& event_options)
|
|||
end = event_options.endArray(); resp_it != end; ++resp_it)
|
||||
{
|
||||
LLSD response = *resp_it;
|
||||
LLDate date;
|
||||
bool is_iso8601_date = false;
|
||||
|
||||
add(response["event_id"].asInteger(), response["event_date_ut"], response["event_date"].asString(), response["event_name"].asString());
|
||||
if (response["event_date"].isDate())
|
||||
{
|
||||
date = response["event_date"].asDate();
|
||||
is_iso8601_date = true;
|
||||
}
|
||||
else if (date.fromString(response["event_date"].asString()))
|
||||
{
|
||||
is_iso8601_date = true;
|
||||
}
|
||||
|
||||
if (is_iso8601_date)
|
||||
{
|
||||
std::string dateStr;
|
||||
|
||||
dateStr = "[" + LLTrans::getString("LTimeYear") + "]-["
|
||||
+ LLTrans::getString("LTimeMthNum") + "]-["
|
||||
+ LLTrans::getString("LTimeDay") + "] ["
|
||||
+ LLTrans::getString("LTimeHour") + "]:["
|
||||
+ LLTrans::getString("LTimeMin") + "]:["
|
||||
+ LLTrans::getString("LTimeSec") + "]";
|
||||
|
||||
LLSD substitution;
|
||||
substitution["datetime"] = date;
|
||||
LLStringUtil::format(dateStr, substitution);
|
||||
|
||||
add(response["event_id"].asInteger(), response["event_date_ut"], dateStr, response["event_name"].asString());
|
||||
}
|
||||
else
|
||||
{
|
||||
add(response["event_id"].asInteger(), response["event_date_ut"], response["event_date"].asString(), response["event_name"].asString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ protected:
|
|||
|
||||
void onAssetLoaded(LLUUID asset_id, LLSettingsBase::ptr_t settins, S32 status);
|
||||
|
||||
private:
|
||||
protected:
|
||||
LLUUID mExpectingAssetId; // for asset load confirmation
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -194,8 +194,6 @@ private:
|
|||
std::string mLastFrameSlider;
|
||||
bool mShiftCopyEnabled;
|
||||
|
||||
LLUUID mExpectingAssetId;
|
||||
|
||||
LLButton* mAddFrameButton;
|
||||
LLButton* mDeleteFrameButton;
|
||||
LLButton* mImportButton;
|
||||
|
|
|
|||
|
|
@ -1930,7 +1930,20 @@ void LLInventoryModel::addChangedMask(U32 mask, const LLUUID& referent)
|
|||
mModifyMask |= mask;
|
||||
}
|
||||
|
||||
if (referent.notNull() && (mChangedItemIDs.find(referent) == mChangedItemIDs.end()))
|
||||
bool needs_update = false;
|
||||
if (referent.notNull())
|
||||
{
|
||||
if (mIsNotifyObservers)
|
||||
{
|
||||
needs_update = mChangedItemIDsBacklog.find(referent) == mChangedItemIDsBacklog.end();
|
||||
}
|
||||
else
|
||||
{
|
||||
needs_update = mChangedItemIDs.find(referent) == mChangedItemIDs.end();
|
||||
}
|
||||
}
|
||||
|
||||
if (needs_update)
|
||||
{
|
||||
if (mIsNotifyObservers)
|
||||
{
|
||||
|
|
@ -1941,6 +1954,8 @@ void LLInventoryModel::addChangedMask(U32 mask, const LLUUID& referent)
|
|||
mChangedItemIDs.insert(referent);
|
||||
}
|
||||
|
||||
// Fix me: From DD-81, probably shouldn't be here, instead
|
||||
// should be somewhere in an observer
|
||||
update_marketplace_category(referent, false);
|
||||
|
||||
if (mask & LLInventoryObserver::ADD)
|
||||
|
|
|
|||
|
|
@ -486,8 +486,14 @@ void LLLandmarksPanel::initLandmarksPanel(LLPlacesInventoryPanel* inventory_list
|
|||
LLPlacesFolderView* root_folder = dynamic_cast<LLPlacesFolderView*>(inventory_list->getRootFolder());
|
||||
if (root_folder)
|
||||
{
|
||||
root_folder->setupMenuHandle(LLInventoryType::IT_CATEGORY, mGearFolderMenu->getHandle());
|
||||
root_folder->setupMenuHandle(LLInventoryType::IT_LANDMARK, mGearLandmarkMenu->getHandle());
|
||||
if (mGearFolderMenu)
|
||||
{
|
||||
root_folder->setupMenuHandle(LLInventoryType::IT_CATEGORY, mGearFolderMenu->getHandle());
|
||||
}
|
||||
if (mGearLandmarkMenu)
|
||||
{
|
||||
root_folder->setupMenuHandle(LLInventoryType::IT_LANDMARK, mGearLandmarkMenu->getHandle());
|
||||
}
|
||||
|
||||
root_folder->setParentLandmarksPanel(this);
|
||||
}
|
||||
|
|
@ -510,13 +516,23 @@ void LLLandmarksPanel::initListCommandsHandlers()
|
|||
mSortingMenu = LLUICtrlFactory::getInstance()->createFromFile<LLToggleableMenu>("menu_places_gear_sorting.xml", gMenuHolder, LLViewerMenuHolderGL::child_registry_t::instance());
|
||||
mAddMenu = LLUICtrlFactory::getInstance()->createFromFile<LLToggleableMenu>("menu_place_add_button.xml", gMenuHolder, LLViewerMenuHolderGL::child_registry_t::instance());
|
||||
|
||||
mGearLandmarkMenu->setVisibilityChangeCallback(boost::bind(&LLLandmarksPanel::onMenuVisibilityChange, this, _1, _2));
|
||||
mGearFolderMenu->setVisibilityChangeCallback(boost::bind(&LLLandmarksPanel::onMenuVisibilityChange, this, _1, _2));
|
||||
if (mGearLandmarkMenu)
|
||||
{
|
||||
mGearLandmarkMenu->setVisibilityChangeCallback(boost::bind(&LLLandmarksPanel::onMenuVisibilityChange, this, _1, _2));
|
||||
// show menus even if all items are disabled
|
||||
mGearLandmarkMenu->setAlwaysShowMenu(TRUE);
|
||||
} // Else corrupted files?
|
||||
|
||||
// show menus even if all items are disabled
|
||||
mGearLandmarkMenu->setAlwaysShowMenu(TRUE);
|
||||
mGearFolderMenu->setAlwaysShowMenu(TRUE);
|
||||
mAddMenu->setAlwaysShowMenu(TRUE);
|
||||
if (mGearFolderMenu)
|
||||
{
|
||||
mGearFolderMenu->setVisibilityChangeCallback(boost::bind(&LLLandmarksPanel::onMenuVisibilityChange, this, _1, _2));
|
||||
mGearFolderMenu->setAlwaysShowMenu(TRUE);
|
||||
}
|
||||
|
||||
if (mAddMenu)
|
||||
{
|
||||
mAddMenu->setAlwaysShowMenu(TRUE);
|
||||
}
|
||||
}
|
||||
|
||||
void LLLandmarksPanel::updateMenuVisibility(LLUICtrl* menu)
|
||||
|
|
@ -1132,7 +1148,10 @@ void LLLandmarksPanel::doShowOnMap(LLLandmark* landmark)
|
|||
}
|
||||
|
||||
mShowOnMapBtn->setEnabled(TRUE); // <FS:Ansariel> FIRE-31033: Keep Teleport/Map/Profile buttons on places floater
|
||||
mGearLandmarkMenu->setItemEnabled("show_on_map", TRUE);
|
||||
if (mGearLandmarkMenu)
|
||||
{
|
||||
mGearLandmarkMenu->setItemEnabled("show_on_map", TRUE);
|
||||
}
|
||||
}
|
||||
|
||||
void LLLandmarksPanel::doProcessParcelInfo(LLLandmark* landmark,
|
||||
|
|
|
|||
|
|
@ -44,8 +44,7 @@ extern LLAgent gAgent;
|
|||
//const S32 ADVANCED_VPAD = 3; // <FS:Ansariel> Improved animation preview
|
||||
|
||||
LLPreviewAnim::LLPreviewAnim(const LLSD& key)
|
||||
: LLPreview( key ),
|
||||
pMotion(NULL)
|
||||
: LLPreview( key )
|
||||
{
|
||||
mCommitCallbackRegistrar.add("PreviewAnim.Play", boost::bind(&LLPreviewAnim::play, this, _2));
|
||||
}
|
||||
|
|
@ -174,19 +173,19 @@ void LLPreviewAnim::refreshFromItem()
|
|||
}
|
||||
|
||||
// Preload motion
|
||||
pMotion = gAgentAvatarp->createMotion(item->getAssetUUID());
|
||||
|
||||
// <FS:Ansariel> Improved animation preview
|
||||
if (pMotion)
|
||||
//gAgentAvatarp->createMotion(item->getAssetUUID());
|
||||
LLMotion *motion = gAgentAvatarp->createMotion(item->getAssetUUID());
|
||||
if (motion)
|
||||
{
|
||||
LLTextBox* stats_box_left = getChild<LLTextBox>("AdvancedStatsLeft");
|
||||
LLTextBox* stats_box_right = getChild<LLTextBox>("AdvancedStatsRight");
|
||||
stats_box_left->setTextArg("[PRIORITY]", llformat("%d", pMotion->getPriority()));
|
||||
stats_box_left->setTextArg("[DURATION]", llformat("%.2f", pMotion->getDuration()));
|
||||
stats_box_left->setTextArg("[IS_LOOP]", (pMotion->getLoop() ? LLTrans::getString("PermYes") : LLTrans::getString("PermNo")));
|
||||
stats_box_right->setTextArg("[EASE_IN]", llformat("%.2f", pMotion->getEaseInDuration()));
|
||||
stats_box_right->setTextArg("[EASE_OUT]", llformat("%.2f", pMotion->getEaseOutDuration()));
|
||||
stats_box_right->setTextArg("[NUM_JOINTS]", llformat("%d", pMotion->getNumJointMotions()));
|
||||
stats_box_left->setTextArg("[PRIORITY]", llformat("%d", motion->getPriority()));
|
||||
stats_box_left->setTextArg("[DURATION]", llformat("%.2f", motion->getDuration()));
|
||||
stats_box_left->setTextArg("[IS_LOOP]", (motion->getLoop() ? LLTrans::getString("PermYes") : LLTrans::getString("PermNo")));
|
||||
stats_box_right->setTextArg("[EASE_IN]", llformat("%.2f", motion->getEaseInDuration()));
|
||||
stats_box_right->setTextArg("[EASE_OUT]", llformat("%.2f", motion->getEaseOutDuration()));
|
||||
stats_box_right->setTextArg("[NUM_JOINTS]", llformat("%d", motion->getNumJointMotions()));
|
||||
}
|
||||
// </FS:Ansariel>
|
||||
|
||||
|
|
@ -232,8 +231,17 @@ void LLPreviewAnim::onClose(bool app_quitting)
|
|||
// LLRect rect = getRect();
|
||||
// reshape(rect.getWidth(), rect.getHeight() + pAdvancedStatsTextBox->getRect().getHeight() + ADVANCED_VPAD, FALSE);
|
||||
//
|
||||
// LLMotion *motion = NULL;
|
||||
// const LLInventoryItem* item = getItem();
|
||||
// if (item)
|
||||
// {
|
||||
// // if motion exists, will return existing one.
|
||||
// // Needed because viewer can purge motions
|
||||
// motion = gAgentAvatarp->createMotion(item->getAssetUUID());
|
||||
// }
|
||||
//
|
||||
// // set text
|
||||
// if (pMotion)
|
||||
// if (motion)
|
||||
// {
|
||||
// pAdvancedStatsTextBox->setTextArg("[PRIORITY]", llformat("%d", pMotion->getPriority()));
|
||||
// pAdvancedStatsTextBox->setTextArg("[DURATION]", llformat("%.2f", pMotion->getDuration()));
|
||||
|
|
|
|||
|
|
@ -52,7 +52,6 @@ protected:
|
|||
|
||||
LLUUID mItemID; // Not an item id, but a playing asset id
|
||||
bool mDidStart;
|
||||
LLMotion* pMotion;
|
||||
// <FS:Ansariel> Improved animation preview
|
||||
//LLTextBox* pAdvancedStatsTextBox;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1797,6 +1797,10 @@ bool idle_startup()
|
|||
|
||||
}
|
||||
}
|
||||
else if (reason_response == "BadType")
|
||||
{
|
||||
LLNotificationsUtil::add("LoginFailedToParse", LLSD(), LLSD(), login_alert_done);
|
||||
}
|
||||
else if (!message.empty())
|
||||
{
|
||||
// This wasn't a certificate error, so throw up the normal
|
||||
|
|
|
|||
|
|
@ -104,10 +104,11 @@ public:
|
|||
/// LLViewerAssetStorage
|
||||
///----------------------------------------------------------------------------
|
||||
|
||||
S32 LLViewerAssetStorage::sAssetCoroCount = 0;
|
||||
|
||||
// Unused?
|
||||
LLViewerAssetStorage::LLViewerAssetStorage(LLMessageSystem *msg, LLXferManager *xfer, const LLHost &upstream_host)
|
||||
: LLAssetStorage(msg, xfer, upstream_host),
|
||||
mAssetCoroCount(0),
|
||||
mCountRequests(0),
|
||||
mCountStarted(0),
|
||||
mCountCompleted(0),
|
||||
|
|
@ -119,7 +120,6 @@ LLViewerAssetStorage::LLViewerAssetStorage(LLMessageSystem *msg, LLXferManager *
|
|||
|
||||
LLViewerAssetStorage::LLViewerAssetStorage(LLMessageSystem *msg, LLXferManager *xfer)
|
||||
: LLAssetStorage(msg, xfer),
|
||||
mAssetCoroCount(0),
|
||||
mCountRequests(0),
|
||||
mCountStarted(0),
|
||||
mCountCompleted(0),
|
||||
|
|
@ -484,8 +484,7 @@ void LLViewerAssetStorage::assetRequestCoro(
|
|||
LLGetAssetCallback callback,
|
||||
void *user_data)
|
||||
{
|
||||
LLScopedIncrement coro_count_boost(mAssetCoroCount);
|
||||
mCountStarted++;
|
||||
LLScopedIncrement coro_count_boost(sAssetCoroCount); // static counter since corotine can outlive LLViewerAssetStorage
|
||||
|
||||
S32 result_code = LL_ERR_NOERR;
|
||||
LLExtStat ext_status = LLExtStat::NONE;
|
||||
|
|
@ -495,6 +494,9 @@ void LLViewerAssetStorage::assetRequestCoro(
|
|||
LL_WARNS_ONCE("ViewerAsset") << "Asset request fails: asset storage no longer exists" << LL_ENDL;
|
||||
return;
|
||||
}
|
||||
|
||||
mCountStarted++;
|
||||
|
||||
if (!gAgent.getRegion())
|
||||
{
|
||||
LL_WARNS_ONCE("ViewerAsset") << "Asset request fails: no region set" << LL_ENDL;
|
||||
|
|
@ -602,6 +604,18 @@ void LLViewerAssetStorage::assetRequestCoro(
|
|||
result_code = LL_ERR_ASSET_REQUEST_FAILED;
|
||||
ext_status = LLExtStat::NONE;
|
||||
}
|
||||
else if (!result.has(LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS_RAW))
|
||||
{
|
||||
LL_DEBUGS("ViewerAsset") << "request failed, no data returned!" << LL_ENDL;
|
||||
result_code = LL_ERR_ASSET_REQUEST_FAILED;
|
||||
ext_status = LLExtStat::NONE;
|
||||
}
|
||||
else if (!result[LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS_RAW].isBinary())
|
||||
{
|
||||
LL_DEBUGS("ViewerAsset") << "request failed, invalid data format!" << LL_ENDL;
|
||||
result_code = LL_ERR_ASSET_REQUEST_FAILED;
|
||||
ext_status = LLExtStat::NONE;
|
||||
}
|
||||
else
|
||||
{
|
||||
LL_DEBUGS("ViewerAsset") << "request succeeded, url " << url << LL_ENDL;
|
||||
|
|
@ -661,7 +675,7 @@ std::string LLViewerAssetStorage::getAssetURL(const std::string& cap_url, const
|
|||
void LLViewerAssetStorage::logAssetStorageInfo()
|
||||
{
|
||||
LLMemory::logMemoryInfo(true);
|
||||
LL_INFOS("AssetStorage") << "Active coros " << mAssetCoroCount << LL_ENDL;
|
||||
LL_INFOS("AssetStorage") << "Active coros " << sAssetCoroCount << LL_ENDL;
|
||||
LL_INFOS("AssetStorage") << "mPendingDownloads size " << mPendingDownloads.size() << LL_ENDL;
|
||||
LL_INFOS("AssetStorage") << "mCountStarted " << mCountStarted << LL_ENDL;
|
||||
LL_INFOS("AssetStorage") << "mCountCompleted " << mCountCompleted << LL_ENDL;
|
||||
|
|
|
|||
|
|
@ -122,12 +122,13 @@ protected:
|
|||
wait_list_t mCoroWaitList;
|
||||
|
||||
std::string mViewerAssetUrl;
|
||||
S32 mAssetCoroCount;
|
||||
S32 mCountRequests;
|
||||
S32 mCountStarted;
|
||||
S32 mCountCompleted;
|
||||
S32 mCountSucceeded;
|
||||
S64 mTotalBytesFetched;
|
||||
|
||||
static S32 sAssetCoroCount; // coroutine count, static since coroutines can outlive LLViewerAssetStorage
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -85,6 +85,8 @@ void LLViewerAudio::registerIdleListener()
|
|||
|
||||
void LLViewerAudio::startInternetStreamWithAutoFade(const std::string &streamURI)
|
||||
{
|
||||
LL_DEBUGS("AudioEngine") << "Start with outo fade: " << streamURI << LL_ENDL;
|
||||
|
||||
// Old and new stream are identical
|
||||
if (mNextStreamURI == streamURI)
|
||||
{
|
||||
|
|
@ -180,6 +182,7 @@ bool LLViewerAudio::onIdleUpdate()
|
|||
if (gAudiop)
|
||||
{
|
||||
// Clear URI
|
||||
LL_DEBUGS("AudioEngine") << "Done with audio fade" << LL_ENDL;
|
||||
gAudiop->startInternetStream(LLStringUtil::null);
|
||||
gAudiop->stopInternetStream();
|
||||
}
|
||||
|
|
@ -190,6 +193,7 @@ bool LLViewerAudio::onIdleUpdate()
|
|||
|
||||
if (gAudiop)
|
||||
{
|
||||
LL_DEBUGS("AudioEngine") << "Audio fade in: " << mNextStreamURI << LL_ENDL;
|
||||
LLStreamingAudioInterface *stream = gAudiop->getStreamingAudioImpl();
|
||||
if(stream && stream->supportsAdjustableBufferSizes())
|
||||
stream->setBufferSizes(gSavedSettings.getU32("FMODStreamBufferSize"),gSavedSettings.getU32("FMODDecodeBufferSize"));
|
||||
|
|
@ -242,6 +246,7 @@ void LLViewerAudio::stopInternetStreamWithAutoFade()
|
|||
|
||||
if (gAudiop)
|
||||
{
|
||||
LL_DEBUGS("AudioEngine") << "Stop audio fade" << LL_ENDL;
|
||||
gAudiop->startInternetStream(LLStringUtil::null);
|
||||
gAudiop->stopInternetStream();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -249,9 +249,9 @@ public:
|
|||
LLVector3 mLastCameraOrigin;
|
||||
U32 mLastCameraUpdate;
|
||||
|
||||
void requestBaseCapabilitiesCoro(U64 regionHandle);
|
||||
void requestBaseCapabilitiesCompleteCoro(U64 regionHandle);
|
||||
void requestSimulatorFeatureCoro(std::string url, U64 regionHandle);
|
||||
static void requestBaseCapabilitiesCoro(U64 regionHandle);
|
||||
static void requestBaseCapabilitiesCompleteCoro(U64 regionHandle);
|
||||
static void requestSimulatorFeatureCoro(std::string url, U64 regionHandle);
|
||||
};
|
||||
|
||||
void LLViewerRegionImpl::requestBaseCapabilitiesCoro(U64 regionHandle)
|
||||
|
|
@ -279,6 +279,7 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCoro(U64 regionHandle)
|
|||
LL_WARNS("AppInit", "Capabilities") << "Attempting to get capabilities for region that no longer exists!" << LL_ENDL;
|
||||
return; // this error condition is not recoverable.
|
||||
}
|
||||
LLViewerRegionImpl* impl = regionp->getRegionImplNC();
|
||||
LL_DEBUGS("AppInit", "Capabilities") << "requesting seed caps for handle " << regionHandle
|
||||
<< " name " << regionp->getName() << LL_ENDL;
|
||||
|
||||
|
|
@ -293,32 +294,33 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCoro(U64 regionHandle)
|
|||
newRegionEntry(*regionp);
|
||||
|
||||
// After a few attempts, continue login. But keep trying to get the caps:
|
||||
if (mSeedCapAttempts >= mSeedCapMaxAttemptsBeforeLogin &&
|
||||
if (impl->mSeedCapAttempts >= impl->mSeedCapMaxAttemptsBeforeLogin &&
|
||||
STATE_SEED_GRANTED_WAIT == LLStartUp::getStartupState())
|
||||
{
|
||||
LLStartUp::setStartupState(STATE_SEED_CAP_GRANTED);
|
||||
}
|
||||
|
||||
if (mSeedCapAttempts > mSeedCapMaxAttempts)
|
||||
if (impl->mSeedCapAttempts > impl->mSeedCapMaxAttempts)
|
||||
{
|
||||
// *TODO: Give a user pop-up about this error?
|
||||
LL_WARNS("AppInit", "Capabilities") << "Failed to get seed capabilities from '" << url << "' after " << mSeedCapAttempts << " attempts. Giving up!" << LL_ENDL;
|
||||
LL_WARNS("AppInit", "Capabilities") << "Failed to get seed capabilities from '" << url << "' after " << impl->mSeedCapAttempts << " attempts. Giving up!" << LL_ENDL;
|
||||
return; // this error condition is not recoverable.
|
||||
}
|
||||
|
||||
S32 id = ++mHttpResponderID;
|
||||
S32 id = ++(impl->mHttpResponderID);
|
||||
|
||||
LLSD capabilityNames = LLSD::emptyArray();
|
||||
buildCapabilityNames(capabilityNames);
|
||||
impl->buildCapabilityNames(capabilityNames);
|
||||
|
||||
LL_INFOS("AppInit", "Capabilities") << "Requesting seed from " << url
|
||||
<< " region name " << regionp->getName()
|
||||
<< " region id " << regionp->getRegionID()
|
||||
<< " handle " << regionp->getHandle()
|
||||
<< " (attempt #" << mSeedCapAttempts + 1 << ")" << LL_ENDL;
|
||||
<< " (attempt #" << impl->mSeedCapAttempts + 1 << ")" << LL_ENDL;
|
||||
LL_DEBUGS("AppInit", "Capabilities") << "Capabilities requested: " << capabilityNames << LL_ENDL;
|
||||
|
||||
regionp = NULL;
|
||||
impl = NULL;
|
||||
result = httpAdapter->postAndSuspend(httpRequest, url, capabilityNames);
|
||||
|
||||
if (STATE_WORLD_INIT > LLStartUp::getStartupState())
|
||||
|
|
@ -332,9 +334,6 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCoro(U64 regionHandle)
|
|||
return;
|
||||
}
|
||||
|
||||
// <FS:Ansariel> Fix seed cap retry count
|
||||
//++mSeedCapAttempts;
|
||||
|
||||
regionp = LLWorld::getInstance()->getRegionFromHandle(regionHandle);
|
||||
if (!regionp) //region was removed
|
||||
{
|
||||
|
|
@ -342,11 +341,16 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCoro(U64 regionHandle)
|
|||
return; // this error condition is not recoverable.
|
||||
}
|
||||
|
||||
if (id != mHttpResponderID) // region is no longer referring to this request
|
||||
impl = regionp->getRegionImplNC();
|
||||
|
||||
// <FS:Ansariel> Fix seed cap retry count
|
||||
//++impl->mSeedCapAttempts;
|
||||
|
||||
if (id != impl->mHttpResponderID) // region is no longer referring to this request
|
||||
{
|
||||
LL_WARNS("AppInit", "Capabilities") << "Received results for a stale capabilities request!" << LL_ENDL;
|
||||
// setup for retry.
|
||||
++mSeedCapAttempts; // <FS:Ansariel> Fix seed cap retry count
|
||||
++(impl->mSeedCapAttempts); // <FS:Ansariel> Fix seed cap retry count
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
@ -354,7 +358,7 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCoro(U64 regionHandle)
|
|||
{
|
||||
LL_WARNS("AppInit", "Capabilities") << "Malformed response" << LL_ENDL;
|
||||
// setup for retry.
|
||||
++mSeedCapAttempts; // <FS:Ansariel> Fix seed cap retry count
|
||||
++(impl->mSeedCapAttempts); // <FS:Ansariel> Fix seed cap retry count
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
@ -364,7 +368,7 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCoro(U64 regionHandle)
|
|||
{
|
||||
LL_WARNS("AppInit", "Capabilities") << "HttpStatus error " << LL_ENDL;
|
||||
// setup for retry.
|
||||
++mSeedCapAttempts; // <FS:Ansariel> Fix seed cap retry count
|
||||
++(impl->mSeedCapAttempts); // <FS:Ansariel> Fix seed cap retry count
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
@ -402,7 +406,6 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCoro(U64 regionHandle)
|
|||
{ // *HACK: we're waiting for the ServerReleaseNotes
|
||||
regionp->showReleaseNotes();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -463,6 +466,7 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCompleteCoro(U64 regionHandle)
|
|||
LL_WARNS("AppInit", "Capabilities") << "Received capabilities for region that no longer exists!" << LL_ENDL;
|
||||
break; // this error condition is not recoverable.
|
||||
}
|
||||
LLViewerRegionImpl* impl = regionp->getRegionImplNC();
|
||||
|
||||
// remove the http_result from the llsd
|
||||
result.erase("http_result");
|
||||
|
|
@ -475,30 +479,30 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCompleteCoro(U64 regionHandle)
|
|||
}
|
||||
|
||||
#if 0
|
||||
log_capabilities(mCapabilities);
|
||||
log_capabilities(impl->mCapabilities);
|
||||
#endif
|
||||
|
||||
if (mCapabilities.size() != mSecondCapabilitiesTracker.size())
|
||||
if (impl->mCapabilities.size() != impl->mSecondCapabilitiesTracker.size())
|
||||
{
|
||||
LL_WARNS("AppInit", "Capabilities")
|
||||
<< "Sim sent duplicate base caps that differ in size from what we initially received - most likely content. "
|
||||
<< "mCapabilities == " << mCapabilities.size()
|
||||
<< " mSecondCapabilitiesTracker == " << mSecondCapabilitiesTracker.size()
|
||||
<< "mCapabilities == " << impl->mCapabilities.size()
|
||||
<< " mSecondCapabilitiesTracker == " << impl->mSecondCapabilitiesTracker.size()
|
||||
<< LL_ENDL;
|
||||
#ifdef DEBUG_CAPS_GRANTS
|
||||
LL_WARNS("AppInit", "Capabilities")
|
||||
<< "Initial Base capabilities: " << LL_ENDL;
|
||||
|
||||
log_capabilities(mCapabilities);
|
||||
log_capabilities(impl->mCapabilities);
|
||||
|
||||
LL_WARNS("AppInit", "Capabilities")
|
||||
<< "Latest base capabilities: " << LL_ENDL;
|
||||
|
||||
log_capabilities(mSecondCapabilitiesTracker);
|
||||
log_capabilities(impl->mSecondCapabilitiesTracker);
|
||||
|
||||
#endif
|
||||
|
||||
if (mSecondCapabilitiesTracker.size() > mCapabilities.size())
|
||||
if (impl->mSecondCapabilitiesTracker.size() > impl->mCapabilities.size())
|
||||
{
|
||||
// *HACK Since we were granted more base capabilities in this grant request than the initial, replace
|
||||
// the old with the new. This shouldn't happen i.e. we should always get the same capabilities from a
|
||||
|
|
@ -506,19 +510,17 @@ void LLViewerRegionImpl::requestBaseCapabilitiesCompleteCoro(U64 regionHandle)
|
|||
// inventory api capability grants.
|
||||
|
||||
// Need to clear a std::map before copying into it because old keys take precedence.
|
||||
mCapabilities.clear();
|
||||
mCapabilities = mSecondCapabilitiesTracker;
|
||||
impl->mCapabilities.clear();
|
||||
impl->mCapabilities = impl->mSecondCapabilitiesTracker;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LL_DEBUGS("CrossingCaps") << "Sim sent multiple base cap grants with matching sizes." << LL_ENDL;
|
||||
}
|
||||
mSecondCapabilitiesTracker.clear();
|
||||
impl->mSecondCapabilitiesTracker.clear();
|
||||
}
|
||||
while (false);
|
||||
|
||||
|
||||
}
|
||||
|
||||
void LLViewerRegionImpl::requestSimulatorFeatureCoro(std::string url, U64 regionHandle)
|
||||
|
|
@ -2408,7 +2410,7 @@ void LLViewerRegion::requestSimulatorFeatures()
|
|||
{
|
||||
std::string coroname =
|
||||
LLCoros::instance().launch("LLViewerRegionImpl::requestSimulatorFeatureCoro",
|
||||
boost::bind(&LLViewerRegionImpl::requestSimulatorFeatureCoro, mImpl, url, getHandle()));
|
||||
boost::bind(&LLViewerRegionImpl::requestSimulatorFeatureCoro, url, getHandle()));
|
||||
|
||||
LL_INFOS("AppInit", "SimulatorFeatures") << "Launching " << coroname << " requesting simulator features from " << url << " for region " << getRegionID() << LL_ENDL;
|
||||
}
|
||||
|
|
@ -3286,7 +3288,7 @@ void LLViewerRegion::setSeedCapability(const std::string& url)
|
|||
//to the "original" seed cap received and determine why there is problem!
|
||||
std::string coroname =
|
||||
LLCoros::instance().launch("LLEnvironmentRequest::requestBaseCapabilitiesCompleteCoro",
|
||||
boost::bind(&LLViewerRegionImpl::requestBaseCapabilitiesCompleteCoro, mImpl, getHandle()));
|
||||
boost::bind(&LLViewerRegionImpl::requestBaseCapabilitiesCompleteCoro, getHandle()));
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -3298,7 +3300,7 @@ void LLViewerRegion::setSeedCapability(const std::string& url)
|
|||
|
||||
std::string coroname =
|
||||
LLCoros::instance().launch("LLViewerRegionImpl::requestBaseCapabilitiesCoro",
|
||||
boost::bind(&LLViewerRegionImpl::requestBaseCapabilitiesCoro, mImpl, getHandle()));
|
||||
boost::bind(&LLViewerRegionImpl::requestBaseCapabilitiesCoro, getHandle()));
|
||||
|
||||
LL_INFOS("AppInit", "Capabilities") << "Launching " << coroname << " requesting seed capabilities from " << url << " for region " << getRegionID() << LL_ENDL;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -89,6 +89,7 @@ namespace {
|
|||
|
||||
// Don't retry connecting to the daemon more frequently than this:
|
||||
const F32 DAEMON_CONNECT_THROTTLE_SECONDS = 1.0f;
|
||||
const int DAEMON_CONNECT_RETRY_MAX = 3;
|
||||
|
||||
// Don't send positional updates more frequently than this:
|
||||
const F32 UPDATE_THROTTLE_SECONDS = 0.5f;
|
||||
|
|
@ -765,6 +766,11 @@ void LLVivoxVoiceClient::voiceControlCoro()
|
|||
|
||||
void LLVivoxVoiceClient::voiceControlStateMachine(S32 &coro_state)
|
||||
{
|
||||
if (sShuttingDown)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
LL_DEBUGS("Voice") << "starting" << LL_ENDL;
|
||||
mIsCoroutineActive = true;
|
||||
LLCoros::set_consuming(true);
|
||||
|
|
@ -920,6 +926,12 @@ void LLVivoxVoiceClient::voiceControlStateMachine(S32 &coro_state)
|
|||
}
|
||||
} while (coro_state > 0);
|
||||
|
||||
if (sShuttingDown)
|
||||
{
|
||||
// LLVivoxVoiceClient might be already dead
|
||||
return;
|
||||
}
|
||||
|
||||
mIsCoroutineActive = false;
|
||||
LL_INFOS("Voice") << "exiting" << LL_ENDL;
|
||||
}
|
||||
|
|
@ -1149,8 +1161,9 @@ bool LLVivoxVoiceClient::startAndLaunchDaemon()
|
|||
|
||||
LL_DEBUGS("Voice") << "Connecting to vivox daemon:" << mDaemonHost << LL_ENDL;
|
||||
|
||||
int retryCount(0);
|
||||
LLVoiceVivoxStats::getInstance()->reset();
|
||||
while (!mConnected && !sShuttingDown)
|
||||
while (!mConnected && !sShuttingDown && retryCount++ <= DAEMON_CONNECT_RETRY_MAX)
|
||||
{
|
||||
LLVoiceVivoxStats::getInstance()->connectionAttemptStart();
|
||||
LL_DEBUGS("Voice") << "Attempting to connect to vivox daemon: " << mDaemonHost << LL_ENDL;
|
||||
|
|
@ -1276,7 +1289,7 @@ bool LLVivoxVoiceClient::provisionVoiceAccount()
|
|||
{
|
||||
provisioned = true;
|
||||
}
|
||||
} while (!provisioned && retryCount <= PROVISION_RETRY_MAX && !sShuttingDown);
|
||||
} while (!provisioned && ++retryCount <= PROVISION_RETRY_MAX && !sShuttingDown);
|
||||
|
||||
if (sShuttingDown && !provisioned)
|
||||
{
|
||||
|
|
@ -1459,6 +1472,12 @@ bool LLVivoxVoiceClient::loginToVivox()
|
|||
}
|
||||
|
||||
LLSD result = llcoro::suspendUntilEventOnWithTimeout(mVivoxPump, LOGIN_ATTEMPT_TIMEOUT, timeoutResult);
|
||||
|
||||
if (sShuttingDown)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
LL_DEBUGS("Voice") << "event=" << ll_stream_notation_sd(result) << LL_ENDL;
|
||||
|
||||
if (result.has("login"))
|
||||
|
|
@ -1521,6 +1540,11 @@ bool LLVivoxVoiceClient::loginToVivox()
|
|||
|
||||
} while ((!response_ok || !account_login) && !sShuttingDown);
|
||||
|
||||
if (sShuttingDown)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
mRelogRequested = false;
|
||||
mIsLoggedIn = true;
|
||||
notifyStatusObservers(LLVoiceClientStatusObserver::STATUS_LOGGED_IN);
|
||||
|
|
|
|||
|
|
@ -421,64 +421,109 @@ private:
|
|||
std::string key(XMLRPC_GetValueID(current));
|
||||
LL_DEBUGS("LLXMLRPCListener") << "key: " << key_pfx << key << LL_ENDL;
|
||||
XMLRPC_VALUE_TYPE_EASY type = XMLRPC_GetValueTypeEasy(current);
|
||||
if (xmlrpc_type_string == type)
|
||||
{
|
||||
LLSD::String val(XMLRPC_GetValueString(current));
|
||||
LL_DEBUGS("LLXMLRPCListener") << "val: " << val << LL_ENDL;
|
||||
responses.insert(key, val);
|
||||
}
|
||||
else if (xmlrpc_type_int == type)
|
||||
{
|
||||
LLSD::Integer val(XMLRPC_GetValueInt(current));
|
||||
LL_DEBUGS("LLXMLRPCListener") << "val: " << val << LL_ENDL;
|
||||
responses.insert(key, val);
|
||||
}
|
||||
else if (xmlrpc_type_double == type)
|
||||
{
|
||||
LLSD::Real val(XMLRPC_GetValueDouble(current));
|
||||
LL_DEBUGS("LLXMLRPCListener") << "val: " << val << LL_ENDL;
|
||||
responses.insert(key, val);
|
||||
}
|
||||
else if (xmlrpc_type_array == type)
|
||||
{
|
||||
// We expect this to be an array of submaps. Walk the array,
|
||||
// recursively parsing each submap and collecting them.
|
||||
LLSD array;
|
||||
int i = 0; // for descriptive purposes
|
||||
for (XMLRPC_VALUE row = XMLRPC_VectorRewind(current); row;
|
||||
row = XMLRPC_VectorNext(current), ++i)
|
||||
{
|
||||
// Recursive call. For the lower-level key_pfx, if 'key'
|
||||
// is "foo", pass "foo[0]:", then "foo[1]:", etc. In the
|
||||
// nested call, a subkey "bar" will then be logged as
|
||||
// "foo[0]:bar", and so forth.
|
||||
// Parse the scalar subkey/value pairs from this array
|
||||
// entry into a temp submap. Collect such submaps in 'array'.
|
||||
array.append(parseValues(status_string,
|
||||
STRINGIZE(key_pfx << key << '[' << i << "]:"),
|
||||
row));
|
||||
}
|
||||
// Having collected an 'array' of 'submap's, insert that whole
|
||||
// 'array' as the value of this 'key'.
|
||||
responses.insert(key, array);
|
||||
}
|
||||
else if (xmlrpc_type_struct == type)
|
||||
{
|
||||
LLSD submap = parseValues(status_string,
|
||||
STRINGIZE(key_pfx << key << ':'),
|
||||
current);
|
||||
responses.insert(key, submap);
|
||||
}
|
||||
else if (xmlrpc_type_empty == type)
|
||||
switch (type)
|
||||
{
|
||||
case xmlrpc_type_empty:
|
||||
LL_INFOS("LLXMLRPCListener") << "Empty result for key " << key_pfx << key << LL_ENDL;
|
||||
responses.insert(key, LLSD());
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
case xmlrpc_type_base64:
|
||||
{
|
||||
S32 len = XMLRPC_GetValueStringLen(current);
|
||||
const char* buf = XMLRPC_GetValueBase64(current);
|
||||
if ((len > 0) && buf)
|
||||
{
|
||||
// During implementation this code was not tested
|
||||
// If you encounter this, please make sure this is correct,
|
||||
// then remove llassert
|
||||
llassert(0);
|
||||
|
||||
LLSD::Binary data;
|
||||
data.resize(len);
|
||||
memcpy((void*)&data[0], (void*)buf, len);
|
||||
responses.insert(key, data);
|
||||
}
|
||||
else
|
||||
{
|
||||
LL_WARNS("LLXMLRPCListener") << "Potentially malformed xmlrpc_type_base64 for key "
|
||||
<< key_pfx << key << LL_ENDL;
|
||||
responses.insert(key, LLSD());
|
||||
}
|
||||
break;
|
||||
}
|
||||
case xmlrpc_type_boolean:
|
||||
{
|
||||
LLSD::Boolean val(XMLRPC_GetValueBoolean(current));
|
||||
LL_DEBUGS("LLXMLRPCListener") << "val: " << val << LL_ENDL;
|
||||
responses.insert(key, val);
|
||||
break;
|
||||
}
|
||||
case xmlrpc_type_datetime:
|
||||
{
|
||||
std::string iso8601_date(XMLRPC_GetValueDateTime_ISO8601(current));
|
||||
LL_DEBUGS("LLXMLRPCListener") << "val: " << iso8601_date << LL_ENDL;
|
||||
responses.insert(key, LLSD::Date(iso8601_date));
|
||||
break;
|
||||
}
|
||||
case xmlrpc_type_double:
|
||||
{
|
||||
LLSD::Real val(XMLRPC_GetValueDouble(current));
|
||||
LL_DEBUGS("LLXMLRPCListener") << "val: " << val << LL_ENDL;
|
||||
responses.insert(key, val);
|
||||
break;
|
||||
}
|
||||
case xmlrpc_type_int:
|
||||
{
|
||||
LLSD::Integer val(XMLRPC_GetValueInt(current));
|
||||
LL_DEBUGS("LLXMLRPCListener") << "val: " << val << LL_ENDL;
|
||||
responses.insert(key, val);
|
||||
break;
|
||||
}
|
||||
case xmlrpc_type_string:
|
||||
{
|
||||
LLSD::String val(XMLRPC_GetValueString(current));
|
||||
LL_DEBUGS("LLXMLRPCListener") << "val: " << val << LL_ENDL;
|
||||
responses.insert(key, val);
|
||||
break;
|
||||
}
|
||||
case xmlrpc_type_mixed:
|
||||
case xmlrpc_type_array:
|
||||
{
|
||||
// We expect this to be an array of submaps. Walk the array,
|
||||
// recursively parsing each submap and collecting them.
|
||||
LLSD array;
|
||||
int i = 0; // for descriptive purposes
|
||||
for (XMLRPC_VALUE row = XMLRPC_VectorRewind(current); row;
|
||||
row = XMLRPC_VectorNext(current), ++i)
|
||||
{
|
||||
// Recursive call. For the lower-level key_pfx, if 'key'
|
||||
// is "foo", pass "foo[0]:", then "foo[1]:", etc. In the
|
||||
// nested call, a subkey "bar" will then be logged as
|
||||
// "foo[0]:bar", and so forth.
|
||||
// Parse the scalar subkey/value pairs from this array
|
||||
// entry into a temp submap. Collect such submaps in 'array'.
|
||||
array.append(parseValues(status_string,
|
||||
STRINGIZE(key_pfx << key << '[' << i << "]:"),
|
||||
row));
|
||||
}
|
||||
// Having collected an 'array' of 'submap's, insert that whole
|
||||
// 'array' as the value of this 'key'.
|
||||
responses.insert(key, array);
|
||||
break;
|
||||
}
|
||||
case xmlrpc_type_struct:
|
||||
{
|
||||
LLSD submap = parseValues(status_string,
|
||||
STRINGIZE(key_pfx << key << ':'),
|
||||
current);
|
||||
responses.insert(key, submap);
|
||||
break;
|
||||
}
|
||||
case xmlrpc_type_none: // Not expected
|
||||
default:
|
||||
// whoops - unrecognized type
|
||||
LL_WARNS("LLXMLRPCListener") << "Unhandled xmlrpc type " << type << " for key "
|
||||
<< key_pfx << key << LL_ENDL;
|
||||
<< key_pfx << key << LL_ENDL;
|
||||
responses.insert(key, STRINGIZE("<bad XMLRPC type " << type << '>'));
|
||||
status_string = "BadType";
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<menu name="menu_folder_gear">
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="Opret mappe" name="add_folder"/>
|
||||
<menu_item_call label="Tilføj landemærke" name="add_landmark"/>
|
||||
</menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<context_menu name="Teleport History Item Context Menu">
|
||||
<toggleable_menu name="Teleport History Item Menu">
|
||||
<menu_item_call label="Teleportér" name="Teleport"/>
|
||||
<menu_item_call label="Mere information" name="More Information"/>
|
||||
<menu_item_call label="Kopiér til udklipsholder" name="CopyToClipboard"/>
|
||||
</context_menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="Landmarke für aktuelle Position erstellen" name="add_landmark"/>
|
||||
<menu_item_call label="Ordner erstellen" name="add_folder"/>
|
||||
<menu_item_call label="Ordner hinzufügen" name="add_folder"/>
|
||||
<menu_item_call label="Landmarke hinzufügen" name="add_landmark"/>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -213,6 +213,19 @@ Make sure your Internet connection is working properly.
|
|||
yestext="OK"/>
|
||||
</notification>
|
||||
|
||||
<notification
|
||||
icon="alertmodal.tga"
|
||||
name="LoginFailedToParse"
|
||||
type="alertmodal">
|
||||
<tag>fail</tag>
|
||||
Viewer received malformed response from server. Please, make sure your Internet connection is working properly and try again later.
|
||||
|
||||
If you feel this is in error, please contact Support.
|
||||
<usetemplate
|
||||
name="okbutton"
|
||||
yestext="OK"/>
|
||||
</notification>
|
||||
|
||||
<notification
|
||||
icon="alertmodal.tga"
|
||||
name="MessageTemplateNotFound"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<menu name="menu_folder_gear">
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="Añadir una carpeta" name="add_folder"/>
|
||||
<menu_item_call label="Añadir este hito" name="add_landmark"/>
|
||||
</menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<context_menu name="Teleport History Item Context Menu">
|
||||
<toggleable_menu name="Teleport History Item Menu">
|
||||
<menu_item_call label="Teleportar" name="Teleport"/>
|
||||
<menu_item_call label="Más información" name="More Information"/>
|
||||
<menu_item_call label="Copiar la SLurl" name="CopyToClipboard"/>
|
||||
</context_menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<menu name="menu_folder_gear">
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="Ajouter un dossier" name="add_folder"/>
|
||||
<menu_item_call label="Ajouter un repère" name="add_landmark"/>
|
||||
</menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<context_menu name="Teleport History Item Context Menu">
|
||||
<toggleable_menu name="Teleport History Item Menu">
|
||||
<menu_item_call label="Téléporter" name="Teleport"/>
|
||||
<menu_item_call label="Plus d'informations" name="More Information"/>
|
||||
<menu_item_call label="Copier la SLurl" name="CopyToClipboard"/>
|
||||
</context_menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<menu name="menu_folder_gear">
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="Aggiungi cartella" name="add_folder"/>
|
||||
<menu_item_call label="Aggiungi segnaposto" name="add_landmark"/>
|
||||
</menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<context_menu name="Teleport History Item Context Menu">
|
||||
<toggleable_menu name="Teleport History Item Menu">
|
||||
<menu_item_call label="Teleport" name="Teleport"/>
|
||||
<menu_item_call label="Maggiori informazioni" name="More Information"/>
|
||||
<menu_item_call label="Copia SLurl negli appunti" name="CopyToClipboard"/>
|
||||
</context_menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="現在地のランドマークを作成" name="add_landmark"/>
|
||||
<menu_item_call label="フォルダを作成" name="add_folder"/>
|
||||
<menu_item_call label="フォルダを追加" name="add_folder"/>
|
||||
<menu_item_call label="ランドマークを追加" name="add_landmark"/>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<menu name="menu_folder_gear">
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="Adicionar pasta" name="add_folder"/>
|
||||
<menu_item_call label="Adicionar marco" name="add_landmark"/>
|
||||
</menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<context_menu name="Teleport History Item Context Menu">
|
||||
<toggleable_menu name="Teleport History Item Menu">
|
||||
<menu_item_call label="Teletransportar" name="Teleport"/>
|
||||
<menu_item_call label="Mais informações" name="More Information"/>
|
||||
<menu_item_call label="Copiar SLurl" name="CopyToClipboard"/>
|
||||
</context_menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="Создать закладку текущего места" name="add_landmark"/>
|
||||
<menu_item_call label="Создать папку" name="add_folder"/>
|
||||
<menu_item_call label="Добавить папку" name="add_folder"/>
|
||||
<menu_item_call label="Добавить закладку" name="add_landmark"/>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<menu name="menu_folder_gear">
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="Klasör Ekle" name="add_folder"/>
|
||||
<menu_item_call label="Yer İmi Ekle" name="add_landmark"/>
|
||||
</menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<context_menu name="Teleport History Item Context Menu">
|
||||
<toggleable_menu name="Teleport History Item Menu">
|
||||
<menu_item_call label="Işınla" name="Teleport"/>
|
||||
<menu_item_call label="Ek Bilgi" name="More Information"/>
|
||||
<menu_item_call label="SLurl'i Kopyala" name="CopyToClipboard"/>
|
||||
</context_menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<menu name="menu_folder_gear">
|
||||
<toggleable_menu name="menu_create">
|
||||
<menu_item_call label="添加資料夾" name="add_folder"/>
|
||||
<menu_item_call label="添加地標" name="add_landmark"/>
|
||||
</menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<context_menu name="Teleport History Item Context Menu">
|
||||
<toggleable_menu name="Teleport History Item Menu">
|
||||
<menu_item_call label="瞬間傳送" name="Teleport"/>
|
||||
<menu_item_call label="更多資訊" name="More Information"/>
|
||||
<menu_item_call label="覆製 SLurl" name="CopyToClipboard"/>
|
||||
</context_menu>
|
||||
</toggleable_menu>
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file test_llxmlrpc_peer.py
|
||||
@author Nat Goodspeed
|
||||
|
|
@ -31,7 +31,7 @@ $/LicenseInfo$
|
|||
|
||||
import os
|
||||
import sys
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer
|
||||
from xmlrpc.server import SimpleXMLRPCServer
|
||||
|
||||
mydir = os.path.dirname(__file__) # expected to be .../indra/newview/tests/
|
||||
sys.path.insert(0, os.path.join(mydir, os.pardir, os.pardir, "llmessage", "tests"))
|
||||
|
|
@ -85,7 +85,7 @@ if __name__ == "__main__":
|
|||
# "Then there's Windows"
|
||||
# Instantiate a TestServer on the first free port in the specified
|
||||
# port range.
|
||||
xmlrpcd, port = freeport(xrange(8000, 8020), make_server)
|
||||
xmlrpcd, port = freeport(range(8000, 8020), make_server)
|
||||
|
||||
# Pass the selected port number to the subject test program via the
|
||||
# environment. We don't want to impose requirements on the test program's
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file viewer_manifest.py
|
||||
@author Ryan Williams
|
||||
|
|
@ -93,7 +93,7 @@ class ViewerManifest(LLManifest,FSViewerManifest):
|
|||
# include the extracted list of contributors
|
||||
contributions_path = "../../doc/contributions.txt"
|
||||
contributor_names = self.extract_names(contributions_path)
|
||||
self.put_in_file(contributor_names, "contributors.txt", src=contributions_path)
|
||||
self.put_in_file(contributor_names.encode(), "contributors.txt", src=contributions_path)
|
||||
|
||||
# ... and the default camera position settings
|
||||
self.path("camera")
|
||||
|
|
@ -145,17 +145,17 @@ class ViewerManifest(LLManifest,FSViewerManifest):
|
|||
if sourceid:
|
||||
settings_install['sourceid'] = settings_template['sourceid'].copy()
|
||||
settings_install['sourceid']['Value'] = sourceid
|
||||
print ("Set sourceid in settings_install.xml to '%s'" % sourceid)
|
||||
print("Set sourceid in settings_install.xml to '%s'" % sourceid)
|
||||
|
||||
if self.args.get('channel_suffix'):
|
||||
settings_install['CmdLineChannel'] = settings_template['CmdLineChannel'].copy()
|
||||
settings_install['CmdLineChannel']['Value'] = self.channel_with_pkg_suffix()
|
||||
print ("Set CmdLineChannel in settings_install.xml to '%s'" % self.channel_with_pkg_suffix())
|
||||
print("Set CmdLineChannel in settings_install.xml to '%s'" % self.channel_with_pkg_suffix())
|
||||
|
||||
if self.args.get('grid'):
|
||||
settings_install['CmdLineGridChoice'] = settings_template['CmdLineGridChoice'].copy()
|
||||
settings_install['CmdLineGridChoice']['Value'] = self.grid()
|
||||
print ("Set CmdLineGridChoice in settings_install.xml to '%s'" % self.grid())
|
||||
print("Set CmdLineGridChoice in settings_install.xml to '%s'" % self.grid())
|
||||
|
||||
# put_in_file(src=) need not be an actual pathname; it
|
||||
# only needs to be non-empty
|
||||
|
|
@ -232,7 +232,7 @@ class ViewerManifest(LLManifest,FSViewerManifest):
|
|||
#we likely no longer need the test, since we will throw an exception above, but belt and suspenders and we get the
|
||||
#return code for free.
|
||||
if not self.path2basename(os.pardir, "build_data.json"):
|
||||
print ("No build_data.json file")
|
||||
print("No build_data.json file")
|
||||
|
||||
def finish_build_data_dict(self, build_data_dict):
|
||||
return build_data_dict
|
||||
|
|
@ -351,13 +351,13 @@ class ViewerManifest(LLManifest,FSViewerManifest):
|
|||
return "icons/" + self.channel_type()
|
||||
|
||||
def extract_names(self,src):
|
||||
"""Extract contributor names from source file, returns string"""
|
||||
try:
|
||||
contrib_file = open(src,'r')
|
||||
with open(src, 'r') as contrib_file:
|
||||
lines = contrib_file.readlines()
|
||||
except IOError:
|
||||
print ("Failed to open '%s'" % src)
|
||||
print("Failed to open '%s'" % src)
|
||||
raise
|
||||
lines = contrib_file.readlines()
|
||||
contrib_file.close()
|
||||
|
||||
# All lines up to and including the first blank line are the file header; skip them
|
||||
lines.reverse() # so that pop will pull from first to last line
|
||||
|
|
@ -393,7 +393,7 @@ class ViewerManifest(LLManifest,FSViewerManifest):
|
|||
"""
|
||||
Like ln -sf, but uses os.symlink() instead of running ln. This creates
|
||||
a symlink at 'dst' that points to 'src' -- see:
|
||||
https://docs.python.org/2/library/os.html#os.symlink
|
||||
https://docs.python.org/3/library/os.html#os.symlink
|
||||
|
||||
If you omit 'dst', this creates a symlink with basename(src) at
|
||||
get_dst_prefix() -- in other words: put a symlink to this pathname
|
||||
|
|
@ -455,11 +455,11 @@ class ViewerManifest(LLManifest,FSViewerManifest):
|
|||
os.remove(dst)
|
||||
os.symlink(src, dst)
|
||||
elif os.path.isdir(dst):
|
||||
print ("Requested symlink (%s) exists but is a directory; replacing" % dst)
|
||||
print("Requested symlink (%s) exists but is a directory; replacing" % dst)
|
||||
shutil.rmtree(dst)
|
||||
os.symlink(src, dst)
|
||||
elif os.path.exists(dst):
|
||||
print ("Requested symlink (%s) exists but is a file; replacing" % dst)
|
||||
print("Requested symlink (%s) exists but is a file; replacing" % dst)
|
||||
os.remove(dst)
|
||||
os.symlink(src, dst)
|
||||
else:
|
||||
|
|
@ -467,7 +467,7 @@ class ViewerManifest(LLManifest,FSViewerManifest):
|
|||
raise
|
||||
except Exception as err:
|
||||
# report
|
||||
print ("Can't symlink %r -> %r: %s: %s" % \
|
||||
print("Can't symlink %r -> %r: %s: %s" % \
|
||||
(dst, src, err.__class__.__name__, err))
|
||||
# if caller asked us not to catch, re-raise this exception
|
||||
if not catch:
|
||||
|
|
@ -529,7 +529,7 @@ class WindowsManifest(ViewerManifest):
|
|||
else:
|
||||
raise Exception("Directories are not supported by test_CRT_and_copy_action()")
|
||||
else:
|
||||
print ("Doesn't exist:", src)
|
||||
print("Doesn't exist:", src)
|
||||
|
||||
def test_for_no_msvcrt_manifest_and_copy_action(self, src, dst):
|
||||
# This is used to test that no manifest for the msvcrt exists.
|
||||
|
|
@ -558,7 +558,7 @@ class WindowsManifest(ViewerManifest):
|
|||
else:
|
||||
raise Exception("Directories are not supported by test_CRT_and_copy_action()")
|
||||
else:
|
||||
print ("Doesn't exist:", src)
|
||||
print("Doesn't exist:", src)
|
||||
|
||||
def construct(self):
|
||||
super(WindowsManifest, self).construct()
|
||||
|
|
@ -599,8 +599,8 @@ class WindowsManifest(ViewerManifest):
|
|||
try:
|
||||
self.path("glod.dll")
|
||||
except RuntimeError as err:
|
||||
print (err.message)
|
||||
print ("Skipping GLOD library (assumming linked statically)")
|
||||
print(err.message)
|
||||
print("Skipping GLOD library (assumming linked statically)")
|
||||
|
||||
# Get fmodstudio dll if needed
|
||||
if self.args['fmodstudio'] == 'ON':
|
||||
|
|
@ -799,8 +799,7 @@ class WindowsManifest(ViewerManifest):
|
|||
result = ""
|
||||
dest_files = [pair[1] for pair in self.file_list if pair[0] and os.path.isfile(pair[1]) and not pair[1].endswith(".pdb") ] #<FS:ND/> Don't include pdb files.
|
||||
# sort deepest hierarchy first
|
||||
dest_files.sort(lambda a,b: cmp(a.count(os.path.sep),b.count(os.path.sep)) or cmp(a,b))
|
||||
dest_files.reverse()
|
||||
dest_files.sort(key=lambda f: (f.count(os.path.sep), f), reverse=True)
|
||||
out_path = None
|
||||
for pkg_file in dest_files:
|
||||
rel_file = os.path.normpath(pkg_file.replace(self.get_dst_prefix()+os.path.sep,''))
|
||||
|
|
@ -823,8 +822,7 @@ class WindowsManifest(ViewerManifest):
|
|||
for d in deleted_file_dirs:
|
||||
deleted_dirs.extend(path_ancestors(d))
|
||||
# sort deepest hierarchy first
|
||||
deleted_dirs.sort(lambda a,b: cmp(a.count(os.path.sep),b.count(os.path.sep)) or cmp(a,b))
|
||||
deleted_dirs.reverse()
|
||||
deleted_dirs.sort(key=lambda f: (f.count(os.path.sep), f), reverse=True)
|
||||
prev = None
|
||||
for d in deleted_dirs:
|
||||
if d != prev: # skip duplicates
|
||||
|
|
@ -927,25 +925,19 @@ class WindowsManifest(ViewerManifest):
|
|||
installer_created=False
|
||||
nsis_attempts=3
|
||||
nsis_retry_wait=15
|
||||
# <FS:TS> A cute little Python 2/3 compatibility hack, thanks to ESR.
|
||||
# Remove the next four lines if Python 2 compatibiltiy is not needed.
|
||||
try:
|
||||
xrange
|
||||
except NameError:
|
||||
xrange = range
|
||||
for attempt in xrange(nsis_attempts):
|
||||
for attempt in range(nsis_attempts):
|
||||
try:
|
||||
self.run_command([NSIS_path, '/V2', self.dst_path_of(tempfile)])
|
||||
except ManifestError as err:
|
||||
if attempt+1 < nsis_attempts:
|
||||
print ("nsis failed, waiting %d seconds before retrying" % nsis_retry_wait, file=sys.stderr)
|
||||
print("nsis failed, waiting %d seconds before retrying" % nsis_retry_wait, file=sys.stderr)
|
||||
time.sleep(nsis_retry_wait)
|
||||
nsis_retry_wait*=2
|
||||
else:
|
||||
# NSIS worked! Done!
|
||||
break
|
||||
else:
|
||||
print ("Maximum nsis attempts exceeded; giving up", file=sys.stderr)
|
||||
print("Maximum nsis attempts exceeded; giving up", file=sys.stderr)
|
||||
raise
|
||||
|
||||
self.fs_sign_win_installer(substitution_strings) # <FS:ND/> Sign files, step two. Sign installer.
|
||||
|
|
@ -959,10 +951,10 @@ class WindowsManifest(ViewerManifest):
|
|||
python = os.environ.get('PYTHON', sys.executable)
|
||||
if os.path.exists(sign_py):
|
||||
dst_path = self.dst_path_of(exe)
|
||||
print ("about to run signing of: ", dst_path)
|
||||
print("about to run signing of: ", dst_path)
|
||||
self.run_command([python, sign_py, dst_path])
|
||||
else:
|
||||
print ("Skipping code signing of %s %s: %s not found" % (self.dst_path_of(exe), exe, sign_py))
|
||||
print("Skipping code signing of %s %s: %s not found" % (self.dst_path_of(exe), exe, sign_py))
|
||||
|
||||
def escape_slashes(self, path):
|
||||
return path.replace('\\', '\\\\\\\\')
|
||||
|
|
@ -1345,14 +1337,15 @@ class DarwinManifest(ViewerManifest):
|
|||
if bugsplat_db:
|
||||
# Inject BugsplatServerURL into Info.plist if provided.
|
||||
Info_plist = self.dst_path_of("Info.plist")
|
||||
Info = plistlib.readPlist(Info_plist)
|
||||
# https://www.bugsplat.com/docs/platforms/os-x#configuration
|
||||
Info["BugsplatServerURL"] = \
|
||||
"https://{}.bugsplat.com/".format(bugsplat_db)
|
||||
self.put_in_file(
|
||||
plistlib.writePlistToString(Info),
|
||||
os.path.basename(Info_plist),
|
||||
"Info.plist")
|
||||
with open(Info_plist, 'rb') as f:
|
||||
Info = plistlib.load(f)
|
||||
# https://www.bugsplat.com/docs/platforms/os-x#configuration
|
||||
Info["BugsplatServerURL"] = \
|
||||
"https://{}.bugsplat.com/".format(bugsplat_db)
|
||||
self.put_in_file(
|
||||
plistlib.dumps(Info),
|
||||
os.path.basename(Info_plist),
|
||||
"Info.plist")
|
||||
|
||||
with self.prefix(dst="Contents"): # everything goes in Contents
|
||||
# self.path("Info.plist", dst="Info.plist")
|
||||
|
|
@ -1432,10 +1425,10 @@ class DarwinManifest(ViewerManifest):
|
|||
added = [os.path.relpath(d, self.get_dst_prefix())
|
||||
for s, d in self.file_list[oldlen:]]
|
||||
except MissingError as err:
|
||||
print ("Warning: "+err.msg, file=sys.stderr)
|
||||
print("Warning: "+err.msg, file=sys.stderr)
|
||||
added = []
|
||||
if not added:
|
||||
print ("Skipping %s" % dst)
|
||||
print("Skipping %s" % dst)
|
||||
return added
|
||||
|
||||
# dylibs is a list of all the .dylib files we expect to need
|
||||
|
|
@ -1635,7 +1628,7 @@ class DarwinManifest(ViewerManifest):
|
|||
|
||||
# mount the image and get the name of the mount point and device node
|
||||
try:
|
||||
hdi_output = subprocess.check_output(['hdiutil', 'attach', '-private', sparsename])
|
||||
hdi_output = subprocess.check_output(['hdiutil', 'attach', '-private', sparsename], text=True)
|
||||
except subprocess.CalledProcessError as err:
|
||||
sys.exit("failed to mount image at '%s'" % sparsename)
|
||||
|
||||
|
|
@ -1670,7 +1663,7 @@ class DarwinManifest(ViewerManifest):
|
|||
#os.path.join(dmg_template, "_VolumeIcon.icns"): ".VolumeIcon.icns",
|
||||
os.path.join(dmg_template, "background.png"): "background.png",
|
||||
os.path.join(dmg_template, "_DS_Store"): ".DS_Store"}.items()):
|
||||
print ("Copying to dmg", s, d)
|
||||
print("Copying to dmg", s, d)
|
||||
self.copy_action(self.src_path_of(s), os.path.join(volpath, d))
|
||||
|
||||
# <FS:TS> The next two commands *MUST* execute before the loop
|
||||
|
|
@ -1713,7 +1706,7 @@ class DarwinManifest(ViewerManifest):
|
|||
# and invalidate the signatures.
|
||||
if 'signature' in self.args:
|
||||
app_in_dmg=os.path.join(volpath,self.app_name()+".app")
|
||||
print ("Attempting to sign '%s'" % app_in_dmg)
|
||||
print("Attempting to sign '%s'" % app_in_dmg)
|
||||
identity = self.args['signature']
|
||||
if identity == '':
|
||||
identity = 'Developer ID Application'
|
||||
|
|
@ -1764,11 +1757,11 @@ class DarwinManifest(ViewerManifest):
|
|||
signed=True # if no exception was raised, the codesign worked
|
||||
except ManifestError as err:
|
||||
if sign_attempts:
|
||||
print ("codesign failed, waiting %d seconds before retrying" % sign_retry_wait, file=sys.stderr)
|
||||
print("codesign failed, waiting %d seconds before retrying" % sign_retry_wait, file=sys.stderr)
|
||||
time.sleep(sign_retry_wait)
|
||||
sign_retry_wait*=2
|
||||
else:
|
||||
print ("Maximum codesign attempts exceeded; giving up", file=sys.stderr)
|
||||
print("Maximum codesign attempts exceeded; giving up", file=sys.stderr)
|
||||
raise
|
||||
self.run_command(['spctl', '-a', '-texec', '-vvvv', app_in_dmg])
|
||||
self.run_command([self.src_path_of("installers/darwin/apple-notarize.sh"), app_in_dmg])
|
||||
|
|
@ -1777,7 +1770,7 @@ class DarwinManifest(ViewerManifest):
|
|||
# Unmount the image even if exceptions from any of the above
|
||||
self.run_command(['hdiutil', 'detach', '-force', devfile])
|
||||
|
||||
print ("Converting temp disk image to final disk image")
|
||||
print("Converting temp disk image to final disk image")
|
||||
self.run_command(['hdiutil', 'convert', sparsename, '-format', 'UDZO',
|
||||
'-imagekey', 'zlib-level=9', '-o', finalname])
|
||||
# get rid of the temp file
|
||||
|
|
@ -1840,7 +1833,7 @@ class LinuxManifest(ViewerManifest):
|
|||
|
||||
# Get the icons based on the channel type
|
||||
icon_path = self.icon_path()
|
||||
print ("DEBUG: icon_path '%s'" % icon_path)
|
||||
print("DEBUG: icon_path '%s'" % icon_path)
|
||||
with self.prefix(src=icon_path) :
|
||||
self.path("firestorm_256.png","firestorm_48.png")
|
||||
#with self.prefix(dst="res-sdl") :
|
||||
|
|
@ -1956,7 +1949,7 @@ class LinuxManifest(ViewerManifest):
|
|||
|
||||
# llcommon
|
||||
#if not self.path("../llcommon/libllcommon.so", "lib/libllcommon.so"):
|
||||
# print "Skipping llcommon.so (assuming llcommon was linked statically)"
|
||||
# print("Skipping llcommon.so (assuming llcommon was linked statically))"
|
||||
|
||||
self.path("featuretable_linux.txt")
|
||||
|
||||
|
|
@ -2069,14 +2062,14 @@ class LinuxManifest(ViewerManifest):
|
|||
'--numeric-owner', self.fs_linux_tar_excludes(), '-caf',
|
||||
tempname + '.tar.xz', installer_name])
|
||||
else:
|
||||
print ("Skipping %s.tar.xz for non-Release build (%s)" % \
|
||||
print("Skipping %s.tar.xz for non-Release build (%s)" % \
|
||||
(installer_name, self.args['buildtype']))
|
||||
finally:
|
||||
self.run_command(["mv", tempname, realname])
|
||||
|
||||
def strip_binaries(self):
|
||||
if self.args['buildtype'].lower() == 'release' and self.is_packaging_viewer():
|
||||
print ("* Going strip-crazy on the packaged binaries, since this is a RELEASE build")
|
||||
print("* Going strip-crazy on the packaged binaries, since this is a RELEASE build")
|
||||
# makes some small assumptions about our packaged dir structure
|
||||
self.run_command(
|
||||
["find"] +
|
||||
|
|
@ -2150,7 +2143,7 @@ class Linux_i686_Manifest(LinuxManifest):
|
|||
self.path("libtcmalloc.so*") #formerly called google perf tools
|
||||
pass
|
||||
except:
|
||||
print ("tcmalloc files not found, skipping")
|
||||
print("tcmalloc files not found, skipping")
|
||||
pass
|
||||
|
||||
if self.args['fmodstudio'] == 'ON':
|
||||
|
|
@ -2159,7 +2152,7 @@ class Linux_i686_Manifest(LinuxManifest):
|
|||
self.path("libfmod.so*")
|
||||
pass
|
||||
except:
|
||||
print ("Skipping libfmod.so - not found")
|
||||
print("Skipping libfmod.so - not found")
|
||||
pass
|
||||
|
||||
# Vivox runtimes
|
||||
|
|
@ -2256,9 +2249,9 @@ def symlinkf(src, dst):
|
|||
if __name__ == "__main__":
|
||||
# Report our own command line so that, in case of trouble, a developer can
|
||||
# manually rerun the same command.
|
||||
print('%s \\\n%s' %
|
||||
print(('%s \\\n%s' %
|
||||
(sys.executable,
|
||||
' '.join((("'%s'" % arg) if ' ' in arg else arg) for arg in sys.argv)))
|
||||
' '.join((("'%s'" % arg) if ' ' in arg else arg) for arg in sys.argv))))
|
||||
# fmodstudio and openal can be used simultaneously and controled by environment
|
||||
extra_arguments = [
|
||||
dict(name='bugsplat', description="""BugSplat database to which to post crashes,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
@file test_llmanifest.py
|
||||
@author Ryan Williams
|
||||
|
|
@ -124,10 +124,10 @@ class TestLLManifest(unittest.TestCase):
|
|||
|
||||
def testcmakedirs(self):
|
||||
self.m.cmakedirs("test_dir_DELETE/nested/dir")
|
||||
self.assert_(os.path.exists("test_dir_DELETE/nested/dir"))
|
||||
self.assert_(os.path.isdir("test_dir_DELETE"))
|
||||
self.assert_(os.path.isdir("test_dir_DELETE/nested"))
|
||||
self.assert_(os.path.isdir("test_dir_DELETE/nested/dir"))
|
||||
self.assertTrue(os.path.exists("test_dir_DELETE/nested/dir"))
|
||||
self.assertTrue(os.path.isdir("test_dir_DELETE"))
|
||||
self.assertTrue(os.path.isdir("test_dir_DELETE/nested"))
|
||||
self.assertTrue(os.path.isdir("test_dir_DELETE/nested/dir"))
|
||||
os.removedirs("test_dir_DELETE/nested/dir")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
|||
|
|
@ -287,6 +287,7 @@ void LLLogin::Impl::loginCoro(std::string uri, LLSD login_params)
|
|||
|
||||
// If we don't recognize status at all, trouble
|
||||
if (! (status == "CURLError"
|
||||
|| status == "BadType"
|
||||
|| status == "XMLRPCError"
|
||||
|| status == "OtherError"))
|
||||
{
|
||||
|
|
@ -296,6 +297,15 @@ void LLLogin::Impl::loginCoro(std::string uri, LLSD login_params)
|
|||
return;
|
||||
}
|
||||
|
||||
if (status == "BadType")
|
||||
{
|
||||
// Invalid xmlrpc type
|
||||
// Dump this response into logs
|
||||
LL_WARNS("LLLogin") << "Failed to parse response"
|
||||
<< " from " << xmlrpcPump.getName()
|
||||
<< " pump: " << mAuthResponse << LL_ENDL;
|
||||
}
|
||||
|
||||
// Here status IS one of the errors tested above.
|
||||
// Tell caller this didn't work out so well.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
|
||||
This script scans the SL codebase for translation-related strings.
|
||||
|
|
@ -25,7 +25,7 @@ Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
|
|||
$/LicenseInfo$
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
import argparse
|
||||
|
|
@ -75,10 +75,10 @@ translate_attribs = [
|
|||
]
|
||||
|
||||
def codify_for_print(val):
|
||||
if isinstance(val, unicode):
|
||||
if isinstance(val, str):
|
||||
return val.encode("utf-8")
|
||||
else:
|
||||
return unicode(val, 'utf-8').encode("utf-8")
|
||||
return str(val, 'utf-8').encode("utf-8")
|
||||
|
||||
# Returns a dict of { name => xml_node }
|
||||
def read_xml_elements(blob):
|
||||
|
|
@ -186,7 +186,7 @@ def make_translation_table(mod_tree, base_tree, lang, args):
|
|||
transl_dict = read_xml_elements(transl_blob)
|
||||
|
||||
rows = 0
|
||||
for name in mod_dict.keys():
|
||||
for name in list(mod_dict.keys()):
|
||||
if not name in base_dict or mod_dict[name].text != base_dict[name].text or (args.missing and not name in transl_dict):
|
||||
elt = mod_dict[name]
|
||||
val = elt.text
|
||||
|
|
@ -307,7 +307,7 @@ def save_translation_file(per_lang_data, aux_data, outfile):
|
|||
print("Added", num_translations, "rows for language", lang)
|
||||
|
||||
# Reference info, not for translation
|
||||
for aux, data in aux_data.items():
|
||||
for aux, data in list(aux_data.items()):
|
||||
df = pd.DataFrame(data, columns = ["Key", "Value"])
|
||||
df.to_excel(writer, index=False, sheet_name=aux)
|
||||
worksheet = writer.sheets[aux]
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file anim_tool.py
|
||||
@author Brad Payne, Nat Goodspeed
|
||||
|
|
@ -39,7 +39,7 @@ $/LicenseInfo$
|
|||
import math
|
||||
import os
|
||||
import random
|
||||
from cStringIO import StringIO
|
||||
from io import StringIO
|
||||
import struct
|
||||
import sys
|
||||
from xml.etree import ElementTree
|
||||
|
|
@ -179,7 +179,7 @@ class RotKey(object):
|
|||
return this
|
||||
|
||||
def dump(self, f):
|
||||
print >>f, " rot_key: t %.3f" % self.time,"st",self.time_short,"rot",",".join("%.3f" % f for f in self.rotation)
|
||||
print(" rot_key: t %.3f" % self.time,"st",self.time_short,"rot",",".join("%.3f" % f for f in self.rotation), file=f)
|
||||
|
||||
def pack(self, fp):
|
||||
fp.pack("<H",self.time_short)
|
||||
|
|
@ -215,7 +215,7 @@ class PosKey(object):
|
|||
return this
|
||||
|
||||
def dump(self, f):
|
||||
print >>f, " pos_key: t %.3f" % self.time,"pos ",",".join("%.3f" % f for f in self.position)
|
||||
print(" pos_key: t %.3f" % self.time,"pos ",",".join("%.3f" % f for f in self.position), file=f)
|
||||
|
||||
def pack(self, fp):
|
||||
fp.pack("<H",self.time_short)
|
||||
|
|
@ -247,18 +247,18 @@ class Constraint(object):
|
|||
self.ease_out_start, self.ease_out_stop)
|
||||
|
||||
def dump(self, f):
|
||||
print >>f, " constraint:"
|
||||
print >>f, " chain_length",self.chain_length
|
||||
print >>f, " constraint_type",self.constraint_type
|
||||
print >>f, " source_volume",self.source_volume
|
||||
print >>f, " source_offset",self.source_offset
|
||||
print >>f, " target_volume",self.target_volume
|
||||
print >>f, " target_offset",self.target_offset
|
||||
print >>f, " target_dir",self.target_dir
|
||||
print >>f, " ease_in_start",self.ease_in_start
|
||||
print >>f, " ease_in_stop",self.ease_in_stop
|
||||
print >>f, " ease_out_start",self.ease_out_start
|
||||
print >>f, " ease_out_stop",self.ease_out_stop
|
||||
print(" constraint:", file=f)
|
||||
print(" chain_length",self.chain_length, file=f)
|
||||
print(" constraint_type",self.constraint_type, file=f)
|
||||
print(" source_volume",self.source_volume, file=f)
|
||||
print(" source_offset",self.source_offset, file=f)
|
||||
print(" target_volume",self.target_volume, file=f)
|
||||
print(" target_offset",self.target_offset, file=f)
|
||||
print(" target_dir",self.target_dir, file=f)
|
||||
print(" ease_in_start",self.ease_in_start, file=f)
|
||||
print(" ease_in_stop",self.ease_in_stop, file=f)
|
||||
print(" ease_out_start",self.ease_out_start, file=f)
|
||||
print(" ease_out_stop",self.ease_out_stop, file=f)
|
||||
|
||||
class Constraints(object):
|
||||
@staticmethod
|
||||
|
|
@ -266,7 +266,7 @@ class Constraints(object):
|
|||
this = Constraints()
|
||||
(num_constraints, ) = fup.unpack("<i")
|
||||
this.constraints = [Constraint.unpack(duration, fup)
|
||||
for i in xrange(num_constraints)]
|
||||
for i in range(num_constraints)]
|
||||
return this
|
||||
|
||||
def pack(self, fp):
|
||||
|
|
@ -275,7 +275,7 @@ class Constraints(object):
|
|||
c.pack(fp)
|
||||
|
||||
def dump(self, f):
|
||||
print >>f, "constraints:",len(self.constraints)
|
||||
print("constraints:",len(self.constraints), file=f)
|
||||
for c in self.constraints:
|
||||
c.dump(f)
|
||||
|
||||
|
|
@ -296,7 +296,7 @@ class PositionCurve(object):
|
|||
this = PositionCurve()
|
||||
(num_pos_keys, ) = fup.unpack("<i")
|
||||
this.keys = [PosKey.unpack(duration, fup)
|
||||
for k in xrange(num_pos_keys)]
|
||||
for k in range(num_pos_keys)]
|
||||
return this
|
||||
|
||||
def pack(self, fp):
|
||||
|
|
@ -305,8 +305,8 @@ class PositionCurve(object):
|
|||
k.pack(fp)
|
||||
|
||||
def dump(self, f):
|
||||
print >>f, " position_curve:"
|
||||
print >>f, " num_pos_keys", len(self.keys)
|
||||
print(" position_curve:", file=f)
|
||||
print(" num_pos_keys", len(self.keys), file=f)
|
||||
for k in self.keys:
|
||||
k.dump(f)
|
||||
|
||||
|
|
@ -327,7 +327,7 @@ class RotationCurve(object):
|
|||
this = RotationCurve()
|
||||
(num_rot_keys, ) = fup.unpack("<i")
|
||||
this.keys = [RotKey.unpack(duration, fup)
|
||||
for k in xrange(num_rot_keys)]
|
||||
for k in range(num_rot_keys)]
|
||||
return this
|
||||
|
||||
def pack(self, fp):
|
||||
|
|
@ -336,8 +336,8 @@ class RotationCurve(object):
|
|||
k.pack(fp)
|
||||
|
||||
def dump(self, f):
|
||||
print >>f, " rotation_curve:"
|
||||
print >>f, " num_rot_keys", len(self.keys)
|
||||
print(" rotation_curve:", file=f)
|
||||
print(" num_rot_keys", len(self.keys), file=f)
|
||||
for k in self.keys:
|
||||
k.dump(f)
|
||||
|
||||
|
|
@ -364,9 +364,9 @@ class JointInfo(object):
|
|||
self.position_curve.pack(fp)
|
||||
|
||||
def dump(self, f):
|
||||
print >>f, "joint:"
|
||||
print >>f, " joint_name:",self.joint_name
|
||||
print >>f, " joint_priority:",self.joint_priority
|
||||
print("joint:", file=f)
|
||||
print(" joint_name:",self.joint_name, file=f)
|
||||
print(" joint_priority:",self.joint_priority, file=f)
|
||||
self.rotation_curve.dump(f)
|
||||
self.position_curve.dump(f)
|
||||
|
||||
|
|
@ -440,10 +440,10 @@ class Anim(object):
|
|||
fup.unpack("@ffiffII")
|
||||
|
||||
self.joints = [JointInfo.unpack(self.duration, fup)
|
||||
for j in xrange(num_joints)]
|
||||
for j in range(num_joints)]
|
||||
if self.verbose:
|
||||
for joint_info in self.joints:
|
||||
print "unpacked joint",joint_info.joint_name
|
||||
print("unpacked joint",joint_info.joint_name)
|
||||
self.constraints = Constraints.unpack(self.duration, fup)
|
||||
self.buffer = fup.buffer
|
||||
|
||||
|
|
@ -461,17 +461,17 @@ class Anim(object):
|
|||
f = sys.stdout
|
||||
else:
|
||||
f = open(filename,"w")
|
||||
print >>f, "versions: ", self.version, self.sub_version
|
||||
print >>f, "base_priority: ", self.base_priority
|
||||
print >>f, "duration: ", self.duration
|
||||
print >>f, "emote_name: ", self.emote_name
|
||||
print >>f, "loop_in_point: ", self.loop_in_point
|
||||
print >>f, "loop_out_point: ", self.loop_out_point
|
||||
print >>f, "loop: ", self.loop
|
||||
print >>f, "ease_in_duration: ", self.ease_in_duration
|
||||
print >>f, "ease_out_duration: ", self.ease_out_duration
|
||||
print >>f, "hand_pose", self.hand_pose
|
||||
print >>f, "num_joints", len(self.joints)
|
||||
print("versions: ", self.version, self.sub_version, file=f)
|
||||
print("base_priority: ", self.base_priority, file=f)
|
||||
print("duration: ", self.duration, file=f)
|
||||
print("emote_name: ", self.emote_name, file=f)
|
||||
print("loop_in_point: ", self.loop_in_point, file=f)
|
||||
print("loop_out_point: ", self.loop_out_point, file=f)
|
||||
print("loop: ", self.loop, file=f)
|
||||
print("ease_in_duration: ", self.ease_in_duration, file=f)
|
||||
print("ease_out_duration: ", self.ease_out_duration, file=f)
|
||||
print("hand_pose", self.hand_pose, file=f)
|
||||
print("num_joints", len(self.joints), file=f)
|
||||
for j in self.joints:
|
||||
j.dump(f)
|
||||
self.constraints.dump(f)
|
||||
|
|
@ -482,7 +482,7 @@ class Anim(object):
|
|||
fp.write(filename)
|
||||
|
||||
def write_src_data(self, filename):
|
||||
print "write file",filename
|
||||
print("write file",filename)
|
||||
with open(filename,"wb") as f:
|
||||
f.write(self.buffer)
|
||||
|
||||
|
|
@ -501,11 +501,11 @@ class Anim(object):
|
|||
j = self.find_joint(name)
|
||||
if j:
|
||||
if self.verbose:
|
||||
print "removing joint", name
|
||||
print("removing joint", name)
|
||||
self.joints.remove(j)
|
||||
else:
|
||||
if self.verbose:
|
||||
print "joint not found to remove", name
|
||||
print("joint not found to remove", name)
|
||||
|
||||
def summary(self):
|
||||
nj = len(self.joints)
|
||||
|
|
@ -513,13 +513,13 @@ class Anim(object):
|
|||
nstatic = len([j for j in self.joints
|
||||
if j.rotation_curve.is_static()
|
||||
and j.position_curve.is_static()])
|
||||
print "summary: %d joints, non-zero priority %d, static %d" % (nj, nz, nstatic)
|
||||
print("summary: %d joints, non-zero priority %d, static %d" % (nj, nz, nstatic))
|
||||
|
||||
def add_pos(self, joint_names, positions):
|
||||
js = [joint for joint in self.joints if joint.joint_name in joint_names]
|
||||
for j in js:
|
||||
if self.verbose:
|
||||
print "adding positions",j.joint_name,positions
|
||||
print("adding positions",j.joint_name,positions)
|
||||
j.joint_priority = 4
|
||||
j.position_curve.keys = [PosKey(self.duration * i / (len(positions) - 1),
|
||||
self.duration,
|
||||
|
|
@ -529,7 +529,7 @@ class Anim(object):
|
|||
def add_rot(self, joint_names, rotations):
|
||||
js = [joint for joint in self.joints if joint.joint_name in joint_names]
|
||||
for j in js:
|
||||
print "adding rotations",j.joint_name
|
||||
print("adding rotations",j.joint_name)
|
||||
j.joint_priority = 4
|
||||
j.rotation_curve.keys = [RotKey(self.duration * i / (len(rotations) - 1),
|
||||
self.duration,
|
||||
|
|
@ -539,8 +539,8 @@ class Anim(object):
|
|||
def twistify(anim, joint_names, rot1, rot2):
|
||||
js = [joint for joint in anim.joints if joint.joint_name in joint_names]
|
||||
for j in js:
|
||||
print "twisting",j.joint_name
|
||||
print len(j.rotation_curve.keys)
|
||||
print("twisting",j.joint_name)
|
||||
print(len(j.rotation_curve.keys))
|
||||
j.joint_priority = 4
|
||||
# Set the joint(s) to rot1 at time 0, rot2 at the full duration.
|
||||
j.rotation_curve.keys = [
|
||||
|
|
@ -563,7 +563,7 @@ def get_joint_by_name(tree,name):
|
|||
if len(matches)==1:
|
||||
return matches[0]
|
||||
elif len(matches)>1:
|
||||
print "multiple matches for name",name
|
||||
print("multiple matches for name",name)
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
|
@ -577,7 +577,7 @@ def get_elt_pos(elt):
|
|||
return (0.0, 0.0, 0.0)
|
||||
|
||||
def resolve_joints(names, skel_tree, lad_tree, no_hud=False):
|
||||
print "resolve joints, no_hud is",no_hud
|
||||
print("resolve joints, no_hud is",no_hud)
|
||||
if skel_tree and lad_tree:
|
||||
all_elts = [elt for elt in skel_tree.getroot().iter()]
|
||||
all_elts.extend([elt for elt in lad_tree.getroot().iter()])
|
||||
|
|
@ -641,12 +641,12 @@ def main(*argv):
|
|||
parser.add_argument("outfilename", nargs="?", help="name of a .anim file to output")
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
print "anim_tool.py: " + " ".join(argv)
|
||||
print "dump is", args.dump
|
||||
print "infilename",args.infilename,"outfilename",args.outfilename
|
||||
print "rot",args.rot
|
||||
print "pos",args.pos
|
||||
print "joints",args.joints
|
||||
print("anim_tool.py: " + " ".join(argv))
|
||||
print("dump is", args.dump)
|
||||
print("infilename",args.infilename,"outfilename",args.outfilename)
|
||||
print("rot",args.rot)
|
||||
print("pos",args.pos)
|
||||
print("joints",args.joints)
|
||||
|
||||
anim = Anim(args.infilename, args.verbose)
|
||||
skel_tree = None
|
||||
|
|
@ -663,7 +663,7 @@ def main(*argv):
|
|||
if args.joints:
|
||||
joints = resolve_joints(args.joints, skel_tree, lad_tree, args.no_hud)
|
||||
if args.verbose:
|
||||
print "joints resolved to",joints
|
||||
print("joints resolved to",joints)
|
||||
for name in joints:
|
||||
anim.add_joint(name,0)
|
||||
if args.delete_joints:
|
||||
|
|
@ -677,8 +677,8 @@ def main(*argv):
|
|||
# pick a random sequence of positions for each joint specified
|
||||
for joint in joints:
|
||||
# generate a list of rand_pos triples
|
||||
pos_array = [tuple(random.uniform(-1,1) for i in xrange(3))
|
||||
for j in xrange(args.rand_pos)]
|
||||
pos_array = [tuple(random.uniform(-1,1) for i in range(3))
|
||||
for j in range(args.rand_pos)]
|
||||
# close the loop by cycling back to the first entry
|
||||
pos_array.append(pos_array[0])
|
||||
anim.add_pos([joint], pos_array)
|
||||
|
|
@ -688,26 +688,26 @@ def main(*argv):
|
|||
if elt is not None:
|
||||
anim.add_pos([joint], 2*[get_elt_pos(elt)])
|
||||
else:
|
||||
print "no elt or no pos data for",joint
|
||||
print("no elt or no pos data for",joint)
|
||||
if args.set_version:
|
||||
anim.version, anim.sub_version = args.set_version
|
||||
if args.base_priority is not None:
|
||||
print "set base priority",args.base_priority
|
||||
print("set base priority",args.base_priority)
|
||||
anim.base_priority = args.base_priority
|
||||
# --joint_priority sets priority for ALL joints, not just the explicitly-
|
||||
# specified ones
|
||||
if args.joint_priority is not None:
|
||||
print "set joint priority",args.joint_priority
|
||||
print("set joint priority",args.joint_priority)
|
||||
for joint in anim.joints:
|
||||
joint.joint_priority = args.joint_priority
|
||||
if args.duration is not None:
|
||||
print "set duration",args.duration
|
||||
print("set duration",args.duration)
|
||||
anim.duration = args.duration
|
||||
if args.loop_in is not None:
|
||||
print "set loop_in",args.loop_in
|
||||
print("set loop_in",args.loop_in)
|
||||
anim.loop_in_point = args.loop_in
|
||||
if args.loop_out is not None:
|
||||
print "set loop_out",args.loop_out
|
||||
print("set loop_out",args.loop_out)
|
||||
anim.loop_out_point = args.loop_out
|
||||
if args.dump:
|
||||
anim.dump("-")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!runpy.sh
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""\
|
||||
|
||||
|
|
@ -42,23 +42,23 @@ def node_key(e):
|
|||
def compare_matched_nodes(key,items,summary):
|
||||
tags = list(set([e.tag for e in items]))
|
||||
if len(tags) != 1:
|
||||
print "different tag types for key",key
|
||||
print("different tag types for key",key)
|
||||
summary.setdefault("tag_mismatch",0)
|
||||
summary["tag_mismatch"] += 1
|
||||
return
|
||||
all_attrib = list(set(chain.from_iterable([e.attrib.keys() for e in items])))
|
||||
all_attrib = list(set(chain.from_iterable([list(e.attrib.keys()) for e in items])))
|
||||
#print key,"all_attrib",all_attrib
|
||||
for attr in all_attrib:
|
||||
vals = [e.get(attr) for e in items]
|
||||
#print "key",key,"attr",attr,"vals",vals
|
||||
if len(set(vals)) != 1:
|
||||
print key,"- attr",attr,"multiple values",vals
|
||||
print(key,"- attr",attr,"multiple values",vals)
|
||||
summary.setdefault("attr",{})
|
||||
summary["attr"].setdefault(attr,0)
|
||||
summary["attr"][attr] += 1
|
||||
|
||||
def compare_trees(file_trees):
|
||||
print "compare_trees"
|
||||
print("compare_trees")
|
||||
summary = {}
|
||||
all_keys = list(set([node_key(e) for tree in file_trees for e in tree.getroot().iter() if node_key(e)]))
|
||||
#print "keys",all_keys
|
||||
|
|
@ -70,14 +70,14 @@ def compare_trees(file_trees):
|
|||
items = []
|
||||
for nodes in tree_nodes:
|
||||
if not key in nodes:
|
||||
print "file",i,"missing item for key",key
|
||||
print("file",i,"missing item for key",key)
|
||||
summary.setdefault("missing",0)
|
||||
summary["missing"] += 1
|
||||
else:
|
||||
items.append(nodes[key])
|
||||
compare_matched_nodes(key,items,summary)
|
||||
print "Summary:"
|
||||
print summary
|
||||
print("Summary:")
|
||||
print(summary)
|
||||
|
||||
def dump_appearance_params(tree):
|
||||
vals = []
|
||||
|
|
@ -88,7 +88,7 @@ def dump_appearance_params(tree):
|
|||
vals.append("{" + e.get("id") + "," +e.get("u8") + "}")
|
||||
#print e.get("id"), e.get("name"), e.get("group"), e.get("u8")
|
||||
if len(vals)==253:
|
||||
print ", ".join(vals)
|
||||
print(", ".join(vals))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
@ -101,9 +101,9 @@ if __name__ == "__main__":
|
|||
args = parser.parse_args()
|
||||
|
||||
|
||||
print "files",args.files
|
||||
print("files",args.files)
|
||||
file_trees = [etree.parse(filename) for filename in args.files]
|
||||
print args
|
||||
print(args)
|
||||
if args.compare:
|
||||
compare_trees(file_trees)
|
||||
if args.appearance_params:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!runpy.sh
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""\
|
||||
|
||||
|
|
@ -35,14 +35,14 @@ from collada import *
|
|||
from lxml import etree
|
||||
|
||||
def mesh_summary(mesh):
|
||||
print "scenes",mesh.scenes
|
||||
print("scenes",mesh.scenes)
|
||||
for scene in mesh.scenes:
|
||||
print "scene",scene
|
||||
print("scene",scene)
|
||||
for node in scene.nodes:
|
||||
print "node",node
|
||||
print("node",node)
|
||||
|
||||
def mesh_lock_offsets(tree, joints):
|
||||
print "mesh_lock_offsets",tree,joints
|
||||
print("mesh_lock_offsets",tree,joints)
|
||||
for joint_node in tree.iter():
|
||||
if "node" not in joint_node.tag:
|
||||
continue
|
||||
|
|
@ -57,11 +57,11 @@ def mesh_lock_offsets(tree, joints):
|
|||
floats[7] += 0.0001
|
||||
floats[11] += 0.0001
|
||||
matrix_node.text = " ".join([str(f) for f in floats])
|
||||
print joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats
|
||||
print(joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats)
|
||||
|
||||
|
||||
def mesh_random_offsets(tree, joints):
|
||||
print "mesh_random_offsets",tree,joints
|
||||
print("mesh_random_offsets",tree,joints)
|
||||
for joint_node in tree.iter():
|
||||
if "node" not in joint_node.tag:
|
||||
continue
|
||||
|
|
@ -73,13 +73,13 @@ def mesh_random_offsets(tree, joints):
|
|||
for matrix_node in list(joint_node):
|
||||
if "matrix" in matrix_node.tag:
|
||||
floats = [float(x) for x in matrix_node.text.split()]
|
||||
print "randomizing",floats
|
||||
print("randomizing",floats)
|
||||
if len(floats) == 16:
|
||||
floats[3] += random.uniform(-1.0,1.0)
|
||||
floats[7] += random.uniform(-1.0,1.0)
|
||||
floats[11] += random.uniform(-1.0,1.0)
|
||||
matrix_node.text = " ".join([str(f) for f in floats])
|
||||
print joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats
|
||||
print(joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
@ -96,24 +96,24 @@ if __name__ == "__main__":
|
|||
tree = None
|
||||
|
||||
if args.infilename:
|
||||
print "reading",args.infilename
|
||||
print("reading",args.infilename)
|
||||
mesh = Collada(args.infilename)
|
||||
tree = etree.parse(args.infilename)
|
||||
|
||||
if args.summary:
|
||||
print "summarizing",args.infilename
|
||||
print("summarizing",args.infilename)
|
||||
mesh_summary(mesh)
|
||||
|
||||
if args.lock_offsets:
|
||||
print "locking offsets for",args.lock_offsets
|
||||
print("locking offsets for",args.lock_offsets)
|
||||
mesh_lock_offsets(tree, args.lock_offsets)
|
||||
|
||||
if args.random_offsets:
|
||||
print "adding random offsets for",args.random_offsets
|
||||
print("adding random offsets for",args.random_offsets)
|
||||
mesh_random_offsets(tree, args.random_offsets)
|
||||
|
||||
if args.outfilename:
|
||||
print "writing",args.outfilename
|
||||
print("writing",args.outfilename)
|
||||
f = open(args.outfilename,"w")
|
||||
print >>f, etree.tostring(tree, pretty_print=True) #need update to get: , short_empty_elements=True)
|
||||
print(etree.tostring(tree, pretty_print=True), file=f) #need update to get: , short_empty_elements=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!runpy.sh
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""\
|
||||
|
||||
|
|
@ -32,14 +32,14 @@ from lxml import etree
|
|||
|
||||
def get_joint_names(tree):
|
||||
joints = [element.get('name') for element in tree.getroot().iter() if element.tag in ['bone','collision_volume']]
|
||||
print "joints:",joints
|
||||
print("joints:",joints)
|
||||
return joints
|
||||
|
||||
def get_aliases(tree):
|
||||
aliases = {}
|
||||
alroot = tree.getroot()
|
||||
for element in alroot.iter():
|
||||
for key in element.keys():
|
||||
for key in list(element.keys()):
|
||||
if key == 'aliases':
|
||||
name = element.get('name')
|
||||
val = element.get('aliases')
|
||||
|
|
@ -58,19 +58,19 @@ def float_tuple(str, n=3):
|
|||
if len(result)==n:
|
||||
return result
|
||||
else:
|
||||
print "tuple length wrong:", str,"gave",result,"wanted len",n,"got len",len(result)
|
||||
print("tuple length wrong:", str,"gave",result,"wanted len",n,"got len",len(result))
|
||||
raise Exception()
|
||||
except:
|
||||
print "convert failed for:",str
|
||||
print("convert failed for:",str)
|
||||
raise
|
||||
|
||||
def check_symmetry(name, field, vec1, vec2):
|
||||
if vec1[0] != vec2[0]:
|
||||
print name,field,"x match fail"
|
||||
print(name,field,"x match fail")
|
||||
if vec1[1] != -vec2[1]:
|
||||
print name,field,"y mirror image fail"
|
||||
print(name,field,"y mirror image fail")
|
||||
if vec1[2] != vec2[2]:
|
||||
print name,field,"z match fail"
|
||||
print(name,field,"z match fail")
|
||||
|
||||
def enforce_symmetry(tree, element, field, fix=False):
|
||||
name = element.get("name")
|
||||
|
|
@ -92,7 +92,7 @@ def get_element_by_name(tree,name):
|
|||
if len(matches)==1:
|
||||
return matches[0]
|
||||
elif len(matches)>1:
|
||||
print "multiple matches for name",name
|
||||
print("multiple matches for name",name)
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
|
@ -100,7 +100,7 @@ def get_element_by_name(tree,name):
|
|||
def list_skel_tree(tree):
|
||||
for element in tree.getroot().iter():
|
||||
if element.tag == "bone":
|
||||
print element.get("name"),"-",element.get("support")
|
||||
print(element.get("name"),"-",element.get("support"))
|
||||
|
||||
def validate_child_order(tree, ogtree, fix=False):
|
||||
unfixable = 0
|
||||
|
|
@ -116,12 +116,12 @@ def validate_child_order(tree, ogtree, fix=False):
|
|||
if og_element is not None:
|
||||
for echild,ochild in zip(list(element),list(og_element)):
|
||||
if echild.get("name") != ochild.get("name"):
|
||||
print "Child ordering error, parent",element.get("name"),echild.get("name"),"vs",ochild.get("name")
|
||||
print("Child ordering error, parent",element.get("name"),echild.get("name"),"vs",ochild.get("name"))
|
||||
if fix:
|
||||
tofix.add(element.get("name"))
|
||||
children = {}
|
||||
for name in tofix:
|
||||
print "FIX",name
|
||||
print("FIX",name)
|
||||
element = get_element_by_name(tree,name)
|
||||
og_element = get_element_by_name(ogtree,name)
|
||||
children = []
|
||||
|
|
@ -130,20 +130,20 @@ def validate_child_order(tree, ogtree, fix=False):
|
|||
elt = get_element_by_name(tree,og_elt.get("name"))
|
||||
if elt is not None:
|
||||
children.append(elt)
|
||||
print "b:",elt.get("name")
|
||||
print("b:",elt.get("name"))
|
||||
else:
|
||||
print "b missing:",og_elt.get("name")
|
||||
print("b missing:",og_elt.get("name"))
|
||||
# then add children that are not present in the original joints
|
||||
for elt in list(element):
|
||||
og_elt = get_element_by_name(ogtree,elt.get("name"))
|
||||
if og_elt is None:
|
||||
children.append(elt)
|
||||
print "e:",elt.get("name")
|
||||
print("e:",elt.get("name"))
|
||||
# if we've done this right, we have a rearranged list of the same length
|
||||
if len(children)!=len(element):
|
||||
print "children",[e.get("name") for e in children]
|
||||
print "element",[e.get("name") for e in element]
|
||||
print "children changes for",name,", cannot reconcile"
|
||||
print("children",[e.get("name") for e in children])
|
||||
print("element",[e.get("name") for e in element])
|
||||
print("children changes for",name,", cannot reconcile")
|
||||
else:
|
||||
element[:] = children
|
||||
|
||||
|
|
@ -163,7 +163,7 @@ def validate_child_order(tree, ogtree, fix=False):
|
|||
# - digits of precision should be consistent (again, except for old joints)
|
||||
# - new bones should have pos, pivot the same
|
||||
def validate_skel_tree(tree, ogtree, reftree, fix=False):
|
||||
print "validate_skel_tree"
|
||||
print("validate_skel_tree")
|
||||
(num_bones,num_cvs) = (0,0)
|
||||
unfixable = 0
|
||||
defaults = {"connected": "false",
|
||||
|
|
@ -175,7 +175,7 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
|
|||
# Preserve values from og_file:
|
||||
for f in ["pos","rot","scale","pivot"]:
|
||||
if og_element is not None and og_element.get(f) and (str(element.get(f)) != str(og_element.get(f))):
|
||||
print element.get("name"),"field",f,"has changed:",og_element.get(f),"!=",element.get(f)
|
||||
print(element.get("name"),"field",f,"has changed:",og_element.get(f),"!=",element.get(f))
|
||||
if fix:
|
||||
element.set(f, og_element.get(f))
|
||||
|
||||
|
|
@ -187,17 +187,17 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
|
|||
fields.extend(["end","connected"])
|
||||
for f in fields:
|
||||
if not element.get(f):
|
||||
print element.get("name"),"missing required field",f
|
||||
print(element.get("name"),"missing required field",f)
|
||||
if fix:
|
||||
if og_element is not None and og_element.get(f):
|
||||
print "fix from ogtree"
|
||||
print("fix from ogtree")
|
||||
element.set(f,og_element.get(f))
|
||||
elif ref_element is not None and ref_element.get(f):
|
||||
print "fix from reftree"
|
||||
print("fix from reftree")
|
||||
element.set(f,ref_element.get(f))
|
||||
else:
|
||||
if f in defaults:
|
||||
print "fix by using default value",f,"=",defaults[f]
|
||||
print("fix by using default value",f,"=",defaults[f])
|
||||
element.set(f,defaults[f])
|
||||
elif f == "support":
|
||||
if og_element is not None:
|
||||
|
|
@ -205,7 +205,7 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
|
|||
else:
|
||||
element.set(f,"extended")
|
||||
else:
|
||||
print "unfixable:",element.get("name"),"no value for field",f
|
||||
print("unfixable:",element.get("name"),"no value for field",f)
|
||||
unfixable += 1
|
||||
|
||||
fix_name(element)
|
||||
|
|
@ -214,7 +214,7 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
|
|||
enforce_symmetry(tree, element, field, fix)
|
||||
if element.get("support")=="extended":
|
||||
if element.get("pos") != element.get("pivot"):
|
||||
print "extended joint",element.get("name"),"has mismatched pos, pivot"
|
||||
print("extended joint",element.get("name"),"has mismatched pos, pivot")
|
||||
|
||||
|
||||
if element.tag == "linden_skeleton":
|
||||
|
|
@ -223,19 +223,19 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
|
|||
all_bones = [e for e in tree.getroot().iter() if e.tag=="bone"]
|
||||
all_cvs = [e for e in tree.getroot().iter() if e.tag=="collision_volume"]
|
||||
if num_bones != len(all_bones):
|
||||
print "wrong bone count, expected",len(all_bones),"got",num_bones
|
||||
print("wrong bone count, expected",len(all_bones),"got",num_bones)
|
||||
if fix:
|
||||
element.set("num_bones", str(len(all_bones)))
|
||||
if num_cvs != len(all_cvs):
|
||||
print "wrong cv count, expected",len(all_cvs),"got",num_cvs
|
||||
print("wrong cv count, expected",len(all_cvs),"got",num_cvs)
|
||||
if fix:
|
||||
element.set("num_collision_volumes", str(len(all_cvs)))
|
||||
|
||||
print "skipping child order code"
|
||||
print("skipping child order code")
|
||||
#unfixable += validate_child_order(tree, ogtree, fix)
|
||||
|
||||
if fix and (unfixable > 0):
|
||||
print "BAD FILE:", unfixable,"errs could not be fixed"
|
||||
print("BAD FILE:", unfixable,"errs could not be fixed")
|
||||
|
||||
|
||||
def slider_info(ladtree,skeltree):
|
||||
|
|
@ -243,37 +243,37 @@ def slider_info(ladtree,skeltree):
|
|||
for skel_param in param.iter("param_skeleton"):
|
||||
bones = [b for b in skel_param.iter("bone")]
|
||||
if bones:
|
||||
print "param",param.get("name"),"id",param.get("id")
|
||||
print("param",param.get("name"),"id",param.get("id"))
|
||||
value_min = float(param.get("value_min"))
|
||||
value_max = float(param.get("value_max"))
|
||||
neutral = 100.0 * (0.0-value_min)/(value_max-value_min)
|
||||
print " neutral",neutral
|
||||
print(" neutral",neutral)
|
||||
for b in bones:
|
||||
scale = float_tuple(b.get("scale","0 0 0"))
|
||||
offset = float_tuple(b.get("offset","0 0 0"))
|
||||
print " bone", b.get("name"), "scale", scale, "offset", offset
|
||||
print(" bone", b.get("name"), "scale", scale, "offset", offset)
|
||||
scale_min = [value_min * s for s in scale]
|
||||
scale_max = [value_max * s for s in scale]
|
||||
offset_min = [value_min * t for t in offset]
|
||||
offset_max = [value_max * t for t in offset]
|
||||
if (scale_min != scale_max):
|
||||
print " Scale MinX", scale_min[0]
|
||||
print " Scale MinY", scale_min[1]
|
||||
print " Scale MinZ", scale_min[2]
|
||||
print " Scale MaxX", scale_max[0]
|
||||
print " Scale MaxY", scale_max[1]
|
||||
print " Scale MaxZ", scale_max[2]
|
||||
print(" Scale MinX", scale_min[0])
|
||||
print(" Scale MinY", scale_min[1])
|
||||
print(" Scale MinZ", scale_min[2])
|
||||
print(" Scale MaxX", scale_max[0])
|
||||
print(" Scale MaxY", scale_max[1])
|
||||
print(" Scale MaxZ", scale_max[2])
|
||||
if (offset_min != offset_max):
|
||||
print " Offset MinX", offset_min[0]
|
||||
print " Offset MinY", offset_min[1]
|
||||
print " Offset MinZ", offset_min[2]
|
||||
print " Offset MaxX", offset_max[0]
|
||||
print " Offset MaxY", offset_max[1]
|
||||
print " Offset MaxZ", offset_max[2]
|
||||
print(" Offset MinX", offset_min[0])
|
||||
print(" Offset MinY", offset_min[1])
|
||||
print(" Offset MinZ", offset_min[2])
|
||||
print(" Offset MaxX", offset_max[0])
|
||||
print(" Offset MaxY", offset_max[1])
|
||||
print(" Offset MaxZ", offset_max[2])
|
||||
|
||||
# Check contents of avatar_lad file relative to a specified skeleton
|
||||
def validate_lad_tree(ladtree,skeltree,orig_ladtree):
|
||||
print "validate_lad_tree"
|
||||
print("validate_lad_tree")
|
||||
bone_names = [elt.get("name") for elt in skeltree.iter("bone")]
|
||||
bone_names.append("mScreen")
|
||||
bone_names.append("mRoot")
|
||||
|
|
@ -285,7 +285,7 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
|
|||
#print "attachment",att_name
|
||||
joint_name = att.get("joint")
|
||||
if not joint_name in bone_names:
|
||||
print "att",att_name,"linked to invalid joint",joint_name
|
||||
print("att",att_name,"linked to invalid joint",joint_name)
|
||||
for skel_param in ladtree.iter("param_skeleton"):
|
||||
skel_param_id = skel_param.get("id")
|
||||
skel_param_name = skel_param.get("name")
|
||||
|
|
@ -297,13 +297,13 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
|
|||
for bone in skel_param.iter("bone"):
|
||||
bone_name = bone.get("name")
|
||||
if not bone_name in bone_names:
|
||||
print "skel param references invalid bone",bone_name
|
||||
print etree.tostring(bone)
|
||||
print("skel param references invalid bone",bone_name)
|
||||
print(etree.tostring(bone))
|
||||
bone_scale = float_tuple(bone.get("scale","0 0 0"))
|
||||
bone_offset = float_tuple(bone.get("offset","0 0 0"))
|
||||
param = bone.getparent().getparent()
|
||||
if bone_scale==(0, 0, 0) and bone_offset==(0, 0, 0):
|
||||
print "no-op bone",bone_name,"in param",param.get("id","-1")
|
||||
print("no-op bone",bone_name,"in param",param.get("id","-1"))
|
||||
# check symmetry of sliders
|
||||
if "Right" in bone.get("name"):
|
||||
left_name = bone_name.replace("Right","Left")
|
||||
|
|
@ -312,12 +312,12 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
|
|||
if b.get("name")==left_name:
|
||||
left_bone = b
|
||||
if left_bone is None:
|
||||
print "left_bone not found",left_name,"in",param.get("id","-1")
|
||||
print("left_bone not found",left_name,"in",param.get("id","-1"))
|
||||
else:
|
||||
left_scale = float_tuple(left_bone.get("scale","0 0 0"))
|
||||
left_offset = float_tuple(left_bone.get("offset","0 0 0"))
|
||||
if left_scale != bone_scale:
|
||||
print "scale mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1")
|
||||
print("scale mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1"))
|
||||
param_id = int(param.get("id","-1"))
|
||||
if param_id in [661]: # shear
|
||||
expected_offset = tuple([bone_offset[0],bone_offset[1],-bone_offset[2]])
|
||||
|
|
@ -326,7 +326,7 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
|
|||
else:
|
||||
expected_offset = tuple([bone_offset[0],-bone_offset[1],bone_offset[2]])
|
||||
if left_offset != expected_offset:
|
||||
print "offset mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1")
|
||||
print("offset mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1"))
|
||||
|
||||
drivers = {}
|
||||
for driven_param in ladtree.iter("driven"):
|
||||
|
|
@ -340,15 +340,15 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
|
|||
if (actual_param.get("value_min") != driver.get("value_min") or \
|
||||
actual_param.get("value_max") != driver.get("value_max")):
|
||||
if args.verbose:
|
||||
print "MISMATCH min max:",driver.get("id"),"drives",driven_param.get("id"),"min",driver.get("value_min"),actual_param.get("value_min"),"max",driver.get("value_max"),actual_param.get("value_max")
|
||||
print("MISMATCH min max:",driver.get("id"),"drives",driven_param.get("id"),"min",driver.get("value_min"),actual_param.get("value_min"),"max",driver.get("value_max"),actual_param.get("value_max"))
|
||||
|
||||
for driven_id in drivers:
|
||||
dset = drivers[driven_id]
|
||||
if len(dset) != 1:
|
||||
print "driven_id",driven_id,"has multiple drivers",dset
|
||||
print("driven_id",driven_id,"has multiple drivers",dset)
|
||||
else:
|
||||
if args.verbose:
|
||||
print "driven_id",driven_id,"has one driver",dset
|
||||
print("driven_id",driven_id,"has one driver",dset)
|
||||
if orig_ladtree:
|
||||
# make sure expected message format is unchanged
|
||||
orig_message_params_by_id = dict((int(param.get("id")),param) for param in orig_ladtree.iter("param") if param.get("group") in ["0","3"])
|
||||
|
|
@ -358,25 +358,25 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
|
|||
message_ids = sorted(message_params_by_id.keys())
|
||||
#print "message_ids",message_ids
|
||||
if (set(message_ids) != set(orig_message_ids)):
|
||||
print "mismatch in message ids!"
|
||||
print "added",set(message_ids) - set(orig_message_ids)
|
||||
print "removed",set(orig_message_ids) - set(message_ids)
|
||||
print("mismatch in message ids!")
|
||||
print("added",set(message_ids) - set(orig_message_ids))
|
||||
print("removed",set(orig_message_ids) - set(message_ids))
|
||||
else:
|
||||
print "message ids OK"
|
||||
print("message ids OK")
|
||||
|
||||
def remove_joint_by_name(tree, name):
|
||||
print "remove joint:",name
|
||||
print("remove joint:",name)
|
||||
elt = get_element_by_name(tree,name)
|
||||
while elt is not None:
|
||||
children = list(elt)
|
||||
parent = elt.getparent()
|
||||
print "graft",[e.get("name") for e in children],"into",parent.get("name")
|
||||
print "remove",elt.get("name")
|
||||
print("graft",[e.get("name") for e in children],"into",parent.get("name"))
|
||||
print("remove",elt.get("name"))
|
||||
#parent_children = list(parent)
|
||||
loc = parent.index(elt)
|
||||
parent[loc:loc+1] = children
|
||||
elt[:] = []
|
||||
print "parent now:",[e.get("name") for e in list(parent)]
|
||||
print("parent now:",[e.get("name") for e in list(parent)])
|
||||
elt = get_element_by_name(tree,name)
|
||||
|
||||
def compare_skel_trees(atree,btree):
|
||||
|
|
@ -386,9 +386,9 @@ def compare_skel_trees(atree,btree):
|
|||
b_missing = set()
|
||||
a_names = set(e.get("name") for e in atree.getroot().iter() if e.get("name"))
|
||||
b_names = set(e.get("name") for e in btree.getroot().iter() if e.get("name"))
|
||||
print "a_names\n ",str("\n ").join(sorted(list(a_names)))
|
||||
print
|
||||
print "b_names\n ","\n ".join(sorted(list(b_names)))
|
||||
print("a_names\n ",str("\n ").join(sorted(list(a_names))))
|
||||
print()
|
||||
print("b_names\n ","\n ".join(sorted(list(b_names))))
|
||||
all_names = set.union(a_names,b_names)
|
||||
for name in all_names:
|
||||
if not name:
|
||||
|
|
@ -396,38 +396,38 @@ def compare_skel_trees(atree,btree):
|
|||
a_element = get_element_by_name(atree,name)
|
||||
b_element = get_element_by_name(btree,name)
|
||||
if a_element is None or b_element is None:
|
||||
print "something not found for",name,a_element,b_element
|
||||
print("something not found for",name,a_element,b_element)
|
||||
if a_element is not None and b_element is not None:
|
||||
all_attrib = set.union(set(a_element.attrib.keys()),set(b_element.attrib.keys()))
|
||||
print name,all_attrib
|
||||
print(name,all_attrib)
|
||||
for att in all_attrib:
|
||||
if a_element.get(att) != b_element.get(att):
|
||||
if not att in diffs:
|
||||
diffs[att] = set()
|
||||
diffs[att].add(name)
|
||||
print "tuples",name,att,float_tuple(a_element.get(att)),float_tuple(b_element.get(att))
|
||||
print("tuples",name,att,float_tuple(a_element.get(att)),float_tuple(b_element.get(att)))
|
||||
if float_tuple(a_element.get(att)) != float_tuple(b_element.get(att)):
|
||||
print "diff in",name,att
|
||||
print("diff in",name,att)
|
||||
if not att in realdiffs:
|
||||
realdiffs[att] = set()
|
||||
realdiffs[att].add(name)
|
||||
for att in diffs:
|
||||
print "Differences in",att
|
||||
print("Differences in",att)
|
||||
for name in sorted(diffs[att]):
|
||||
print " ",name
|
||||
print(" ",name)
|
||||
for att in realdiffs:
|
||||
print "Real differences in",att
|
||||
print("Real differences in",att)
|
||||
for name in sorted(diffs[att]):
|
||||
print " ",name
|
||||
print(" ",name)
|
||||
a_missing = b_names.difference(a_names)
|
||||
b_missing = a_names.difference(b_names)
|
||||
if len(a_missing) or len(b_missing):
|
||||
print "Missing from comparison"
|
||||
print("Missing from comparison")
|
||||
for name in a_missing:
|
||||
print " ",name
|
||||
print "Missing from infile"
|
||||
print(" ",name)
|
||||
print("Missing from infile")
|
||||
for name in b_missing:
|
||||
print " ",name
|
||||
print(" ",name)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
|
|
@ -499,5 +499,5 @@ if __name__ == "__main__":
|
|||
|
||||
if args.outfilename:
|
||||
f = open(args.outfilename,"w")
|
||||
print >>f, etree.tostring(tree, pretty_print=True) #need update to get: , short_empty_elements=True)
|
||||
print(etree.tostring(tree, pretty_print=True), file=f) #need update to get: , short_empty_elements=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file md5check.py
|
||||
@brief Replacement for message template compatibility verifier.
|
||||
|
|
@ -29,14 +29,14 @@ import sys
|
|||
import hashlib
|
||||
|
||||
if len(sys.argv) != 3:
|
||||
print """Usage: %s --create|<hash-digest> <file>
|
||||
print("""Usage: %s --create|<hash-digest> <file>
|
||||
|
||||
Creates an md5sum hash digest of the specified file content
|
||||
and compares it with the given hash digest.
|
||||
|
||||
If --create is used instead of a hash digest, it will simply
|
||||
print out the hash digest of specified file content.
|
||||
""" % sys.argv[0]
|
||||
""" % sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
if sys.argv[2] == '-':
|
||||
|
|
@ -48,9 +48,9 @@ else:
|
|||
|
||||
hexdigest = hashlib.md5(fh.read()).hexdigest()
|
||||
if sys.argv[1] == '--create':
|
||||
print hexdigest
|
||||
print(hexdigest)
|
||||
elif hexdigest == sys.argv[1]:
|
||||
print "md5sum check passed:", filename
|
||||
print("md5sum check passed:", filename)
|
||||
else:
|
||||
print "md5sum check FAILED:", filename
|
||||
print("md5sum check FAILED:", filename)
|
||||
sys.exit(1)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ def get_metrics_record(infiles):
|
|||
context = iter(context)
|
||||
|
||||
# get the root element
|
||||
event, root = context.next()
|
||||
event, root = next(context)
|
||||
try:
|
||||
for event, elem in context:
|
||||
if event == "end" and elem.tag == "llsd":
|
||||
|
|
@ -48,7 +48,7 @@ def get_metrics_record(infiles):
|
|||
sd = llsd.parse_xml(xmlstr)
|
||||
yield sd
|
||||
except etree.XMLSyntaxError:
|
||||
print "Fell off end of document"
|
||||
print("Fell off end of document")
|
||||
|
||||
f.close()
|
||||
|
||||
|
|
@ -56,7 +56,7 @@ def update_stats(stats,rec):
|
|||
for region in rec["regions"]:
|
||||
region_key = (region["grid_x"],region["grid_y"])
|
||||
#print "region",region_key
|
||||
for field, val in region.iteritems():
|
||||
for field, val in region.items():
|
||||
if field in ["duration","grid_x","grid_y"]:
|
||||
continue
|
||||
if field == "fps":
|
||||
|
|
@ -96,7 +96,7 @@ if __name__ == "__main__":
|
|||
for key in sorted(stats.keys()):
|
||||
val = stats[key]
|
||||
if val["count"] > 0:
|
||||
print key,"count",val["count"],"mean_time",val["sum"]/val["count"],"mean_bytes",val["sum_bytes"]/val["count"],"net bytes/sec",val["sum_bytes"]/val["sum"],"enqueued",val["enqueued"],"dequeued",val["dequeued"]
|
||||
print(key,"count",val["count"],"mean_time",val["sum"]/val["count"],"mean_bytes",val["sum_bytes"]/val["count"],"net bytes/sec",val["sum_bytes"]/val["sum"],"enqueued",val["enqueued"],"dequeued",val["dequeued"])
|
||||
else:
|
||||
print key,"count",val["count"],"enqueued",val["enqueued"],"dequeued",val["dequeued"]
|
||||
print(key,"count",val["count"],"enqueued",val["enqueued"],"dequeued",val["dequeued"])
|
||||
|
||||
|
|
|
|||
|
|
@ -54,11 +54,11 @@ def show_stats_by_key(recs,indices,settings_sd = None):
|
|||
v = tuple(v)
|
||||
per_key_cnt[k][v] += 1
|
||||
except Exception as e:
|
||||
print "err", e
|
||||
print "d", d, "k", k, "v", v
|
||||
print("err", e)
|
||||
print("d", d, "k", k, "v", v)
|
||||
raise
|
||||
mc = cnt.most_common()
|
||||
print "========================="
|
||||
print("=========================")
|
||||
keyprefix = ""
|
||||
if len(indices)>0:
|
||||
keyprefix = ".".join(indices) + "."
|
||||
|
|
@ -67,32 +67,32 @@ def show_stats_by_key(recs,indices,settings_sd = None):
|
|||
bigc = m[1]
|
||||
unset_cnt = len(recs) - bigc
|
||||
kmc = per_key_cnt[k].most_common(5)
|
||||
print i, keyprefix+str(k), bigc
|
||||
print(i, keyprefix+str(k), bigc)
|
||||
if settings_sd is not None and k in settings_sd and "Value" in settings_sd[k]:
|
||||
print " ", "default",settings_sd[k]["Value"],"count",unset_cnt
|
||||
print(" ", "default",settings_sd[k]["Value"],"count",unset_cnt)
|
||||
for v in kmc:
|
||||
print " ", "value",v[0],"count",v[1]
|
||||
print(" ", "value",v[0],"count",v[1])
|
||||
if settings_sd is not None:
|
||||
print "Total keys in settings", len(settings_sd.keys())
|
||||
print("Total keys in settings", len(settings_sd.keys()))
|
||||
unused_keys = list(set(settings_sd.keys()) - set(cnt.keys()))
|
||||
unused_keys_non_str = [k for k in unused_keys if settings_sd[k]["Type"] != "String"]
|
||||
unused_keys_str = [k for k in unused_keys if settings_sd[k]["Type"] == "String"]
|
||||
|
||||
# Things that no one in the sample has set to a non-default value. Possible candidates for removal.
|
||||
print "\nUnused_keys_non_str", len(unused_keys_non_str)
|
||||
print "======================"
|
||||
print "\n".join(sorted(unused_keys_non_str))
|
||||
print("\nUnused_keys_non_str", len(unused_keys_non_str))
|
||||
print( "======================")
|
||||
print("\n".join(sorted(unused_keys_non_str)))
|
||||
|
||||
# Strings are not currently logged, so we have no info on usage.
|
||||
print "\nString keys (usage unknown)", len(unused_keys_str)
|
||||
print "======================"
|
||||
print "\n".join(sorted(unused_keys_str))
|
||||
print("\nString keys (usage unknown)", len(unused_keys_str))
|
||||
print( "======================")
|
||||
print("\n".join(sorted(unused_keys_str)))
|
||||
|
||||
# Things that someone has set but that aren't recognized settings.
|
||||
unrec_keys = list(set(cnt.keys()) - set(settings_sd.keys()))
|
||||
print "\nUnrecognized keys", len(unrec_keys)
|
||||
print "======================"
|
||||
print "\n".join(sorted(unrec_keys))
|
||||
print("\nUnrecognized keys", len(unrec_keys))
|
||||
print( "======================")
|
||||
print("\n".join(sorted(unrec_keys)))
|
||||
|
||||
result = (settings_sd.keys(), unused_keys_str, unused_keys_non_str, unrec_keys)
|
||||
return result
|
||||
|
|
@ -138,7 +138,7 @@ def get_used_strings(root_dir):
|
|||
for dir_name, sub_dir_list, file_list in os.walk(root_dir):
|
||||
for fname in file_list:
|
||||
if fname in ["settings.xml", "settings.xml.edit", "settings_per_account.xml"]:
|
||||
print "skip", fname
|
||||
print("skip", fname)
|
||||
continue
|
||||
(base,ext) = os.path.splitext(fname)
|
||||
#if ext not in [".cpp", ".hpp", ".h", ".xml"]:
|
||||
|
|
@ -155,8 +155,8 @@ def get_used_strings(root_dir):
|
|||
for m in ms:
|
||||
#print "used_str",m
|
||||
used_str.add(m)
|
||||
print "skipped extensions", skipped_ext
|
||||
print "got used_str", len(used_str)
|
||||
print("skipped extensions", skipped_ext)
|
||||
print("got used_str", len(used_str))
|
||||
return used_str
|
||||
|
||||
|
||||
|
|
@ -171,7 +171,7 @@ if __name__ == "__main__":
|
|||
args = parser.parse_args()
|
||||
|
||||
for fname in args.infiles:
|
||||
print "process", fname
|
||||
print("process", fname)
|
||||
df = pd.read_csv(fname,sep='\t')
|
||||
#print "DF", df.describe()
|
||||
jstrs = df['RAW_LOG:BODY']
|
||||
|
|
@ -182,12 +182,12 @@ if __name__ == "__main__":
|
|||
show_stats_by_key(recs,[])
|
||||
show_stats_by_key(recs,["agent"])
|
||||
if args.preferences:
|
||||
print "\nSETTINGS.XML"
|
||||
print("\nSETTINGS.XML")
|
||||
settings_sd = parse_settings_xml("settings.xml")
|
||||
#for skey,svals in settings_sd.items():
|
||||
# print skey, "=>", svals
|
||||
(all_str,_,_,_) = show_stats_by_key(recs,["preferences","settings"],settings_sd)
|
||||
print
|
||||
print()
|
||||
|
||||
#print "\nSETTINGS_PER_ACCOUNT.XML"
|
||||
#settings_pa_sd = parse_settings_xml("settings_per_account.xml")
|
||||
|
|
@ -201,19 +201,19 @@ if __name__ == "__main__":
|
|||
unref_strings = all_str_set-used_strings_set
|
||||
# Some settings names are generated by appending to a prefix. Need to look for this case.
|
||||
prefix_used = set()
|
||||
print "checking unref_strings", len(unref_strings)
|
||||
print("checking unref_strings", len(unref_strings))
|
||||
for u in unref_strings:
|
||||
for k in range(6,len(u)):
|
||||
prefix = u[0:k]
|
||||
if prefix in all_str_set and prefix in used_strings_set:
|
||||
prefix_used.add(u)
|
||||
#print "PREFIX_USED",u,prefix
|
||||
print "PREFIX_USED", len(prefix_used), ",".join(list(prefix_used))
|
||||
print
|
||||
print("PREFIX_USED", len(prefix_used), ",".join(list(prefix_used)))
|
||||
print()
|
||||
unref_strings = unref_strings - prefix_used
|
||||
|
||||
print "\nUNREF_IN_CODE " + str(len(unref_strings)) + "\n"
|
||||
print "\n".join(list(unref_strings))
|
||||
print("\nUNREF_IN_CODE " + str(len(unref_strings)) + "\n")
|
||||
print("\n".join(list(unref_strings)))
|
||||
settings_str = read_raw_settings_xml("settings.xml")
|
||||
# Do this via direct string munging to generate minimal changeset
|
||||
settings_edited = remove_settings(settings_str,unref_strings)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
This module formats the package version and copyright information for the
|
||||
viewer and its dependent packages.
|
||||
|
|
@ -37,6 +37,9 @@ parser.add_argument('version', help='viewer version number')
|
|||
args = parser.parse_args()
|
||||
|
||||
_autobuild=os.getenv('AUTOBUILD', 'autobuild')
|
||||
_autobuild_env=os.environ.copy()
|
||||
# Coerce stdout encoding to utf-8 as cygwin's will be detected as cp1252 otherwise.
|
||||
_autobuild_env["PYTHONIOENCODING"] = "utf-8"
|
||||
|
||||
pkg_line=re.compile('^([\w-]+):\s+(.*)$')
|
||||
|
||||
|
|
@ -50,7 +53,7 @@ def autobuild(*args):
|
|||
try:
|
||||
child = subprocess.Popen(command,
|
||||
stdin=None, stdout=subprocess.PIPE,
|
||||
universal_newlines=True)
|
||||
universal_newlines=True, env=_autobuild_env)
|
||||
except OSError as err:
|
||||
if err.errno != errno.ENOENT:
|
||||
# Don't attempt to interpret anything but ENOENT
|
||||
|
|
@ -113,20 +116,20 @@ for key, rawdata in ("versions", versions), ("copyrights", copyrights):
|
|||
break
|
||||
|
||||
# Now that we've run through all of both outputs -- are there duplicates?
|
||||
if any(pkgs for pkgs in dups.values()):
|
||||
for key, pkgs in dups.items():
|
||||
if any(pkgs for pkgs in list(dups.values())):
|
||||
for key, pkgs in list(dups.items()):
|
||||
if pkgs:
|
||||
print >>sys.stderr, "Duplicate %s for %s" % (key, ", ".join(pkgs))
|
||||
print("Duplicate %s for %s" % (key, ", ".join(pkgs)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print "%s %s" % (args.channel, args.version)
|
||||
print viewer_copyright
|
||||
print("%s %s" % (args.channel, args.version))
|
||||
print(viewer_copyright)
|
||||
version = list(info['versions'].items())
|
||||
version.sort()
|
||||
for pkg, pkg_version in version:
|
||||
print ': '.join([pkg, pkg_version])
|
||||
print(': '.join([pkg, pkg_version]))
|
||||
try:
|
||||
print info['copyrights'][pkg]
|
||||
print(info['copyrights'][pkg])
|
||||
except KeyError:
|
||||
sys.exit("No copyright for %s" % pkg)
|
||||
print
|
||||
print()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file setup-path.py
|
||||
@brief Get the python library directory in the path, so we don't have
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file template_verifier.py
|
||||
@brief Message template compatibility verifier.
|
||||
|
|
@ -58,14 +58,14 @@ def add_indra_lib_path():
|
|||
sys.path.insert(0, dir)
|
||||
break
|
||||
else:
|
||||
print >>sys.stderr, "This script is not inside a valid installation."
|
||||
print("This script is not inside a valid installation.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
add_indra_lib_path()
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import urllib
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import hashlib
|
||||
|
||||
from indra.ipc import compatibility
|
||||
|
|
@ -90,7 +90,7 @@ def getstatusoutput(command):
|
|||
|
||||
|
||||
def die(msg):
|
||||
print >>sys.stderr, msg
|
||||
print(msg, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
MESSAGE_TEMPLATE = 'message_template.msg'
|
||||
|
|
@ -106,7 +106,7 @@ def retry(times, function, *args, **kwargs):
|
|||
for i in range(times):
|
||||
try:
|
||||
return function(*args, **kwargs)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if i == times - 1:
|
||||
raise e # we retried all the times we could
|
||||
|
||||
|
|
@ -138,10 +138,14 @@ def fetch(url):
|
|||
if url.startswith('file://'):
|
||||
# just open the file directly because urllib is dumb about these things
|
||||
file_name = url[len('file://'):]
|
||||
return open(file_name).read()
|
||||
with open(file_name, 'rb') as f:
|
||||
return f.read()
|
||||
else:
|
||||
# *FIX: this doesn't throw an exception for a 404, and oddly enough the sl.com 404 page actually gets parsed successfully
|
||||
return ''.join(urllib.urlopen(url).readlines())
|
||||
with urllib.request.urlopen(url) as res:
|
||||
body = res.read()
|
||||
if res.status > 299:
|
||||
sys.exit("ERROR: Unable to download %s. HTTP status %d.\n%s" % (url, res.status, body.decode("utf-8")))
|
||||
return body
|
||||
|
||||
def cache_master(master_url):
|
||||
"""Using the url for the master, updates the local cache, and returns an url to the local cache."""
|
||||
|
|
@ -153,23 +157,22 @@ def cache_master(master_url):
|
|||
and time.time() - os.path.getmtime(master_cache) < MAX_MASTER_AGE):
|
||||
return master_cache_url # our cache is fresh
|
||||
# new master doesn't exist or isn't fresh
|
||||
print "Refreshing master cache from %s" % master_url
|
||||
print("Refreshing master cache from %s" % master_url)
|
||||
def get_and_test_master():
|
||||
new_master_contents = fetch(master_url)
|
||||
llmessage.parseTemplateString(new_master_contents)
|
||||
llmessage.parseTemplateString(new_master_contents.decode("utf-8"))
|
||||
return new_master_contents
|
||||
try:
|
||||
new_master_contents = retry(3, get_and_test_master)
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
# the refresh failed, so we should just soldier on
|
||||
print "WARNING: unable to download new master, probably due to network error. Your message template compatibility may be suspect."
|
||||
print "Cause: %s" % e
|
||||
print("WARNING: unable to download new master, probably due to network error. Your message template compatibility may be suspect.")
|
||||
print("Cause: %s" % e)
|
||||
return master_cache_url
|
||||
try:
|
||||
tmpname = '%s.%d' % (master_cache, os.getpid())
|
||||
mc = open(tmpname, 'wb')
|
||||
mc.write(new_master_contents)
|
||||
mc.close()
|
||||
with open(tmpname, "wb") as mc:
|
||||
mc.write(new_master_contents)
|
||||
try:
|
||||
os.rename(tmpname, master_cache)
|
||||
except OSError:
|
||||
|
|
@ -180,9 +183,9 @@ def cache_master(master_url):
|
|||
# a single day.
|
||||
os.unlink(master_cache)
|
||||
os.rename(tmpname, master_cache)
|
||||
except IOError, e:
|
||||
print "WARNING: Unable to write master message template to %s, proceeding without cache." % master_cache
|
||||
print "Cause: %s" % e
|
||||
except IOError as e:
|
||||
print("WARNING: Unable to write master message template to %s, proceeding without cache." % master_cache)
|
||||
print("Cause: %s" % e)
|
||||
return master_url
|
||||
return master_cache_url
|
||||
|
||||
|
|
@ -246,16 +249,16 @@ http://wiki.secondlife.com/wiki/Template_verifier.py
|
|||
# both current and master supplied in positional params
|
||||
if len(args) == 2:
|
||||
master_filename, current_filename = args
|
||||
print "master:", master_filename
|
||||
print "current:", current_filename
|
||||
print("master:", master_filename)
|
||||
print("current:", current_filename)
|
||||
master_url = 'file://%s' % master_filename
|
||||
current_url = 'file://%s' % current_filename
|
||||
# only current supplied in positional param
|
||||
elif len(args) == 1:
|
||||
master_url = None
|
||||
current_filename = args[0]
|
||||
print "master:", options.master_url
|
||||
print "current:", current_filename
|
||||
print("master:", options.master_url)
|
||||
print("current:", current_filename)
|
||||
current_url = 'file://%s' % current_filename
|
||||
# nothing specified, use defaults for everything
|
||||
elif len(args) == 0:
|
||||
|
|
@ -269,8 +272,8 @@ http://wiki.secondlife.com/wiki/Template_verifier.py
|
|||
|
||||
if current_url is None:
|
||||
current_filename = local_template_filename()
|
||||
print "master:", options.master_url
|
||||
print "current:", current_filename
|
||||
print("master:", options.master_url)
|
||||
print("current:", current_filename)
|
||||
current_url = 'file://%s' % current_filename
|
||||
|
||||
# retrieve the contents of the local template
|
||||
|
|
@ -281,42 +284,42 @@ http://wiki.secondlife.com/wiki/Template_verifier.py
|
|||
sha_url = "%s.sha1" % current_url
|
||||
current_sha = fetch(sha_url)
|
||||
if hexdigest == current_sha:
|
||||
print "Message template SHA_1 has not changed."
|
||||
print("Message template SHA_1 has not changed.")
|
||||
sys.exit(0)
|
||||
|
||||
# and check for syntax
|
||||
current_parsed = llmessage.parseTemplateString(current)
|
||||
current_parsed = llmessage.parseTemplateString(current.decode("utf-8"))
|
||||
|
||||
if options.cache_master:
|
||||
# optionally return a url to a locally-cached master so we don't hit the network all the time
|
||||
master_url = cache_master(master_url)
|
||||
|
||||
def parse_master_url():
|
||||
master = fetch(master_url)
|
||||
master = fetch(master_url).decode("utf-8")
|
||||
return llmessage.parseTemplateString(master)
|
||||
try:
|
||||
master_parsed = retry(3, parse_master_url)
|
||||
except (IOError, tokenstream.ParseError), e:
|
||||
except (IOError, tokenstream.ParseError) as e:
|
||||
if options.mode == 'production':
|
||||
raise e
|
||||
else:
|
||||
print "WARNING: problems retrieving the master from %s." % master_url
|
||||
print "Syntax-checking the local template ONLY, no compatibility check is being run."
|
||||
print "Cause: %s\n\n" % e
|
||||
print("WARNING: problems retrieving the master from %s." % master_url)
|
||||
print("Syntax-checking the local template ONLY, no compatibility check is being run.")
|
||||
print("Cause: %s\n\n" % e)
|
||||
return 0
|
||||
|
||||
acceptable, compat = compare(
|
||||
master_parsed, current_parsed, options.mode)
|
||||
|
||||
def explain(header, compat):
|
||||
print header
|
||||
print(header)
|
||||
# indent compatibility explanation
|
||||
print '\n\t'.join(compat.explain().split('\n'))
|
||||
print('\n\t'.join(compat.explain().split('\n')))
|
||||
|
||||
if acceptable:
|
||||
explain("--- PASS ---", compat)
|
||||
if options.force_verification == False:
|
||||
print "Updating sha1 to %s" % hexdigest
|
||||
print("Updating sha1 to %s" % hexdigest)
|
||||
sha_filename = "%s.sha1" % current_filename
|
||||
sha_file = open(sha_filename, 'w')
|
||||
sha_file.write(hexdigest)
|
||||
|
|
|
|||
Loading…
Reference in New Issue