master
Rider Linden 2017-01-25 11:13:59 -08:00
commit 25cd2ed205
432 changed files with 17905 additions and 10677 deletions

View File

@ -519,3 +519,6 @@ e9d350764dfbf5a46229e627547ef5c1b1eeef00 4.0.2-release
4070611edd95eb3a683d1cd97c4c07fe67793812 4.0.6-release
33981d8130f031597b4c7f4c981b18359afb61a0 4.0.7-release
45eaee56883df7a439ed3300c44d3126f7e3a41e 4.0.8-release
b280a1c797a3891e68dbc237e73de9cf19f426e9 4.1.1-release
bfbba2244320dc2ae47758cd7edd8fa3b67dc756 4.1.2-release
b41e1e7c7876f7656c505f552b5888b4e478f92b 5.0.0-release

View File

@ -1,6 +1,6 @@
Second Life Viewer
====================
This project manages the source code for the
[Second Life](https://www.secondlife.com) Viewer.

84
autobuild.xml Executable file → Normal file
View File

@ -1484,11 +1484,11 @@
<key>archive</key>
<map>
<key>hash</key>
<string>29a1f64df46094eda0d681821a98d17e</string>
<string>2845033912eb947a1401847ece1469ce</string>
<key>hash_algorithm</key>
<string>md5</string>
<key>url</key>
<string>http://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-llceflib_3p-llceflib/rev/311349/arch/Darwin/installer/llceflib-1.5.3.311349-darwin-311349.tar.bz2</string>
<string>http://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-llceflib_3p-llceflib/rev/321153/arch/Darwin/installer/llceflib-1.5.3.321153-darwin-321153.tar.bz2</string>
</map>
<key>name</key>
<string>darwin</string>
@ -1498,18 +1498,18 @@
<key>archive</key>
<map>
<key>hash</key>
<string>827b7c339a2cd401d9d23f9ee02cb83f</string>
<string>1156121b4ccbb4aa29bc01f15c589f98</string>
<key>hash_algorithm</key>
<string>md5</string>
<key>url</key>
<string>http://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-llceflib_3p-llceflib/rev/311349/arch/CYGWIN/installer/llceflib-1.5.3.311349-windows-311349.tar.bz2</string>
<string>http://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-llceflib_3p-llceflib/rev/321153/arch/CYGWIN/installer/llceflib-1.5.3.321153-windows-321153.tar.bz2</string>
</map>
<key>name</key>
<string>windows</string>
</map>
</map>
<key>version</key>
<string>1.5.3.311349</string>
<string>1.5.3.317959</string>
</map>
<key>llphysicsextensions_source</key>
<map>
@ -2135,6 +2135,46 @@
<key>version</key>
<string>0.8.0.1</string>
</map>
<key>vlc-bin</key>
<map>
<key>copyright</key>
<string>Copyright (C) 1998-2016 VLC authors and VideoLAN</string>
<key>license</key>
<string>GPL2</string>
<key>license_file</key>
<string>LICENSES/vlc.txt</string>
<key>name</key>
<string>vlc-bin</string>
<key>platforms</key>
<map>
<key>linux</key>
<map>
<key>archive</key>
<map>
<key>hash</key>
<string>2f410640df3f9812d1abff02a414cfa8</string>
<key>url</key>
<string>https://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-vlc-bin/rev/315283/arch/Linux/vlc_bin-2.2.3-linux-201606011750-r10.tar.bz2</string>
</map>
<key>name</key>
<string>linux</string>
</map>
<key>windows</key>
<map>
<key>archive</key>
<map>
<key>hash</key>
<string>04cff37070a5f65f3652b4ddcec7183f</string>
<key>url</key>
<string>http://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-vlc-bin/rev/317935/arch/CYGWIN/installer/vlc_bin-2.2.4.317935-windows-317935.tar.bz2</string>
</map>
<key>name</key>
<string>windows</string>
</map>
</map>
<key>version</key>
<string>2.2.4.317935</string>
</map>
<key>xmlrpc-epi</key>
<map>
<key>copyright</key>
@ -2617,14 +2657,6 @@
<key>arguments</key>
<array>
<string>..\indra</string>
<string>&amp;&amp;</string>
<string>..\indra\tools\vstool\VSTool.exe</string>
<string>--solution</string>
<string>SecondLife.sln</string>
<string>--config</string>
<string>RelWithDebInfo</string>
<string>--startup</string>
<string>secondlife-bin</string>
</array>
<key>options</key>
<array>
@ -2663,20 +2695,11 @@
<key>arguments</key>
<array>
<string>..\indra</string>
<string>&amp;&amp;</string>
<string>..\indra\tools\vstool\VSTool.exe</string>
<string>--solution</string>
<string>SecondLife.sln</string>
<string>--config</string>
<string>RelWithDebInfo</string>
<string>--startup</string>
<string>secondlife-bin</string>
</array>
<key>options</key>
<array>
<string>-G</string>
<string>"Visual Studio 12"</string>
<string>-DUNATTENDED:BOOL=ON</string>
<string>-DINSTALL_PROPRIETARY=FALSE</string>
<string>-DUSE_KDU=FALSE</string>
</array>
@ -2705,14 +2728,6 @@
<key>arguments</key>
<array>
<string>..\indra</string>
<string>&amp;&amp;</string>
<string>..\indra\tools\vstool\VSTool.exe</string>
<string>--solution</string>
<string>SecondLife.sln</string>
<string>--config</string>
<string>Release</string>
<string>--startup</string>
<string>secondlife-bin</string>
</array>
<key>options</key>
<array>
@ -2749,20 +2764,11 @@
<key>arguments</key>
<array>
<string>..\indra</string>
<string>&amp;&amp;</string>
<string>..\indra\tools\vstool\VSTool.exe</string>
<string>--solution</string>
<string>SecondLife.sln</string>
<string>--config</string>
<string>Release</string>
<string>--startup</string>
<string>secondlife-bin</string>
</array>
<key>options</key>
<array>
<string>-G</string>
<string>"Visual Studio 12"</string>
<string>-DUNATTENDED:BOOL=ON</string>
<string>-DINSTALL_PROPRIETARY=FALSE</string>
<string>-DUSE_KDU=FALSE</string>
</array>

View File

@ -97,6 +97,7 @@ pre_build()
"$autobuild" configure --quiet -c $variant -- \
-DPACKAGE:BOOL=ON \
-DUNATTENDED:BOOL=ON \
-DRELEASE_CRASH_REPORTING:BOOL=ON \
-DVIEWER_CHANNEL:STRING="\"$viewer_channel\"" \
-DGRID:STRING="\"$viewer_grid\"" \

View File

@ -12,6 +12,7 @@ Able Whitman
Adam Marker
VWR-2755
Adeon Writer
MAINT-1211
Aeonix Aeon
Agathos Frascati
CT-246
@ -190,9 +191,19 @@ Ansariel Hiller
STORM-2094
MAINT-5756
MAINT-4677
MAINT-6300
MAINT-6397
MAINT-6432
MAINT-6513
MAINT-6514
MAINT-6552
STORM-2133
MAINT-6511
MAINT-6612
MAINT-6637
MAINT-6636
MAINT-6744
MAINT-6752
Aralara Rajal
Arare Chantilly
CHUIBUG-191
@ -307,6 +318,7 @@ Catherine Pfeffer
VWR-1282
VWR-8624
VWR-10854
Cathy Foil
Cayu Cluny
Celierra Darling
VWR-1274
@ -790,6 +802,7 @@ Kitty Barnett
MAINT-6152
MAINT-6153
MAINT-6154
MAINT-6568
Kolor Fall
Komiko Okamoto
Korvel Noh
@ -859,6 +872,7 @@ MartinRJ Fayray
STORM-1845
STORM-1911
STORM-1934
Matrice Laville
Matthew Anthony
Matthew Dowd
VWR-1344
@ -1021,6 +1035,7 @@ Nicky Dasmijn
OPEN-187
STORM-2010
STORM-2082
MAINT-6665
Nicky Perian
OPEN-1
STORM-1087
@ -1232,6 +1247,7 @@ Shnurui Troughton
Shyotl Kuhr
MAINT-1138
MAINT-2334
MAINT-6913
Siana Gearz
STORM-960
STORM-1088
@ -1265,6 +1281,7 @@ Sovereign Engineer
MAINT-6107
STORM-2107
MAINT-6218
MAINT-6913
SpacedOut Frye
VWR-34
VWR-45

View File

@ -83,8 +83,8 @@ if (WINDOWS)
add_definitions(/WX)
endif (NOT VS_DISABLE_FATAL_WARNINGS)
# configure win32 API for windows XP+ compatibility
set(WINVER "0x0501" CACHE STRING "Win32 API Target version (see http://msdn.microsoft.com/en-us/library/aa383745%28v=VS.85%29.aspx)")
# configure Win32 API for Windows Vista+ compatibility
set(WINVER "0x0600" CACHE STRING "Win32 API Target version (see http://msdn.microsoft.com/en-us/library/aa383745%28v=VS.85%29.aspx)")
add_definitions("/DWINVER=${WINVER}" "/D_WIN32_WINNT=${WINVER}")
endif (WINDOWS)

View File

@ -24,7 +24,6 @@ set(cmake_SOURCE_FILES
DirectX.cmake
DragDrop.cmake
EXPAT.cmake
## ExamplePlugin.cmake
FindAPR.cmake
FindAutobuild.cmake
FindBerkeleyDB.cmake
@ -100,6 +99,7 @@ set(cmake_SOURCE_FILES
Variables.cmake
ViewerMiscLibs.cmake
VisualLeakDetector.cmake
LibVLCPlugin.cmake
XmlRpcEpi.cmake
ZLIB.cmake
)

View File

@ -18,7 +18,7 @@ else (USESYSTEMLIBS)
use_prebuilt_binary(SDL)
set (SDL_FOUND TRUE)
set (SDL_INCLUDE_DIR ${LIBS_PREBUILT_DIR}/i686-linux)
set (SDL_LIBRARY SDL directfb fusion direct)
set (SDL_LIBRARY SDL directfb fusion direct X11)
endif (LINUX)
endif (USESYSTEMLIBS)

View File

@ -0,0 +1,27 @@
# -*- cmake -*-
include(Linking)
include(Prebuilt)
if (USESYSTEMLIBS)
set(LIBVLCPLUGIN OFF CACHE BOOL
"LIBVLCPLUGIN support for the llplugin/llmedia test apps.")
else (USESYSTEMLIBS)
use_prebuilt_binary(vlc-bin)
set(LIBVLCPLUGIN ON CACHE BOOL
"LIBVLCPLUGIN support for the llplugin/llmedia test apps.")
set(VLC_INCLUDE_DIR ${LIBS_PREBUILT_DIR}/include/vlc)
endif (USESYSTEMLIBS)
if (WINDOWS)
set(VLC_PLUGIN_LIBRARIES
libvlc.lib
libvlccore.lib
)
elseif (DARWIN)
elseif (LINUX)
# Specify a full path to make sure we get a static link
set(VLC_PLUGIN_LIBRARIES
${LIBS_PREBUILT_DIR}/lib/libvlc.a
${LIBS_PREBUILT_DIR}/lib/libvlccore.a
)
endif (WINDOWS)

View File

@ -1,3 +1,4 @@
2014-02-25 10:34

View File

@ -1,27 +0,0 @@
"""\
@file __init__.py
@brief Initialization file for the indra.base module.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""

View File

@ -1,73 +0,0 @@
#!/usr/bin/python
##
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
## Second Life Viewer Source Code
## Copyright (C) 2011, Linden Research, Inc.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation;
## version 2.1 of the License only.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
## $/LicenseInfo$
from indra.base import llsd, lluuid
from datetime import datetime
import cllsd
import time, sys
class myint(int):
pass
values = (
'&<>',
u'\u81acj',
llsd.uri('http://foo<'),
lluuid.UUID(),
llsd.LLSD(['thing']),
1,
myint(31337),
sys.maxint + 10,
llsd.binary('foo'),
[],
{},
{u'f&\u1212': 3},
3.1,
True,
None,
datetime.fromtimestamp(time.time()),
)
def valuator(values):
for v in values:
yield v
longvalues = () # (values, list(values), iter(values), valuator(values))
for v in values + longvalues:
print '%r => %r' % (v, cllsd.llsd_to_xml(v))
a = [[{'a':3}]] * 1000000
s = time.time()
print hash(cllsd.llsd_to_xml(a))
e = time.time()
t1 = e - s
print t1
s = time.time()
print hash(llsd.LLSDXMLFormatter()._format(a))
e = time.time()
t2 = e - s
print t2
print 'Speedup:', t2 / t1

View File

@ -1,266 +0,0 @@
"""\
@file config.py
@brief Utility module for parsing and accessing the indra.xml config file.
$LicenseInfo:firstyear=2006&license=mit$
Copyright (c) 2006-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import copy
import errno
import os
import traceback
import time
import types
from os.path import dirname, getmtime, join, realpath
from indra.base import llsd
_g_config = None
class IndraConfig(object):
"""
IndraConfig loads a 'indra' xml configuration file
and loads into memory. This representation in memory
can get updated to overwrite values or add new values.
The xml configuration file is considered a live file and changes
to the file are checked and reloaded periodically. If a value had
been overwritten via the update or set method, the loaded values
from the file are ignored (the values from the update/set methods
override)
"""
def __init__(self, indra_config_file):
self._indra_config_file = indra_config_file
self._reload_check_interval = 30 # seconds
self._last_check_time = 0
self._last_mod_time = 0
self._config_overrides = {}
self._config_file_dict = {}
self._combined_dict = {}
self._load()
def _load(self):
# if you initialize the IndraConfig with None, no attempt
# is made to load any files
if self._indra_config_file is None:
return
config_file = open(self._indra_config_file)
self._config_file_dict = llsd.parse(config_file.read())
self._combine_dictionaries()
config_file.close()
self._last_mod_time = self._get_last_modified_time()
self._last_check_time = time.time() # now
def _get_last_modified_time(self):
"""
Returns the mtime (last modified time) of the config file,
if such exists.
"""
if self._indra_config_file is not None:
return os.path.getmtime(self._indra_config_file)
return 0
def _combine_dictionaries(self):
self._combined_dict = {}
self._combined_dict.update(self._config_file_dict)
self._combined_dict.update(self._config_overrides)
def _reload_if_necessary(self):
now = time.time()
if (now - self._last_check_time) > self._reload_check_interval:
self._last_check_time = now
try:
modtime = self._get_last_modified_time()
if modtime > self._last_mod_time:
self._load()
except OSError, e:
if e.errno == errno.ENOENT: # file not found
# someone messed with our internal state
# or removed the file
print 'WARNING: Configuration file has been removed ' + (self._indra_config_file)
print 'Disabling reloading of configuration file.'
traceback.print_exc()
self._indra_config_file = None
self._last_check_time = 0
self._last_mod_time = 0
else:
raise # pass the exception along to the caller
def __getitem__(self, key):
self._reload_if_necessary()
return self._combined_dict[key]
def get(self, key, default = None):
try:
return self.__getitem__(key)
except KeyError:
return default
def __setitem__(self, key, value):
"""
Sets the value of the config setting of key to be newval
Once any key/value pair is changed via the set method,
that key/value pair will remain set with that value until
change via the update or set method
"""
self._config_overrides[key] = value
self._combine_dictionaries()
def set(self, key, newval):
return self.__setitem__(key, newval)
def update(self, new_conf):
"""
Load an XML file and apply its map as overrides or additions
to the existing config. Update can be a file or a dict.
Once any key/value pair is changed via the update method,
that key/value pair will remain set with that value until
change via the update or set method
"""
if isinstance(new_conf, dict):
overrides = new_conf
else:
# assuming that it is a filename
config_file = open(new_conf)
overrides = llsd.parse(config_file.read())
config_file.close()
self._config_overrides.update(overrides)
self._combine_dictionaries()
def as_dict(self):
"""
Returns immutable copy of the IndraConfig as a dictionary
"""
return copy.deepcopy(self._combined_dict)
def load(config_xml_file = None):
global _g_config
load_default_files = config_xml_file is None
if load_default_files:
## going from:
## "/opt/linden/indra/lib/python/indra/base/config.py"
## to:
## "/opt/linden/etc/indra.xml"
config_xml_file = realpath(
dirname(realpath(__file__)) + "../../../../../../etc/indra.xml")
try:
_g_config = IndraConfig(config_xml_file)
except IOError:
# Failure to load passed in file
# or indra.xml default file
if load_default_files:
try:
config_xml_file = realpath(
dirname(realpath(__file__)) + "../../../../../../etc/globals.xml")
_g_config = IndraConfig(config_xml_file)
return
except IOError:
# Failure to load globals.xml
# fall to code below
pass
# Either failed to load passed in file
# or failed to load all default files
_g_config = IndraConfig(None)
def dump(indra_xml_file, indra_cfg = None, update_in_mem=False):
'''
Dump config contents into a file
Kindof reverse of load.
Optionally takes a new config to dump.
Does NOT update global config unless requested.
'''
global _g_config
if not indra_cfg:
if _g_config is None:
return
indra_cfg = _g_config.as_dict()
if not indra_cfg:
return
config_file = open(indra_xml_file, 'w')
_config_xml = llsd.format_xml(indra_cfg)
config_file.write(_config_xml)
config_file.close()
if update_in_mem:
update(indra_cfg)
def update(new_conf):
global _g_config
if _g_config is None:
# To keep with how this function behaved
# previously, a call to update
# before the global is defined
# make a new global config which does not
# load data from a file.
_g_config = IndraConfig(None)
return _g_config.update(new_conf)
def get(key, default = None):
global _g_config
if _g_config is None:
load()
return _g_config.get(key, default)
def set(key, newval):
"""
Sets the value of the config setting of key to be newval
Once any key/value pair is changed via the set method,
that key/value pair will remain set with that value until
change via the update or set method or program termination
"""
global _g_config
if _g_config is None:
_g_config = IndraConfig(None)
_g_config.set(key, newval)
def get_config():
global _g_config
return _g_config

File diff suppressed because it is too large Load Diff

View File

@ -1,319 +0,0 @@
"""\
@file lluuid.py
@brief UUID parser/generator.
$LicenseInfo:firstyear=2004&license=mit$
Copyright (c) 2004-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import random, socket, string, time, re
import uuid
try:
# Python 2.6
from hashlib import md5
except ImportError:
# Python 2.5 and earlier
from md5 import new as md5
def _int2binstr(i,l):
s=''
for a in range(l):
s=chr(i&0xFF)+s
i>>=8
return s
def _binstr2int(s):
i = long(0)
for c in s:
i = (i<<8) + ord(c)
return i
class UUID(object):
"""
A class which represents a 16 byte integer. Stored as a 16 byte 8
bit character string.
The string version is to be of the form:
AAAAAAAA-AAAA-BBBB-BBBB-BBBBBBCCCCCC (a 128-bit number in hex)
where A=network address, B=timestamp, C=random.
"""
NULL_STR = "00000000-0000-0000-0000-000000000000"
# the UUIDREGEX_STRING is helpful for parsing UUID's in text
hex_wildcard = r"[0-9a-fA-F]"
word = hex_wildcard + r"{4,4}-"
long_word = hex_wildcard + r"{8,8}-"
very_long_word = hex_wildcard + r"{12,12}"
UUID_REGEX_STRING = long_word + word + word + word + very_long_word
uuid_regex = re.compile(UUID_REGEX_STRING)
rand = random.Random()
ip = ''
try:
ip = socket.gethostbyname(socket.gethostname())
except(socket.gaierror, socket.error):
# no ip address, so just default to somewhere in 10.x.x.x
ip = '10'
for i in range(3):
ip += '.' + str(rand.randrange(1,254))
hexip = ''.join(["%04x" % long(i) for i in ip.split('.')])
lastid = ''
def __init__(self, possible_uuid=None):
"""
Initialize to first valid UUID in argument (if a string),
or to null UUID if none found or argument is not supplied.
If the argument is a UUID, the constructed object will be a copy of it.
"""
self._bits = "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
if possible_uuid is None:
return
if isinstance(possible_uuid, type(self)):
self.set(possible_uuid)
return
uuid_match = UUID.uuid_regex.search(possible_uuid)
if uuid_match:
uuid_string = uuid_match.group()
s = string.replace(uuid_string, '-', '')
self._bits = _int2binstr(string.atol(s[:8],16),4) + \
_int2binstr(string.atol(s[8:16],16),4) + \
_int2binstr(string.atol(s[16:24],16),4) + \
_int2binstr(string.atol(s[24:],16),4)
def __len__(self):
"""
Used by the len() builtin.
"""
return 36
def __nonzero__(self):
return self._bits != "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
def __str__(self):
uuid_string = self.toString()
return uuid_string
__repr__ = __str__
def __getitem__(self, index):
return str(self)[index]
def __eq__(self, other):
if isinstance(other, (str, unicode)):
return other == str(self)
return self._bits == getattr(other, '_bits', '')
def __ne__(self, other):
return not self.__eq__(other)
def __le__(self, other):
return self._bits <= other._bits
def __ge__(self, other):
return self._bits >= other._bits
def __lt__(self, other):
return self._bits < other._bits
def __gt__(self, other):
return self._bits > other._bits
def __hash__(self):
return hash(self._bits)
def set(self, uuid):
self._bits = uuid._bits
def setFromString(self, uuid_string):
"""
Given a string version of a uuid, set self bits
appropriately. Returns self.
"""
s = string.replace(uuid_string, '-', '')
self._bits = _int2binstr(string.atol(s[:8],16),4) + \
_int2binstr(string.atol(s[8:16],16),4) + \
_int2binstr(string.atol(s[16:24],16),4) + \
_int2binstr(string.atol(s[24:],16),4)
return self
def setFromMemoryDump(self, gdb_string):
"""
We expect to get gdb_string as four hex units. eg:
0x147d54db 0xc34b3f1b 0x714f989b 0x0a892fd2
Which will be translated to:
db547d14-1b3f4bc3-9b984f71-d22f890a
Returns self.
"""
s = string.replace(gdb_string, '0x', '')
s = string.replace(s, ' ', '')
t = ''
for i in range(8,40,8):
for j in range(0,8,2):
t = t + s[i-j-2:i-j]
self.setFromString(t)
def toString(self):
"""
Return as a string matching the LL standard
AAAAAAAA-AAAA-BBBB-BBBB-BBBBBBCCCCCC (a 128-bit number in hex)
where A=network address, B=timestamp, C=random.
"""
return uuid_bits_to_string(self._bits)
def getAsString(self):
"""
Return a different string representation of the form
AAAAAAAA-AAAABBBB-BBBBBBBB-BBCCCCCC (a 128-bit number in hex)
where A=network address, B=timestamp, C=random.
"""
i1 = _binstr2int(self._bits[0:4])
i2 = _binstr2int(self._bits[4:8])
i3 = _binstr2int(self._bits[8:12])
i4 = _binstr2int(self._bits[12:16])
return '%08lx-%08lx-%08lx-%08lx' % (i1,i2,i3,i4)
def generate(self):
"""
Generate a new uuid. This algorithm is slightly different
from c++ implementation for portability reasons.
Returns self.
"""
m = md5()
m.update(uuid.uuid1().bytes)
self._bits = m.digest()
return self
def isNull(self):
"""
Returns 1 if the uuid is null - ie, equal to default uuid.
"""
return (self._bits == "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0")
def xor(self, rhs):
"""
xors self with rhs.
"""
v1 = _binstr2int(self._bits[0:4]) ^ _binstr2int(rhs._bits[0:4])
v2 = _binstr2int(self._bits[4:8]) ^ _binstr2int(rhs._bits[4:8])
v3 = _binstr2int(self._bits[8:12]) ^ _binstr2int(rhs._bits[8:12])
v4 = _binstr2int(self._bits[12:16]) ^ _binstr2int(rhs._bits[12:16])
self._bits = _int2binstr(v1,4) + \
_int2binstr(v2,4) + \
_int2binstr(v3,4) + \
_int2binstr(v4,4)
# module-level null constant
NULL = UUID()
def printTranslatedMemory(four_hex_uints):
"""
We expect to get the string as four hex units. eg:
0x147d54db 0xc34b3f1b 0x714f989b 0x0a892fd2
Which will be translated to:
db547d14-1b3f4bc3-9b984f71-d22f890a
"""
uuid = UUID()
uuid.setFromMemoryDump(four_hex_uints)
print uuid.toString()
def isUUID(id_str):
"""
This function returns:
- 1 if the string passed is a UUID
- 0 is the string passed is not a UUID
- None if it neither of the if's below is satisfied
"""
if not id_str or len(id_str) < 5 or len(id_str) > 36:
return 0
if isinstance(id_str, UUID) or UUID.uuid_regex.match(id_str):
return 1
return None
def isPossiblyID(id_str):
"""
This function returns 1 if the string passed has some uuid-like
characteristics. Otherwise returns 0.
"""
is_uuid = isUUID(id_str)
if is_uuid is not None:
return is_uuid
# build a string which matches every character.
hex_wildcard = r"[0-9a-fA-F]"
chars = len(id_str)
next = min(chars, 8)
matcher = hex_wildcard+"{"+str(next)+","+str(next)+"}"
chars = chars - next
if chars > 0:
matcher = matcher + "-"
chars = chars - 1
for block in range(3):
next = max(min(chars, 4), 0)
if next:
matcher = matcher + hex_wildcard+"{"+str(next)+","+str(next)+"}"
chars = chars - next
if chars > 0:
matcher = matcher + "-"
chars = chars - 1
if chars > 0:
next = min(chars, 12)
matcher = matcher + hex_wildcard+"{"+str(next)+","+str(next)+"}"
#print matcher
uuid_matcher = re.compile(matcher)
if uuid_matcher.match(id_str):
return 1
return 0
def uuid_bits_to_string(bits):
i1 = _binstr2int(bits[0:4])
i2 = _binstr2int(bits[4:6])
i3 = _binstr2int(bits[6:8])
i4 = _binstr2int(bits[8:10])
i5 = _binstr2int(bits[10:12])
i6 = _binstr2int(bits[12:16])
return '%08lx-%04lx-%04lx-%04lx-%04lx%08lx' % (i1,i2,i3,i4,i5,i6)
def uuid_bits_to_uuid(bits):
return UUID(uuid_bits_to_string(bits))
try:
from mulib import stacked
stacked.NoProducer() # just to exercise stacked
except:
#print "Couldn't import mulib.stacked, not registering UUID converter"
pass
else:
def convertUUID(uuid, req):
req.write(str(uuid))
stacked.add_producer(UUID, convertUUID, "*/*")
stacked.add_producer(UUID, convertUUID, "text/html")

View File

@ -1,121 +0,0 @@
"""\
@file metrics.py
@author Phoenix
@date 2007-11-27
@brief simple interface for logging metrics
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import sys
try:
import syslog
except ImportError:
# Windows
import sys
class syslog(object):
# wrap to a lame syslog for windows
_logfp = sys.stderr
def syslog(msg):
_logfp.write(msg)
if not msg.endswith('\n'):
_logfp.write('\n')
syslog = staticmethod(syslog)
from indra.base.llsd import format_notation
def record_metrics(table, stats):
"Write a standard metrics log"
_log("LLMETRICS", table, stats)
def record_event(table, data):
"Write a standard logmessage log"
_log("LLLOGMESSAGE", table, data)
def set_destination(dest):
"""Set the destination of metrics logs for this process.
If you do not call this function prior to calling a logging
method, that function will open sys.stdout as a destination.
Attempts to set dest to None will throw a RuntimeError.
@param dest a file-like object which will be the destination for logs."""
if dest is None:
raise RuntimeError("Attempt to unset metrics destination.")
global _destination
_destination = dest
def destination():
"""Get the destination of the metrics logs for this process.
Returns None if no destination is set"""
global _destination
return _destination
class SysLogger(object):
"A file-like object which writes to syslog."
def __init__(self, ident='indra', logopt = None, facility = None):
try:
if logopt is None:
logopt = syslog.LOG_CONS | syslog.LOG_PID
if facility is None:
facility = syslog.LOG_LOCAL0
syslog.openlog(ident, logopt, facility)
import atexit
atexit.register(syslog.closelog)
except AttributeError:
# No syslog module on Windows
pass
def write(str):
syslog.syslog(str)
write = staticmethod(write)
def flush():
pass
flush = staticmethod(flush)
#
# internal API
#
_sequence_id = 0
_destination = None
def _next_id():
global _sequence_id
next = _sequence_id
_sequence_id += 1
return next
def _dest():
global _destination
if _destination is None:
# this default behavior is documented in the metrics functions above.
_destination = sys.stdout
return _destination
def _log(header, table, data):
log_line = "%s (%d) %s %s" \
% (header, _next_id(), table, format_notation(data))
dest = _dest()
dest.write(log_line)
dest.flush()

View File

@ -1,30 +0,0 @@
#!/usr/bin/python
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
## Second Life Viewer Source Code
## Copyright (C) 2011, Linden Research, Inc.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation;
## version 2.1 of the License only.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
## $/LicenseInfo$
import warnings
warnings.warn("indra.ipc.httputil has been deprecated; use eventlet.httpc instead", DeprecationWarning, 2)
from eventlet.httpc import *
makeConnection = make_connection

View File

@ -1,100 +0,0 @@
"""\
@file llsdhttp.py
@brief Functions to ease moving llsd over http
$LicenseInfo:firstyear=2006&license=mit$
Copyright (c) 2006-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import os.path
import os
import urlparse
from indra.base import llsd
from eventlet import httpc
suite = httpc.HttpSuite(llsd.format_xml, llsd.parse, 'application/llsd+xml')
delete = suite.delete
delete_ = suite.delete_
get = suite.get
get_ = suite.get_
head = suite.head
head_ = suite.head_
post = suite.post
post_ = suite.post_
put = suite.put
put_ = suite.put_
request = suite.request
request_ = suite.request_
# import every httpc error exception into our namespace for convenience
for x in httpc.status_to_error_map.itervalues():
globals()[x.__name__] = x
ConnectionError = httpc.ConnectionError
Retriable = httpc.Retriable
for x in (httpc.ConnectionError,):
globals()[x.__name__] = x
def postFile(url, filename):
f = open(filename)
body = f.read()
f.close()
llsd_body = llsd.parse(body)
return post_(url, llsd_body)
# deprecated in favor of get_
def getStatus(url, use_proxy=False):
status, _headers, _body = get_(url, use_proxy=use_proxy)
return status
# deprecated in favor of put_
def putStatus(url, data):
status, _headers, _body = put_(url, data)
return status
# deprecated in favor of delete_
def deleteStatus(url):
status, _headers, _body = delete_(url)
return status
# deprecated in favor of post_
def postStatus(url, data):
status, _headers, _body = post_(url, data)
return status
def postFileStatus(url, filename):
status, _headers, body = postFile(url, filename)
return status, body
def getFromSimulator(path, use_proxy=False):
return get('http://' + simulatorHostAndPort + path, use_proxy=use_proxy)
def postToSimulator(path, data=None):
return post('http://' + simulatorHostAndPort + path, data)

View File

@ -1,81 +0,0 @@
"""\
@file mysql_pool.py
@brief Thin wrapper around eventlet.db_pool that chooses MySQLdb and Tpool.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import MySQLdb
from eventlet import db_pool
class DatabaseConnector(db_pool.DatabaseConnector):
def __init__(self, credentials, *args, **kwargs):
super(DatabaseConnector, self).__init__(MySQLdb, credentials,
conn_pool=db_pool.ConnectionPool,
*args, **kwargs)
# get is extended relative to eventlet.db_pool to accept a port argument
def get(self, host, dbname, port=3306):
key = (host, dbname, port)
if key not in self._databases:
new_kwargs = self._kwargs.copy()
new_kwargs['db'] = dbname
new_kwargs['host'] = host
new_kwargs['port'] = port
new_kwargs.update(self.credentials_for(host))
dbpool = ConnectionPool(*self._args, **new_kwargs)
self._databases[key] = dbpool
return self._databases[key]
class ConnectionPool(db_pool.TpooledConnectionPool):
"""A pool which gives out saranwrapped MySQLdb connections from a pool
"""
def __init__(self, *args, **kwargs):
super(ConnectionPool, self).__init__(MySQLdb, *args, **kwargs)
def get(self):
conn = super(ConnectionPool, self).get()
# annotate the connection object with the details on the
# connection; this is used elsewhere to check that you haven't
# suddenly changed databases in midstream while making a
# series of queries on a connection.
arg_names = ['host','user','passwd','db','port','unix_socket','conv','connect_timeout',
'compress', 'named_pipe', 'init_command', 'read_default_file', 'read_default_group',
'cursorclass', 'use_unicode', 'charset', 'sql_mode', 'client_flag', 'ssl',
'local_infile']
# you could have constructed this connectionpool with a mix of
# keyword and non-keyword arguments, but we want to annotate
# the connection object with a dict so it's easy to check
# against so here we are converting the list of non-keyword
# arguments (in self._args) into a dict of keyword arguments,
# and merging that with the actual keyword arguments
# (self._kwargs). The arg_names variable lists the
# constructor arguments for MySQLdb Connection objects.
converted_kwargs = dict([ (arg_names[i], arg) for i, arg in enumerate(self._args) ])
converted_kwargs.update(self._kwargs)
conn.connection_parameters = converted_kwargs
return conn

View File

@ -1,165 +0,0 @@
"""\
@file russ.py
@brief Recursive URL Substitution Syntax helpers
@author Phoenix
Many details on how this should work is available on the wiki:
https://wiki.secondlife.com/wiki/Recursive_URL_Substitution_Syntax
Adding features to this should be reflected in that page in the
implementations section.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import urllib
from indra.ipc import llsdhttp
class UnbalancedBraces(Exception):
pass
class UnknownDirective(Exception):
pass
class BadDirective(Exception):
pass
def format_value_for_path(value):
if type(value) in [list, tuple]:
# *NOTE: treat lists as unquoted path components so that the quoting
# doesn't get out-of-hand. This is a workaround for the fact that
# russ always quotes, even if the data it's given is already quoted,
# and it's not safe to simply unquote a path directly, so if we want
# russ to substitute urls parts inside other url parts we always
# have to do so via lists of unquoted path components.
return '/'.join([urllib.quote(str(item)) for item in value])
else:
return urllib.quote(str(value))
def format(format_str, context):
"""@brief Format format string according to rules for RUSS.
@see https://osiris.lindenlab.com/mediawiki/index.php/Recursive_URL_Substitution_Syntax
@param format_str The input string to format.
@param context A map used for string substitutions.
@return Returns the formatted string. If no match, the braces remain intact.
"""
while True:
#print "format_str:", format_str
all_matches = _find_sub_matches(format_str)
if not all_matches:
break
substitutions = 0
while True:
matches = all_matches.pop()
# we work from right to left to make sure we do not
# invalidate positions earlier in format_str
matches.reverse()
for pos in matches:
# Use index since _find_sub_matches should have raised
# an exception, and failure to find now is an exception.
end = format_str.index('}', pos)
#print "directive:", format_str[pos+1:pos+5]
if format_str[pos + 1] == '$':
value = context[format_str[pos + 2:end]]
if value is not None:
value = format_value_for_path(value)
elif format_str[pos + 1] == '%':
value = _build_query_string(
context.get(format_str[pos + 2:end]))
elif format_str[pos+1:pos+5] == 'http' or format_str[pos+1:pos+5] == 'file':
value = _fetch_url_directive(format_str[pos + 1:end])
else:
raise UnknownDirective, format_str[pos:end + 1]
if value is not None:
format_str = format_str[:pos]+str(value)+format_str[end+1:]
substitutions += 1
# If there were any substitutions at this depth, re-parse
# since this may have revealed new things to substitute
if substitutions:
break
if not all_matches:
break
# If there were no substitutions at all, and we have exhausted
# the possible matches, bail.
if not substitutions:
break
return format_str
def _find_sub_matches(format_str):
"""@brief Find all of the substitution matches.
@param format_str the RUSS conformant format string.
@return Returns an array of depths of arrays of positional matches in input.
"""
depth = 0
matches = []
for pos in range(len(format_str)):
if format_str[pos] == '{':
depth += 1
if not len(matches) == depth:
matches.append([])
matches[depth - 1].append(pos)
continue
if format_str[pos] == '}':
depth -= 1
continue
if not depth == 0:
raise UnbalancedBraces, format_str
return matches
def _build_query_string(query_dict):
"""\
@breif given a dict, return a query string. utility wrapper for urllib.
@param query_dict input query dict
@returns Returns an urlencoded query string including leading '?'.
"""
if query_dict:
keys = query_dict.keys()
keys.sort()
def stringize(value):
if type(value) in (str,unicode):
return value
else:
return str(value)
query_list = [urllib.quote(str(key)) + '=' + urllib.quote(stringize(query_dict[key])) for key in keys]
return '?' + '&'.join(query_list)
else:
return ''
def _fetch_url_directive(directive):
"*FIX: This only supports GET"
commands = directive.split('|')
resource = llsdhttp.get(commands[0])
if len(commands) == 3:
resource = _walk_resource(resource, commands[2])
return resource
def _walk_resource(resource, path):
path = path.split('/')
for child in path:
if not child:
continue
resource = resource[child]
return resource

View File

@ -1,134 +0,0 @@
"""\
@file servicebuilder.py
@author Phoenix
@brief Class which will generate service urls.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
from indra.base import config
from indra.ipc import llsdhttp
from indra.ipc import russ
# *NOTE: agent presence relies on this variable existing and being current, it is a huge hack
services_config = {}
try:
services_config = llsdhttp.get(config.get('services-config'))
except:
pass
_g_builder = None
def _builder():
global _g_builder
if _g_builder is None:
_g_builder = ServiceBuilder()
return _g_builder
def build(name, context={}, **kwargs):
""" Convenience method for using a global, singleton, service builder. Pass arguments either via a dict or via python keyword arguments, or both!
Example use:
> context = {'channel':'Second Life Release', 'version':'1.18.2.0'}
> servicebuilder.build('version-manager-version', context)
'http://int.util.vaak.lindenlab.com/channel/Second%20Life%20Release/1.18.2.0'
> servicebuilder.build('version-manager-version', channel='Second Life Release', version='1.18.2.0')
'http://int.util.vaak.lindenlab.com/channel/Second%20Life%20Release/1.18.2.0'
> servicebuilder.build('version-manager-version', context, version='1.18.1.2')
'http://int.util.vaak.lindenlab.com/channel/Second%20Life%20Release/1.18.1.2'
"""
global _g_builder
if _g_builder is None:
_g_builder = ServiceBuilder()
return _g_builder.buildServiceURL(name, context, **kwargs)
def build_path(name, context={}, **kwargs):
context = context.copy() # shouldn't modify the caller's dictionary
context.update(kwargs)
return _builder().buildPath(name, context)
class ServiceBuilder(object):
def __init__(self, services_definition = services_config):
"""\
@brief
@brief Create a ServiceBuilder.
@param services_definition Complete services definition, services.xml.
"""
# no need to keep a copy of the services section of the
# complete services definition, but it doesn't hurt much.
self.services = services_definition['services']
self.builders = {}
for service in self.services:
service_builder = service.get('service-builder')
if not service_builder:
continue
if isinstance(service_builder, dict):
# We will be constructing several builders
for name, builder in service_builder.iteritems():
full_builder_name = service['name'] + '-' + name
self.builders[full_builder_name] = builder
else:
self.builders[service['name']] = service_builder
def buildPath(self, name, context):
"""\
@brief given the environment on construction, return a service path.
@param name The name of the service.
@param context A dict of name value lookups for the service.
@returns Returns the
"""
return russ.format(self.builders[name], context)
def buildServiceURL(self, name, context={}, **kwargs):
"""\
@brief given the environment on construction, return a service URL.
@param name The name of the service.
@param context A dict of name value lookups for the service.
@param kwargs Any keyword arguments are treated as members of the
context, this allows you to be all 31337 by writing shit like:
servicebuilder.build('name', param=value)
@returns Returns the
"""
context = context.copy() # shouldn't modify the caller's dictionary
context.update(kwargs)
base_url = config.get('services-base-url')
svc_path = russ.format(self.builders[name], context)
return base_url + svc_path
def on_in(query_name, host_key, schema_key):
"""\
@brief Constructs an on/in snippet (for running named queries)
from a schema name and two keys referencing values stored in
indra.xml.
@param query_name Name of the query.
@param host_key Logical name of destination host. Will be
looked up in indra.xml.
@param schema_key Logical name of destination schema. Will
be looked up in indra.xml.
"""
return "on/config:%s/in/config:%s/%s" % (host_key.strip('/'),
schema_key.strip('/'),
query_name.lstrip('/'))

View File

@ -1,468 +0,0 @@
"""\
@file siesta.py
@brief A tiny llsd based RESTful web services framework
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
from indra.base import config
from indra.base import llsd
from webob import exc
import webob
import re, socket
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
import cjson
json_decode = cjson.decode
json_encode = cjson.encode
JsonDecodeError = cjson.DecodeError
JsonEncodeError = cjson.EncodeError
except ImportError:
import simplejson
json_decode = simplejson.loads
json_encode = simplejson.dumps
JsonDecodeError = ValueError
JsonEncodeError = TypeError
llsd_parsers = {
'application/json': json_decode,
llsd.BINARY_MIME_TYPE: llsd.parse_binary,
'application/llsd+notation': llsd.parse_notation,
llsd.XML_MIME_TYPE: llsd.parse_xml,
'application/xml': llsd.parse_xml,
}
def mime_type(content_type):
'''Given a Content-Type header, return only the MIME type.'''
return content_type.split(';', 1)[0].strip().lower()
class BodyLLSD(object):
'''Give a webob Request or Response an llsd based "content" property.
Getting the content property parses the body, and caches the result.
Setting the content property formats a payload, and the body property
is set.'''
def _llsd__get(self):
'''Get, set, or delete the LLSD value stored in this object.'''
try:
return self._llsd
except AttributeError:
if not self.body:
raise AttributeError('No llsd attribute has been set')
else:
mtype = mime_type(self.content_type)
try:
parser = llsd_parsers[mtype]
except KeyError:
raise exc.HTTPUnsupportedMediaType(
'Content type %s not supported' % mtype).exception
try:
self._llsd = parser(self.body)
except (llsd.LLSDParseError, JsonDecodeError, TypeError), err:
raise exc.HTTPBadRequest(
'Could not parse body: %r' % err.args).exception
return self._llsd
def _llsd__set(self, val):
req = getattr(self, 'request', None)
if req is not None:
formatter, ctype = formatter_for_request(req)
self.content_type = ctype
else:
formatter, ctype = formatter_for_mime_type(
mime_type(self.content_type))
self.body = formatter(val)
def _llsd__del(self):
if hasattr(self, '_llsd'):
del self._llsd
content = property(_llsd__get, _llsd__set, _llsd__del)
class Response(webob.Response, BodyLLSD):
'''Response class with LLSD support.
A sensible default content type is used.
Setting the llsd property also sets the body. Getting the llsd
property parses the body if necessary.
If you set the body property directly, the llsd property will be
deleted.'''
default_content_type = 'application/llsd+xml'
def _body__set(self, body):
if hasattr(self, '_llsd'):
del self._llsd
super(Response, self)._body__set(body)
def cache_forever(self):
self.cache_expires(86400 * 365)
body = property(webob.Response._body__get, _body__set,
webob.Response._body__del,
webob.Response._body__get.__doc__)
class Request(webob.Request, BodyLLSD):
'''Request class with LLSD support.
Sensible content type and accept headers are used by default.
Setting the content property also sets the body. Getting the content
property parses the body if necessary.
If you set the body property directly, the content property will be
deleted.'''
default_content_type = 'application/llsd+xml'
default_accept = ('application/llsd+xml; q=0.5, '
'application/llsd+notation; q=0.3, '
'application/llsd+binary; q=0.2, '
'application/xml; q=0.1, '
'application/json; q=0.0')
def __init__(self, environ=None, *args, **kwargs):
if environ is None:
environ = {}
else:
environ = environ.copy()
if 'CONTENT_TYPE' not in environ:
environ['CONTENT_TYPE'] = self.default_content_type
if 'HTTP_ACCEPT' not in environ:
environ['HTTP_ACCEPT'] = self.default_accept
super(Request, self).__init__(environ, *args, **kwargs)
def _body__set(self, body):
if hasattr(self, '_llsd'):
del self._llsd
super(Request, self)._body__set(body)
def path_urljoin(self, *parts):
return '/'.join([path_url.rstrip('/')] + list(parts))
body = property(webob.Request._body__get, _body__set,
webob.Request._body__del, webob.Request._body__get.__doc__)
def create_response(self, content=None, status='200 OK',
conditional_response=webob.NoDefault):
resp = self.ResponseClass(status=status, request=self,
conditional_response=conditional_response)
resp.content = content
return resp
def curl(self):
'''Create and fill out a pycurl easy object from this request.'''
import pycurl
c = pycurl.Curl()
c.setopt(pycurl.URL, self.url())
if self.headers:
c.setopt(pycurl.HTTPHEADER,
['%s: %s' % (k, self.headers[k]) for k in self.headers])
c.setopt(pycurl.FOLLOWLOCATION, True)
c.setopt(pycurl.AUTOREFERER, True)
c.setopt(pycurl.MAXREDIRS, 16)
c.setopt(pycurl.NOSIGNAL, True)
c.setopt(pycurl.READFUNCTION, self.body_file.read)
c.setopt(pycurl.SSL_VERIFYHOST, 2)
if self.method == 'POST':
c.setopt(pycurl.POST, True)
post301 = getattr(pycurl, 'POST301', None)
if post301 is not None:
# Added in libcurl 7.17.1.
c.setopt(post301, True)
elif self.method == 'PUT':
c.setopt(pycurl.PUT, True)
elif self.method != 'GET':
c.setopt(pycurl.CUSTOMREQUEST, self.method)
return c
Request.ResponseClass = Response
Response.RequestClass = Request
llsd_formatters = {
'application/json': json_encode,
'application/llsd+binary': llsd.format_binary,
'application/llsd+notation': llsd.format_notation,
'application/llsd+xml': llsd.format_xml,
'application/xml': llsd.format_xml,
}
formatter_qualities = (
('application/llsd+xml', 1.0),
('application/llsd+notation', 0.5),
('application/llsd+binary', 0.4),
('application/xml', 0.3),
('application/json', 0.2),
)
def formatter_for_mime_type(mime_type):
'''Return a formatter that encodes to the given MIME type.
The result is a pair of function and MIME type.'''
try:
return llsd_formatters[mime_type], mime_type
except KeyError:
raise exc.HTTPInternalServerError(
'Could not use MIME type %r to format response' %
mime_type).exception
def formatter_for_request(req):
'''Return a formatter that encodes to the preferred type of the client.
The result is a pair of function and actual MIME type.'''
ctype = req.accept.best_match(formatter_qualities)
try:
return llsd_formatters[ctype], ctype
except KeyError:
raise exc.HTTPNotAcceptable().exception
def wsgi_adapter(func, environ, start_response):
'''Adapt a Siesta callable to act as a WSGI application.'''
# Process the request as appropriate.
try:
req = Request(environ)
#print req.urlvars
resp = func(req, **req.urlvars)
if not isinstance(resp, webob.Response):
try:
formatter, ctype = formatter_for_request(req)
resp = req.ResponseClass(formatter(resp), content_type=ctype)
resp._llsd = resp
except (JsonEncodeError, TypeError), err:
resp = exc.HTTPInternalServerError(
detail='Could not format response')
except exc.HTTPException, e:
resp = e
except socket.error, e:
resp = exc.HTTPInternalServerError(detail=e.args[1])
return resp(environ, start_response)
def llsd_callable(func):
'''Turn a callable into a Siesta application.'''
def replacement(environ, start_response):
return wsgi_adapter(func, environ, start_response)
return replacement
def llsd_method(http_method, func):
def replacement(environ, start_response):
if environ['REQUEST_METHOD'] == http_method:
return wsgi_adapter(func, environ, start_response)
return exc.HTTPMethodNotAllowed()(environ, start_response)
return replacement
http11_methods = 'OPTIONS GET HEAD POST PUT DELETE TRACE CONNECT'.split()
http11_methods.sort()
def llsd_class(cls):
'''Turn a class into a Siesta application.
A new instance is created for each request. A HTTP method FOO is
turned into a call to the handle_foo method of the instance.'''
def foo(req, **kwargs):
instance = cls()
method = req.method.lower()
try:
handler = getattr(instance, 'handle_' + method)
except AttributeError:
allowed = [m for m in http11_methods
if hasattr(instance, 'handle_' + m.lower())]
raise exc.HTTPMethodNotAllowed(
headers={'Allow': ', '.join(allowed)}).exception
#print "kwargs: ", kwargs
return handler(req, **kwargs)
def replacement(environ, start_response):
return wsgi_adapter(foo, environ, start_response)
return replacement
def curl(reqs):
import pycurl
m = pycurl.CurlMulti()
curls = [r.curl() for r in reqs]
io = {}
for c in curls:
fp = StringIO()
hdr = StringIO()
c.setopt(pycurl.WRITEFUNCTION, fp.write)
c.setopt(pycurl.HEADERFUNCTION, hdr.write)
io[id(c)] = fp, hdr
m.handles = curls
try:
while True:
ret, num_handles = m.perform()
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
finally:
m.close()
for req, c in zip(reqs, curls):
fp, hdr = io[id(c)]
hdr.seek(0)
status = hdr.readline().rstrip()
headers = []
name, values = None, None
# XXX We don't currently handle bogus header data.
for line in hdr.readlines():
if not line[0].isspace():
if name:
headers.append((name, ' '.join(values)))
name, value = line.strip().split(':', 1)
value = [value]
else:
values.append(line.strip())
if name:
headers.append((name, ' '.join(values)))
resp = c.ResponseClass(fp.getvalue(), status, headers, request=req)
route_re = re.compile(r'''
\{ # exact character "{"
(\w*) # "config" or variable (restricted to a-z, 0-9, _)
(?:([:~])([^}]+))? # optional :type or ~regex part
\} # exact character "}"
''', re.VERBOSE)
predefined_regexps = {
'uuid': r'[a-f0-9][a-f0-9-]{31,35}',
'int': r'\d+',
'host': r'[a-z0-9][a-z0-9\-\.]*',
}
def compile_route(route):
fp = StringIO()
last_pos = 0
for match in route_re.finditer(route):
#print "matches: ", match.groups()
fp.write(re.escape(route[last_pos:match.start()]))
var_name = match.group(1)
sep = match.group(2)
expr = match.group(3)
if var_name == 'config':
expr = re.escape(str(config.get(var_name)))
else:
if expr:
if sep == ':':
expr = predefined_regexps[expr]
# otherwise, treat what follows '~' as a regexp
else:
expr = '[^/]+'
if var_name != '':
expr = '(?P<%s>%s)' % (var_name, expr)
else:
expr = '(%s)' % (expr,)
fp.write(expr)
last_pos = match.end()
fp.write(re.escape(route[last_pos:]))
compiled_route = '^%s$' % fp.getvalue()
#print route, "->", compiled_route
return compiled_route
class Router(object):
'''WSGI routing class. Parses a URL and hands off a request to
some other WSGI application. If no suitable application is found,
responds with a 404.'''
def __init__(self):
self._new_routes = []
self._routes = []
self._paths = []
def add(self, route, app, methods=None):
self._new_routes.append((route, app, methods))
def _create_routes(self):
for route, app, methods in self._new_routes:
self._paths.append(route)
self._routes.append(
(re.compile(compile_route(route)),
app,
methods and dict.fromkeys(methods)))
self._new_routes = []
def __call__(self, environ, start_response):
# load up the config from the config file. Only needs to be
# done once per interpreter. This is the entry point of all
# siesta applications, so this is where we trap it.
_conf = config.get_config()
if _conf is None:
import os.path
fname = os.path.join(
environ.get('ll.config_dir', '/local/linden/etc'),
'indra.xml')
config.load(fname)
# proceed with handling the request
self._create_routes()
path_info = environ['PATH_INFO']
request_method = environ['REQUEST_METHOD']
allowed = []
for regex, app, methods in self._routes:
m = regex.match(path_info)
if m:
#print "groupdict:",m.groupdict()
if not methods or request_method in methods:
environ['paste.urlvars'] = m.groupdict()
return app(environ, start_response)
else:
allowed += methods
if allowed:
allowed = dict.fromkeys(allows).keys()
allowed.sort()
resp = exc.HTTPMethodNotAllowed(
headers={'Allow': ', '.join(allowed)})
else:
resp = exc.HTTPNotFound()
return resp(environ, start_response)

View File

@ -1,235 +0,0 @@
#!/usr/bin/python
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
## Second Life Viewer Source Code
## Copyright (C) 2011, Linden Research, Inc.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation;
## version 2.1 of the License only.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
## $/LicenseInfo$
from indra.base import llsd, lluuid
from indra.ipc import siesta
import datetime, math, unittest
from webob import exc
class ClassApp(object):
def handle_get(self, req):
pass
def handle_post(self, req):
return req.llsd
def callable_app(req):
if req.method == 'UNDERPANTS':
raise exc.HTTPMethodNotAllowed()
elif req.method == 'GET':
return None
return req.llsd
class TestBase:
def test_basic_get(self):
req = siesta.Request.blank('/')
self.assertEquals(req.get_response(self.server).body,
llsd.format_xml(None))
def test_bad_method(self):
req = siesta.Request.blank('/')
req.environ['REQUEST_METHOD'] = 'UNDERPANTS'
self.assertEquals(req.get_response(self.server).status_int,
exc.HTTPMethodNotAllowed.code)
json_safe = {
'none': None,
'bool_true': True,
'bool_false': False,
'int_zero': 0,
'int_max': 2147483647,
'int_min': -2147483648,
'long_zero': 0,
'long_max': 2147483647L,
'long_min': -2147483648L,
'float_zero': 0,
'float': math.pi,
'float_huge': 3.14159265358979323846e299,
'str_empty': '',
'str': 'foo',
u'unic\u1e51de_empty': u'',
u'unic\u1e51de': u'\u1e4exx\u10480',
}
json_safe['array'] = json_safe.values()
json_safe['tuple'] = tuple(json_safe.values())
json_safe['dict'] = json_safe.copy()
json_unsafe = {
'uuid_empty': lluuid.UUID(),
'uuid_full': lluuid.UUID('dc61ab0530200d7554d23510559102c1a98aab1b'),
'binary_empty': llsd.binary(),
'binary': llsd.binary('f\0\xff'),
'uri_empty': llsd.uri(),
'uri': llsd.uri('http://www.secondlife.com/'),
'datetime_empty': datetime.datetime(1970,1,1),
'datetime': datetime.datetime(1999,9,9,9,9,9),
}
json_unsafe.update(json_safe)
json_unsafe['array'] = json_unsafe.values()
json_unsafe['tuple'] = tuple(json_unsafe.values())
json_unsafe['dict'] = json_unsafe.copy()
json_unsafe['iter'] = iter(json_unsafe.values())
def _test_client_content_type_good(self, content_type, ll):
def run(ll):
req = siesta.Request.blank('/')
req.environ['REQUEST_METHOD'] = 'POST'
req.content_type = content_type
req.llsd = ll
req.accept = content_type
resp = req.get_response(self.server)
self.assertEquals(resp.status_int, 200)
return req, resp
if False and isinstance(ll, dict):
def fixup(v):
if isinstance(v, float):
return '%.5f' % v
if isinstance(v, long):
return int(v)
if isinstance(v, (llsd.binary, llsd.uri)):
return v
if isinstance(v, (tuple, list)):
return [fixup(i) for i in v]
if isinstance(v, dict):
return dict([(k, fixup(i)) for k, i in v.iteritems()])
return v
for k, v in ll.iteritems():
l = [k, v]
req, resp = run(l)
self.assertEquals(fixup(resp.llsd), fixup(l))
run(ll)
def test_client_content_type_json_good(self):
self._test_client_content_type_good('application/json', self.json_safe)
def test_client_content_type_llsd_xml_good(self):
self._test_client_content_type_good('application/llsd+xml',
self.json_unsafe)
def test_client_content_type_llsd_notation_good(self):
self._test_client_content_type_good('application/llsd+notation',
self.json_unsafe)
def test_client_content_type_llsd_binary_good(self):
self._test_client_content_type_good('application/llsd+binary',
self.json_unsafe)
def test_client_content_type_xml_good(self):
self._test_client_content_type_good('application/xml',
self.json_unsafe)
def _test_client_content_type_bad(self, content_type):
req = siesta.Request.blank('/')
req.environ['REQUEST_METHOD'] = 'POST'
req.body = '\0invalid nonsense under all encodings'
req.content_type = content_type
self.assertEquals(req.get_response(self.server).status_int,
exc.HTTPBadRequest.code)
def test_client_content_type_json_bad(self):
self._test_client_content_type_bad('application/json')
def test_client_content_type_llsd_xml_bad(self):
self._test_client_content_type_bad('application/llsd+xml')
def test_client_content_type_llsd_notation_bad(self):
self._test_client_content_type_bad('application/llsd+notation')
def test_client_content_type_llsd_binary_bad(self):
self._test_client_content_type_bad('application/llsd+binary')
def test_client_content_type_xml_bad(self):
self._test_client_content_type_bad('application/xml')
def test_client_content_type_bad(self):
req = siesta.Request.blank('/')
req.environ['REQUEST_METHOD'] = 'POST'
req.body = 'XXX'
req.content_type = 'application/nonsense'
self.assertEquals(req.get_response(self.server).status_int,
exc.HTTPUnsupportedMediaType.code)
def test_request_default_content_type(self):
req = siesta.Request.blank('/')
self.assertEquals(req.content_type, req.default_content_type)
def test_request_default_accept(self):
req = siesta.Request.blank('/')
from webob import acceptparse
self.assertEquals(str(req.accept).replace(' ', ''),
req.default_accept.replace(' ', ''))
def test_request_llsd_auto_body(self):
req = siesta.Request.blank('/')
req.llsd = {'a': 2}
self.assertEquals(req.body, '<?xml version="1.0" ?><llsd><map>'
'<key>a</key><integer>2</integer></map></llsd>')
def test_request_llsd_mod_body_changes_llsd(self):
req = siesta.Request.blank('/')
req.llsd = {'a': 2}
req.body = '<?xml version="1.0" ?><llsd><integer>1337</integer></llsd>'
self.assertEquals(req.llsd, 1337)
def test_request_bad_llsd_fails(self):
def crashme(ctype):
def boom():
class foo(object): pass
req = siesta.Request.blank('/')
req.content_type = ctype
req.llsd = foo()
for mime_type in siesta.llsd_parsers:
self.assertRaises(TypeError, crashme(mime_type))
class ClassServer(TestBase, unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.server = siesta.llsd_class(ClassApp)
class CallableServer(TestBase, unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.server = siesta.llsd_callable(callable_app)
class RouterServer(unittest.TestCase):
def test_router(self):
def foo(req, quux):
print quux
r = siesta.Router()
r.add('/foo/{quux:int}', siesta.llsd_callable(foo), methods=['GET'])
req = siesta.Request.blank('/foo/33')
req.get_response(r)
req = siesta.Request.blank('/foo/bar')
self.assertEquals(req.get_response(r).status_int,
exc.HTTPNotFound.code)
if __name__ == '__main__':
unittest.main()

View File

@ -1,597 +0,0 @@
"""
@file webdav.py
@brief Classes to make manipulation of a webdav store easier.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import sys, os, httplib, urlparse
import socket, time
import xml.dom.minidom
import syslog
# import signal
__revision__ = '0'
dav_debug = False
# def urlsafe_b64decode (enc):
# return base64.decodestring (enc.replace ('_', '/').replace ('-', '+'))
# def urlsafe_b64encode (str):
# return base64.encodestring (str).replace ('+', '-').replace ('/', '_')
class DAVError (Exception):
""" Base class for exceptions in this module. """
def __init__ (self, status=0, message='', body='', details=''):
self.status = status
self.message = message
self.body = body
self.details = details
Exception.__init__ (self, '%d:%s:%s%s' % (self.status, self.message,
self.body, self.details))
def print_to_stderr (self):
""" print_to_stderr docstring """
print >> sys.stderr, str (self.status) + ' ' + self.message
print >> sys.stderr, str (self.details)
class Timeout (Exception):
""" Timeout docstring """
def __init__ (self, arg=''):
Exception.__init__ (self, arg)
def alarm_handler (signum, frame):
""" alarm_handler docstring """
raise Timeout ('caught alarm')
class WebDAV:
""" WebDAV docstring """
def __init__ (self, url, proxy=None, retries_before_fail=6):
self.init_url = url
self.init_proxy = proxy
self.retries_before_fail = retries_before_fail
url_parsed = urlparse.urlsplit (url)
self.top_path = url_parsed[ 2 ]
# make sure top_path has a trailing /
if self.top_path == None or self.top_path == '':
self.top_path = '/'
elif len (self.top_path) > 1 and self.top_path[-1:] != '/':
self.top_path += '/'
if dav_debug:
syslog.syslog ('new WebDAV %s : %s' % (str (url), str (proxy)))
if proxy:
proxy_parsed = urlparse.urlsplit (proxy)
self.host_header = url_parsed[ 1 ]
host_and_port = proxy_parsed[ 1 ].split (':')
self.host = host_and_port[ 0 ]
if len (host_and_port) > 1:
self.port = int(host_and_port[ 1 ])
else:
self.port = 80
else: # no proxy
host_and_port = url_parsed[ 1 ].split (':')
self.host_header = None
self.host = host_and_port[ 0 ]
if len (host_and_port) > 1:
self.port = int(host_and_port[ 1 ])
else:
self.port = 80
self.connection = False
self.connect ()
def log (self, msg, depth=0):
""" log docstring """
if dav_debug and depth == 0:
host = str (self.init_url)
if host == 'http://int.tuco.lindenlab.com:80/asset/':
host = 'tuco'
if host == 'http://harriet.lindenlab.com/asset-keep/':
host = 'harriet/asset-keep'
if host == 'http://harriet.lindenlab.com/asset-flag/':
host = 'harriet/asset-flag'
if host == 'http://harriet.lindenlab.com/asset/':
host = 'harriet/asset'
if host == 'http://ozzy.lindenlab.com/asset/':
host = 'ozzy/asset'
if host == 'http://station11.lindenlab.com:12041/:':
host = 'station11:12041'
proxy = str (self.init_proxy)
if proxy == 'None':
proxy = ''
if proxy == 'http://int.tuco.lindenlab.com:3128/':
proxy = 'tuco'
syslog.syslog ('WebDAV (%s:%s) %s' % (host, proxy, str (msg)))
def connect (self):
""" connect docstring """
self.log ('connect')
self.connection = httplib.HTTPConnection (self.host, self.port)
def __err (self, response, details):
""" __err docstring """
raise DAVError (response.status, response.reason, response.read (),
str (self.init_url) + ':' + \
str (self.init_proxy) + ':' + str (details))
def request (self, method, path, body=None, headers=None,
read_all=True, body_hook = None, recurse=0, allow_cache=True):
""" request docstring """
# self.log ('request %s %s' % (method, path))
if headers == None:
headers = {}
if not allow_cache:
headers['Pragma'] = 'no-cache'
headers['cache-control'] = 'no-cache'
try:
if method.lower () != 'purge':
if path.startswith ('/'):
path = path[1:]
if self.host_header: # use proxy
headers[ 'host' ] = self.host_header
fullpath = 'http://%s%s%s' % (self.host_header,
self.top_path, path)
else: # no proxy
fullpath = self.top_path + path
else:
fullpath = path
self.connection.request (method, fullpath, body, headers)
if body_hook:
body_hook ()
# signal.signal (signal.SIGALRM, alarm_handler)
# try:
# signal.alarm (120)
# signal.alarm (0)
# except Timeout, e:
# if recurse < 6:
# return self.retry_request (method, path, body, headers,
# read_all, body_hook, recurse)
# else:
# raise DAVError (0, 'timeout', self.host,
# (method, path, body, headers, recurse))
response = self.connection.getresponse ()
if read_all:
while len (response.read (1024)) > 0:
pass
if (response.status == 500 or \
response.status == 503 or \
response.status == 403) and \
recurse < self.retries_before_fail:
return self.retry_request (method, path, body, headers,
read_all, body_hook, recurse)
return response
except (httplib.ResponseNotReady,
httplib.BadStatusLine,
socket.error):
# if the server hangs up on us (keepalive off, broken pipe),
# we need to reconnect and try again.
if recurse < self.retries_before_fail:
return self.retry_request (method, path, body, headers,
read_all, body_hook, recurse)
raise DAVError (0, 'reconnect failed', self.host,
(method, path, body, headers, recurse))
def retry_request (self, method, path, body, headers,
read_all, body_hook, recurse):
""" retry_request docstring """
time.sleep (10.0 * recurse)
self.connect ()
return self.request (method, path, body, headers,
read_all, body_hook, recurse+1)
def propfind (self, path, body=None, depth=1):
""" propfind docstring """
# self.log ('propfind %s' % path)
headers = {'Content-Type':'text/xml; charset="utf-8"',
'Depth':str(depth)}
response = self.request ('PROPFIND', path, body, headers, False)
if response.status == 207:
return response # Multi-Status
self.__err (response, ('PROPFIND', path, body, headers, 0))
def purge (self, path):
""" issue a squid purge command """
headers = {'Accept':'*/*'}
response = self.request ('PURGE', path, None, headers)
if response.status == 200 or response.status == 404:
# 200 if it was purge, 404 if it wasn't there.
return response
self.__err (response, ('PURGE', path, None, headers))
def get_file_size (self, path):
"""
Use propfind to ask a webdav server what the size of
a file is. If used on a directory (collection) return 0
"""
self.log ('get_file_size %s' % path)
# "getcontentlength" property
# 8.1.1 Example - Retrieving Named Properties
# http://docs.python.org/lib/module-xml.dom.html
nsurl = 'http://apache.org/dav/props/'
doc = xml.dom.minidom.Document ()
propfind_element = doc.createElementNS (nsurl, "D:propfind")
propfind_element.setAttributeNS (nsurl, 'xmlns:D', 'DAV:')
doc.appendChild (propfind_element)
prop_element = doc.createElementNS (nsurl, "D:prop")
propfind_element.appendChild (prop_element)
con_len_element = doc.createElementNS (nsurl, "D:getcontentlength")
prop_element.appendChild (con_len_element)
response = self.propfind (path, doc.toxml ())
doc.unlink ()
resp_doc = xml.dom.minidom.parseString (response.read ())
cln = resp_doc.getElementsByTagNameNS ('DAV:','getcontentlength')[ 0 ]
try:
content_length = int (cln.childNodes[ 0 ].nodeValue)
except IndexError:
return 0
resp_doc.unlink ()
return content_length
def file_exists (self, path):
"""
do an http head on the given file. return True if it succeeds
"""
self.log ('file_exists %s' % path)
expect_gzip = path.endswith ('.gz')
response = self.request ('HEAD', path)
got_gzip = response.getheader ('Content-Encoding', '').strip ()
if got_gzip.lower () == 'x-gzip' and expect_gzip == False:
# the asset server fakes us out if we ask for the non-gzipped
# version of an asset, but the server has the gzipped version.
return False
return response.status == 200
def mkdir (self, path):
""" mkdir docstring """
self.log ('mkdir %s' % path)
headers = {}
response = self.request ('MKCOL', path, None, headers)
if response.status == 201:
return # success
if response.status == 405:
return # directory already existed?
self.__err (response, ('MKCOL', path, None, headers, 0))
def delete (self, path):
""" delete docstring """
self.log ('delete %s' % path)
headers = {'Depth':'infinity'} # collections require infinity
response = self.request ('DELETE', path, None, headers)
if response.status == 204:
return # no content
if response.status == 404:
return # hmm
self.__err (response, ('DELETE', path, None, headers, 0))
def list_directory (self, path, dir_filter=None, allow_cache=True,
minimum_cache_time=False):
"""
Request an http directory listing and parse the filenames out of lines
like: '<LI><A HREF="X"> X</A>'. If a filter function is provided,
only return filenames that the filter returns True for.
This is sort of grody, but it seems faster than other ways of getting
this information from an isilon.
"""
self.log ('list_directory %s' % path)
def try_match (lline, before, after):
""" try_match docstring """
try:
blen = len (before)
asset_start_index = lline.index (before)
asset_end_index = lline.index (after, asset_start_index + blen)
asset = line[ asset_start_index + blen : asset_end_index ]
if not dir_filter or dir_filter (asset):
return [ asset ]
return []
except ValueError:
return []
if len (path) > 0 and path[-1:] != '/':
path += '/'
response = self.request ('GET', path, None, {}, False,
allow_cache=allow_cache)
if allow_cache and minimum_cache_time: # XXX
print response.getheader ('Date')
# s = "2005-12-06T12:13:14"
# from datetime import datetime
# from time import strptime
# datetime(*strptime(s, "%Y-%m-%dT%H:%M:%S")[0:6])
# datetime.datetime(2005, 12, 6, 12, 13, 14)
if response.status != 200:
self.__err (response, ('GET', path, None, {}, 0))
assets = []
for line in response.read ().split ('\n'):
lline = line.lower ()
if lline.find ("parent directory") == -1:
# isilon file
assets += try_match (lline, '<li><a href="', '"> ')
# apache dir
assets += try_match (lline, 'alt="[dir]"> <a href="', '/">')
# apache file
assets += try_match (lline, 'alt="[ ]"> <a href="', '">')
return assets
def __tmp_filename (self, path_and_file):
""" __tmp_filename docstring """
head, tail = os.path.split (path_and_file)
if head != '':
return head + '/.' + tail + '.' + str (os.getpid ())
else:
return head + '.' + tail + '.' + str (os.getpid ())
def __put__ (self, filesize, body_hook, remotefile):
""" __put__ docstring """
headers = {'Content-Length' : str (filesize)}
remotefile_tmp = self.__tmp_filename (remotefile)
response = self.request ('PUT', remotefile_tmp, None,
headers, True, body_hook)
if not response.status in (201, 204): # created, no content
self.__err (response, ('PUT', remotefile, None, headers, 0))
if filesize != self.get_file_size (remotefile_tmp):
try:
self.delete (remotefile_tmp)
except:
pass
raise DAVError (0, 'tmp upload error', remotefile_tmp)
# move the file to its final location
try:
self.rename (remotefile_tmp, remotefile)
except DAVError, exc:
if exc.status == 403: # try to clean up the tmp file
try:
self.delete (remotefile_tmp)
except:
pass
raise
if filesize != self.get_file_size (remotefile):
raise DAVError (0, 'file upload error', str (remotefile_tmp))
def put_string (self, strng, remotefile):
""" put_string docstring """
self.log ('put_string %d -> %s' % (len (strng), remotefile))
filesize = len (strng)
def body_hook ():
""" body_hook docstring """
self.connection.send (strng)
self.__put__ (filesize, body_hook, remotefile)
def put_file (self, localfile, remotefile):
"""
Send a local file to a remote webdav store. First, upload to
a temporary filename. Next make sure the file is the size we
expected. Next, move the file to its final location. Next,
check the file size at the final location.
"""
self.log ('put_file %s -> %s' % (localfile, remotefile))
filesize = os.path.getsize (localfile)
def body_hook ():
""" body_hook docstring """
handle = open (localfile)
while True:
data = handle.read (1300)
if len (data) == 0:
break
self.connection.send (data)
handle.close ()
self.__put__ (filesize, body_hook, remotefile)
def create_empty_file (self, remotefile):
""" create an empty file """
self.log ('touch_file %s' % (remotefile))
headers = {'Content-Length' : '0'}
response = self.request ('PUT', remotefile, None, headers)
if not response.status in (201, 204): # created, no content
self.__err (response, ('PUT', remotefile, None, headers, 0))
if self.get_file_size (remotefile) != 0:
raise DAVError (0, 'file upload error', str (remotefile))
def __get_file_setup (self, remotefile, check_size=True):
""" __get_file_setup docstring """
if check_size:
remotesize = self.get_file_size (remotefile)
response = self.request ('GET', remotefile, None, {}, False)
if response.status != 200:
self.__err (response, ('GET', remotefile, None, {}, 0))
try:
content_length = int (response.getheader ("Content-Length"))
except TypeError:
content_length = None
if check_size:
if content_length != remotesize:
raise DAVError (0, 'file DL size error', remotefile)
return (response, content_length)
def __get_file_read (self, writehandle, response, content_length):
""" __get_file_read docstring """
if content_length != None:
so_far_length = 0
while so_far_length < content_length:
data = response.read (content_length - so_far_length)
if len (data) == 0:
raise DAVError (0, 'short file download')
so_far_length += len (data)
writehandle.write (data)
while len (response.read ()) > 0:
pass
else:
while True:
data = response.read ()
if (len (data) < 1):
break
writehandle.write (data)
def get_file (self, remotefile, localfile, check_size=True):
"""
Get a remote file from a webdav server. Download to a local
tmp file, then move into place. Sanity check file sizes as
we go.
"""
self.log ('get_file %s -> %s' % (remotefile, localfile))
(response, content_length) = \
self.__get_file_setup (remotefile, check_size)
localfile_tmp = self.__tmp_filename (localfile)
handle = open (localfile_tmp, 'w')
self.__get_file_read (handle, response, content_length)
handle.close ()
if check_size:
if content_length != os.path.getsize (localfile_tmp):
raise DAVError (0, 'file DL size error',
remotefile+','+localfile)
os.rename (localfile_tmp, localfile)
def get_file_as_string (self, remotefile, check_size=True):
"""
download a file from a webdav server and return it as a string.
"""
self.log ('get_file_as_string %s' % remotefile)
(response, content_length) = \
self.__get_file_setup (remotefile, check_size)
# (tmp_handle, tmp_filename) = tempfile.mkstemp ()
tmp_handle = os.tmpfile ()
self.__get_file_read (tmp_handle, response, content_length)
tmp_handle.seek (0)
ret = tmp_handle.read ()
tmp_handle.close ()
# os.unlink (tmp_filename)
return ret
def get_post_as_string (self, remotefile, body):
"""
Do an http POST, send body, get response and return it.
"""
self.log ('get_post_as_string %s' % remotefile)
# headers = {'Content-Type':'application/x-www-form-urlencoded'}
headers = {'Content-Type':'text/xml; charset="utf-8"'}
# b64body = urlsafe_b64encode (asset_url)
response = self.request ('POST', remotefile, body, headers, False)
if response.status != 200:
self.__err (response, ('POST', remotefile, body, headers, 0))
try:
content_length = int (response.getheader ('Content-Length'))
except TypeError:
content_length = None
tmp_handle = os.tmpfile ()
self.__get_file_read (tmp_handle, response, content_length)
tmp_handle.seek (0)
ret = tmp_handle.read ()
tmp_handle.close ()
return ret
def __destination_command (self, verb, remotesrc, dstdav, remotedst):
"""
self and dstdav should point to the same http server.
"""
if len (remotedst) > 0 and remotedst[ 0 ] == '/':
remotedst = remotedst[1:]
headers = {'Destination': 'http://%s:%d%s%s' % (dstdav.host,
dstdav.port,
dstdav.top_path,
remotedst)}
response = self.request (verb, remotesrc, None, headers)
if response.status == 201:
return # created
if response.status == 204:
return # no content
self.__err (response, (verb, remotesrc, None, headers, 0))
def rename (self, remotesrc, remotedst):
""" rename a file on a webdav server """
self.log ('rename %s -> %s' % (remotesrc, remotedst))
self.__destination_command ('MOVE', remotesrc, self, remotedst)
def xrename (self, remotesrc, dstdav, remotedst):
""" rename a file on a webdav server """
self.log ('xrename %s -> %s' % (remotesrc, remotedst))
self.__destination_command ('MOVE', remotesrc, dstdav, remotedst)
def copy (self, remotesrc, remotedst):
""" copy a file on a webdav server """
self.log ('copy %s -> %s' % (remotesrc, remotedst))
self.__destination_command ('COPY', remotesrc, self, remotedst)
def xcopy (self, remotesrc, dstdav, remotedst):
""" copy a file on a webdav server """
self.log ('xcopy %s -> %s' % (remotesrc, remotedst))
self.__destination_command ('COPY', remotesrc, dstdav, remotedst)
def put_string (data, url):
"""
upload string s to a url
"""
url_parsed = urlparse.urlsplit (url)
dav = WebDAV ('%s://%s/' % (url_parsed[ 0 ], url_parsed[ 1 ]))
dav.put_string (data, url_parsed[ 2 ])
def get_string (url, check_size=True):
"""
return the contents of a url as a string
"""
url_parsed = urlparse.urlsplit (url)
dav = WebDAV ('%s://%s/' % (url_parsed[ 0 ], url_parsed[ 1 ]))
return dav.get_file_as_string (url_parsed[ 2 ], check_size)

View File

@ -1,273 +0,0 @@
"""\
@file xml_rpc.py
@brief An implementation of a parser/generator for the XML-RPC xml format.
$LicenseInfo:firstyear=2006&license=mit$
Copyright (c) 2006-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
from greenlet import greenlet
from mulib import mu
from xml.sax import handler
from xml.sax import parseString
# States
class Expected(object):
def __init__(self, tag):
self.tag = tag
def __getattr__(self, name):
return type(self)(name)
def __repr__(self):
return '%s(%r)' % (
type(self).__name__, self.tag)
class START(Expected):
pass
class END(Expected):
pass
class STR(object):
tag = ''
START = START('')
END = END('')
class Malformed(Exception):
pass
class XMLParser(handler.ContentHandler):
def __init__(self, state_machine, next_states):
handler.ContentHandler.__init__(self)
self.state_machine = state_machine
if not isinstance(next_states, tuple):
next_states = (next_states, )
self.next_states = next_states
self._character_buffer = ''
def assertState(self, state, name, *rest):
if not isinstance(self.next_states, tuple):
self.next_states = (self.next_states, )
for next in self.next_states:
if type(state) == type(next):
if next.tag and next.tag != name:
raise Malformed(
"Expected %s, got %s %s %s" % (
next, state, name, rest))
break
else:
raise Malformed(
"Expected %s, got %s %s %s" % (
self.next_states, state, name, rest))
def startElement(self, name, attrs):
self.assertState(START, name.lower(), attrs)
self.next_states = self.state_machine.switch(START, (name.lower(), dict(attrs)))
def endElement(self, name):
if self._character_buffer.strip():
characters = self._character_buffer.strip()
self._character_buffer = ''
self.assertState(STR, characters)
self.next_states = self.state_machine.switch(characters)
self.assertState(END, name.lower())
self.next_states = self.state_machine.switch(END, name.lower())
def error(self, exc):
self.bozo = 1
self.exc = exc
def fatalError(self, exc):
self.error(exc)
raise exc
def characters(self, characters):
self._character_buffer += characters
def parse(what):
child = greenlet(xml_rpc)
me = greenlet.getcurrent()
startup_states = child.switch(me)
parser = XMLParser(child, startup_states)
try:
parseString(what, parser)
except Malformed:
print what
raise
return child.switch()
def xml_rpc(yielder):
yielder.switch(START.methodcall)
yielder.switch(START.methodname)
methodName = yielder.switch(STR)
yielder.switch(END.methodname)
yielder.switch(START.params)
root = None
params = []
while True:
state, _ = yielder.switch(START.param, END.params)
if state == END:
break
yielder.switch(START.value)
params.append(
handle(yielder))
yielder.switch(END.value)
yielder.switch(END.param)
yielder.switch(END.methodcall)
## Resume parse
yielder.switch()
## Return result to parse
return methodName.strip(), params
def handle(yielder):
_, (tag, attrs) = yielder.switch(START)
if tag in ['int', 'i4']:
result = int(yielder.switch(STR))
elif tag == 'boolean':
result = bool(int(yielder.switch(STR)))
elif tag == 'string':
result = yielder.switch(STR)
elif tag == 'double':
result = float(yielder.switch(STR))
elif tag == 'datetime.iso8601':
result = yielder.switch(STR)
elif tag == 'base64':
result = base64.b64decode(yielder.switch(STR))
elif tag == 'struct':
result = {}
while True:
state, _ = yielder.switch(START.member, END.struct)
if state == END:
break
yielder.switch(START.name)
key = yielder.switch(STR)
yielder.switch(END.name)
yielder.switch(START.value)
result[key] = handle(yielder)
yielder.switch(END.value)
yielder.switch(END.member)
## We already handled </struct> above, don't want to handle it below
return result
elif tag == 'array':
result = []
yielder.switch(START.data)
while True:
state, _ = yielder.switch(START.value, END.data)
if state == END:
break
result.append(handle(yielder))
yielder.switch(END.value)
yielder.switch(getattr(END, tag))
return result
VALUE = mu.tag_factory('value')
BOOLEAN = mu.tag_factory('boolean')
INT = mu.tag_factory('int')
STRUCT = mu.tag_factory('struct')
MEMBER = mu.tag_factory('member')
NAME = mu.tag_factory('name')
ARRAY = mu.tag_factory('array')
DATA = mu.tag_factory('data')
STRING = mu.tag_factory('string')
DOUBLE = mu.tag_factory('double')
METHODRESPONSE = mu.tag_factory('methodResponse')
PARAMS = mu.tag_factory('params')
PARAM = mu.tag_factory('param')
mu.inline_elements['string'] = True
mu.inline_elements['boolean'] = True
mu.inline_elements['name'] = True
def _generate(something):
if isinstance(something, dict):
result = STRUCT()
for key, value in something.items():
result[
MEMBER[
NAME[key], _generate(value)]]
return VALUE[result]
elif isinstance(something, list):
result = DATA()
for item in something:
result[_generate(item)]
return VALUE[ARRAY[[result]]]
elif isinstance(something, basestring):
return VALUE[STRING[something]]
elif isinstance(something, bool):
if something:
return VALUE[BOOLEAN['1']]
return VALUE[BOOLEAN['0']]
elif isinstance(something, int):
return VALUE[INT[something]]
elif isinstance(something, float):
return VALUE[DOUBLE[something]]
def generate(*args):
params = PARAMS()
for arg in args:
params[PARAM[_generate(arg)]]
return METHODRESPONSE[params]
if __name__ == '__main__':
print parse("""<?xml version="1.0"?> <methodCall> <methodName>examples.getStateName</methodName> <params> <param> <value><i4>41</i4></value> </param> </params> </methodCall>
""")

View File

@ -1,64 +0,0 @@
"""\
@file fastest_elementtree.py
@brief Concealing some gnarly import logic in here. This should export the interface of elementtree.
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
# The parsing exception raised by the underlying library depends
# on the ElementTree implementation we're using, so we provide an
# alias here.
#
# Use ElementTreeError as the exception type for catching parsing
# errors.
# Using cElementTree might cause some unforeseen problems, so here's a
# convenient off switch.
use_celementree = True
try:
if not use_celementree:
raise ImportError()
# Python 2.3 and 2.4.
from cElementTree import *
ElementTreeError = SyntaxError
except ImportError:
try:
if not use_celementree:
raise ImportError()
# Python 2.5 and above.
from xml.etree.cElementTree import *
ElementTreeError = SyntaxError
except ImportError:
# Pure Python code.
try:
# Python 2.3 and 2.4.
from elementtree.ElementTree import *
except ImportError:
# Python 2.5 and above.
from xml.etree.ElementTree import *
# The pure Python ElementTree module uses Expat for parsing.
from xml.parsers.expat import ExpatError as ElementTreeError

View File

@ -1,52 +0,0 @@
"""\
@file helpformatter.py
@author Phoenix
@brief Class for formatting optparse descriptions.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import optparse
import textwrap
class Formatter(optparse.IndentedHelpFormatter):
def __init__(
self,
p_indentIncrement = 2,
p_maxHelpPosition = 24,
p_width = 79,
p_shortFirst = 1) :
optparse.HelpFormatter.__init__(
self,
p_indentIncrement,
p_maxHelpPosition,
p_width,
p_shortFirst)
def format_description(self, p_description):
t_descWidth = self.width - self.current_indent
t_indent = " " * (self.current_indent + 2)
return "\n".join(
[textwrap.fill(descr, t_descWidth, initial_indent = t_indent,
subsequent_indent = t_indent)
for descr in p_description.split("\n")] )

View File

@ -1,63 +0,0 @@
"""\
@file iterators.py
@brief Useful general-purpose iterators.
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
from __future__ import nested_scopes
def iter_chunks(rows, aggregate_size=100):
"""
Given an iterable set of items (@p rows), produces lists of up to @p
aggregate_size items at a time, for example:
iter_chunks([1,2,3,4,5,6,7,8,9,10], 3)
Values for @p aggregate_size < 1 will raise ValueError.
Will return a generator that produces, in the following order:
- [1, 2, 3]
- [4, 5, 6]
- [7, 8, 9]
- [10]
"""
if aggregate_size < 1:
raise ValueError()
def iter_chunks_inner():
row_iter = iter(rows)
done = False
agg = []
while not done:
try:
row = row_iter.next()
agg.append(row)
except StopIteration:
done = True
if agg and (len(agg) >= aggregate_size or done):
yield agg
agg = []
return iter_chunks_inner()

View File

@ -1,72 +0,0 @@
"""\
@file iterators_test.py
@brief Test cases for iterators module.
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import unittest
from indra.util.iterators import iter_chunks
class TestIterChunks(unittest.TestCase):
"""Unittests for iter_chunks"""
def test_bad_agg_size(self):
rows = [1,2,3,4]
self.assertRaises(ValueError, iter_chunks, rows, 0)
self.assertRaises(ValueError, iter_chunks, rows, -1)
try:
for i in iter_chunks(rows, 0):
pass
except ValueError:
pass
else:
self.fail()
try:
result = list(iter_chunks(rows, 0))
except ValueError:
pass
else:
self.fail()
def test_empty(self):
rows = []
result = list(iter_chunks(rows))
self.assertEqual(result, [])
def test_small(self):
rows = [[1]]
result = list(iter_chunks(rows, 2))
self.assertEqual(result, [[[1]]])
def test_size(self):
rows = [[1],[2]]
result = list(iter_chunks(rows, 2))
self.assertEqual(result, [[[1],[2]]])
def test_multi_agg(self):
rows = [[1],[2],[3],[4],[5]]
result = list(iter_chunks(rows, 2))
self.assertEqual(result, [[[1],[2]],[[3],[4]],[[5]]])
if __name__ == "__main__":
unittest.main()

View File

@ -1,182 +0,0 @@
#!/usr/bin/env python
"""\
@file llperformance.py
$LicenseInfo:firstyear=2010&license=viewerlgpl$
Second Life Viewer Source Code
Copyright (C) 2010-2011, Linden Research, Inc.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation;
version 2.1 of the License only.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
$/LicenseInfo$
"""
# ------------------------------------------------
# Sim metrics utility functions.
import glob, os, time, sys, stat, exceptions
from indra.base import llsd
gBlockMap = {} #Map of performance metric data with function hierarchy information.
gCurrentStatPath = ""
gIsLoggingEnabled=False
class LLPerfStat:
def __init__(self,key):
self.mTotalTime = 0
self.mNumRuns = 0
self.mName=key
self.mTimeStamp = int(time.time()*1000)
self.mUTCTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def __str__(self):
return "%f" % self.mTotalTime
def start(self):
self.mStartTime = int(time.time() * 1000000)
self.mNumRuns += 1
def stop(self):
execution_time = int(time.time() * 1000000) - self.mStartTime
self.mTotalTime += execution_time
def get_map(self):
results={}
results['name']=self.mName
results['utc_time']=self.mUTCTime
results['timestamp']=self.mTimeStamp
results['us']=self.mTotalTime
results['count']=self.mNumRuns
return results
class PerfError(exceptions.Exception):
def __init__(self):
return
def __Str__(self):
print "","Unfinished LLPerfBlock"
class LLPerfBlock:
def __init__( self, key ):
global gBlockMap
global gCurrentStatPath
global gIsLoggingEnabled
#Check to see if we're running metrics right now.
if gIsLoggingEnabled:
self.mRunning = True #Mark myself as running.
self.mPreviousStatPath = gCurrentStatPath
gCurrentStatPath += "/" + key
if gCurrentStatPath not in gBlockMap:
gBlockMap[gCurrentStatPath] = LLPerfStat(key)
self.mStat = gBlockMap[gCurrentStatPath]
self.mStat.start()
def finish( self ):
global gBlockMap
global gIsLoggingEnabled
if gIsLoggingEnabled:
self.mStat.stop()
self.mRunning = False
gCurrentStatPath = self.mPreviousStatPath
# def __del__( self ):
# if self.mRunning:
# #SPATTERS FIXME
# raise PerfError
class LLPerformance:
#--------------------------------------------------
# Determine whether or not we want to log statistics
def __init__( self, process_name = "python" ):
self.process_name = process_name
self.init_testing()
self.mTimeStamp = int(time.time()*1000)
self.mUTCTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def init_testing( self ):
global gIsLoggingEnabled
host_performance_file = "/dev/shm/simperf/simperf_proc_config.llsd"
#If file exists, open
if os.path.exists(host_performance_file):
file = open (host_performance_file,'r')
#Read serialized LLSD from file.
body = llsd.parse(file.read())
#Calculate time since file last modified.
stats = os.stat(host_performance_file)
now = time.time()
mod = stats[stat.ST_MTIME]
age = now - mod
if age < ( body['duration'] ):
gIsLoggingEnabled = True
def get ( self ):
global gIsLoggingEnabled
return gIsLoggingEnabled
#def output(self,ptr,path):
# if 'stats' in ptr:
# stats = ptr['stats']
# self.mOutputPtr[path] = stats.get_map()
# if 'children' in ptr:
# children=ptr['children']
# curptr = self.mOutputPtr
# curchildren={}
# curptr['children'] = curchildren
# for key in children:
# curchildren[key]={}
# self.mOutputPtr = curchildren[key]
# self.output(children[key],path + '/' + key)
def done(self):
global gBlockMap
if not self.get():
return
output_name = "/dev/shm/simperf/%s_proc.%d.llsd" % (self.process_name, os.getpid())
output_file = open(output_name, 'w')
process_info = {
"name" : self.process_name,
"pid" : os.getpid(),
"ppid" : os.getppid(),
"timestamp" : self.mTimeStamp,
"utc_time" : self.mUTCTime,
}
output_file.write(llsd.format_notation(process_info))
output_file.write('\n')
for key in gBlockMap.keys():
gBlockMap[key] = gBlockMap[key].get_map()
output_file.write(llsd.format_notation(gBlockMap))
output_file.write('\n')
output_file.close()

View File

@ -1,117 +0,0 @@
"""\
@file llsubprocess.py
@author Phoenix
@date 2008-01-18
@brief The simplest possible wrapper for a common sub-process paradigm.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import os
import popen2
import time
import select
class Timeout(RuntimeError):
"Exception raised when a subprocess times out."
pass
def run(command, args=None, data=None, timeout=None):
"""\
@brief Run command with arguments
This is it. This is the function I want to run all the time when doing
subprocces, but end up copying the code everywhere. none of the
standard commands are secure and provide a way to specify input, get
all the output, and get the result.
@param command A string specifying a process to launch.
@param args Arguments to be passed to command. Must be list, tuple or None.
@param data input to feed to the command.
@param timeout Maximum number of many seconds to run.
@return Returns (result, stdout, stderr) from process.
"""
cmd = [command]
if args:
cmd.extend([str(arg) for arg in args])
#print "cmd: ","' '".join(cmd)
child = popen2.Popen3(cmd, True)
#print child.pid
out = []
err = []
result = -1
time_left = timeout
tochild = [child.tochild.fileno()]
while True:
time_start = time.time()
#print "time:",time_left
p_in, p_out, p_err = select.select(
[child.fromchild.fileno(), child.childerr.fileno()],
tochild,
[],
time_left)
if p_in:
new_line = os.read(child.fromchild.fileno(), 32 * 1024)
if new_line:
#print "line:",new_line
out.append(new_line)
new_line = os.read(child.childerr.fileno(), 32 * 1024)
if new_line:
#print "error:", new_line
err.append(new_line)
if p_out:
if data:
#print "p_out"
bytes = os.write(child.tochild.fileno(), data)
data = data[bytes:]
if len(data) == 0:
data = None
tochild = []
child.tochild.close()
result = child.poll()
if result != -1:
# At this point, the child process has exited and result
# is the return value from the process. Between the time
# we called select() and poll() the process may have
# exited so read all the data left on the child process
# stdout and stderr.
last = child.fromchild.read()
if last:
out.append(last)
last = child.childerr.read()
if last:
err.append(last)
child.tochild.close()
child.fromchild.close()
child.childerr.close()
break
if time_left is not None:
time_left -= (time.time() - time_start)
if time_left < 0:
raise Timeout
#print "result:",result
out = ''.join(out)
#print "stdout:", out
err = ''.join(err)
#print "stderr:", err
return result, out, err

View File

@ -1,592 +0,0 @@
"""\
@file named_query.py
@author Ryan Williams, Phoenix
@date 2007-07-31
@brief An API for running named queries.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import errno
import MySQLdb
import MySQLdb.cursors
import os
import os.path
import re
import time
from indra.base import llsd
from indra.base import config
DEBUG = False
NQ_FILE_SUFFIX = config.get('named-query-file-suffix', '.nq')
NQ_FILE_SUFFIX_LEN = len(NQ_FILE_SUFFIX)
_g_named_manager = None
def _init_g_named_manager(sql_dir = None):
"""Initializes a global NamedManager object to point at a
specified named queries hierarchy.
This function is intended entirely for testing purposes,
because it's tricky to control the config from inside a test."""
global NQ_FILE_SUFFIX
NQ_FILE_SUFFIX = config.get('named-query-file-suffix', '.nq')
global NQ_FILE_SUFFIX_LEN
NQ_FILE_SUFFIX_LEN = len(NQ_FILE_SUFFIX)
if sql_dir is None:
sql_dir = config.get('named-query-base-dir')
# extra fallback directory in case config doesn't return what we want
if sql_dir is None:
sql_dir = os.path.abspath(
os.path.join(
os.path.realpath(os.path.dirname(__file__)), "..", "..", "..", "..", "web", "dataservice", "sql"))
global _g_named_manager
_g_named_manager = NamedQueryManager(
os.path.abspath(os.path.realpath(sql_dir)))
def get(name, schema = None):
"Get the named query object to be used to perform queries"
if _g_named_manager is None:
_init_g_named_manager()
return _g_named_manager.get(name).for_schema(schema)
def sql(connection, name, params):
# use module-global NamedQuery object to perform default substitution
return get(name).sql(connection, params)
def run(connection, name, params, expect_rows = None):
"""\
@brief given a connection, run a named query with the params
Note that this function will fetch ALL rows.
@param connection The connection to use
@param name The name of the query to run
@param params The parameters passed into the query
@param expect_rows The number of rows expected. Set to 1 if return_as_map is true. Raises ExpectationFailed if the number of returned rows doesn't exactly match. Kind of a hack.
@return Returns the result set as a list of dicts.
"""
return get(name).run(connection, params, expect_rows)
class ExpectationFailed(Exception):
""" Exception that is raised when an expectation for an sql query
is not met."""
def __init__(self, message):
Exception.__init__(self, message)
self.message = message
class NamedQuery(object):
def __init__(self, name, filename):
""" Construct a NamedQuery object. The name argument is an
arbitrary name as a handle for the query, and the filename is
a path to a file or a file-like object containing an llsd named
query document."""
self._stat_interval_seconds = 5 # 5 seconds
self._name = name
if (filename is not None and isinstance(filename, (str, unicode))
and NQ_FILE_SUFFIX != filename[-NQ_FILE_SUFFIX_LEN:]):
filename = filename + NQ_FILE_SUFFIX
self._location = filename
self._alternative = dict()
self._last_mod_time = 0
self._last_check_time = 0
self.deleted = False
self.load_contents()
def name(self):
""" The name of the query. """
return self._name
def get_modtime(self):
""" Returns the mtime (last modified time) of the named query
filename. For file-like objects, expect a modtime of 0"""
if self._location and isinstance(self._location, (str, unicode)):
return os.path.getmtime(self._location)
return 0
def load_contents(self):
""" Loads and parses the named query file into self. Does
nothing if self.location is nonexistant."""
if self._location:
if isinstance(self._location, (str, unicode)):
contents = llsd.parse(open(self._location).read())
else:
# we probably have a file-like object. Godspeed!
contents = llsd.parse(self._location.read())
self._reference_contents(contents)
# Check for alternative implementations
try:
for name, alt in self._contents['alternative'].items():
nq = NamedQuery(name, None)
nq._reference_contents(alt)
self._alternative[name] = nq
except KeyError, e:
pass
self._last_mod_time = self.get_modtime()
self._last_check_time = time.time()
def _reference_contents(self, contents):
"Helper method which builds internal structure from parsed contents"
self._contents = contents
self._ttl = int(self._contents.get('ttl', 0))
self._return_as_map = bool(self._contents.get('return_as_map', False))
self._legacy_dbname = self._contents.get('legacy_dbname', None)
# reset these before doing the sql conversion because we will
# read them there. reset these while loading so we pick up
# changes.
self._around = set()
self._append = set()
self._integer = set()
self._options = self._contents.get('dynamic_where', {})
for key in self._options:
if isinstance(self._options[key], basestring):
self._options[key] = self._convert_sql(self._options[key])
elif isinstance(self._options[key], list):
lines = []
for line in self._options[key]:
lines.append(self._convert_sql(line))
self._options[key] = lines
else:
moreopt = {}
for kk in self._options[key]:
moreopt[kk] = self._convert_sql(self._options[key][kk])
self._options[key] = moreopt
self._base_query = self._convert_sql(self._contents['base_query'])
self._query_suffix = self._convert_sql(
self._contents.get('query_suffix', ''))
def _convert_sql(self, sql):
"""convert the parsed sql into a useful internal structure.
This function has to turn the named query format into a pyformat
style. It also has to look for %:name% and :name% and
ready them for use in LIKE statements"""
if sql:
# This first sub is to properly escape any % signs that
# are meant to be literally passed through to mysql in the
# query. It leaves any %'s that are used for
# like-expressions.
expr = re.compile("(?<=[^a-zA-Z0-9_-])%(?=[^:])")
sql = expr.sub('%%', sql)
# This should tackle the rest of the %'s in the query, by
# converting them to LIKE clauses.
expr = re.compile("(%?):([a-zA-Z][a-zA-Z0-9_-]*)%")
sql = expr.sub(self._prepare_like, sql)
expr = re.compile("#:([a-zA-Z][a-zA-Z0-9_-]*)")
sql = expr.sub(self._prepare_integer, sql)
expr = re.compile(":([a-zA-Z][a-zA-Z0-9_-]*)")
sql = expr.sub("%(\\1)s", sql)
return sql
def _prepare_like(self, match):
"""This function changes LIKE statement replace behavior
It works by turning %:name% to %(_name_around)s and :name% to
%(_name_append)s. Since a leading '_' is not a valid keyname
input (enforced via unit tests), it will never clash with
existing keys. Then, when building the statement, the query
runner will generate corrected strings."""
if match.group(1) == '%':
# there is a leading % so this is treated as prefix/suffix
self._around.add(match.group(2))
return "%(" + self._build_around_key(match.group(2)) + ")s"
else:
# there is no leading %, so this is suffix only
self._append.add(match.group(2))
return "%(" + self._build_append_key(match.group(2)) + ")s"
def _build_around_key(self, key):
return "_" + key + "_around"
def _build_append_key(self, key):
return "_" + key + "_append"
def _prepare_integer(self, match):
"""This function adjusts the sql for #:name replacements
It works by turning #:name to %(_name_as_integer)s. Since a
leading '_' is not a valid keyname input (enforced via unit
tests), it will never clash with existing keys. Then, when
building the statement, the query runner will generate
corrected strings."""
self._integer.add(match.group(1))
return "%(" + self._build_integer_key(match.group(1)) + ")s"
def _build_integer_key(self, key):
return "_" + key + "_as_integer"
def _strip_wildcards_to_list(self, value):
"""Take string, and strip out the LIKE special characters.
Technically, this is database dependant, but postgresql and
mysql use the same wildcards, and I am not aware of a general
way to handle this. I think you need a sql statement of the
form:
LIKE_STRING( [ANY,ONE,str]... )
which would treat ANY as their any string, and ONE as their
single glyph, and str as something that needs database
specific encoding to not allow any % or _ to affect the query.
As it stands, I believe it's impossible to write a named query
style interface which uses like to search the entire space of
text available. Imagine the query:
% of brain used by average linden
In order to search for %, it must be escaped, so once you have
escaped the string to not do wildcard searches, and be escaped
for the database, and then prepended the wildcard you come
back with one of:
1) %\% of brain used by average linden
2) %%% of brain used by average linden
Then, when passed to the database to be escaped to be database
safe, you get back:
1) %\\% of brain used by average linden
: which means search for any character sequence, followed by a
backslash, followed by any sequence, followed by ' of
brain...'
2) %%% of brain used by average linden
: which (I believe) means search for a % followed by any
character sequence followed by 'of brain...'
Neither of which is what we want!
So, we need a vendor (or extention) for LIKE_STRING. Anyone
want to write it?"""
if isinstance(value, unicode):
utf8_value = value
else:
utf8_value = unicode(value, "utf-8")
esc_list = []
remove_chars = set(u"%_")
for glyph in utf8_value:
if glyph in remove_chars:
continue
esc_list.append(glyph.encode("utf-8"))
return esc_list
def delete(self):
""" Makes this query unusable by deleting all the members and
setting the deleted member. This is desired when the on-disk
query has been deleted but the in-memory copy remains."""
# blow away all members except _name, _location, and deleted
name, location = self._name, self._location
for key in self.__dict__.keys():
del self.__dict__[key]
self.deleted = True
self._name, self._location = name, location
def ttl(self):
""" Estimated time to live of this query. Used for web
services to set the Expires header."""
return self._ttl
def legacy_dbname(self):
return self._legacy_dbname
def return_as_map(self):
""" Returns true if this query is configured to return its
results as a single map (as opposed to a list of maps, the
normal behavior)."""
return self._return_as_map
def for_schema(self, db_name):
"Look trough the alternates and return the correct query"
if db_name is None:
return self
try:
return self._alternative[db_name]
except KeyError, e:
pass
return self
def run(self, connection, params, expect_rows = None, use_dictcursor = True):
"""given a connection, run a named query with the params
Note that this function will fetch ALL rows. We do this because it
opens and closes the cursor to generate the values, and this
isn't a generator so the cursor has no life beyond the method call.
@param cursor The connection to use (this generates its own cursor for the query)
@param name The name of the query to run
@param params The parameters passed into the query
@param expect_rows The number of rows expected. Set to 1 if return_as_map is true. Raises ExpectationFailed if the number of returned rows doesn't exactly match. Kind of a hack.
@param use_dictcursor Set to false to use a normal cursor and manually convert the rows to dicts.
@return Returns the result set as a list of dicts, or, if the named query has return_as_map set to true, returns a single dict.
"""
if use_dictcursor:
cursor = connection.cursor(MySQLdb.cursors.DictCursor)
else:
cursor = connection.cursor()
full_query, params = self._construct_sql(params)
if DEBUG:
print "SQL:", self.sql(connection, params)
rows = cursor.execute(full_query, params)
# *NOTE: the expect_rows argument is a very cheesy way to get some
# validation on the result set. If you want to add more expectation
# logic, do something more object-oriented and flexible. Or use an ORM.
if(self._return_as_map):
expect_rows = 1
if expect_rows is not None and rows != expect_rows:
cursor.close()
raise ExpectationFailed("Statement expected %s rows, got %s. Sql: '%s' %s" % (
expect_rows, rows, full_query, params))
# convert to dicts manually if we're not using a dictcursor
if use_dictcursor:
result_set = cursor.fetchall()
else:
if cursor.description is None:
# an insert or something
x = cursor.fetchall()
cursor.close()
return x
names = [x[0] for x in cursor.description]
result_set = []
for row in cursor.fetchall():
converted_row = {}
for idx, col_name in enumerate(names):
converted_row[col_name] = row[idx]
result_set.append(converted_row)
cursor.close()
if self._return_as_map:
return result_set[0]
return result_set
def _construct_sql(self, params):
""" Returns a query string and a dictionary of parameters,
suitable for directly passing to the execute() method."""
self.refresh()
# build the query from the options available and the params
base_query = []
base_query.append(self._base_query)
for opt, extra_where in self._options.items():
if type(extra_where) in (dict, list, tuple):
if opt in params:
base_query.append(extra_where[params[opt]])
else:
if opt in params and params[opt]:
base_query.append(extra_where)
if self._query_suffix:
base_query.append(self._query_suffix)
full_query = '\n'.join(base_query)
# Go through the query and rewrite all of the ones with the
# @:name syntax.
rewrite = _RewriteQueryForArray(params)
expr = re.compile("@%\(([a-zA-Z][a-zA-Z0-9_-]*)\)s")
full_query = expr.sub(rewrite.operate, full_query)
params.update(rewrite.new_params)
# build out the params for like. We only have to do this
# parameters which were detected to have ued the where syntax
# during load.
#
# * treat the incoming string as utf-8
# * strip wildcards
# * append or prepend % as appropriate
new_params = {}
for key in params:
if key in self._around:
new_value = ['%']
new_value.extend(self._strip_wildcards_to_list(params[key]))
new_value.append('%')
new_params[self._build_around_key(key)] = ''.join(new_value)
if key in self._append:
new_value = self._strip_wildcards_to_list(params[key])
new_value.append('%')
new_params[self._build_append_key(key)] = ''.join(new_value)
if key in self._integer:
new_params[self._build_integer_key(key)] = int(params[key])
params.update(new_params)
return full_query, params
def sql(self, connection, params):
""" Generates an SQL statement from the named query document
and a dictionary of parameters.
*NOTE: Only use for debugging, because it uses the
non-standard MySQLdb 'literal' method.
"""
if not DEBUG:
import warnings
warnings.warn("Don't use named_query.sql() when not debugging. Used on %s" % self._location)
# do substitution using the mysql (non-standard) 'literal'
# function to do the escaping.
full_query, params = self._construct_sql(params)
return full_query % connection.literal(params)
def refresh(self):
""" Refresh self from the file on the filesystem.
This is optimized to be callable as frequently as you wish,
without adding too much load. It does so by only stat-ing the
file every N seconds, where N defaults to 5 and is
configurable through the member _stat_interval_seconds. If the stat
reveals that the file has changed, refresh will re-parse the
contents of the file and use them to update the named query
instance. If the stat reveals that the file has been deleted,
refresh will call self.delete to make the in-memory
representation unusable."""
now = time.time()
if(now - self._last_check_time > self._stat_interval_seconds):
self._last_check_time = now
try:
modtime = self.get_modtime()
if(modtime > self._last_mod_time):
self.load_contents()
except OSError, e:
if e.errno == errno.ENOENT: # file not found
self.delete() # clean up self
raise # pass the exception along to the caller so they know that this query disappeared
class NamedQueryManager(object):
""" Manages the lifespan of NamedQuery objects, drawing from a
directory hierarchy of named query documents.
In practice this amounts to a memory cache of NamedQuery objects."""
def __init__(self, named_queries_dir):
""" Initializes a manager to look for named queries in a
directory."""
self._dir = os.path.abspath(os.path.realpath(named_queries_dir))
self._cached_queries = {}
def sql(self, connection, name, params):
nq = self.get(name)
return nq.sql(connection, params)
def get(self, name):
""" Returns a NamedQuery instance based on the name, either
from memory cache, or by parsing from disk.
The name is simply a relative path to the directory associated
with the manager object. Before returning the instance, the
NamedQuery object is cached in memory, so that subsequent
accesses don't have to read from disk or do any parsing. This
means that NamedQuery objects returned by this method are
shared across all users of the manager object.
NamedQuery.refresh is used to bring the NamedQuery objects in
sync with the actual files on disk."""
nq = self._cached_queries.get(name)
if nq is None:
nq = NamedQuery(name, os.path.join(self._dir, name))
self._cached_queries[name] = nq
else:
try:
nq.refresh()
except OSError, e:
if e.errno == errno.ENOENT: # file not found
del self._cached_queries[name]
raise # pass exception along to caller so they know that the query disappeared
return nq
class _RewriteQueryForArray(object):
"Helper class for rewriting queries with the @:name syntax"
def __init__(self, params):
self.params = params
self.new_params = dict()
def operate(self, match):
"Given a match, return the string that should be in use"
key = match.group(1)
value = self.params[key]
if type(value) in (list,tuple):
rv = []
for idx in range(len(value)):
# if the value@idx is array-like, we are
# probably dealing with a VALUES
new_key = "_%s_%s"%(key, str(idx))
val_item = value[idx]
if type(val_item) in (list, tuple, dict):
if type(val_item) is dict:
# this is because in Python, the order of
# key, value retrieval from the dict is not
# guaranteed to match what the input intended
# and for VALUES, order is important.
# TODO: Implemented ordered dict in LLSD parser?
raise ExpectationFailed('Only lists/tuples allowed,\
received dict')
values_keys = []
for value_idx, item in enumerate(val_item):
# we want a key of the format :
# key_#replacement_#value_row_#value_col
# ugh... so if we are replacing 10 rows in user_note,
# the first values clause would read (for @:user_notes) :-
# ( :_user_notes_0_1_1, :_user_notes_0_1_2, :_user_notes_0_1_3 )
# the input LLSD for VALUES will look like:
# <llsd>...
# <map>
# <key>user_notes</key>
# <array>
# <array> <!-- row 1 for VALUES -->
# <string>...</string>
# <string>...</string>
# <string>...</string>
# </array>
# ...
# </array>
# </map>
# ... </llsd>
values_key = "%s_%s"%(new_key, value_idx)
self.new_params[values_key] = item
values_keys.append("%%(%s)s"%values_key)
# now collapse all these new place holders enclosed in ()
# from [':_key_0_1_1', ':_key_0_1_2', ':_key_0_1_3,...]
# rv will have [ '(:_key_0_1_1, :_key_0_1_2, :_key_0_1_3)', ]
# which is flattened a few lines below join(rv)
rv.append('(%s)' % ','.join(values_keys))
else:
self.new_params[new_key] = val_item
rv.append("%%(%s)s"%new_key)
return ','.join(rv)
else:
# not something that can be expanded, so just drop the
# leading @ in the front of the match. This will mean that
# the single value we have, be it a string, int, whatever
# (other than dict) will correctly show up, eg:
#
# where foo in (@:foobar) -- foobar is a string, so we get
# where foo in (:foobar)
return match.group(0)[1:]

View File

@ -1,84 +0,0 @@
'''
@file shutil2.py
@brief a better shutil.copytree replacement
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
'''
#
# shutil2.py
# Taken from http://www.scons.org/wiki/AccumulateBuilder
# the stock copytree sucks because it insists that the
# target dir not exist
#
import os.path
import shutil
def copytree(src, dest, symlinks=False):
"""My own copyTree which does not fail if the directory exists.
Recursively copy a directory tree using copy2().
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied.
Behavior is meant to be identical to GNU 'cp -R'.
"""
def copyItems(src, dest, symlinks=False):
"""Function that does all the work.
It is necessary to handle the two 'cp' cases:
- destination does exist
- destination does not exist
See 'cp -R' documentation for more details
"""
for item in os.listdir(src):
srcPath = os.path.join(src, item)
if os.path.isdir(srcPath):
srcBasename = os.path.basename(srcPath)
destDirPath = os.path.join(dest, srcBasename)
if not os.path.exists(destDirPath):
os.makedirs(destDirPath)
copyItems(srcPath, destDirPath)
elif os.path.islink(item) and symlinks:
linkto = os.readlink(item)
os.symlink(linkto, dest)
else:
shutil.copy2(srcPath, dest)
# case 'cp -R src/ dest/' where dest/ already exists
if os.path.exists(dest):
destPath = os.path.join(dest, os.path.basename(src))
if not os.path.exists(destPath):
os.makedirs(destPath)
# case 'cp -R src/ dest/' where dest/ does not exist
else:
os.makedirs(dest)
destPath = dest
# actually copy the files
copyItems(src, destPath)

View File

@ -1,338 +0,0 @@
#!/usr/bin/env python
"""\
@file simperf_host_xml_parser.py
@brief Digest collector's XML dump and convert to simple dict/list structure
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import sys, os, getopt, time
import simplejson
from xml import sax
def usage():
print "Usage:"
print sys.argv[0] + " [options]"
print " Convert RRD's XML dump to JSON. Script to convert the simperf_host_collector-"
print " generated RRD dump into JSON. Steps include converting selected named"
print " fields from GAUGE type to COUNTER type by computing delta with preceding"
print " values. Top-level named fields are:"
print
print " lastupdate Time (javascript timestamp) of last data sample"
print " step Time in seconds between samples"
print " ds Data specification (name/type) for each column"
print " database Table of data samples, one time step per row"
print
print "Options:"
print " -i, --in Input settings filename. (Default: stdin)"
print " -o, --out Output settings filename. (Default: stdout)"
print " -h, --help Print this message and exit."
print
print "Example: %s -i rrddump.xml -o rrddump.json" % sys.argv[0]
print
print "Interfaces:"
print " class SimPerfHostXMLParser() # SAX content handler"
print " def simperf_host_xml_fixup(parser) # post-parse value fixup"
class SimPerfHostXMLParser(sax.handler.ContentHandler):
def __init__(self):
pass
def startDocument(self):
self.rrd_last_update = 0 # public
self.rrd_step = 0 # public
self.rrd_ds = [] # public
self.rrd_records = [] # public
self._rrd_level = 0
self._rrd_parse_state = 0
self._rrd_chars = ""
self._rrd_capture = False
self._rrd_ds_val = {}
self._rrd_data_row = []
self._rrd_data_row_has_nan = False
def endDocument(self):
pass
# Nasty little ad-hoc state machine to extract the elements that are
# necessary from the 'rrdtool dump' XML output. The same element
# name '<ds>' is used for two different data sets so we need to pay
# some attention to the actual structure to get the ones we want
# and ignore the ones we don't.
def startElement(self, name, attrs):
self._rrd_level = self._rrd_level + 1
self._rrd_capture = False
if self._rrd_level == 1:
if name == "rrd" and self._rrd_parse_state == 0:
self._rrd_parse_state = 1 # In <rrd>
self._rrd_capture = True
self._rrd_chars = ""
elif self._rrd_level == 2:
if self._rrd_parse_state == 1:
if name == "lastupdate":
self._rrd_parse_state = 2 # In <rrd><lastupdate>
self._rrd_capture = True
self._rrd_chars = ""
elif name == "step":
self._rrd_parse_state = 3 # In <rrd><step>
self._rrd_capture = True
self._rrd_chars = ""
elif name == "ds":
self._rrd_parse_state = 4 # In <rrd><ds>
self._rrd_ds_val = {}
self._rrd_chars = ""
elif name == "rra":
self._rrd_parse_state = 5 # In <rrd><rra>
elif self._rrd_level == 3:
if self._rrd_parse_state == 4:
if name == "name":
self._rrd_parse_state = 6 # In <rrd><ds><name>
self._rrd_capture = True
self._rrd_chars = ""
elif name == "type":
self._rrd_parse_state = 7 # In <rrd><ds><type>
self._rrd_capture = True
self._rrd_chars = ""
elif self._rrd_parse_state == 5:
if name == "database":
self._rrd_parse_state = 8 # In <rrd><rra><database>
elif self._rrd_level == 4:
if self._rrd_parse_state == 8:
if name == "row":
self._rrd_parse_state = 9 # In <rrd><rra><database><row>
self._rrd_data_row = []
self._rrd_data_row_has_nan = False
elif self._rrd_level == 5:
if self._rrd_parse_state == 9:
if name == "v":
self._rrd_parse_state = 10 # In <rrd><rra><database><row><v>
self._rrd_capture = True
self._rrd_chars = ""
def endElement(self, name):
self._rrd_capture = False
if self._rrd_parse_state == 10:
self._rrd_capture = self._rrd_level == 6
if self._rrd_level == 5:
if self._rrd_chars == "NaN":
self._rrd_data_row_has_nan = True
else:
self._rrd_data_row.append(self._rrd_chars)
self._rrd_parse_state = 9 # In <rrd><rra><database><row>
elif self._rrd_parse_state == 9:
if self._rrd_level == 4:
if not self._rrd_data_row_has_nan:
self.rrd_records.append(self._rrd_data_row)
self._rrd_parse_state = 8 # In <rrd><rra><database>
elif self._rrd_parse_state == 8:
if self._rrd_level == 3:
self._rrd_parse_state = 5 # In <rrd><rra>
elif self._rrd_parse_state == 7:
if self._rrd_level == 3:
self._rrd_ds_val["type"] = self._rrd_chars
self._rrd_parse_state = 4 # In <rrd><ds>
elif self._rrd_parse_state == 6:
if self._rrd_level == 3:
self._rrd_ds_val["name"] = self._rrd_chars
self._rrd_parse_state = 4 # In <rrd><ds>
elif self._rrd_parse_state == 5:
if self._rrd_level == 2:
self._rrd_parse_state = 1 # In <rrd>
elif self._rrd_parse_state == 4:
if self._rrd_level == 2:
self.rrd_ds.append(self._rrd_ds_val)
self._rrd_parse_state = 1 # In <rrd>
elif self._rrd_parse_state == 3:
if self._rrd_level == 2:
self.rrd_step = long(self._rrd_chars)
self._rrd_parse_state = 1 # In <rrd>
elif self._rrd_parse_state == 2:
if self._rrd_level == 2:
self.rrd_last_update = long(self._rrd_chars)
self._rrd_parse_state = 1 # In <rrd>
elif self._rrd_parse_state == 1:
if self._rrd_level == 1:
self._rrd_parse_state = 0 # At top
if self._rrd_level:
self._rrd_level = self._rrd_level - 1
def characters(self, content):
if self._rrd_capture:
self._rrd_chars = self._rrd_chars + content.strip()
def _make_numeric(value):
try:
value = float(value)
except:
value = ""
return value
def simperf_host_xml_fixup(parser, filter_start_time = None, filter_end_time = None):
# Fixup for GAUGE fields that are really COUNTS. They
# were forced to GAUGE to try to disable rrdtool's
# data interpolation/extrapolation for non-uniform time
# samples.
fixup_tags = [ "cpu_user",
"cpu_nice",
"cpu_sys",
"cpu_idle",
"cpu_waitio",
"cpu_intr",
# "file_active",
# "file_free",
# "inode_active",
# "inode_free",
"netif_in_kb",
"netif_in_pkts",
"netif_in_errs",
"netif_in_drop",
"netif_out_kb",
"netif_out_pkts",
"netif_out_errs",
"netif_out_drop",
"vm_page_in",
"vm_page_out",
"vm_swap_in",
"vm_swap_out",
#"vm_mem_total",
#"vm_mem_used",
#"vm_mem_active",
#"vm_mem_inactive",
#"vm_mem_free",
#"vm_mem_buffer",
#"vm_swap_cache",
#"vm_swap_total",
#"vm_swap_used",
#"vm_swap_free",
"cpu_interrupts",
"cpu_switches",
"cpu_forks" ]
col_count = len(parser.rrd_ds)
row_count = len(parser.rrd_records)
# Process the last row separately, just to make all values numeric.
for j in range(col_count):
parser.rrd_records[row_count - 1][j] = _make_numeric(parser.rrd_records[row_count - 1][j])
# Process all other row/columns.
last_different_row = row_count - 1
current_row = row_count - 2
while current_row >= 0:
# Check for a different value than the previous row. If everything is the same
# then this is probably just a filler/bogus entry.
is_different = False
for j in range(col_count):
parser.rrd_records[current_row][j] = _make_numeric(parser.rrd_records[current_row][j])
if parser.rrd_records[current_row][j] != parser.rrd_records[last_different_row][j]:
# We're good. This is a different row.
is_different = True
if not is_different:
# This is a filler/bogus entry. Just ignore it.
for j in range(col_count):
parser.rrd_records[current_row][j] = float('nan')
else:
# Some tags need to be converted into deltas.
for j in range(col_count):
if parser.rrd_ds[j]["name"] in fixup_tags:
parser.rrd_records[last_different_row][j] = \
parser.rrd_records[last_different_row][j] - parser.rrd_records[current_row][j]
last_different_row = current_row
current_row -= 1
# Set fixup_tags in the first row to 'nan' since they aren't useful anymore.
for j in range(col_count):
if parser.rrd_ds[j]["name"] in fixup_tags:
parser.rrd_records[0][j] = float('nan')
# Add a timestamp to each row and to the catalog. Format and name
# chosen to match other simulator logging (hopefully).
start_time = parser.rrd_last_update - (parser.rrd_step * (row_count - 1))
# Build a filtered list of rrd_records if we are limited to a time range.
filter_records = False
if filter_start_time is not None or filter_end_time is not None:
filter_records = True
filtered_rrd_records = []
if filter_start_time is None:
filter_start_time = start_time * 1000
if filter_end_time is None:
filter_end_time = parser.rrd_last_update * 1000
for i in range(row_count):
record_timestamp = (start_time + (i * parser.rrd_step)) * 1000
parser.rrd_records[i].insert(0, record_timestamp)
if filter_records:
if filter_start_time <= record_timestamp and record_timestamp <= filter_end_time:
filtered_rrd_records.append(parser.rrd_records[i])
if filter_records:
parser.rrd_records = filtered_rrd_records
parser.rrd_ds.insert(0, {"type": "GAUGE", "name": "javascript_timestamp"})
def main(argv=None):
opts, args = getopt.getopt(sys.argv[1:], "i:o:h", ["in=", "out=", "help"])
input_file = sys.stdin
output_file = sys.stdout
for o, a in opts:
if o in ("-i", "--in"):
input_file = open(a, 'r')
if o in ("-o", "--out"):
output_file = open(a, 'w')
if o in ("-h", "--help"):
usage()
sys.exit(0)
# Using the SAX parser as it is at least 4X faster and far, far
# smaller on this dataset than the DOM-based interface in xml.dom.minidom.
# With SAX and a 5.4MB xml file, this requires about seven seconds of
# wall-clock time and 32MB VSZ. With the DOM interface, about 22 seconds
# and over 270MB VSZ.
handler = SimPerfHostXMLParser()
sax.parse(input_file, handler)
if input_file != sys.stdin:
input_file.close()
# Various format fixups: string-to-num, gauge-to-counts, add
# a time stamp, etc.
simperf_host_xml_fixup(handler)
# Create JSONable dict with interesting data and format/print it
print >>output_file, simplejson.dumps({ "step" : handler.rrd_step,
"lastupdate": handler.rrd_last_update * 1000,
"ds" : handler.rrd_ds,
"database" : handler.rrd_records })
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,167 +0,0 @@
#!/usr/bin/env python
"""\
@file simperf_oprof_interface.py
@brief Manage OProfile data collection on a host
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import sys, os, getopt
import simplejson
def usage():
print "Usage:"
print sys.argv[0] + " [options]"
print " Digest the OProfile report forms that come out of the"
print " simperf_oprof_ctl program's -r/--report command. The result"
print " is an array of dictionaires with the following keys:"
print
print " symbol Name of sampled, calling, or called procedure"
print " file Executable or library where symbol resides"
print " percentage Percentage contribution to profile, calls or called"
print " samples Sample count"
print " calls Methods called by the method in question (full only)"
print " called_by Methods calling the method (full only)"
print
print " For 'full' reports the two keys 'calls' and 'called_by' are"
print " themselves arrays of dictionaries based on the first four keys."
print
print "Return Codes:"
print " None. Aggressively digests everything. Will likely mung results"
print " if a program or library has whitespace in its name."
print
print "Options:"
print " -i, --in Input settings filename. (Default: stdin)"
print " -o, --out Output settings filename. (Default: stdout)"
print " -h, --help Print this message and exit."
print
print "Interfaces:"
print " class SimPerfOProfileInterface()"
class SimPerfOProfileInterface:
def __init__(self):
self.isBrief = True # public
self.isValid = False # public
self.result = [] # public
def parse(self, input):
in_samples = False
for line in input:
if in_samples:
if line[0:6] == "------":
self.isBrief = False
self._parseFull(input)
else:
self._parseBrief(input, line)
self.isValid = True
return
try:
hd1, remain = line.split(None, 1)
if hd1 == "samples":
in_samples = True
except ValueError:
pass
def _parseBrief(self, input, line1):
try:
fld1, fld2, fld3, fld4 = line1.split(None, 3)
self.result.append({"samples" : fld1,
"percentage" : fld2,
"file" : fld3,
"symbol" : fld4.strip("\n")})
except ValueError:
pass
for line in input:
try:
fld1, fld2, fld3, fld4 = line.split(None, 3)
self.result.append({"samples" : fld1,
"percentage" : fld2,
"file" : fld3,
"symbol" : fld4.strip("\n")})
except ValueError:
pass
def _parseFull(self, input):
state = 0 # In 'called_by' section
calls = []
called_by = []
current = {}
for line in input:
if line[0:6] == "------":
if len(current):
current["calls"] = calls
current["called_by"] = called_by
self.result.append(current)
state = 0
calls = []
called_by = []
current = {}
else:
try:
fld1, fld2, fld3, fld4 = line.split(None, 3)
tmp = {"samples" : fld1,
"percentage" : fld2,
"file" : fld3,
"symbol" : fld4.strip("\n")}
except ValueError:
continue
if line[0] != " ":
current = tmp
state = 1 # In 'calls' section
elif state == 0:
called_by.append(tmp)
else:
calls.append(tmp)
if len(current):
current["calls"] = calls
current["called_by"] = called_by
self.result.append(current)
def main(argv=None):
opts, args = getopt.getopt(sys.argv[1:], "i:o:h", ["in=", "out=", "help"])
input_file = sys.stdin
output_file = sys.stdout
for o, a in opts:
if o in ("-i", "--in"):
input_file = open(a, 'r')
if o in ("-o", "--out"):
output_file = open(a, 'w')
if o in ("-h", "--help"):
usage()
sys.exit(0)
oprof = SimPerfOProfileInterface()
oprof.parse(input_file)
if input_file != sys.stdin:
input_file.close()
# Create JSONable dict with interesting data and format/print it
print >>output_file, simplejson.dumps(oprof.result)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,191 +0,0 @@
#!/usr/bin/env python
"""\
@file simperf_proc_interface.py
@brief Utility to extract log messages from *.<pid>.llsd files containing performance statistics.
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
# ----------------------------------------------------
# Utility to extract log messages from *.<pid>.llsd
# files that contain performance statistics.
# ----------------------------------------------------
import sys, os
if os.path.exists("setup-path.py"):
execfile("setup-path.py")
from indra.base import llsd
DEFAULT_PATH="/dev/shm/simperf/"
# ----------------------------------------------------
# Pull out the stats and return a single document
def parse_logfile(filename, target_column=None, verbose=False):
full_doc = []
# Open source temp log file. Let exceptions percolate up.
sourcefile = open( filename,'r')
if verbose:
print "Reading " + filename
# Parse and output all lines from the temp file
for line in sourcefile.xreadlines():
partial_doc = llsd.parse(line)
if partial_doc is not None:
if target_column is None:
full_doc.append(partial_doc)
else:
trim_doc = { target_column: partial_doc[target_column] }
if target_column != "fps":
trim_doc[ 'fps' ] = partial_doc[ 'fps' ]
trim_doc[ '/total_time' ] = partial_doc[ '/total_time' ]
trim_doc[ 'utc_time' ] = partial_doc[ 'utc_time' ]
full_doc.append(trim_doc)
sourcefile.close()
return full_doc
# Extract just the meta info line, and the timestamp of the first/last frame entry.
def parse_logfile_info(filename, verbose=False):
# Open source temp log file. Let exceptions percolate up.
sourcefile = open(filename, 'rU') # U is to open with Universal newline support
if verbose:
print "Reading " + filename
# The first line is the meta info line.
info_line = sourcefile.readline()
if not info_line:
sourcefile.close()
return None
# The rest of the lines are frames. Read the first and last to get the time range.
info = llsd.parse( info_line )
info['start_time'] = None
info['end_time'] = None
first_frame = sourcefile.readline()
if first_frame:
try:
info['start_time'] = int(llsd.parse(first_frame)['timestamp'])
except:
pass
# Read the file backwards to find the last two lines.
sourcefile.seek(0, 2)
file_size = sourcefile.tell()
offset = 1024
num_attempts = 0
end_time = None
if file_size < offset:
offset = file_size
while 1:
sourcefile.seek(-1*offset, 2)
read_str = sourcefile.read(offset)
# Remove newline at the end
if read_str[offset - 1] == '\n':
read_str = read_str[0:-1]
lines = read_str.split('\n')
full_line = None
if len(lines) > 2: # Got two line
try:
end_time = llsd.parse(lines[-1])['timestamp']
except:
# We couldn't parse this line. Try once more.
try:
end_time = llsd.parse(lines[-2])['timestamp']
except:
# Nope. Just move on.
pass
break
if len(read_str) == file_size: # Reached the beginning
break
offset += 1024
info['end_time'] = int(end_time)
sourcefile.close()
return info
def parse_proc_filename(filename):
try:
name_as_list = filename.split(".")
cur_stat_type = name_as_list[0].split("_")[0]
cur_pid = name_as_list[1]
except IndexError, ValueError:
return (None, None)
return (cur_pid, cur_stat_type)
# ----------------------------------------------------
def get_simstats_list(path=None):
""" Return stats (pid, type) listed in <type>_proc.<pid>.llsd """
if path is None:
path = DEFAULT_PATH
simstats_list = []
for file_name in os.listdir(path):
if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd":
simstats_info = parse_logfile_info(path + file_name)
if simstats_info is not None:
simstats_list.append(simstats_info)
return simstats_list
def get_log_info_list(pid=None, stat_type=None, path=None, target_column=None, verbose=False):
""" Return data from all llsd files matching the pid and stat type """
if path is None:
path = DEFAULT_PATH
log_info_list = {}
for file_name in os.listdir ( path ):
if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd":
(cur_pid, cur_stat_type) = parse_proc_filename(file_name)
if cur_pid is None:
continue
if pid is not None and pid != cur_pid:
continue
if stat_type is not None and stat_type != cur_stat_type:
continue
log_info_list[cur_pid] = parse_logfile(path + file_name, target_column, verbose)
return log_info_list
def delete_simstats_files(pid=None, stat_type=None, path=None):
""" Delete *.<pid>.llsd files """
if path is None:
path = DEFAULT_PATH
del_list = []
for file_name in os.listdir(path):
if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd":
(cur_pid, cur_stat_type) = parse_proc_filename(file_name)
if cur_pid is None:
continue
if pid is not None and pid != cur_pid:
continue
if stat_type is not None and stat_type != cur_stat_type:
continue
del_list.append(cur_pid)
# Allow delete related exceptions to percolate up if this fails.
os.unlink(os.path.join(DEFAULT_PATH, file_name))
return del_list

View File

@ -1,222 +0,0 @@
'''
@file term.py
@brief a better shutil.copytree replacement
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
'''
#http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/475116
import sys, re
class TerminalController:
"""
A class that can be used to portably generate formatted output to
a terminal.
`TerminalController` defines a set of instance variables whose
values are initialized to the control sequence necessary to
perform a given action. These can be simply included in normal
output to the terminal:
>>> term = TerminalController()
>>> print 'This is '+term.GREEN+'green'+term.NORMAL
Alternatively, the `render()` method can used, which replaces
'${action}' with the string required to perform 'action':
>>> term = TerminalController()
>>> print term.render('This is ${GREEN}green${NORMAL}')
If the terminal doesn't support a given action, then the value of
the corresponding instance variable will be set to ''. As a
result, the above code will still work on terminals that do not
support color, except that their output will not be colored.
Also, this means that you can test whether the terminal supports a
given action by simply testing the truth value of the
corresponding instance variable:
>>> term = TerminalController()
>>> if term.CLEAR_SCREEN:
... print 'This terminal supports clearning the screen.'
Finally, if the width and height of the terminal are known, then
they will be stored in the `COLS` and `LINES` attributes.
"""
# Cursor movement:
BOL = '' #: Move the cursor to the beginning of the line
UP = '' #: Move the cursor up one line
DOWN = '' #: Move the cursor down one line
LEFT = '' #: Move the cursor left one char
RIGHT = '' #: Move the cursor right one char
# Deletion:
CLEAR_SCREEN = '' #: Clear the screen and move to home position
CLEAR_EOL = '' #: Clear to the end of the line.
CLEAR_BOL = '' #: Clear to the beginning of the line.
CLEAR_EOS = '' #: Clear to the end of the screen
# Output modes:
BOLD = '' #: Turn on bold mode
BLINK = '' #: Turn on blink mode
DIM = '' #: Turn on half-bright mode
REVERSE = '' #: Turn on reverse-video mode
NORMAL = '' #: Turn off all modes
# Cursor display:
HIDE_CURSOR = '' #: Make the cursor invisible
SHOW_CURSOR = '' #: Make the cursor visible
# Terminal size:
COLS = None #: Width of the terminal (None for unknown)
LINES = None #: Height of the terminal (None for unknown)
# Foreground colors:
BLACK = BLUE = GREEN = CYAN = RED = MAGENTA = YELLOW = WHITE = ''
# Background colors:
BG_BLACK = BG_BLUE = BG_GREEN = BG_CYAN = ''
BG_RED = BG_MAGENTA = BG_YELLOW = BG_WHITE = ''
_STRING_CAPABILITIES = """
BOL=cr UP=cuu1 DOWN=cud1 LEFT=cub1 RIGHT=cuf1
CLEAR_SCREEN=clear CLEAR_EOL=el CLEAR_BOL=el1 CLEAR_EOS=ed BOLD=bold
BLINK=blink DIM=dim REVERSE=rev UNDERLINE=smul NORMAL=sgr0
HIDE_CURSOR=cinvis SHOW_CURSOR=cnorm""".split()
_COLORS = """BLACK BLUE GREEN CYAN RED MAGENTA YELLOW WHITE""".split()
_ANSICOLORS = "BLACK RED GREEN YELLOW BLUE MAGENTA CYAN WHITE".split()
def __init__(self, term_stream=sys.stdout):
"""
Create a `TerminalController` and initialize its attributes
with appropriate values for the current terminal.
`term_stream` is the stream that will be used for terminal
output; if this stream is not a tty, then the terminal is
assumed to be a dumb terminal (i.e., have no capabilities).
"""
# Curses isn't available on all platforms
try: import curses
except: return
# If the stream isn't a tty, then assume it has no capabilities.
if not term_stream.isatty(): return
# Check the terminal type. If we fail, then assume that the
# terminal has no capabilities.
try: curses.setupterm()
except: return
# Look up numeric capabilities.
self.COLS = curses.tigetnum('cols')
self.LINES = curses.tigetnum('lines')
# Look up string capabilities.
for capability in self._STRING_CAPABILITIES:
(attrib, cap_name) = capability.split('=')
setattr(self, attrib, self._tigetstr(cap_name) or '')
# Colors
set_fg = self._tigetstr('setf')
if set_fg:
for i,color in zip(range(len(self._COLORS)), self._COLORS):
setattr(self, color, curses.tparm(set_fg, i) or '')
set_fg_ansi = self._tigetstr('setaf')
if set_fg_ansi:
for i,color in zip(range(len(self._ANSICOLORS)), self._ANSICOLORS):
setattr(self, color, curses.tparm(set_fg_ansi, i) or '')
set_bg = self._tigetstr('setb')
if set_bg:
for i,color in zip(range(len(self._COLORS)), self._COLORS):
setattr(self, 'BG_'+color, curses.tparm(set_bg, i) or '')
set_bg_ansi = self._tigetstr('setab')
if set_bg_ansi:
for i,color in zip(range(len(self._ANSICOLORS)), self._ANSICOLORS):
setattr(self, 'BG_'+color, curses.tparm(set_bg_ansi, i) or '')
def _tigetstr(self, cap_name):
# String capabilities can include "delays" of the form "$<2>".
# For any modern terminal, we should be able to just ignore
# these, so strip them out.
import curses
cap = curses.tigetstr(cap_name) or ''
return re.sub(r'\$<\d+>[/*]?', '', cap)
def render(self, template):
"""
Replace each $-substitutions in the given template string with
the corresponding terminal control string (if it's defined) or
'' (if it's not).
"""
return re.sub(r'\$\$|\${\w+}', self._render_sub, template)
def _render_sub(self, match):
s = match.group()
if s == '$$': return s
else: return getattr(self, s[2:-1])
#######################################################################
# Example use case: progress bar
#######################################################################
class ProgressBar:
"""
A 3-line progress bar, which looks like::
Header
20% [===========----------------------------------]
progress message
The progress bar is colored, if the terminal supports color
output; and adjusts to the width of the terminal.
"""
BAR = '%3d%% ${GREEN}[${BOLD}%s%s${NORMAL}${GREEN}]${NORMAL}\n'
HEADER = '${BOLD}${CYAN}%s${NORMAL}\n\n'
def __init__(self, term, header):
self.term = term
if not (self.term.CLEAR_EOL and self.term.UP and self.term.BOL):
raise ValueError("Terminal isn't capable enough -- you "
"should use a simpler progress dispaly.")
self.width = self.term.COLS or 75
self.bar = term.render(self.BAR)
self.header = self.term.render(self.HEADER % header.center(self.width))
self.cleared = 1 #: true if we haven't drawn the bar yet.
self.update(0, '')
def update(self, percent, message):
if self.cleared:
sys.stdout.write(self.header)
self.cleared = 0
n = int((self.width-10)*percent)
sys.stdout.write(
self.term.BOL + self.term.UP + self.term.CLEAR_EOL +
(self.bar % (100*percent, '='*n, '-'*(self.width-10-n))) +
self.term.CLEAR_EOL + message.center(self.width))
def clear(self):
if not self.cleared:
sys.stdout.write(self.term.BOL + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL)
self.cleared = 1

View File

@ -1,508 +0,0 @@
#!/usr/bin/python
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
## Second Life Viewer Source Code
## Copyright (C) 2011, Linden Research, Inc.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation;
## version 2.1 of the License only.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
## $/LicenseInfo$
r"""UUID objects (universally unique identifiers) according to RFC 4122.
This module provides immutable UUID objects (class UUID) and the functions
uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5
UUIDs as specified in RFC 4122.
If all you want is a unique ID, you should probably call uuid1() or uuid4().
Note that uuid1() may compromise privacy since it creates a UUID containing
the computer's network address. uuid4() creates a random UUID.
Typical usage:
>>> import uuid
# make a UUID based on the host ID and current time
>>> uuid.uuid1()
UUID('a8098c1a-f86e-11da-bd1a-00112444be1e')
# make a UUID using an MD5 hash of a namespace UUID and a name
>>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org')
UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e')
# make a random UUID
>>> uuid.uuid4()
UUID('16fd2706-8baf-433b-82eb-8c7fada847da')
# make a UUID using a SHA-1 hash of a namespace UUID and a name
>>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org')
UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d')
# make a UUID from a string of hex digits (braces and hyphens ignored)
>>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}')
# convert a UUID to a string of hex digits in standard form
>>> str(x)
'00010203-0405-0607-0809-0a0b0c0d0e0f'
# get the raw 16 bytes of the UUID
>>> x.bytes
'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
# make a UUID from a 16-byte string
>>> uuid.UUID(bytes=x.bytes)
UUID('00010203-0405-0607-0809-0a0b0c0d0e0f')
This module works with Python 2.3 or higher."""
__author__ = 'Ka-Ping Yee <ping@zesty.ca>'
__date__ = '$Date: 2006/06/12 23:15:40 $'.split()[1].replace('/', '-')
__version__ = '$Revision: 1.30 $'.split()[1]
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
'reserved for NCS compatibility', 'specified in RFC 4122',
'reserved for Microsoft compatibility', 'reserved for future definition']
class UUID(object):
"""Instances of the UUID class represent UUIDs as specified in RFC 4122.
UUID objects are immutable, hashable, and usable as dictionary keys.
Converting a UUID to a string with str() yields something in the form
'12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts
four possible forms: a similar string of hexadecimal digits, or a
string of 16 raw bytes as an argument named 'bytes', or a tuple of
six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and
48-bit values respectively) as an argument named 'fields', or a single
128-bit integer as an argument named 'int'.
UUIDs have these read-only attributes:
bytes the UUID as a 16-byte string
fields a tuple of the six integer fields of the UUID,
which are also available as six individual attributes
and two derived attributes:
time_low the first 32 bits of the UUID
time_mid the next 16 bits of the UUID
time_hi_version the next 16 bits of the UUID
clock_seq_hi_variant the next 8 bits of the UUID
clock_seq_low the next 8 bits of the UUID
node the last 48 bits of the UUID
time the 60-bit timestamp
clock_seq the 14-bit sequence number
hex the UUID as a 32-character hexadecimal string
int the UUID as a 128-bit integer
urn the UUID as a URN as specified in RFC 4122
variant the UUID variant (one of the constants RESERVED_NCS,
RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE)
version the UUID version number (1 through 5, meaningful only
when the variant is RFC_4122)
"""
def __init__(self, hex=None, bytes=None, fields=None, int=None,
version=None):
r"""Create a UUID from either a string of 32 hexadecimal digits,
a string of 16 bytes as the 'bytes' argument, a tuple of six
integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version,
8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as
the 'fields' argument, or a single 128-bit integer as the 'int'
argument. When a string of hex digits is given, curly braces,
hyphens, and a URN prefix are all optional. For example, these
expressions all yield the same UUID:
UUID('{12345678-1234-5678-1234-567812345678}')
UUID('12345678123456781234567812345678')
UUID('urn:uuid:12345678-1234-5678-1234-567812345678')
UUID(bytes='\x12\x34\x56\x78'*4)
UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678))
UUID(int=0x12345678123456781234567812345678)
Exactly one of 'hex', 'bytes', 'fields', or 'int' must be given.
The 'version' argument is optional; if given, the resulting UUID
will have its variant and version number set according to RFC 4122,
overriding bits in the given 'hex', 'bytes', 'fields', or 'int'.
"""
if [hex, bytes, fields, int].count(None) != 3:
raise TypeError('need just one of hex, bytes, fields, or int')
if hex is not None:
hex = hex.replace('urn:', '').replace('uuid:', '')
hex = hex.strip('{}').replace('-', '')
if len(hex) != 32:
raise ValueError('badly formed hexadecimal UUID string')
int = long(hex, 16)
if bytes is not None:
if len(bytes) != 16:
raise ValueError('bytes is not a 16-char string')
int = long(('%02x'*16) % tuple(map(ord, bytes)), 16)
if fields is not None:
if len(fields) != 6:
raise ValueError('fields is not a 6-tuple')
(time_low, time_mid, time_hi_version,
clock_seq_hi_variant, clock_seq_low, node) = fields
if not 0 <= time_low < 1<<32L:
raise ValueError('field 1 out of range (need a 32-bit value)')
if not 0 <= time_mid < 1<<16L:
raise ValueError('field 2 out of range (need a 16-bit value)')
if not 0 <= time_hi_version < 1<<16L:
raise ValueError('field 3 out of range (need a 16-bit value)')
if not 0 <= clock_seq_hi_variant < 1<<8L:
raise ValueError('field 4 out of range (need an 8-bit value)')
if not 0 <= clock_seq_low < 1<<8L:
raise ValueError('field 5 out of range (need an 8-bit value)')
if not 0 <= node < 1<<48L:
raise ValueError('field 6 out of range (need a 48-bit value)')
clock_seq = (clock_seq_hi_variant << 8L) | clock_seq_low
int = ((time_low << 96L) | (time_mid << 80L) |
(time_hi_version << 64L) | (clock_seq << 48L) | node)
if int is not None:
if not 0 <= int < 1<<128L:
raise ValueError('int is out of range (need a 128-bit value)')
if version is not None:
if not 1 <= version <= 5:
raise ValueError('illegal version number')
# Set the variant to RFC 4122.
int &= ~(0xc000 << 48L)
int |= 0x8000 << 48L
# Set the version number.
int &= ~(0xf000 << 64L)
int |= version << 76L
self.__dict__['int'] = int
def __cmp__(self, other):
if isinstance(other, UUID):
return cmp(self.int, other.int)
return NotImplemented
def __hash__(self):
return hash(self.int)
def __int__(self):
return self.int
def __repr__(self):
return 'UUID(%r)' % str(self)
def __setattr__(self, name, value):
raise TypeError('UUID objects are immutable')
def __str__(self):
hex = '%032x' % self.int
return '%s-%s-%s-%s-%s' % (
hex[:8], hex[8:12], hex[12:16], hex[16:20], hex[20:])
def get_bytes(self):
bytes = ''
for shift in range(0, 128, 8):
bytes = chr((self.int >> shift) & 0xff) + bytes
return bytes
bytes = property(get_bytes)
def get_fields(self):
return (self.time_low, self.time_mid, self.time_hi_version,
self.clock_seq_hi_variant, self.clock_seq_low, self.node)
fields = property(get_fields)
def get_time_low(self):
return self.int >> 96L
time_low = property(get_time_low)
def get_time_mid(self):
return (self.int >> 80L) & 0xffff
time_mid = property(get_time_mid)
def get_time_hi_version(self):
return (self.int >> 64L) & 0xffff
time_hi_version = property(get_time_hi_version)
def get_clock_seq_hi_variant(self):
return (self.int >> 56L) & 0xff
clock_seq_hi_variant = property(get_clock_seq_hi_variant)
def get_clock_seq_low(self):
return (self.int >> 48L) & 0xff
clock_seq_low = property(get_clock_seq_low)
def get_time(self):
return (((self.time_hi_version & 0x0fffL) << 48L) |
(self.time_mid << 32L) | self.time_low)
time = property(get_time)
def get_clock_seq(self):
return (((self.clock_seq_hi_variant & 0x3fL) << 8L) |
self.clock_seq_low)
clock_seq = property(get_clock_seq)
def get_node(self):
return self.int & 0xffffffffffff
node = property(get_node)
def get_hex(self):
return '%032x' % self.int
hex = property(get_hex)
def get_urn(self):
return 'urn:uuid:' + str(self)
urn = property(get_urn)
def get_variant(self):
if not self.int & (0x8000 << 48L):
return RESERVED_NCS
elif not self.int & (0x4000 << 48L):
return RFC_4122
elif not self.int & (0x2000 << 48L):
return RESERVED_MICROSOFT
else:
return RESERVED_FUTURE
variant = property(get_variant)
def get_version(self):
# The version bits are only meaningful for RFC 4122 UUIDs.
if self.variant == RFC_4122:
return int((self.int >> 76L) & 0xf)
version = property(get_version)
def _ifconfig_getnode():
"""Get the hardware address on Unix by running ifconfig."""
import os
for dir in ['', '/sbin/', '/usr/sbin']:
try:
path = os.path.join(dir, 'ifconfig')
if os.path.exists(path):
pipe = os.popen(path)
else:
continue
except IOError:
continue
for line in pipe:
words = line.lower().split()
for i in range(len(words)):
if words[i] in ['hwaddr', 'ether']:
return int(words[i + 1].replace(':', ''), 16)
def _ipconfig_getnode():
"""Get the hardware address on Windows by running ipconfig.exe."""
import os, re
dirs = ['', r'c:\windows\system32', r'c:\winnt\system32']
try:
import ctypes
buffer = ctypes.create_string_buffer(300)
ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300)
dirs.insert(0, buffer.value.decode('mbcs'))
except:
pass
for dir in dirs:
try:
pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all')
except IOError:
continue
for line in pipe:
value = line.split(':')[-1].strip().lower()
if re.match('([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value):
return int(value.replace('-', ''), 16)
def _netbios_getnode():
"""Get the hardware address on Windows using NetBIOS calls.
See http://support.microsoft.com/kb/118623 for details."""
import win32wnet, netbios
ncb = netbios.NCB()
ncb.Command = netbios.NCBENUM
ncb.Buffer = adapters = netbios.LANA_ENUM()
adapters._pack()
if win32wnet.Netbios(ncb) != 0:
return
adapters._unpack()
for i in range(adapters.length):
ncb.Reset()
ncb.Command = netbios.NCBRESET
ncb.Lana_num = ord(adapters.lana[i])
if win32wnet.Netbios(ncb) != 0:
continue
ncb.Reset()
ncb.Command = netbios.NCBASTAT
ncb.Lana_num = ord(adapters.lana[i])
ncb.Callname = '*'.ljust(16)
ncb.Buffer = status = netbios.ADAPTER_STATUS()
if win32wnet.Netbios(ncb) != 0:
continue
status._unpack()
bytes = map(ord, status.adapter_address)
return ((bytes[0]<<40L) + (bytes[1]<<32L) + (bytes[2]<<24L) +
(bytes[3]<<16L) + (bytes[4]<<8L) + bytes[5])
# Thanks to Thomas Heller for ctypes and for his help with its use here.
# If ctypes is available, use it to find system routines for UUID generation.
_uuid_generate_random = _uuid_generate_time = _UuidCreate = None
try:
import ctypes, ctypes.util
_buffer = ctypes.create_string_buffer(16)
# The uuid_generate_* routines are provided by libuuid on at least
# Linux and FreeBSD, and provided by libc on Mac OS X.
for libname in ['uuid', 'c']:
try:
lib = ctypes.CDLL(ctypes.util.find_library(libname))
except:
continue
if hasattr(lib, 'uuid_generate_random'):
_uuid_generate_random = lib.uuid_generate_random
if hasattr(lib, 'uuid_generate_time'):
_uuid_generate_time = lib.uuid_generate_time
# On Windows prior to 2000, UuidCreate gives a UUID containing the
# hardware address. On Windows 2000 and later, UuidCreate makes a
# random UUID and UuidCreateSequential gives a UUID containing the
# hardware address. These routines are provided by the RPC runtime.
try:
lib = ctypes.windll.rpcrt4
except:
lib = None
_UuidCreate = getattr(lib, 'UuidCreateSequential',
getattr(lib, 'UuidCreate', None))
except:
pass
def _unixdll_getnode():
"""Get the hardware address on Unix using ctypes."""
_uuid_generate_time(_buffer)
return UUID(bytes=_buffer.raw).node
def _windll_getnode():
"""Get the hardware address on Windows using ctypes."""
if _UuidCreate(_buffer) == 0:
return UUID(bytes=_buffer.raw).node
def _random_getnode():
"""Get a random node ID, with eighth bit set as suggested by RFC 4122."""
import random
return random.randrange(0, 1<<48L) | 0x010000000000L
_node = None
def getnode():
"""Get the hardware address as a 48-bit integer. The first time this
runs, it may launch a separate program, which could be quite slow. If
all attempts to obtain the hardware address fail, we choose a random
48-bit number with its eighth bit set to 1 as recommended in RFC 4122."""
global _node
if _node is not None:
return _node
import sys
if sys.platform == 'win32':
getters = [_windll_getnode, _netbios_getnode, _ipconfig_getnode]
else:
getters = [_unixdll_getnode, _ifconfig_getnode]
for getter in getters + [_random_getnode]:
try:
_node = getter()
except:
continue
if _node is not None:
return _node
def uuid1(node=None, clock_seq=None):
"""Generate a UUID from a host ID, sequence number, and the current time.
If 'node' is not given, getnode() is used to obtain the hardware
address. If 'clock_seq' is given, it is used as the sequence number;
otherwise a random 14-bit sequence number is chosen."""
# When the system provides a version-1 UUID generator, use it (but don't
# use UuidCreate here because its UUIDs don't conform to RFC 4122).
if _uuid_generate_time and node is clock_seq is None:
_uuid_generate_time(_buffer)
return UUID(bytes=_buffer.raw)
import time
nanoseconds = int(time.time() * 1e9)
# 0x01b21dd213814000 is the number of 100-ns intervals between the
# UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
timestamp = int(nanoseconds/100) + 0x01b21dd213814000L
if clock_seq is None:
import random
clock_seq = random.randrange(1<<14L) # instead of stable storage
time_low = timestamp & 0xffffffffL
time_mid = (timestamp >> 32L) & 0xffffL
time_hi_version = (timestamp >> 48L) & 0x0fffL
clock_seq_low = clock_seq & 0xffL
clock_seq_hi_variant = (clock_seq >> 8L) & 0x3fL
if node is None:
node = getnode()
return UUID(fields=(time_low, time_mid, time_hi_version,
clock_seq_hi_variant, clock_seq_low, node), version=1)
def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
try:
# Python 2.6
from hashlib import md5
except ImportError:
# Python 2.5 and earlier
from md5 import new as md5
hash = md5(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=3)
def uuid4():
"""Generate a random UUID."""
# When the system provides a version-4 UUID generator, use it.
if _uuid_generate_random:
_uuid_generate_random(_buffer)
return UUID(bytes=_buffer.raw)
# Otherwise, get randomness from urandom or the 'random' module.
try:
import os
return UUID(bytes=os.urandom(16), version=4)
except:
import random
bytes = [chr(random.randrange(256)) for i in range(16)]
return UUID(bytes=bytes, version=4)
def uuid5(namespace, name):
"""Generate a UUID from the SHA-1 hash of a namespace UUID and a name."""
import sha
hash = sha.sha(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=5)
# The following standard UUIDs are for use with uuid3() or uuid5().
NAMESPACE_DNS = UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_URL = UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')

View File

@ -44,6 +44,7 @@
#include "lltexglobalcolor.h"
#include "llwearabledata.h"
#include "boost/bind.hpp"
#include "boost/tokenizer.hpp"
#if LL_MSVC
@ -87,8 +88,11 @@ public:
private:
std::string mName;
std::string mSupport;
std::string mAliases;
BOOL mIsJoint;
LLVector3 mPos;
LLVector3 mEnd;
LLVector3 mRot;
LLVector3 mScale;
LLVector3 mPivot;
@ -118,6 +122,7 @@ public:
private:
S32 mNumBones;
S32 mNumCollisionVolumes;
LLAvatarAppearance::joint_alias_map_t mJointAliasMap;
typedef std::vector<LLAvatarBoneInfo*> bone_info_list_t;
bone_info_list_t mBoneInfoList;
};
@ -181,7 +186,11 @@ LLAvatarAppearance::LLAvatarAppearance(LLWearableData* wearable_data) :
mPelvisToFoot(0.f),
mHeadOffset(),
mRoot(NULL),
mWearableData(wearable_data)
mWearableData(wearable_data),
mNumBones(0),
mNumCollisionVolumes(0),
mCollisionVolumes(NULL),
mIsBuilt(FALSE)
{
llassert_always(mWearableData);
mBakedTextureDatas.resize(LLAvatarAppearanceDefines::BAKED_NUM_INDICES);
@ -194,11 +203,6 @@ LLAvatarAppearance::LLAvatarAppearance(LLWearableData* wearable_data) :
mBakedTextureDatas[i].mMaskTexName = 0;
mBakedTextureDatas[i].mTextureIndex = LLAvatarAppearanceDefines::LLAvatarAppearanceDictionary::bakedToLocalTextureIndex((LLAvatarAppearanceDefines::EBakedTextureIndex)i);
}
mIsBuilt = FALSE;
mNumCollisionVolumes = 0;
mCollisionVolumes = NULL;
}
// virtual
@ -323,36 +327,49 @@ LLAvatarAppearance::~LLAvatarAppearance()
//static
void LLAvatarAppearance::initClass()
{
std::string xmlFile;
initClass("","");
}
xmlFile = gDirUtilp->getExpandedFilename(LL_PATH_CHARACTER,AVATAR_DEFAULT_CHAR) + "_lad.xml";
BOOL success = sXMLTree.parseFile( xmlFile, FALSE );
//static
void LLAvatarAppearance::initClass(const std::string& avatar_file_name_arg, const std::string& skeleton_file_name_arg)
{
std::string avatar_file_name;
if (!avatar_file_name_arg.empty())
{
avatar_file_name = gDirUtilp->getExpandedFilename(LL_PATH_CHARACTER,avatar_file_name_arg);
}
else
{
avatar_file_name = gDirUtilp->getExpandedFilename(LL_PATH_CHARACTER,AVATAR_DEFAULT_CHAR + "_lad.xml");
}
BOOL success = sXMLTree.parseFile( avatar_file_name, FALSE );
if (!success)
{
LL_ERRS() << "Problem reading avatar configuration file:" << xmlFile << LL_ENDL;
LL_ERRS() << "Problem reading avatar configuration file:" << avatar_file_name << LL_ENDL;
}
// now sanity check xml file
LLXmlTreeNode* root = sXMLTree.getRoot();
if (!root)
{
LL_ERRS() << "No root node found in avatar configuration file: " << xmlFile << LL_ENDL;
LL_ERRS() << "No root node found in avatar configuration file: " << avatar_file_name << LL_ENDL;
return;
}
//-------------------------------------------------------------------------
// <linden_avatar version="1.0"> (root)
// <linden_avatar version="2.0"> (root)
//-------------------------------------------------------------------------
if( !root->hasName( "linden_avatar" ) )
{
LL_ERRS() << "Invalid avatar file header: " << xmlFile << LL_ENDL;
LL_ERRS() << "Invalid avatar file header: " << avatar_file_name << LL_ENDL;
}
std::string version;
static LLStdStringHandle version_string = LLXmlTree::addAttributeString("version");
if( !root->getFastAttributeString( version_string, version ) || (version != "1.0") )
if( !root->getFastAttributeString( version_string, version ) || ((version != "1.0") && (version != "2.0")))
{
LL_ERRS() << "Invalid avatar file version: " << version << " in file: " << xmlFile << LL_ENDL;
LL_ERRS() << "Invalid avatar file version: " << version << " in file: " << avatar_file_name << LL_ENDL;
}
S32 wearable_def_version = 1;
@ -365,16 +382,19 @@ void LLAvatarAppearance::initClass()
LLXmlTreeNode* skeleton_node = root->getChildByName( "skeleton" );
if (!skeleton_node)
{
LL_ERRS() << "No skeleton in avatar configuration file: " << xmlFile << LL_ENDL;
LL_ERRS() << "No skeleton in avatar configuration file: " << avatar_file_name << LL_ENDL;
return;
}
std::string skeleton_file_name;
static LLStdStringHandle file_name_string = LLXmlTree::addAttributeString("file_name");
if (!skeleton_node->getFastAttributeString(file_name_string, skeleton_file_name))
{
LL_ERRS() << "No file name in skeleton node in avatar config file: " << xmlFile << LL_ENDL;
}
std::string skeleton_file_name = skeleton_file_name_arg;
if (skeleton_file_name.empty())
{
static LLStdStringHandle file_name_string = LLXmlTree::addAttributeString("file_name");
if (!skeleton_node->getFastAttributeString(file_name_string, skeleton_file_name))
{
LL_ERRS() << "No file name in skeleton node in avatar config file: " << avatar_file_name << LL_ENDL;
}
}
std::string skeleton_path;
skeleton_path = gDirUtilp->getExpandedFilename(LL_PATH_CHARACTER,skeleton_file_name);
@ -437,11 +457,56 @@ void LLAvatarAppearance::cleanupClass()
using namespace LLAvatarAppearanceDefines;
void LLAvatarAppearance::compareJointStateMaps(joint_state_map_t& last_state,
joint_state_map_t& curr_state)
{
if (!last_state.empty() && (last_state != curr_state))
{
S32 diff_count = 0;
joint_state_map_t::iterator it;
for (it=last_state.begin(); it != last_state.end(); ++it)
{
const std::string& key = it->first;
if (last_state[key] != curr_state[key])
{
LL_DEBUGS("AvatarBodySize") << "BodySize change " << key << " " << last_state[key] << "->" << curr_state[key] << LL_ENDL;
diff_count++;
}
}
if (diff_count > 0)
{
LL_DEBUGS("AvatarBodySize") << "Total of BodySize changes " << diff_count << LL_ENDL;
}
}
}
//------------------------------------------------------------------------
// The viewer can only suggest a good size for the agent,
// the simulator will keep it inside a reasonable range.
void LLAvatarAppearance::computeBodySize()
{
mLastBodySizeState = mCurrBodySizeState;
mCurrBodySizeState["mPelvis scale"] = mPelvisp->getScale();
mCurrBodySizeState["mSkull pos"] = mSkullp->getPosition();
mCurrBodySizeState["mSkull scale"] = mSkullp->getScale();
mCurrBodySizeState["mNeck pos"] = mNeckp->getPosition();
mCurrBodySizeState["mNeck scale"] = mNeckp->getScale();
mCurrBodySizeState["mChest pos"] = mChestp->getPosition();
mCurrBodySizeState["mChest scale"] = mChestp->getScale();
mCurrBodySizeState["mHead pos"] = mHeadp->getPosition();
mCurrBodySizeState["mHead scale"] = mHeadp->getScale();
mCurrBodySizeState["mTorso pos"] = mTorsop->getPosition();
mCurrBodySizeState["mTorso scale"] = mTorsop->getScale();
mCurrBodySizeState["mHipLeft pos"] = mHipLeftp->getPosition();
mCurrBodySizeState["mHipLeft scale"] = mHipLeftp->getScale();
mCurrBodySizeState["mKneeLeft pos"] = mKneeLeftp->getPosition();
mCurrBodySizeState["mKneeLeft scale"] = mKneeLeftp->getScale();
mCurrBodySizeState["mAnkleLeft pos"] = mAnkleLeftp->getPosition();
mCurrBodySizeState["mAnkleLeft scale"] = mAnkleLeftp->getScale();
mCurrBodySizeState["mFootLeft pos"] = mFootLeftp->getPosition();
LLVector3 pelvis_scale = mPelvisp->getScale();
// some of the joints have not been cached
@ -501,6 +566,8 @@ void LLAvatarAppearance::computeBodySize()
if (new_body_size != mBodySize || old_offset != mAvatarOffset.mV[VZ])
{
mBodySize = new_body_size;
compareJointStateMaps(mLastBodySizeState, mCurrBodySizeState);
}
}
@ -536,7 +603,7 @@ BOOL LLAvatarAppearance::parseSkeletonFile(const std::string& filename)
std::string version;
static LLStdStringHandle version_string = LLXmlTree::addAttributeString("version");
if( !root->getFastAttributeString( version_string, version ) || (version != "1.0") )
if( !root->getFastAttributeString( version_string, version ) || ((version != "1.0") && (version != "2.0")))
{
LL_ERRS() << "Invalid avatar skeleton file version: " << version << " in file: " << filename << LL_ENDL;
return FALSE;
@ -552,6 +619,12 @@ BOOL LLAvatarAppearance::setupBone(const LLAvatarBoneInfo* info, LLJoint* parent
{
LLJoint* joint = NULL;
LL_DEBUGS("BVH") << "bone info: name " << info->mName
<< " isJoint " << info->mIsJoint
<< " volume_num " << volume_num
<< " joint_num " << joint_num
<< LL_ENDL;
if (info->mIsJoint)
{
joint = getCharacterJoint(joint_num);
@ -566,7 +639,7 @@ BOOL LLAvatarAppearance::setupBone(const LLAvatarBoneInfo* info, LLJoint* parent
{
if (volume_num >= (S32)mNumCollisionVolumes)
{
LL_WARNS() << "Too many bones" << LL_ENDL;
LL_WARNS() << "Too many collision volumes" << LL_ENDL;
return FALSE;
}
joint = (&mCollisionVolumes[volume_num]);
@ -574,26 +647,34 @@ BOOL LLAvatarAppearance::setupBone(const LLAvatarBoneInfo* info, LLJoint* parent
}
// add to parent
if (parent)
if (parent && (joint->getParent()!=parent))
{
parent->addChild( joint );
}
// SL-315
joint->setPosition(info->mPos);
joint->setDefaultPosition(info->mPos);
joint->setRotation(mayaQ(info->mRot.mV[VX], info->mRot.mV[VY],
info->mRot.mV[VZ], LLQuaternion::XYZ));
joint->setScale(info->mScale);
joint->setDefaultScale(info->mScale);
joint->setSupport(info->mSupport);
joint->setEnd(info->mEnd);
if (info->mIsJoint)
{
joint->setSkinOffset( info->mPivot );
joint->setJointNum(joint_num);
joint_num++;
}
else // collision volume
{
joint->setJointNum(mNumBones+volume_num);
volume_num++;
}
// setup children
LLAvatarBoneInfo::child_list_t::const_iterator iter;
for (iter = info->mChildList.begin(); iter != info->mChildList.end(); ++iter)
@ -613,12 +694,12 @@ BOOL LLAvatarAppearance::setupBone(const LLAvatarBoneInfo* info, LLJoint* parent
//-----------------------------------------------------------------------------
BOOL LLAvatarAppearance::allocateCharacterJoints( U32 num )
{
clearSkeleton();
for(S32 joint_num = 0; joint_num < (S32)num; joint_num++)
{
mSkeleton.push_back(createAvatarJoint(joint_num));
}
if (mSkeleton.size() != num)
{
clearSkeleton();
mSkeleton = avatar_joint_list_t(num,NULL);
mNumBones = num;
}
return TRUE;
}
@ -629,18 +710,16 @@ BOOL LLAvatarAppearance::allocateCharacterJoints( U32 num )
//-----------------------------------------------------------------------------
BOOL LLAvatarAppearance::buildSkeleton(const LLAvatarSkeletonInfo *info)
{
//-------------------------------------------------------------------------
LL_DEBUGS("BVH") << "numBones " << info->mNumBones << " numCollisionVolumes " << info->mNumCollisionVolumes << LL_ENDL;
// allocate joints
//-------------------------------------------------------------------------
if (!allocateCharacterJoints(info->mNumBones))
{
LL_ERRS() << "Can't allocate " << info->mNumBones << " joints" << LL_ENDL;
return FALSE;
}
//-------------------------------------------------------------------------
// allocate volumes
//-------------------------------------------------------------------------
if (info->mNumCollisionVolumes)
{
if (!allocateCollisionVolumes(info->mNumCollisionVolumes))
@ -655,8 +734,8 @@ BOOL LLAvatarAppearance::buildSkeleton(const LLAvatarSkeletonInfo *info)
LLAvatarSkeletonInfo::bone_info_list_t::const_iterator iter;
for (iter = info->mBoneInfoList.begin(); iter != info->mBoneInfoList.end(); ++iter)
{
LLAvatarBoneInfo *info = *iter;
if (!setupBone(info, NULL, current_volume_num, current_joint_num))
LLAvatarBoneInfo *bone_info = *iter;
if (!setupBone(bone_info, NULL, current_volume_num, current_joint_num))
{
LL_ERRS() << "Error parsing bone in skeleton file" << LL_ENDL;
return FALSE;
@ -820,6 +899,7 @@ void LLAvatarAppearance::buildCharacter()
//-------------------------------------------------------------------------
// initialize the pelvis
//-------------------------------------------------------------------------
// SL-315
mPelvisp->setPosition( LLVector3(0.0f, 0.0f, 0.0f) );
mIsBuilt = TRUE;
@ -834,21 +914,21 @@ BOOL LLAvatarAppearance::loadAvatar()
// avatar_skeleton.xml
if( !buildSkeleton(sAvatarSkeletonInfo) )
{
LL_WARNS() << "avatar file: buildSkeleton() failed" << LL_ENDL;
LL_ERRS() << "avatar file: buildSkeleton() failed" << LL_ENDL;
return FALSE;
}
// avatar_lad.xml : <skeleton>
if( !loadSkeletonNode() )
{
LL_WARNS() << "avatar file: loadNodeSkeleton() failed" << LL_ENDL;
LL_ERRS() << "avatar file: loadNodeSkeleton() failed" << LL_ENDL;
return FALSE;
}
// avatar_lad.xml : <mesh>
if( !loadMeshNodes() )
{
LL_WARNS() << "avatar file: loadNodeMesh() failed" << LL_ENDL;
LL_ERRS() << "avatar file: loadNodeMesh() failed" << LL_ENDL;
return FALSE;
}
@ -858,13 +938,13 @@ BOOL LLAvatarAppearance::loadAvatar()
mTexSkinColor = new LLTexGlobalColor( this );
if( !mTexSkinColor->setInfo( sAvatarXmlInfo->mTexSkinColorInfo ) )
{
LL_WARNS() << "avatar file: mTexSkinColor->setInfo() failed" << LL_ENDL;
LL_ERRS() << "avatar file: mTexSkinColor->setInfo() failed" << LL_ENDL;
return FALSE;
}
}
else
{
LL_WARNS() << "<global_color> name=\"skin_color\" not found" << LL_ENDL;
LL_ERRS() << "<global_color> name=\"skin_color\" not found" << LL_ENDL;
return FALSE;
}
if( sAvatarXmlInfo->mTexHairColorInfo )
@ -872,13 +952,13 @@ BOOL LLAvatarAppearance::loadAvatar()
mTexHairColor = new LLTexGlobalColor( this );
if( !mTexHairColor->setInfo( sAvatarXmlInfo->mTexHairColorInfo ) )
{
LL_WARNS() << "avatar file: mTexHairColor->setInfo() failed" << LL_ENDL;
LL_ERRS() << "avatar file: mTexHairColor->setInfo() failed" << LL_ENDL;
return FALSE;
}
}
else
{
LL_WARNS() << "<global_color> name=\"hair_color\" not found" << LL_ENDL;
LL_ERRS() << "<global_color> name=\"hair_color\" not found" << LL_ENDL;
return FALSE;
}
if( sAvatarXmlInfo->mTexEyeColorInfo )
@ -886,26 +966,26 @@ BOOL LLAvatarAppearance::loadAvatar()
mTexEyeColor = new LLTexGlobalColor( this );
if( !mTexEyeColor->setInfo( sAvatarXmlInfo->mTexEyeColorInfo ) )
{
LL_WARNS() << "avatar file: mTexEyeColor->setInfo() failed" << LL_ENDL;
LL_ERRS() << "avatar file: mTexEyeColor->setInfo() failed" << LL_ENDL;
return FALSE;
}
}
else
{
LL_WARNS() << "<global_color> name=\"eye_color\" not found" << LL_ENDL;
LL_ERRS() << "<global_color> name=\"eye_color\" not found" << LL_ENDL;
return FALSE;
}
// avatar_lad.xml : <layer_set>
if (sAvatarXmlInfo->mLayerInfoList.empty())
{
LL_WARNS() << "avatar file: missing <layer_set> node" << LL_ENDL;
LL_ERRS() << "avatar file: missing <layer_set> node" << LL_ENDL;
return FALSE;
}
if (sAvatarXmlInfo->mMorphMaskInfoList.empty())
{
LL_WARNS() << "avatar file: missing <morph_masks> node" << LL_ENDL;
LL_ERRS() << "avatar file: missing <morph_masks> node" << LL_ENDL;
return FALSE;
}
@ -1104,6 +1184,7 @@ BOOL LLAvatarAppearance::loadMeshNodes()
{
// This should never happen
LL_WARNS("Avatar") << "Could not find avatar mesh: " << info->mReferenceMeshName << LL_ENDL;
return FALSE;
}
}
else
@ -1240,6 +1321,10 @@ LLJoint *LLAvatarAppearance::getCharacterJoint( U32 num )
{
return NULL;
}
if (!mSkeleton[num])
{
mSkeleton[num] = createAvatarJoint();
}
return mSkeleton[num];
}
@ -1476,16 +1561,19 @@ LLTexLayerSet* LLAvatarAppearance::getAvatarLayerSet(EBakedTextureIndex baked_in
//-----------------------------------------------------------------------------
BOOL LLAvatarAppearance::allocateCollisionVolumes( U32 num )
{
delete_and_clear_array(mCollisionVolumes);
mNumCollisionVolumes = 0;
if (mNumCollisionVolumes !=num)
{
delete_and_clear_array(mCollisionVolumes);
mNumCollisionVolumes = 0;
mCollisionVolumes = new LLAvatarJointCollisionVolume[num];
if (!mCollisionVolumes)
{
return FALSE;
}
mNumCollisionVolumes = num;
mCollisionVolumes = new LLAvatarJointCollisionVolume[num];
if (!mCollisionVolumes)
{
return FALSE;
}
mNumCollisionVolumes = num;
}
return TRUE;
}
@ -1503,6 +1591,9 @@ BOOL LLAvatarBoneInfo::parseXml(LLXmlTreeNode* node)
LL_WARNS() << "Bone without name" << LL_ENDL;
return FALSE;
}
static LLStdStringHandle aliases_string = LLXmlTree::addAttributeString("aliases");
node->getFastAttributeString(aliases_string, mAliases ); //Aliases are not required.
}
else if (node->hasName("collision_volume"))
{
@ -1540,6 +1631,20 @@ BOOL LLAvatarBoneInfo::parseXml(LLXmlTreeNode* node)
return FALSE;
}
static LLStdStringHandle end_string = LLXmlTree::addAttributeString("end");
if (!node->getFastAttributeVector3(end_string, mEnd))
{
LL_WARNS() << "Bone without end " << mName << LL_ENDL;
mEnd = LLVector3(0.0f, 0.0f, 0.0f);
}
static LLStdStringHandle support_string = LLXmlTree::addAttributeString("support");
if (!node->getFastAttributeString(support_string,mSupport))
{
LL_WARNS() << "Bone without support " << mName << LL_ENDL;
mSupport = "base";
}
if (mIsJoint)
{
static LLStdStringHandle pivot_string = LLXmlTree::addAttributeString("pivot");
@ -1595,6 +1700,54 @@ BOOL LLAvatarSkeletonInfo::parseXml(LLXmlTreeNode* node)
return TRUE;
}
//Make aliases for joint and push to map.
void LLAvatarAppearance::makeJointAliases(LLAvatarBoneInfo *bone_info)
{
if (! bone_info->mIsJoint )
{
return;
}
std::string bone_name = bone_info->mName;
mJointAliasMap[bone_name] = bone_name; //Actual name is a valid alias.
std::string aliases = bone_info->mAliases;
boost::char_separator<char> sep(" ");
boost::tokenizer<boost::char_separator<char> > tok(aliases, sep);
for(boost::tokenizer<boost::char_separator<char> >::iterator i = tok.begin(); i != tok.end(); ++i)
{
if ( mJointAliasMap.find(*i) != mJointAliasMap.end() )
{
LL_WARNS() << "avatar skeleton: Joint alias \"" << *i << "\" remapped from " << mJointAliasMap[*i] << " to " << bone_name << LL_ENDL;
}
mJointAliasMap[*i] = bone_name;
}
LLAvatarBoneInfo::child_list_t::const_iterator iter;
for (iter = bone_info->mChildList.begin(); iter != bone_info->mChildList.end(); ++iter)
{
makeJointAliases( *iter );
}
}
const LLAvatarAppearance::joint_alias_map_t& LLAvatarAppearance::getJointAliases ()
{
LLAvatarAppearance::joint_alias_map_t alias_map;
if (mJointAliasMap.empty())
{
LLAvatarSkeletonInfo::bone_info_list_t::const_iterator iter;
for (iter = sAvatarSkeletonInfo->mBoneInfoList.begin(); iter != sAvatarSkeletonInfo->mBoneInfoList.end(); ++iter)
{
//LLAvatarBoneInfo *bone_info = *iter;
makeJointAliases( *iter );
}
}
return mJointAliasMap;
}
//-----------------------------------------------------------------------------
// parseXmlSkeletonNode(): parses <skeleton> nodes from XML tree
@ -1625,7 +1778,7 @@ BOOL LLAvatarAppearance::LLAvatarXmlInfo::parseXmlSkeletonNode(LLXmlTreeNode* ro
{
LL_WARNS() << "Unknown param type." << LL_ENDL;
}
continue;
return FALSE;
}
LLPolySkeletalDistortionInfo *info = new LLPolySkeletalDistortionInfo;
@ -1650,7 +1803,7 @@ BOOL LLAvatarAppearance::LLAvatarXmlInfo::parseXmlSkeletonNode(LLXmlTreeNode* ro
{
LL_WARNS() << "No name supplied for attachment point." << LL_ENDL;
delete info;
continue;
return FALSE;
}
static LLStdStringHandle joint_string = LLXmlTree::addAttributeString("joint");
@ -1658,7 +1811,7 @@ BOOL LLAvatarAppearance::LLAvatarXmlInfo::parseXmlSkeletonNode(LLXmlTreeNode* ro
{
LL_WARNS() << "No bone declared in attachment point " << info->mName << LL_ENDL;
delete info;
continue;
return FALSE;
}
static LLStdStringHandle position_string = LLXmlTree::addAttributeString("position");
@ -1684,7 +1837,7 @@ BOOL LLAvatarAppearance::LLAvatarXmlInfo::parseXmlSkeletonNode(LLXmlTreeNode* ro
{
LL_WARNS() << "No id supplied for attachment point " << info->mName << LL_ENDL;
delete info;
continue;
return FALSE;
}
static LLStdStringHandle slot_string = LLXmlTree::addAttributeString("pie_slice");
@ -1770,7 +1923,7 @@ BOOL LLAvatarAppearance::LLAvatarXmlInfo::parseXmlMeshNodes(LLXmlTreeNode* root)
{
LL_WARNS() << "Unknown param type." << LL_ENDL;
}
continue;
return FALSE;
}
LLPolyMorphTargetInfo *morphinfo = new LLPolyMorphTargetInfo();
@ -1931,7 +2084,7 @@ BOOL LLAvatarAppearance::LLAvatarXmlInfo::parseXmlMorphNodes(LLXmlTreeNode* root
{
LL_WARNS() << "No name supplied for morph mask." << LL_ENDL;
delete info;
continue;
return FALSE;
}
static LLStdStringHandle region_string = LLXmlTree::addAttributeString("body_region");
@ -1939,7 +2092,7 @@ BOOL LLAvatarAppearance::LLAvatarXmlInfo::parseXmlMorphNodes(LLXmlTreeNode* root
{
LL_WARNS() << "No region supplied for morph mask." << LL_ENDL;
delete info;
continue;
return FALSE;
}
static LLStdStringHandle layer_string = LLXmlTree::addAttributeString("layer");
@ -1947,7 +2100,7 @@ BOOL LLAvatarAppearance::LLAvatarXmlInfo::parseXmlMorphNodes(LLXmlTreeNode* root
{
LL_WARNS() << "No layer supplied for morph mask." << LL_ENDL;
delete info;
continue;
return FALSE;
}
// optional parameter. don't throw a warning if not present.

View File

@ -66,7 +66,8 @@ public:
LLAvatarAppearance(LLWearableData* wearable_data);
virtual ~LLAvatarAppearance();
static void initClass(); // initializes static members
static void initClass(const std::string& avatar_file_name, const std::string& skeleton_file_name); // initializes static members
static void initClass();
static void cleanupClass(); // Cleanup data that's only init'd once per class.
virtual void initInstance(); // Called after construction to initialize the instance.
virtual BOOL loadSkeletonNode();
@ -124,8 +125,11 @@ public:
protected:
virtual LLAvatarJoint* createAvatarJoint() = 0;
virtual LLAvatarJoint* createAvatarJoint(S32 joint_num) = 0;
virtual LLAvatarJoint* createAvatarJoint(S32 joint_num) = 0;
virtual LLAvatarJointMesh* createAvatarJointMesh() = 0;
void makeJointAliases(LLAvatarBoneInfo *bone_info);
public:
F32 getPelvisToFoot() const { return mPelvisToFoot; }
/*virtual*/ LLJoint* getRootJoint() { return mRoot; }
@ -135,9 +139,20 @@ public:
typedef std::map<std::string, LLJoint*> joint_map_t;
joint_map_t mJointMap;
typedef std::map<std::string, LLVector3> joint_state_map_t;
joint_state_map_t mLastBodySizeState;
joint_state_map_t mCurrBodySizeState;
void compareJointStateMaps(joint_state_map_t& last_state,
joint_state_map_t& curr_state);
void computeBodySize();
public:
typedef std::vector<LLAvatarJoint*> avatar_joint_list_t;
const avatar_joint_list_t& getSkeleton() { return mSkeleton; }
typedef std::map<std::string, std::string> joint_alias_map_t;
const joint_alias_map_t& getJointAliases();
protected:
static BOOL parseSkeletonFile(const std::string& filename);
@ -147,12 +162,12 @@ protected:
BOOL setupBone(const LLAvatarBoneInfo* info, LLJoint* parent, S32 &current_volume_num, S32 &current_joint_num);
BOOL allocateCharacterJoints(U32 num);
BOOL buildSkeleton(const LLAvatarSkeletonInfo *info);
protected:
void clearSkeleton();
BOOL mIsBuilt; // state of deferred character building
typedef std::vector<LLAvatarJoint*> avatar_joint_list_t;
avatar_joint_list_t mSkeleton;
LLPosOverrideMap mPelvisFixups;
LLVector3OverrideMap mPelvisFixups;
joint_alias_map_t mJointAliasMap;
//--------------------------------------------------------------------
// Pelvis height adjustment members.
@ -335,6 +350,7 @@ protected:
// Collision volumes
//--------------------------------------------------------------------
public:
S32 mNumBones;
S32 mNumCollisionVolumes;
LLAvatarJointCollisionVolume* mCollisionVolumes;
protected:

View File

@ -52,19 +52,18 @@ LLAvatarJoint::LLAvatarJoint() :
init();
}
LLAvatarJoint::LLAvatarJoint(S32 joint_num) :
LLJoint(joint_num)
{
init();
}
LLAvatarJoint::LLAvatarJoint(const std::string &name, LLJoint *parent) :
LLJoint(name, parent)
{
init();
}
LLAvatarJoint::LLAvatarJoint(S32 joint_num) :
LLJoint(joint_num)
{
init();
}
void LLAvatarJoint::init()
{
mValid = FALSE;

View File

@ -33,22 +33,7 @@
#include "llavatarjointmesh.h"
#include "llavatarappearance.h"
//#include "llapr.h"
//#include "llbox.h"
//#include "lldrawable.h"
//#include "lldrawpoolavatar.h"
//#include "lldrawpoolbump.h"
//#include "lldynamictexture.h"
//#include "llface.h"
//#include "llgldbg.h"
//#include "llglheaders.h"
#include "lltexlayer.h"
//#include "llviewercamera.h"
//#include "llviewercontrol.h"
//#include "llviewertexturelist.h"
//#include "llsky.h"
//#include "pipeline.h"
//#include "llviewershadermgr.h"
#include "llmath.h"
#include "v4math.h"
#include "m3math.h"
@ -56,6 +41,41 @@
#include "llmatrix4a.h"
// Utility functions added with Bento to simplify handling of extra
// spine joints, or other new joints internal to the original
// skeleton, and unknown to the system avatar.
//-----------------------------------------------------------------------------
// getBaseSkeletonAncestor()
//-----------------------------------------------------------------------------
LLAvatarJoint *getBaseSkeletonAncestor(LLAvatarJoint* joint)
{
LLJoint *ancestor = joint->getParent();
while (ancestor->getParent() && (ancestor->getSupport() != LLJoint::SUPPORT_BASE))
{
LL_DEBUGS("Avatar") << "skipping non-base ancestor " << ancestor->getName() << LL_ENDL;
ancestor = ancestor->getParent();
}
return (LLAvatarJoint*) ancestor;
}
//-----------------------------------------------------------------------------
// totalSkinOffset()
//-----------------------------------------------------------------------------
LLVector3 totalSkinOffset(LLAvatarJoint *joint)
{
LLVector3 totalOffset;
while (joint)
{
if (joint->getSupport() == LLJoint::SUPPORT_BASE)
{
totalOffset += joint->getSkinOffset();
}
joint = (LLAvatarJoint*)joint->getParent();
}
return totalOffset;
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// LLAvatarJointMesh::LLSkinJoint
@ -92,18 +112,12 @@ BOOL LLSkinJoint::setupSkinJoint( LLAvatarJoint *joint)
}
// compute the inverse root skin matrix
mRootToJointSkinOffset.clearVec();
mRootToJointSkinOffset = totalSkinOffset(joint);
mRootToJointSkinOffset = -mRootToJointSkinOffset;
LLVector3 rootSkinOffset;
while (joint)
{
rootSkinOffset += joint->getSkinOffset();
joint = (LLAvatarJoint*)joint->getParent();
}
mRootToJointSkinOffset = -rootSkinOffset;
mRootToParentJointSkinOffset = mRootToJointSkinOffset;
mRootToParentJointSkinOffset += mJoint->getSkinOffset();
//mRootToParentJointSkinOffset = totalSkinOffset((LLAvatarJoint*)joint->getParent());
mRootToParentJointSkinOffset = totalSkinOffset(getBaseSkeletonAncestor(joint));
mRootToParentJointSkinOffset = -mRootToParentJointSkinOffset;
return TRUE;
}
@ -289,6 +303,7 @@ void LLAvatarJointMesh::setMesh( LLPolyMesh *mesh )
}
// acquire the transform from the mesh object
// SL-315
setPosition( mMesh->getPosition() );
setRotation( mMesh->getRotation() );
setScale( mMesh->getScale() );
@ -314,9 +329,9 @@ void LLAvatarJointMesh::setMesh( LLPolyMesh *mesh )
if (!mMesh->isLOD())
{
setupJoint((LLAvatarJoint*)getRoot());
LL_DEBUGS("Avatar") << getName() << " joint render entries: " << mMesh->mJointRenderData.size() << LL_ENDL;
}
LL_DEBUGS() << "joint render entries: " << mMesh->mJointRenderData.size() << LL_ENDL;
}
//-----------------------------------------------------------------------------
@ -324,9 +339,6 @@ void LLAvatarJointMesh::setMesh( LLPolyMesh *mesh )
//-----------------------------------------------------------------------------
void LLAvatarJointMesh::setupJoint(LLAvatarJoint* current_joint)
{
LL_DEBUGS() << "Mesh: " << getName() << LL_ENDL;
S32 joint_count = 0;
U32 sj;
for (sj=0; sj<mNumSkinJoints; sj++)
@ -339,22 +351,30 @@ void LLAvatarJointMesh::setupJoint(LLAvatarJoint* current_joint)
}
// we've found a skinjoint for this joint..
LL_DEBUGS("Avatar") << "Mesh: " << getName() << " joint " << current_joint->getName() << " matches skinjoint " << sj << LL_ENDL;
// is the last joint in the array our parent?
if(mMesh->mJointRenderData.size() && mMesh->mJointRenderData[mMesh->mJointRenderData.size() - 1]->mWorldMatrix == &current_joint->getParent()->getWorldMatrix())
std::vector<LLJointRenderData*> &jrd = mMesh->mJointRenderData;
// SL-287 - need to update this so the results are the same if
// additional extended-skeleton joints lie between this joint
// and the original parent.
LLJoint *ancestor = getBaseSkeletonAncestor(current_joint);
if(jrd.size() && jrd.back()->mWorldMatrix == &ancestor->getWorldMatrix())
{
// ...then just add ourselves
LLAvatarJoint* jointp = js.mJoint;
mMesh->mJointRenderData.push_back(new LLJointRenderData(&jointp->getWorldMatrix(), &js));
LL_DEBUGS() << "joint " << joint_count++ << js.mJoint->getName() << LL_ENDL;
jrd.push_back(new LLJointRenderData(&jointp->getWorldMatrix(), &js));
LL_DEBUGS("Avatar") << "add joint[" << (jrd.size()-1) << "] = " << js.mJoint->getName() << LL_ENDL;
}
// otherwise add our parent and ourselves
// otherwise add our ancestor and ourselves
else
{
mMesh->mJointRenderData.push_back(new LLJointRenderData(&current_joint->getParent()->getWorldMatrix(), NULL));
LL_DEBUGS() << "joint " << joint_count++ << current_joint->getParent()->getName() << LL_ENDL;
mMesh->mJointRenderData.push_back(new LLJointRenderData(&current_joint->getWorldMatrix(), &js));
LL_DEBUGS() << "joint " << joint_count++ << current_joint->getName() << LL_ENDL;
jrd.push_back(new LLJointRenderData(&ancestor->getWorldMatrix(), NULL));
LL_DEBUGS("Avatar") << "add2 ancestor joint[" << (jrd.size()-1) << "] = " << ancestor->getName() << LL_ENDL;
jrd.push_back(new LLJointRenderData(&current_joint->getWorldMatrix(), &js));
LL_DEBUGS("Avatar") << "add2 joint[" << (jrd.size()-1) << "] = " << current_joint->getName() << LL_ENDL;
}
}

View File

@ -110,6 +110,14 @@ void LLDriverParamInfo::toStream(std::ostream &out)
out << std::endl;
// FIXME - this mDriverParam backlink makes no sense, because the
// LLDriverParamInfos are static objects - there's only one copy
// for each param type, so the backlink will just reference the
// corresponding param in the most recently created
// avatar. Apparently these toStream() methods are not currently
// used anywhere, so it's not an urgent problem.
LL_WARNS_ONCE() << "Invalid usage of mDriverParam." << LL_ENDL;
if(mDriverParam && mDriverParam->getAvatarAppearance()->isSelf() &&
mDriverParam->getAvatarAppearance()->isValid())
{

View File

@ -128,6 +128,10 @@ public:
S32 getDrivenParamsCount() const;
const LLViewerVisualParam* getDrivenParam(S32 index) const;
typedef std::vector<LLDrivenEntry> entry_list_t;
entry_list_t& getDrivenList() { return mDriven; }
void setDrivenList(entry_list_t& driven_list) { mDriven = driven_list; }
protected:
LLDriverParam(const LLDriverParam& pOther);
F32 getDrivenWeight(const LLDrivenEntry* driven, F32 input_weight);
@ -135,7 +139,6 @@ protected:
LL_ALIGN_16(LLVector4a mDefaultVec); // temp holder
typedef std::vector<LLDrivenEntry> entry_list_t;
entry_list_t mDriven;
LLViewerVisualParam* mCurrentDistortionParam;
// Backlink only; don't make this an LLPointer.

View File

@ -369,7 +369,8 @@ BOOL LLPolyMorphTarget::setInfo(LLPolyMorphTargetInfo* info)
{
if (avatarp->mCollisionVolumes[i].getName() == volume_info->mName)
{
mVolumeMorphs.push_back(LLPolyVolumeMorph(&avatarp->mCollisionVolumes[i],
mVolumeMorphs.push_back(
LLPolyVolumeMorph(&avatarp->mCollisionVolumes[i],
volume_info->mScale,
volume_info->mPos));
break;
@ -647,6 +648,7 @@ void LLPolyMorphTarget::apply( ESex avatar_sex )
LLVector3 pos_delta = volume_morph->mPos * delta_weight;
volume_morph->mVolume->setScale(volume_morph->mVolume->getScale() + scale_delta);
// SL-315
volume_morph->mVolume->setPosition(volume_morph->mVolume->getPosition() + pos_delta);
}
}
@ -730,6 +732,20 @@ void LLPolyMorphTarget::applyMask(U8 *maskTextureData, S32 width, S32 height, S3
apply(mLastSex);
}
void LLPolyMorphTarget::applyVolumeChanges(F32 delta_weight)
{
// now apply volume changes
for( volume_list_t::iterator iter = mVolumeMorphs.begin(); iter != mVolumeMorphs.end(); iter++ )
{
LLPolyVolumeMorph* volume_morph = &(*iter);
LLVector3 scale_delta = volume_morph->mScale * delta_weight;
LLVector3 pos_delta = volume_morph->mPos * delta_weight;
volume_morph->mVolume->setScale(volume_morph->mVolume->getScale() + scale_delta);
// SL-315
volume_morph->mVolume->setPosition(volume_morph->mVolume->getPosition() + pos_delta);
}
}
//-----------------------------------------------------------------------------
// LLPolyVertexMask()

View File

@ -182,6 +182,8 @@ public:
void applyMask(U8 *maskData, S32 width, S32 height, S32 num_components, BOOL invert);
void addPendingMorphMask() { mNumMorphMasksPending++; }
void applyVolumeChanges(F32 delta_weight); // SL-315 - for resetSkeleton()
void* operator new(size_t size)
{
return ll_aligned_malloc_16(size);

View File

@ -34,6 +34,7 @@
#include "llpolymorph.h"
#include "llwearable.h"
#include "llfasttimer.h"
#include "llcallstack.h"
#include "llpolyskeletaldistortion.h"
@ -134,55 +135,49 @@ LLPolySkeletalDistortion::~LLPolySkeletalDistortion()
BOOL LLPolySkeletalDistortion::setInfo(LLPolySkeletalDistortionInfo *info)
{
llassert(mInfo == NULL);
if (info->mID < 0)
return FALSE;
mInfo = info;
mID = info->mID;
setWeight(getDefaultWeight());
if (info->mID < 0)
{
return FALSE;
}
mInfo = info;
mID = info->mID;
setWeight(getDefaultWeight());
LLPolySkeletalDistortionInfo::bone_info_list_t::iterator iter;
for (iter = getInfo()->mBoneInfoList.begin(); iter != getInfo()->mBoneInfoList.end(); iter++)
LLPolySkeletalDistortionInfo::bone_info_list_t::iterator iter;
for (iter = getInfo()->mBoneInfoList.begin(); iter != getInfo()->mBoneInfoList.end(); iter++)
{
LLPolySkeletalBoneInfo *bone_info = &(*iter);
LLJoint* joint = mAvatar->getJoint(bone_info->mBoneName);
if (!joint)
{
LLPolySkeletalBoneInfo *bone_info = &(*iter);
LLJoint* joint = mAvatar->getJoint(bone_info->mBoneName);
if (!joint)
{
LL_WARNS() << "Joint " << bone_info->mBoneName << " not found." << LL_ENDL;
continue;
}
if (mJointScales.find(joint) != mJointScales.end())
{
LL_WARNS() << "Scale deformation already supplied for joint " << joint->getName() << "." << LL_ENDL;
}
// store it
mJointScales[joint] = bone_info->mScaleDeformation;
// apply to children that need to inherit it
for (LLJoint::child_list_t::iterator iter = joint->mChildren.begin();
iter != joint->mChildren.end(); ++iter)
{
LLAvatarJoint* child_joint = (LLAvatarJoint*)(*iter);
if (child_joint->inheritScale())
{
LLVector3 childDeformation = LLVector3(child_joint->getScale());
childDeformation.scaleVec(bone_info->mScaleDeformation);
mJointScales[child_joint] = childDeformation;
}
}
if (bone_info->mHasPositionDeformation)
{
if (mJointOffsets.find(joint) != mJointOffsets.end())
{
LL_WARNS() << "Offset deformation already supplied for joint " << joint->getName() << "." << LL_ENDL;
}
mJointOffsets[joint] = bone_info->mPositionDeformation;
}
// There's no point continuing after this error - means
// that either the skeleton or lad file is broken.
LL_WARNS() << "Joint " << bone_info->mBoneName << " not found." << LL_ENDL;
return FALSE;
}
return TRUE;
// store it
mJointScales[joint] = bone_info->mScaleDeformation;
// apply to children that need to inherit it
for (LLJoint::child_list_t::iterator iter = joint->mChildren.begin();
iter != joint->mChildren.end(); ++iter)
{
LLAvatarJoint* child_joint = (LLAvatarJoint*)(*iter);
if (child_joint->inheritScale())
{
LLVector3 childDeformation = LLVector3(child_joint->getScale());
childDeformation.scaleVec(bone_info->mScaleDeformation);
mJointScales[child_joint] = childDeformation;
}
}
if (bone_info->mHasPositionDeformation)
{
mJointOffsets[joint] = bone_info->mPositionDeformation;
}
}
return TRUE;
}
/*virtual*/ LLViewerVisualParam* LLPolySkeletalDistortion::cloneParam(LLWearable* wearable) const
@ -197,42 +192,52 @@ static LLTrace::BlockTimerStatHandle FTM_POLYSKELETAL_DISTORTION_APPLY("Skeletal
void LLPolySkeletalDistortion::apply( ESex avatar_sex )
{
LL_RECORD_BLOCK_TIME(FTM_POLYSKELETAL_DISTORTION_APPLY);
LL_RECORD_BLOCK_TIME(FTM_POLYSKELETAL_DISTORTION_APPLY);
F32 effective_weight = ( getSex() & avatar_sex ) ? mCurWeight : getDefaultWeight();
F32 effective_weight = ( getSex() & avatar_sex ) ? mCurWeight : getDefaultWeight();
LLJoint* joint;
joint_vec_map_t::iterator iter;
LLJoint* joint;
joint_vec_map_t::iterator iter;
for (iter = mJointScales.begin();
iter != mJointScales.end();
iter++)
{
joint = iter->first;
LLVector3 newScale = joint->getScale();
LLVector3 scaleDelta = iter->second;
newScale = newScale + (effective_weight * scaleDelta) - (mLastWeight * scaleDelta);
//An aspect of attached mesh objects (which contain joint offsets) that need to be cleaned up when detached
// needed? // joint->storeScaleForReset( newScale );
joint->setScale(newScale);
}
for (iter = mJointScales.begin();
iter != mJointScales.end();
iter++)
{
joint = iter->first;
LLVector3 newScale = joint->getScale();
LLVector3 scaleDelta = iter->second;
LLVector3 offset = (effective_weight - mLastWeight) * scaleDelta;
newScale = newScale + offset;
//An aspect of attached mesh objects (which contain joint offsets) that need to be cleaned up when detached
// needed?
// joint->storeScaleForReset( newScale );
for (iter = mJointOffsets.begin();
iter != mJointOffsets.end();
iter++)
{
joint = iter->first;
LLVector3 newPosition = joint->getPosition();
LLVector3 positionDelta = iter->second;
newPosition = newPosition + (effective_weight * positionDelta) - (mLastWeight * positionDelta);
joint->setPosition(newPosition);
}
// BENTO for detailed stack tracing of params.
std::stringstream ostr;
ostr << "LLPolySkeletalDistortion::apply, id " << getID() << " " << getName() << " effective wt " << effective_weight << " last wt " << mLastWeight << " scaleDelta " << scaleDelta << " offset " << offset;
LLScopedContextString str(ostr.str());
if (mLastWeight != mCurWeight && !mIsAnimating)
{
mAvatar->setSkeletonSerialNum(mAvatar->getSkeletonSerialNum() + 1);
}
mLastWeight = mCurWeight;
joint->setScale(newScale, true);
}
for (iter = mJointOffsets.begin();
iter != mJointOffsets.end();
iter++)
{
joint = iter->first;
LLVector3 newPosition = joint->getPosition();
LLVector3 positionDelta = iter->second;
newPosition = newPosition + (effective_weight * positionDelta) - (mLastWeight * positionDelta);
// SL-315
bool allow_attachment_pos_overrides = true;
joint->setPosition(newPosition, allow_attachment_pos_overrides);
}
if (mLastWeight != effective_weight && !mIsAnimating)
{
mAvatar->setSkeletonSerialNum(mAvatar->getSkeletonSerialNum() + 1);
}
mLastWeight = effective_weight;
}

View File

@ -173,7 +173,7 @@ void LLWearable::createVisualParams(LLAvatarAppearance *avatarp)
{
if( !param->linkDrivenParams(boost::bind(param_function,avatarp,_1 ), true))
{
LL_WARNS() << "could not link driven params for wearable " << getName() << " id: " << param->getID() << LL_ENDL;
LL_DEBUGS("Avatar") << "could not link driven params for wearable " << getName() << " id: " << param->getID() << LL_ENDL;
continue;
}
}

View File

@ -91,12 +91,12 @@ target_link_libraries(
# Add tests
if (LL_TESTS)
include(LLAddBuildTest)
# UNIT TESTS
SET(llcharacter_TEST_SOURCE_FILES
lljoint.cpp
)
LL_ADD_PROJECT_UNIT_TESTS(llcharacter "${llcharacter_TEST_SOURCE_FILES}")
endif (LL_TESTS)
#if (LL_TESTS)
# include(LLAddBuildTest)
# # UNIT TESTS
# SET(llcharacter_TEST_SOURCE_FILES
# lljoint.cpp
# )
# LL_ADD_PROJECT_UNIT_TESTS(llcharacter "${llcharacter_TEST_SOURCE_FILES}")
#endif (LL_TESTS)

View File

@ -11,7 +11,7 @@
* License as published by the Free Software Foundation;
* version 2.1 of the License only.
*
* This library is distributed in the hope that it will be useful,
* This library is distributed in the hope that it will be useful,7
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
@ -29,6 +29,7 @@
#include "llbvhloader.h"
#include <boost/tokenizer.hpp>
#include <boost/lexical_cast.hpp>
#include "lldatapacker.h"
#include "lldir.h"
@ -36,6 +37,7 @@
#include "llquantize.h"
#include "llstl.h"
#include "llapr.h"
#include "llsdserialize.h"
using namespace std;
@ -121,52 +123,16 @@ LLQuaternion::Order bvhStringToOrder( char *str )
// LLBVHLoader()
//-----------------------------------------------------------------------------
/*
LLBVHLoader::LLBVHLoader(const char* buffer)
{
reset();
mStatus = loadTranslationTable("anim.ini");
if (mStatus == LLBVHLoader::ST_NO_XLT_FILE)
{
LL_WARNS() << "NOTE: No translation table found." << LL_ENDL;
return;
}
else
{
if (mStatus != LLBVHLoader::ST_OK)
{
LL_WARNS() << "ERROR: [line: " << getLineNumber() << "] " << mStatus << LL_ENDL;
return;
}
}
char error_text[128]; // Flawfinder: ignore
S32 error_line;
mStatus = loadBVHFile(buffer, error_text, error_line);
if (mStatus != LLBVHLoader::ST_OK)
{
LL_WARNS() << "ERROR: [line: " << getLineNumber() << "] " << mStatus << LL_ENDL;
return;
}
applyTranslations();
optimize();
mInitialized = TRUE;
}
*/
LLBVHLoader::LLBVHLoader(const char* buffer, ELoadStatus &loadStatus, S32 &errorLine)
LLBVHLoader::LLBVHLoader(const char* buffer, ELoadStatus &loadStatus, S32 &errorLine, std::map<std::string, std::string>& joint_alias_map )
{
reset();
errorLine = 0;
mStatus = loadTranslationTable("anim.ini");
loadStatus = mStatus;
LL_INFOS()<<"Load Status 00 : "<< loadStatus << LL_ENDL;
LL_INFOS("BVH") << "Load Status 00 : " << loadStatus << LL_ENDL;
if (mStatus == E_ST_NO_XLT_FILE)
{
//LL_WARNS() << "NOTE: No translation table found." << LL_ENDL;
LL_WARNS("BVH") << "NOTE: No translation table found." << LL_ENDL;
loadStatus = mStatus;
return;
}
@ -174,28 +140,43 @@ LLBVHLoader::LLBVHLoader(const char* buffer, ELoadStatus &loadStatus, S32 &error
{
if (mStatus != E_ST_OK)
{
//LL_WARNS() << "ERROR: [line: " << getLineNumber() << "] " << mStatus << LL_ENDL;
LL_WARNS("BVH") << "ERROR: [line: " << getLineNumber() << "] " << mStatus << LL_ENDL;
errorLine = getLineNumber();
loadStatus = mStatus;
return;
}
}
// Recognize all names we've been told are legal.
std::map<std::string, std::string>::iterator iter;
for (iter = joint_alias_map.begin(); iter != joint_alias_map.end(); iter++)
{
makeTranslation( iter->first , iter->second );
}
char error_text[128]; /* Flawfinder: ignore */
S32 error_line;
mStatus = loadBVHFile(buffer, error_text, error_line);
mStatus = loadBVHFile(buffer, error_text, error_line); //Reads all joints in BVH file.
LL_DEBUGS("BVH") << "============================================================" << LL_ENDL;
LL_DEBUGS("BVH") << "Raw data from file" << LL_ENDL;
dumpBVHInfo();
if (mStatus != E_ST_OK)
{
//LL_WARNS() << "ERROR: [line: " << getLineNumber() << "] " << mStatus << LL_ENDL;
LL_WARNS("BVH") << "ERROR: [line: " << getLineNumber() << "] " << mStatus << LL_ENDL;
loadStatus = mStatus;
errorLine = getLineNumber();
return;
}
applyTranslations();
applyTranslations(); //Maps between joints found in file and the aliased names.
optimize();
LL_DEBUGS("BVH") << "============================================================" << LL_ENDL;
LL_DEBUGS("BVH") << "After translations and optimize" << LL_ENDL;
dumpBVHInfo();
mInitialized = TRUE;
}
@ -211,10 +192,6 @@ LLBVHLoader::~LLBVHLoader()
//------------------------------------------------------------------------
ELoadStatus LLBVHLoader::loadTranslationTable(const char *fileName)
{
mLineNumber = 0;
mTranslations.clear();
mConstraints.clear();
//--------------------------------------------------------------------
// open file
//--------------------------------------------------------------------
@ -226,7 +203,7 @@ ELoadStatus LLBVHLoader::loadTranslationTable(const char *fileName)
if (!fp)
return E_ST_NO_XLT_FILE;
LL_INFOS() << "NOTE: Loading translation table: " << fileName << LL_ENDL;
LL_INFOS("BVH") << "NOTE: Loading translation table: " << fileName << LL_ENDL;
//--------------------------------------------------------------------
// register file to be closed on function exit
@ -244,7 +221,6 @@ ELoadStatus LLBVHLoader::loadTranslationTable(const char *fileName)
// load data one line at a time
//--------------------------------------------------------------------
BOOL loadingGlobals = FALSE;
Translation *trans = NULL;
while ( getLine(fp) )
{
//----------------------------------------------------------------
@ -271,13 +247,6 @@ ELoadStatus LLBVHLoader::loadTranslationTable(const char *fileName)
loadingGlobals = TRUE;
continue;
}
else
{
loadingGlobals = FALSE;
Translation &newTrans = mTranslations[ name ];
trans = &newTrans;
continue;
}
}
//----------------------------------------------------------------
@ -499,173 +468,101 @@ ELoadStatus LLBVHLoader::loadTranslationTable(const char *fileName)
mConstraints.push_back(constraint);
continue;
}
//----------------------------------------------------------------
// at this point there must be a valid trans pointer
//----------------------------------------------------------------
if ( ! trans )
return E_ST_NO_XLT_NAME;
//----------------------------------------------------------------
// check for ignore flag
//----------------------------------------------------------------
if ( LLStringUtil::compareInsensitive(token, "ignore")==0 )
{
char trueFalse[128]; /* Flawfinder: ignore */
if ( sscanf(mLine, " %*s = %127s", trueFalse) != 1 ) /* Flawfinder: ignore */
return E_ST_NO_XLT_IGNORE;
trans->mIgnore = (LLStringUtil::compareInsensitive(trueFalse, "true")==0);
continue;
}
//----------------------------------------------------------------
// check for relativepos flag
//----------------------------------------------------------------
if ( LLStringUtil::compareInsensitive(token, "relativepos")==0 )
{
F32 x, y, z;
char relpos[128]; /* Flawfinder: ignore */
if ( sscanf(mLine, " %*s = %f %f %f", &x, &y, &z) == 3 )
{
trans->mRelativePosition.setVec( x, y, z );
}
else if ( sscanf(mLine, " %*s = %127s", relpos) == 1 ) /* Flawfinder: ignore */
{
if ( LLStringUtil::compareInsensitive(relpos, "firstkey")==0 )
{
trans->mRelativePositionKey = TRUE;
}
else
{
return E_ST_NO_XLT_RELATIVE;
}
}
else
{
return E_ST_NO_XLT_RELATIVE;
}
continue;
}
//----------------------------------------------------------------
// check for relativerot flag
//----------------------------------------------------------------
if ( LLStringUtil::compareInsensitive(token, "relativerot")==0 )
{
//F32 x, y, z;
char relpos[128]; /* Flawfinder: ignore */
if ( sscanf(mLine, " %*s = %127s", relpos) == 1 ) /* Flawfinder: ignore */
{
if ( LLStringUtil::compareInsensitive(relpos, "firstkey")==0 )
{
trans->mRelativeRotationKey = TRUE;
}
else
{
return E_ST_NO_XLT_RELATIVE;
}
}
else
{
return E_ST_NO_XLT_RELATIVE;
}
continue;
}
//----------------------------------------------------------------
// check for outname value
//----------------------------------------------------------------
if ( LLStringUtil::compareInsensitive(token, "outname")==0 )
{
char outName[128]; /* Flawfinder: ignore */
if ( sscanf(mLine, " %*s = %127s", outName) != 1 ) /* Flawfinder: ignore */
return E_ST_NO_XLT_OUTNAME;
trans->mOutName = outName;
continue;
}
//----------------------------------------------------------------
// check for frame matrix value
//----------------------------------------------------------------
if ( LLStringUtil::compareInsensitive(token, "frame")==0 )
{
LLMatrix3 fm;
if ( sscanf(mLine, " %*s = %f %f %f, %f %f %f, %f %f %f",
&fm.mMatrix[0][0], &fm.mMatrix[0][1], &fm.mMatrix[0][2],
&fm.mMatrix[1][0], &fm.mMatrix[1][1], &fm.mMatrix[1][2],
&fm.mMatrix[2][0], &fm.mMatrix[2][1], &fm.mMatrix[2][2] ) != 9 )
return E_ST_NO_XLT_MATRIX;
trans->mFrameMatrix = fm;
continue;
}
//----------------------------------------------------------------
// check for offset matrix value
//----------------------------------------------------------------
if ( LLStringUtil::compareInsensitive(token, "offset")==0 )
{
LLMatrix3 om;
if ( sscanf(mLine, " %*s = %f %f %f, %f %f %f, %f %f %f",
&om.mMatrix[0][0], &om.mMatrix[0][1], &om.mMatrix[0][2],
&om.mMatrix[1][0], &om.mMatrix[1][1], &om.mMatrix[1][2],
&om.mMatrix[2][0], &om.mMatrix[2][1], &om.mMatrix[2][2] ) != 9 )
return E_ST_NO_XLT_MATRIX;
trans->mOffsetMatrix = om;
continue;
}
//----------------------------------------------------------------
// check for mergeparent value
//----------------------------------------------------------------
if ( LLStringUtil::compareInsensitive(token, "mergeparent")==0 )
{
char mergeParentName[128]; /* Flawfinder: ignore */
if ( sscanf(mLine, " %*s = %127s", mergeParentName) != 1 ) /* Flawfinder: ignore */
return E_ST_NO_XLT_MERGEPARENT;
trans->mMergeParentName = mergeParentName;
continue;
}
//----------------------------------------------------------------
// check for mergechild value
//----------------------------------------------------------------
if ( LLStringUtil::compareInsensitive(token, "mergechild")==0 )
{
char mergeChildName[128]; /* Flawfinder: ignore */
if ( sscanf(mLine, " %*s = %127s", mergeChildName) != 1 ) /* Flawfinder: ignore */
return E_ST_NO_XLT_MERGECHILD;
trans->mMergeChildName = mergeChildName;
continue;
}
//----------------------------------------------------------------
// check for per-joint priority
//----------------------------------------------------------------
if ( LLStringUtil::compareInsensitive(token, "priority")==0 )
{
S32 priority;
if ( sscanf(mLine, " %*s = %d", &priority) != 1 )
return E_ST_NO_XLT_PRIORITY;
trans->mPriorityModifier = priority;
continue;
}
}
infile.close() ;
return E_ST_OK;
}
void LLBVHLoader::makeTranslation(std::string alias_name, std::string joint_name)
{
//Translation &newTrans = (foomap.insert(value_type(alias_name, Translation()))).first();
Translation &newTrans = mTranslations[ alias_name ]; //Uses []'s implicit call to ctor.
newTrans.mOutName = joint_name;
LLMatrix3 fm;
LLVector3 vect1(0, 1, 0);
LLVector3 vect2(0, 0, 1);
LLVector3 vect3(1, 0, 0);
fm.setRows(vect1, vect2, vect3);
newTrans.mFrameMatrix = fm;
if (joint_name == "mPelvis")
{
newTrans.mRelativePositionKey = TRUE;
newTrans.mRelativeRotationKey = TRUE;
}
}
ELoadStatus LLBVHLoader::loadAliases(const char * filename)
{
LLSD aliases_sd;
std::string fullpath = gDirUtilp->getExpandedFilename(LL_PATH_APP_SETTINGS,filename);
llifstream input_stream;
input_stream.open(fullpath.c_str(), std::ios::in | std::ios::binary);
if(input_stream.is_open())
{
if ( LLSDSerialize::fromXML(aliases_sd, input_stream) )
{
for(LLSD::map_iterator alias_iter = aliases_sd.beginMap();
alias_iter != aliases_sd.endMap();
++alias_iter)
{
LLSD::String alias_name = alias_iter->first;
LLSD::String joint_name = alias_iter->second;
makeTranslation(alias_name, joint_name);
}
}
else
{
return E_ST_NO_XLT_HEADER;
}
input_stream.close();
}
else
{
LL_WARNS("BVH") << "Can't open joint alias file " << fullpath << LL_ENDL;
return E_ST_NO_XLT_FILE;
}
return E_ST_OK;
}
void LLBVHLoader::dumpBVHInfo()
{
for (U32 j=0; j<mJoints.size(); j++)
{
Joint *joint = mJoints[j];
LL_DEBUGS("BVH") << joint->mName << LL_ENDL;
for (S32 i=0; i<mNumFrames; i++)
{
if (i<joint->mKeys.size()) // Check this in case file load failed.
{
Key &prevkey = joint->mKeys[llmax(i-1,0)];
Key &key = joint->mKeys[i];
if ((i==0) ||
(key.mPos[0] != prevkey.mPos[0]) ||
(key.mPos[1] != prevkey.mPos[1]) ||
(key.mPos[2] != prevkey.mPos[2]) ||
(key.mRot[0] != prevkey.mRot[0]) ||
(key.mRot[1] != prevkey.mRot[1]) ||
(key.mRot[2] != prevkey.mRot[2])
)
{
LL_DEBUGS("BVH") << "FRAME " << i
<< " POS " << key.mPos[0] << "," << key.mPos[1] << "," << key.mPos[2]
<< " ROT " << key.mRot[0] << "," << key.mRot[1] << "," << key.mRot[2] << LL_ENDL;
}
}
}
}
}
//------------------------------------------------------------------------
// LLBVHLoader::loadBVHFile()
@ -746,6 +643,7 @@ ELoadStatus LLBVHLoader::loadBVHFile(const char *buffer, char* error_text, S32 &
{
iter++; // {
iter++; // OFFSET
iter++; // }
S32 depth = 0;
for (S32 j = (S32)parent_joints.size() - 1; j >= 0; j--)
{
@ -777,12 +675,19 @@ ELoadStatus LLBVHLoader::loadBVHFile(const char *buffer, char* error_text, S32 &
//---------------------------------------------------------------
// we require the root joint be "hip" - DEV-26188
//---------------------------------------------------------------
const char* FORCED_ROOT_NAME = "hip";
if ( (mJoints.size() == 0 ) && ( !strstr(jointName, FORCED_ROOT_NAME) ) )
{
strncpy(error_text, line.c_str(), 127); /* Flawfinder: ignore */
return E_ST_BAD_ROOT;
}
if (mJoints.size() == 0 )
{
//The root joint of the BVH file must be hip (mPelvis) or an alias of mPelvis.
const char* FORCED_ROOT_NAME = "hip";
TranslationMap::iterator hip_joint = mTranslations.find( FORCED_ROOT_NAME );
TranslationMap::iterator root_joint = mTranslations.find( jointName );
if ( hip_joint == mTranslations.end() || root_joint == mTranslations.end() || root_joint->second.mOutName != hip_joint->second.mOutName )
{
strncpy(error_text, line.c_str(), 127); /* Flawfinder: ignore */
return E_ST_BAD_ROOT;
}
}
//----------------------------------------------------------------
@ -790,11 +695,14 @@ ELoadStatus LLBVHLoader::loadBVHFile(const char *buffer, char* error_text, S32 &
//----------------------------------------------------------------
mJoints.push_back( new Joint( jointName ) );
Joint *joint = mJoints.back();
LL_DEBUGS("BVH") << "Created joint " << jointName << LL_ENDL;
LL_DEBUGS("BVH") << "- index " << mJoints.size()-1 << LL_ENDL;
S32 depth = 1;
for (S32 j = (S32)parent_joints.size() - 1; j >= 0; j--)
{
Joint *pjoint = mJoints[parent_joints[j]];
LL_DEBUGS("BVH") << "- ancestor " << pjoint->mName << LL_ENDL;
if (depth > pjoint->mChildTreeMaxDepth)
{
pjoint->mChildTreeMaxDepth = depth;
@ -863,6 +771,21 @@ ELoadStatus LLBVHLoader::loadBVHFile(const char *buffer, char* error_text, S32 &
return E_ST_NO_CHANNELS;
}
// Animating position (via mNumChannels = 6) is only supported for mPelvis.
int res = sscanf(line.c_str(), " CHANNELS %d", &joint->mNumChannels);
if ( res != 1 )
{
// Assume default if not otherwise specified.
if (mJoints.size()==1)
{
joint->mNumChannels = 6;
}
else
{
joint->mNumChannels = 3;
}
}
//----------------------------------------------------------------
// get rotation order
//----------------------------------------------------------------
@ -961,57 +884,49 @@ ELoadStatus LLBVHLoader::loadBVHFile(const char *buffer, char* error_text, S32 &
line = (*(iter++));
err_line++;
// read and store values
const char *p = line.c_str();
// Split line into a collection of floats.
std::deque<F32> floats;
boost::char_separator<char> whitespace_sep("\t ");
tokenizer float_tokens(line, whitespace_sep);
tokenizer::iterator float_token_iter = float_tokens.begin();
while (float_token_iter != float_tokens.end())
{
try
{
F32 val = boost::lexical_cast<float>(*float_token_iter);
floats.push_back(val);
}
catch (const boost::bad_lexical_cast&)
{
strncpy(error_text, line.c_str(), 127); /*Flawfinder: ignore*/
return E_ST_NO_POS;
}
float_token_iter++;
}
LL_DEBUGS("BVH") << "Got " << floats.size() << " floats " << LL_ENDL;
for (U32 j=0; j<mJoints.size(); j++)
{
Joint *joint = mJoints[j];
joint->mKeys.push_back( Key() );
Key &key = joint->mKeys.back();
// get 3 pos values for root joint only
if (j==0)
if (floats.size() < joint->mNumChannels)
{
if ( sscanf(p, "%f %f %f", key.mPos, key.mPos+1, key.mPos+2) != 3 )
{
strncpy(error_text, line.c_str(), 127); /*Flawfinder: ignore*/
return E_ST_NO_POS;
}
strncpy(error_text, line.c_str(), 127); /*Flawfinder: ignore*/
return E_ST_NO_POS;
}
// skip to next 3 values in the line
p = find_next_whitespace(p);
if (!p)
// assume either numChannels == 6, in which case we have pos + rot,
// or numChannels == 3, in which case we have only rot.
if (joint->mNumChannels == 6)
{
strncpy(error_text, line.c_str(), 127); /*Flawfinder: ignore*/
return E_ST_NO_ROT;
key.mPos[0] = floats.front(); floats.pop_front();
key.mPos[1] = floats.front(); floats.pop_front();
key.mPos[2] = floats.front(); floats.pop_front();
}
p = find_next_whitespace(++p);
if (!p)
{
strncpy(error_text, line.c_str(), 127); /*Flawfinder: ignore*/
return E_ST_NO_ROT;
}
p = find_next_whitespace(++p);
if (!p)
{
strncpy(error_text, line.c_str(), 127); /*Flawfinder: ignore*/
return E_ST_NO_ROT;
}
// get 3 rot values for joint
F32 rot[3];
if ( sscanf(p, " %f %f %f", rot, rot+1, rot+2) != 3 )
{
strncpy(error_text, line.c_str(), 127); /*Flawfinder: ignore*/
return E_ST_NO_ROT;
}
p++;
key.mRot[ joint->mOrder[0]-'X' ] = rot[0];
key.mRot[ joint->mOrder[1]-'X' ] = rot[1];
key.mRot[ joint->mOrder[2]-'X' ] = rot[2];
key.mRot[ joint->mOrder[0]-'X' ] = floats.front(); floats.pop_front();
key.mRot[ joint->mOrder[1]-'X' ] = floats.front(); floats.pop_front();
key.mRot[ joint->mOrder[2]-'X' ] = floats.front(); floats.pop_front();
}
}
@ -1045,7 +960,7 @@ void LLBVHLoader::applyTranslations()
//----------------------------------------------------------------
if ( trans.mIgnore )
{
//LL_INFOS() << "NOTE: Ignoring " << joint->mName.c_str() << LL_ENDL;
//LL_INFOS() << "NOTE: Ignoring " << joint->mName.c_str() << LL_ENDL;
joint->mIgnore = TRUE;
continue;
}
@ -1059,13 +974,12 @@ void LLBVHLoader::applyTranslations()
joint->mOutName = trans.mOutName;
}
//----------------------------------------------------------------
// Set the ignorepos flag if necessary
//----------------------------------------------------------------
if ( joint->mOutName == std::string("mPelvis") )
{
joint->mIgnorePositions = FALSE;
}
//Allow joint position changes as of SL-318
joint->mIgnorePositions = FALSE;
if (joint->mNumChannels == 3)
{
joint->mIgnorePositions = TRUE;
}
//----------------------------------------------------------------
// Set the relativepos flags if necessary
@ -1334,6 +1248,9 @@ void LLBVHLoader::reset()
mInitialized = FALSE;
mEmoteName = "";
mLineNumber = 0;
mTranslations.clear();
mConstraints.clear();
}
//------------------------------------------------------------------------
@ -1508,8 +1425,8 @@ BOOL LLBVHLoader::serialize(LLDataPacker& dp)
frame++;
}
// output position keys (only for 1st joint)
if ( ji == mJoints.begin() && !joint->mIgnorePositions )
// output position keys if joint has motion.
if ( !joint->mIgnorePositions )
{
dp.packS32(joint->mNumPosKeys, "num_pos_keys");
@ -1539,6 +1456,7 @@ BOOL LLBVHLoader::serialize(LLDataPacker& dp)
outPos *= INCHES_TO_METERS;
//SL-318 Pelvis position can only move 5m. Limiting all joint position offsets to this dist.
outPos -= relPos;
outPos.clamp(-LL_MAX_PELVIS_OFFSET, LL_MAX_PELVIS_OFFSET);
@ -1586,5 +1504,6 @@ BOOL LLBVHLoader::serialize(LLDataPacker& dp)
dp.packF32(constraint_it->mEaseOutStop, "ease_out_stop");
}
return TRUE;
}

View File

@ -102,6 +102,7 @@ struct Joint
mNumRotKeys = 0;
mChildTreeMaxDepth = 0;
mPriority = 0;
mNumChannels = 3;
}
// Include aligned members first
@ -123,6 +124,7 @@ struct Joint
S32 mNumRotKeys;
S32 mChildTreeMaxDepth;
S32 mPriority;
S32 mNumChannels;
};
@ -225,8 +227,7 @@ class LLBVHLoader
friend class LLKeyframeMotion;
public:
// Constructor
// LLBVHLoader(const char* buffer);
LLBVHLoader(const char* buffer, ELoadStatus &loadStatus, S32 &errorLine);
LLBVHLoader(const char* buffer, ELoadStatus &loadStatus, S32 &errorLine, std::map<std::string, std::string>& joint_alias_map );
~LLBVHLoader();
/*
@ -265,13 +266,22 @@ public:
static const char *ST_NO_XLT_EMOTE;
static const char *ST_BAD_ROOT;
*/
// Loads the specified translation table.
ELoadStatus loadTranslationTable(const char *fileName);
//Create a new joint alias
void makeTranslation(std::string key, std::string value);
// Loads joint aliases from XML file.
ELoadStatus loadAliases(const char * filename);
// Load the specified BVH file.
// Returns status code.
ELoadStatus loadBVHFile(const char *buffer, char *error_text, S32 &error_line);
void dumpBVHInfo();
// Applies translations to BVH data loaded.
void applyTranslations();

View File

@ -117,6 +117,7 @@ LLMotion::LLMotionInitStatus LLEditingMotion::onInitialize(LLCharacter *characte
addJointState( mWristState );
// propagate joint positions to kinematic chain
// SL-315
mParentJoint.setPosition( mParentState->getJoint()->getWorldPosition() );
mShoulderJoint.setPosition( mShoulderState->getJoint()->getPosition() );
mElbowJoint.setPosition( mElbowState->getJoint()->getPosition() );
@ -143,6 +144,7 @@ LLMotion::LLMotionInitStatus LLEditingMotion::onInitialize(LLCharacter *characte
BOOL LLEditingMotion::onActivate()
{
// propagate joint positions to kinematic chain
// SL-315
mParentJoint.setPosition( mParentState->getJoint()->getWorldPosition() );
mShoulderJoint.setPosition( mShoulderState->getJoint()->getPosition() );
mElbowJoint.setPosition( mElbowState->getJoint()->getPosition() );
@ -181,6 +183,7 @@ BOOL LLEditingMotion::onUpdate(F32 time, U8* joint_mask)
focus_pt += mCharacter->getCharacterPosition();
// propagate joint positions to kinematic chain
// SL-315
mParentJoint.setPosition( mParentState->getJoint()->getWorldPosition() );
mShoulderJoint.setPosition( mShoulderState->getJoint()->getPosition() );
mElbowJoint.setPosition( mElbowState->getJoint()->getPosition() );
@ -217,7 +220,8 @@ BOOL LLEditingMotion::onUpdate(F32 time, U8* joint_mask)
" and focus point " << focus_pt << LL_ENDL;
target.setVec(1.f, 1.f, 1.f);
}
// SL-315
mTarget.setPosition( target + mParentJoint.getPosition());
// LL_INFOS() << "Point At: " << mTarget.getPosition() << LL_ENDL;

View File

@ -285,7 +285,10 @@ LLEyeMotion::LLEyeMotion(const LLUUID &id) : LLMotion(id)
mName = "eye_rot";
mLeftEyeState = new LLJointState;
mAltLeftEyeState = new LLJointState;
mRightEyeState = new LLJointState;
mAltRightEyeState = new LLJointState;
}
@ -318,18 +321,38 @@ LLMotion::LLMotionInitStatus LLEyeMotion::onInitialize(LLCharacter *character)
return STATUS_FAILURE;
}
mAltLeftEyeState->setJoint( character->getJoint("mFaceEyeAltLeft") );
if ( ! mAltLeftEyeState->getJoint() )
{
LL_INFOS() << getName() << ": Can't get alt left eyeball joint." << LL_ENDL;
return STATUS_FAILURE;
}
mRightEyeState->setJoint( character->getJoint("mEyeRight") );
if ( ! mRightEyeState->getJoint() )
{
LL_INFOS() << getName() << ": Can't get Right eyeball joint." << LL_ENDL;
LL_INFOS() << getName() << ": Can't get right eyeball joint." << LL_ENDL;
return STATUS_FAILURE;
}
mAltRightEyeState->setJoint( character->getJoint("mFaceEyeAltRight") );
if ( ! mAltRightEyeState->getJoint() )
{
LL_INFOS() << getName() << ": Can't get alt right eyeball joint." << LL_ENDL;
return STATUS_FAILURE;
}
mLeftEyeState->setUsage(LLJointState::ROT);
mAltLeftEyeState->setUsage(LLJointState::ROT);
mRightEyeState->setUsage(LLJointState::ROT);
mAltRightEyeState->setUsage(LLJointState::ROT);
addJointState( mLeftEyeState );
addJointState( mAltLeftEyeState );
addJointState( mRightEyeState );
addJointState( mAltRightEyeState );
return STATUS_SUCCESS;
}
@ -343,17 +366,98 @@ BOOL LLEyeMotion::onActivate()
return TRUE;
}
//-----------------------------------------------------------------------------
// LLEyeMotion::adjustEyeTarget()
//-----------------------------------------------------------------------------
void LLEyeMotion::adjustEyeTarget(LLVector3* targetPos, LLJointState& left_eye_state, LLJointState& right_eye_state)
{
// Compute eye rotation.
BOOL has_eye_target = FALSE;
LLQuaternion target_eye_rot;
LLVector3 eye_look_at;
F32 vergence;
if (targetPos)
{
LLVector3 skyward(0.f, 0.f, 1.f);
LLVector3 left;
LLVector3 up;
eye_look_at = *targetPos;
has_eye_target = TRUE;
F32 lookAtDistance = eye_look_at.normVec();
left.setVec(skyward % eye_look_at);
up.setVec(eye_look_at % left);
target_eye_rot = LLQuaternion(eye_look_at, left, up);
// convert target rotation to head-local coordinates
target_eye_rot *= ~mHeadJoint->getWorldRotation();
// eliminate any Euler roll - we're lucky that roll is applied last.
F32 roll, pitch, yaw;
target_eye_rot.getEulerAngles(&roll, &pitch, &yaw);
target_eye_rot.setQuat(0.0f, pitch, yaw);
// constrain target orientation to be in front of avatar's face
target_eye_rot.constrain(EYE_ROT_LIMIT_ANGLE);
// calculate vergence
F32 interocular_dist = (left_eye_state.getJoint()->getWorldPosition() - right_eye_state.getJoint()->getWorldPosition()).magVec();
vergence = -atan2((interocular_dist / 2.f), lookAtDistance);
llclamp(vergence, -F_PI_BY_TWO, 0.f);
}
else
{
target_eye_rot = LLQuaternion::DEFAULT;
vergence = 0.f;
}
//RN: subtract 4 degrees to account for foveal angular offset relative to pupil
vergence += 4.f * DEG_TO_RAD;
// calculate eye jitter
LLQuaternion eye_jitter_rot;
// vergence not too high...
if (vergence > -0.05f)
{
//...go ahead and jitter
eye_jitter_rot.setQuat(0.f, mEyeJitterPitch + mEyeLookAwayPitch, mEyeJitterYaw + mEyeLookAwayYaw);
}
else
{
//...or don't
eye_jitter_rot.loadIdentity();
}
// calculate vergence of eyes as an object gets closer to the avatar's head
LLQuaternion vergence_quat;
if (has_eye_target)
{
vergence_quat.setQuat(vergence, LLVector3(0.f, 0.f, 1.f));
}
else
{
vergence_quat.loadIdentity();
}
// calculate eye rotations
LLQuaternion left_eye_rot = target_eye_rot;
left_eye_rot = vergence_quat * eye_jitter_rot * left_eye_rot;
LLQuaternion right_eye_rot = target_eye_rot;
vergence_quat.transQuat();
right_eye_rot = vergence_quat * eye_jitter_rot * right_eye_rot;
left_eye_state.setRotation( left_eye_rot );
right_eye_state.setRotation( right_eye_rot );
}
//-----------------------------------------------------------------------------
// LLEyeMotion::onUpdate()
//-----------------------------------------------------------------------------
BOOL LLEyeMotion::onUpdate(F32 time, U8* joint_mask)
{
// Compute eye rotation.
LLQuaternion target_eye_rot;
LLVector3 eye_look_at;
F32 vergence;
//calculate jitter
if (mEyeJitterTimer.getElapsedTimeF32() > mEyeJitterTime)
{
@ -426,83 +530,10 @@ BOOL LLEyeMotion::onUpdate(F32 time, U8* joint_mask)
}
}
BOOL has_eye_target = FALSE;
LLVector3* targetPos = (LLVector3*)mCharacter->getAnimationData("LookAtPoint");
if (targetPos)
{
LLVector3 skyward(0.f, 0.f, 1.f);
LLVector3 left;
LLVector3 up;
eye_look_at = *targetPos;
has_eye_target = TRUE;
F32 lookAtDistance = eye_look_at.normVec();
left.setVec(skyward % eye_look_at);
up.setVec(eye_look_at % left);
target_eye_rot = LLQuaternion(eye_look_at, left, up);
// convert target rotation to head-local coordinates
target_eye_rot *= ~mHeadJoint->getWorldRotation();
// eliminate any Euler roll - we're lucky that roll is applied last.
F32 roll, pitch, yaw;
target_eye_rot.getEulerAngles(&roll, &pitch, &yaw);
target_eye_rot.setQuat(0.0f, pitch, yaw);
// constrain target orientation to be in front of avatar's face
target_eye_rot.constrain(EYE_ROT_LIMIT_ANGLE);
// calculate vergence
F32 interocular_dist = (mLeftEyeState->getJoint()->getWorldPosition() - mRightEyeState->getJoint()->getWorldPosition()).magVec();
vergence = -atan2((interocular_dist / 2.f), lookAtDistance);
llclamp(vergence, -F_PI_BY_TWO, 0.f);
}
else
{
target_eye_rot = LLQuaternion::DEFAULT;
vergence = 0.f;
}
//RN: subtract 4 degrees to account for foveal angular offset relative to pupil
vergence += 4.f * DEG_TO_RAD;
// calculate eye jitter
LLQuaternion eye_jitter_rot;
// vergence not too high...
if (vergence > -0.05f)
{
//...go ahead and jitter
eye_jitter_rot.setQuat(0.f, mEyeJitterPitch + mEyeLookAwayPitch, mEyeJitterYaw + mEyeLookAwayYaw);
}
else
{
//...or don't
eye_jitter_rot.loadIdentity();
}
// calculate vergence of eyes as an object gets closer to the avatar's head
LLQuaternion vergence_quat;
if (has_eye_target)
{
vergence_quat.setQuat(vergence, LLVector3(0.f, 0.f, 1.f));
}
else
{
vergence_quat.loadIdentity();
}
// calculate eye rotations
LLQuaternion left_eye_rot = target_eye_rot;
left_eye_rot = vergence_quat * eye_jitter_rot * left_eye_rot;
LLQuaternion right_eye_rot = target_eye_rot;
vergence_quat.transQuat();
right_eye_rot = vergence_quat * eye_jitter_rot * right_eye_rot;
mLeftEyeState->setRotation( left_eye_rot );
mRightEyeState->setRotation( right_eye_rot );
adjustEyeTarget(targetPos, *mLeftEyeState, *mRightEyeState);
adjustEyeTarget(targetPos, *mAltLeftEyeState, *mAltRightEyeState);
return TRUE;
}
@ -519,11 +550,23 @@ void LLEyeMotion::onDeactivate()
joint->setRotation(LLQuaternion::DEFAULT);
}
joint = mAltLeftEyeState->getJoint();
if (joint)
{
joint->setRotation(LLQuaternion::DEFAULT);
}
joint = mRightEyeState->getJoint();
if (joint)
{
joint->setRotation(LLQuaternion::DEFAULT);
}
joint = mAltRightEyeState->getJoint();
if (joint)
{
joint->setRotation(LLQuaternion::DEFAULT);
}
}
// End

View File

@ -176,6 +176,8 @@ public:
// it will be deactivated
virtual BOOL onActivate();
void adjustEyeTarget(LLVector3* targetPos, LLJointState& left_eye_state, LLJointState& right_eye_state);
// called per time step
// must return TRUE while it is active, and
// must return FALSE when the motion is completed.
@ -193,6 +195,8 @@ public:
LLJoint *mHeadJoint;
LLPointer<LLJointState> mLeftEyeState;
LLPointer<LLJointState> mRightEyeState;
LLPointer<LLJointState> mAltLeftEyeState;
LLPointer<LLJointState> mAltRightEyeState;
LLFrameTimer mEyeJitterTimer;
F32 mEyeJitterTime;

View File

@ -32,6 +32,8 @@
#include "lljoint.h"
#include "llmath.h"
#include "llcallstack.h"
#include <boost/algorithm/string.hpp>
S32 LLJoint::sNumUpdates = 0;
S32 LLJoint::sNumTouches = 0;
@ -42,7 +44,7 @@ bool attachment_map_iter_compare_key(const T& a, const T& b)
return a.first < b.first;
}
bool LLPosOverrideMap::findActiveOverride(LLUUID& mesh_id, LLVector3& pos) const
bool LLVector3OverrideMap::findActiveOverride(LLUUID& mesh_id, LLVector3& pos) const
{
pos = LLVector3(0,0,0);
mesh_id = LLUUID();
@ -60,7 +62,7 @@ bool LLPosOverrideMap::findActiveOverride(LLUUID& mesh_id, LLVector3& pos) const
return found;
}
void LLPosOverrideMap::showJointPosOverrides( std::ostringstream& os ) const
void LLVector3OverrideMap::showJointVector3Overrides( std::ostringstream& os ) const
{
map_type::const_iterator max_it = std::max_element(m_map.begin(),
m_map.end(),
@ -73,23 +75,23 @@ void LLPosOverrideMap::showJointPosOverrides( std::ostringstream& os ) const
}
}
U32 LLPosOverrideMap::count() const
U32 LLVector3OverrideMap::count() const
{
return m_map.size();
}
void LLPosOverrideMap::add(const LLUUID& mesh_id, const LLVector3& pos)
void LLVector3OverrideMap::add(const LLUUID& mesh_id, const LLVector3& pos)
{
m_map[mesh_id] = pos;
}
bool LLPosOverrideMap::remove(const LLUUID& mesh_id)
bool LLVector3OverrideMap::remove(const LLUUID& mesh_id)
{
U32 remove_count = m_map.erase(mesh_id);
return (remove_count > 0);
}
void LLPosOverrideMap::clear()
void LLVector3OverrideMap::clear()
{
m_map.clear();
}
@ -108,6 +110,8 @@ void LLJoint::init()
mXform.setScale(LLVector3(1.0f, 1.0f, 1.0f));
mDirtyFlags = MATRIX_DIRTY | ROTATION_DIRTY | POSITION_DIRTY;
mUpdateXform = TRUE;
mSupport = SUPPORT_BASE;
mEnd = LLVector3(0.0f, 0.0f, 0.0f);
}
LLJoint::LLJoint() :
@ -124,13 +128,12 @@ LLJoint::LLJoint(S32 joint_num) :
touch();
}
//-----------------------------------------------------------------------------
// LLJoint()
// Class Constructor
//-----------------------------------------------------------------------------
LLJoint::LLJoint(const std::string &name, LLJoint *parent) :
mJointNum(0)
mJointNum(-2)
{
init();
mUpdateXform = FALSE;
@ -169,6 +172,27 @@ void LLJoint::setup(const std::string &name, LLJoint *parent)
}
}
//-----------------------------------------------------------------------------
// setSupport()
//-----------------------------------------------------------------------------
void LLJoint::setSupport(const std::string& support_name)
{
if (support_name == "extended")
{
setSupport(SUPPORT_EXTENDED);
}
else if (support_name == "base")
{
setSupport(SUPPORT_BASE);
}
else
{
LL_WARNS() << "unknown support string " << support_name << LL_ENDL;
setSupport(SUPPORT_BASE);
}
}
//-----------------------------------------------------------------------------
// touch()
// Sets all dirty flags for all children, recursively.
@ -194,6 +218,18 @@ void LLJoint::touch(U32 flags)
}
}
//-----------------------------------------------------------------------------
// setJointNum()
//-----------------------------------------------------------------------------
void LLJoint::setJointNum(S32 joint_num)
{
mJointNum = joint_num;
if (mJointNum + 2 >= LL_CHARACTER_MAX_ANIMATED_JOINTS)
{
LL_INFOS() << "LL_CHARACTER_MAX_ANIMATED_JOINTS needs to be increased" << LL_ENDL;
LL_ERRS() << "joint_num " << joint_num << " + 2 is too large for " << LL_CHARACTER_MAX_ANIMATED_JOINTS << LL_ENDL;
}
}
//-----------------------------------------------------------------------------
// getRoot()
//-----------------------------------------------------------------------------
@ -290,43 +326,119 @@ const LLVector3& LLJoint::getPosition()
bool do_debug_joint(const std::string& name)
{
return false;
if (std::find(LLJoint::s_debugJointNames.begin(), LLJoint::s_debugJointNames.end(),name) != LLJoint::s_debugJointNames.end())
{
return true;
}
return false;
}
//--------------------------------------------------------------------
// setPosition()
//--------------------------------------------------------------------
void LLJoint::setPosition( const LLVector3& pos )
void LLJoint::setPosition( const LLVector3& requested_pos, bool apply_attachment_overrides )
{
if (pos != getPosition())
LLVector3 pos(requested_pos);
LLVector3 active_override;
LLUUID mesh_id;
if (apply_attachment_overrides && m_attachmentPosOverrides.findActiveOverride(mesh_id,active_override))
{
if (pos != active_override && do_debug_joint(getName()))
{
LLScopedContextString str("setPosition");
LL_DEBUGS("Avatar") << " joint " << getName() << " requested_pos " << requested_pos
<< " overriden by attachment " << active_override << LL_ENDL;
}
pos = active_override;
}
if ((pos != getPosition()) && do_debug_joint(getName()))
{
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << " joint " << getName() << " set pos " << pos << LL_ENDL;
}
LLScopedContextString str("setPosition");
LLCallStack cs;
LLContextStatus con_status;
LL_DEBUGS("Avatar") << " joint " << getName() << " set pos " << pos << LL_ENDL;
LL_DEBUGS("Avatar") << "CONTEXT:\n" << "====================\n" << con_status << "====================" << LL_ENDL;
LL_DEBUGS("Avatar") << "STACK:\n" << "====================\n" << cs << "====================" << LL_ENDL;
}
mXform.setPosition(pos);
touch(MATRIX_DIRTY | POSITION_DIRTY);
if (pos != getPosition())
{
mXform.setPosition(pos);
touch(MATRIX_DIRTY | POSITION_DIRTY);
}
}
void LLJoint::setDefaultPosition( const LLVector3& pos )
{
mDefaultPosition = pos;
}
const LLVector3& LLJoint::getDefaultPosition() const
{
return mDefaultPosition;
}
void LLJoint::setDefaultScale( const LLVector3& scale )
{
mDefaultScale = scale;
}
const LLVector3& LLJoint::getDefaultScale() const
{
return mDefaultScale;
}
void showJointPosOverrides( const LLJoint& joint, const std::string& note, const std::string& av_info )
{
std::ostringstream os;
os << joint.m_posBeforeOverrides;
joint.m_attachmentOverrides.showJointPosOverrides(os);
joint.m_attachmentPosOverrides.showJointVector3Overrides(os);
LL_DEBUGS("Avatar") << av_info << " joint " << joint.getName() << " " << note << " " << os.str() << LL_ENDL;
}
void showJointScaleOverrides( const LLJoint& joint, const std::string& note, const std::string& av_info )
{
std::ostringstream os;
os << joint.m_scaleBeforeOverrides;
joint.m_attachmentScaleOverrides.showJointVector3Overrides(os);
LL_DEBUGS("Avatar") << av_info << " joint " << joint.getName() << " " << note << " " << os.str() << LL_ENDL;
}
bool LLJoint::aboveJointPosThreshold(const LLVector3& pos) const
{
LLVector3 diff = pos - getDefaultPosition();
const F32 max_joint_pos_offset = 0.0001f; // 0.1 mm
return diff.lengthSquared() > max_joint_pos_offset * max_joint_pos_offset;
}
bool LLJoint::aboveJointScaleThreshold(const LLVector3& scale) const
{
LLVector3 diff = scale - getDefaultScale();
const F32 max_joint_scale_offset = 0.0001f; // 0.1 mm
return diff.lengthSquared() > max_joint_scale_offset * max_joint_scale_offset;
}
//--------------------------------------------------------------------
// addAttachmentPosOverride()
//--------------------------------------------------------------------
void LLJoint::addAttachmentPosOverride( const LLVector3& pos, const LLUUID& mesh_id, const std::string& av_info )
void LLJoint::addAttachmentPosOverride( const LLVector3& pos, const LLUUID& mesh_id, const std::string& av_info, bool& active_override_changed )
{
active_override_changed = false;
if (mesh_id.isNull())
{
return;
}
if (!m_attachmentOverrides.count())
// BENTO
// Not clear pelvis overrides are meaningful/useful.
//if (mName == "mPelvis")
//{
// return;
//}
LLVector3 before_pos;
LLUUID before_mesh_id;
bool has_active_override_before = hasAttachmentPosOverride( before_pos, before_mesh_id );
if (!m_attachmentPosOverrides.count())
{
if (do_debug_joint(getName()))
{
@ -334,32 +446,50 @@ void LLJoint::addAttachmentPosOverride( const LLVector3& pos, const LLUUID& mesh
}
m_posBeforeOverrides = getPosition();
}
m_attachmentOverrides.add(mesh_id,pos);
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " addAttachmentPosOverride for mesh " << mesh_id << " pos " << pos << LL_ENDL;
}
updatePos(av_info);
m_attachmentPosOverrides.add(mesh_id,pos);
LLVector3 after_pos;
LLUUID after_mesh_id;
hasAttachmentPosOverride(after_pos, after_mesh_id);
if (!has_active_override_before || (after_pos != before_pos))
{
active_override_changed = true;
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " addAttachmentPosOverride for mesh " << mesh_id << " pos " << pos << LL_ENDL;
}
updatePos(av_info);
}
}
//--------------------------------------------------------------------
// removeAttachmentPosOverride()
//--------------------------------------------------------------------
void LLJoint::removeAttachmentPosOverride( const LLUUID& mesh_id, const std::string& av_info )
void LLJoint::removeAttachmentPosOverride( const LLUUID& mesh_id, const std::string& av_info, bool& active_override_changed )
{
active_override_changed = false;
if (mesh_id.isNull())
{
return;
}
if (m_attachmentOverrides.remove(mesh_id))
LLVector3 before_pos;
LLUUID before_mesh_id;
hasAttachmentPosOverride( before_pos, before_mesh_id );
if (m_attachmentPosOverrides.remove(mesh_id))
{
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName()
<< " removeAttachmentPosOverride for " << mesh_id << LL_ENDL;
showJointPosOverrides(*this, "remove", av_info);
}
updatePos(av_info);
LLVector3 after_pos;
LLUUID after_mesh_id;
bool has_active_override_after = hasAttachmentPosOverride(after_pos, after_mesh_id);
if (!has_active_override_after || (after_pos != before_pos))
{
active_override_changed = true;
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName()
<< " removeAttachmentPosOverride for " << mesh_id << LL_ENDL;
showJointPosOverrides(*this, "remove", av_info);
}
updatePos(av_info);
}
}
}
@ -368,7 +498,7 @@ void LLJoint::removeAttachmentPosOverride( const LLUUID& mesh_id, const std::str
//--------------------------------------------------------------------
bool LLJoint::hasAttachmentPosOverride( LLVector3& pos, LLUUID& mesh_id ) const
{
return m_attachmentOverrides.findActiveOverride(mesh_id,pos);
return m_attachmentPosOverrides.findActiveOverride(mesh_id,pos);
}
//--------------------------------------------------------------------
@ -376,11 +506,81 @@ bool LLJoint::hasAttachmentPosOverride( LLVector3& pos, LLUUID& mesh_id ) const
//--------------------------------------------------------------------
void LLJoint::clearAttachmentPosOverrides()
{
if (m_attachmentOverrides.count())
if (m_attachmentPosOverrides.count())
{
m_attachmentOverrides.clear();
m_attachmentPosOverrides.clear();
setPosition(m_posBeforeOverrides);
setId( LLUUID::null );
}
}
//--------------------------------------------------------------------
// getAllAttachmentPosOverrides()
//--------------------------------------------------------------------
void LLJoint::getAllAttachmentPosOverrides(S32& num_pos_overrides,
std::set<LLVector3>& distinct_pos_overrides)
{
num_pos_overrides = m_attachmentPosOverrides.count();
LLVector3OverrideMap::map_type::const_iterator it = m_attachmentPosOverrides.getMap().begin();
for (; it != m_attachmentPosOverrides.getMap().end(); ++it)
{
distinct_pos_overrides.insert(it->second);
}
}
//--------------------------------------------------------------------
// getAllAttachmentScaleOverrides()
//--------------------------------------------------------------------
void LLJoint::getAllAttachmentScaleOverrides(S32& num_scale_overrides,
std::set<LLVector3>& distinct_scale_overrides)
{
num_scale_overrides = m_attachmentScaleOverrides.count();
LLVector3OverrideMap::map_type::const_iterator it = m_attachmentScaleOverrides.getMap().begin();
for (; it != m_attachmentScaleOverrides.getMap().end(); ++it)
{
distinct_scale_overrides.insert(it->second);
}
}
//--------------------------------------------------------------------
// showAttachmentPosOverrides()
//--------------------------------------------------------------------
void LLJoint::showAttachmentPosOverrides(const std::string& av_info) const
{
LLVector3 active_override;
bool has_active_override;
LLUUID mesh_id;
has_active_override = m_attachmentPosOverrides.findActiveOverride(mesh_id,active_override);
U32 count = m_attachmentPosOverrides.count();
if (count==1)
{
LLVector3OverrideMap::map_type::const_iterator it = m_attachmentPosOverrides.getMap().begin();
std::string highlight = (has_active_override && (it->second == active_override)) ? "*" : "";
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName()
<< " has single attachment pos override " << highlight << "" << it->second << " default " << mDefaultPosition << LL_ENDL;
}
else if (count>1)
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " has " << count << " attachment pos overrides" << LL_ENDL;
std::set<LLVector3> distinct_offsets;
LLVector3OverrideMap::map_type::const_iterator it = m_attachmentPosOverrides.getMap().begin();
for (; it != m_attachmentPosOverrides.getMap().end(); ++it)
{
distinct_offsets.insert(it->second);
}
if (distinct_offsets.size()>1)
{
LL_DEBUGS("Avatar") << "CONFLICTS, " << distinct_offsets.size() << " different values" << LL_ENDL;
}
else
{
LL_DEBUGS("Avatar") << "no conflicts" << LL_ENDL;
}
std::set<LLVector3>::iterator dit = distinct_offsets.begin();
for ( ; dit != distinct_offsets.end(); ++dit)
{
std::string highlight = (has_active_override && *dit == active_override) ? "*" : "";
LL_DEBUGS("Avatar") << " POS " << highlight << "" << (*dit) << " default " << mDefaultPosition << LL_ENDL;
}
}
}
@ -391,19 +591,177 @@ void LLJoint::updatePos(const std::string& av_info)
{
LLVector3 pos, found_pos;
LLUUID mesh_id;
if (m_attachmentOverrides.findActiveOverride(mesh_id,found_pos))
if (m_attachmentPosOverrides.findActiveOverride(mesh_id,found_pos))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " updatePos, winner of " << m_attachmentOverrides.count() << " is mesh " << mesh_id << " pos " << found_pos << LL_ENDL;
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " updatePos, winner of " << m_attachmentPosOverrides.count() << " is mesh " << mesh_id << " pos " << found_pos << LL_ENDL;
}
pos = found_pos;
}
else
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " updatePos, winner is posBeforeOverrides " << m_posBeforeOverrides << LL_ENDL;
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " updatePos, winner is posBeforeOverrides " << m_posBeforeOverrides << LL_ENDL;
}
pos = m_posBeforeOverrides;
}
setPosition(pos);
}
//--------------------------------------------------------------------
// updateScale()
//--------------------------------------------------------------------
void LLJoint::updateScale(const std::string& av_info)
{
LLVector3 scale, found_scale;
LLUUID mesh_id;
if (m_attachmentScaleOverrides.findActiveOverride(mesh_id,found_scale))
{
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " updateScale, winner of " << m_attachmentScaleOverrides.count() << " is mesh " << mesh_id << " scale " << found_scale << LL_ENDL;
}
scale = found_scale;
}
else
{
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " updateScale, winner is scaleBeforeOverrides " << m_scaleBeforeOverrides << LL_ENDL;
}
scale = m_scaleBeforeOverrides;
}
setScale(scale);
}
//--------------------------------------------------------------------
// addAttachmentScaleOverride()
//--------------------------------------------------------------------
void LLJoint::addAttachmentScaleOverride( const LLVector3& scale, const LLUUID& mesh_id, const std::string& av_info )
{
if (mesh_id.isNull())
{
return;
}
if (!m_attachmentScaleOverrides.count())
{
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " saving m_scaleBeforeOverrides " << getScale() << LL_ENDL;
}
m_scaleBeforeOverrides = getScale();
}
m_attachmentScaleOverrides.add(mesh_id,scale);
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " addAttachmentScaleOverride for mesh " << mesh_id << " scale " << scale << LL_ENDL;
}
updateScale(av_info);
}
//--------------------------------------------------------------------
// removeAttachmentScaleOverride()
//--------------------------------------------------------------------
void LLJoint::removeAttachmentScaleOverride( const LLUUID& mesh_id, const std::string& av_info )
{
if (mesh_id.isNull())
{
return;
}
if (m_attachmentScaleOverrides.remove(mesh_id))
{
if (do_debug_joint(getName()))
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName()
<< " removeAttachmentScaleOverride for " << mesh_id << LL_ENDL;
showJointScaleOverrides(*this, "remove", av_info);
}
updateScale(av_info);
}
}
//--------------------------------------------------------------------
// hasAttachmentScaleOverride()
//--------------------------------------------------------------------
bool LLJoint::hasAttachmentScaleOverride( LLVector3& scale, LLUUID& mesh_id ) const
{
return m_attachmentScaleOverrides.findActiveOverride(mesh_id,scale);
}
//--------------------------------------------------------------------
// clearAttachmentScaleOverrides()
//--------------------------------------------------------------------
void LLJoint::clearAttachmentScaleOverrides()
{
if (m_attachmentScaleOverrides.count())
{
m_attachmentScaleOverrides.clear();
setScale(m_scaleBeforeOverrides);
}
}
//--------------------------------------------------------------------
// showAttachmentScaleOverrides()
//--------------------------------------------------------------------
void LLJoint::showAttachmentScaleOverrides(const std::string& av_info) const
{
LLVector3 active_override;
bool has_active_override;
LLUUID mesh_id;
has_active_override = m_attachmentScaleOverrides.findActiveOverride(mesh_id,active_override);
U32 count = m_attachmentScaleOverrides.count();
if (count==1)
{
LLVector3OverrideMap::map_type::const_iterator it = m_attachmentScaleOverrides.getMap().begin();
std::string highlight = (has_active_override && (it->second == active_override)) ? "*" : "";
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName()
<< " has single attachment scale override " << highlight << "" << it->second << " default " << mDefaultScale << LL_ENDL;
}
else if (count>1)
{
LL_DEBUGS("Avatar") << "av " << av_info << " joint " << getName() << " has " << count << " attachment scale overrides" << LL_ENDL;
std::set<LLVector3> distinct_offsets;
LLVector3OverrideMap::map_type::const_iterator it = m_attachmentScaleOverrides.getMap().begin();
for (; it != m_attachmentScaleOverrides.getMap().end(); ++it)
{
distinct_offsets.insert(it->second);
}
if (distinct_offsets.size()>1)
{
LL_DEBUGS("Avatar") << "CONFLICTS, " << distinct_offsets.size() << " different values" << LL_ENDL;
}
else
{
LL_DEBUGS("Avatar") << "no conflicts" << LL_ENDL;
}
std::set<LLVector3>::iterator dit = distinct_offsets.begin();
for ( ; dit != distinct_offsets.end(); ++dit)
{
std::string highlight = (has_active_override && *dit == active_override) ? "*" : "";
LL_DEBUGS("Avatar") << " POS " << highlight << "" << (*dit) << " default " << mDefaultScale << LL_ENDL;
}
}
}
// init static
LLJoint::debug_joint_name_t LLJoint::s_debugJointNames = debug_joint_name_t();
//--------------------------------------------------------------------
// setDebugJointNames
//--------------------------------------------------------------------
void LLJoint::setDebugJointNames(const debug_joint_name_t& names)
{
s_debugJointNames = names;
}
void LLJoint::setDebugJointNames(const std::string& names_string)
{
debug_joint_name_t names;
boost::split(names, names_string, boost::is_any_of(" :,"));
setDebugJointNames(names);
}
//--------------------------------------------------------------------
// getWorldPosition()
//--------------------------------------------------------------------
@ -529,13 +887,32 @@ const LLVector3& LLJoint::getScale()
//--------------------------------------------------------------------
// setScale()
//--------------------------------------------------------------------
void LLJoint::setScale( const LLVector3& scale )
void LLJoint::setScale( const LLVector3& requested_scale, bool apply_attachment_overrides )
{
// if (mXform.getScale() != scale)
LLVector3 scale(requested_scale);
LLUUID mesh_id;
LLVector3 active_override;
if (apply_attachment_overrides && m_attachmentScaleOverrides.findActiveOverride(mesh_id,active_override))
{
if (scale != active_override && do_debug_joint(getName()))
{
LLScopedContextString str("setScale");
LL_DEBUGS("Avatar") << " joint " << getName() << " requested_scale " << requested_scale
<< " overriden by attachment " << active_override << LL_ENDL;
}
scale = active_override;
}
if ((mXform.getScale() != scale) && do_debug_joint(getName()))
{
mXform.setScale(scale);
touch();
LLScopedContextString str("setScale");
LLCallStack cs;
LLContextStatus con_status;
LL_DEBUGS("Avatar") << " joint " << getName() << " set scale " << scale << LL_ENDL;
LL_DEBUGS("Avatar") << "CONTEXT:\n" << "====================\n" << con_status << LL_ENDL;
LL_DEBUGS("Avatar") << "STACK:\n" << "====================\n" << cs << "====================" << LL_ENDL;
}
mXform.setScale(scale);
touch();
}

View File

@ -40,24 +40,33 @@
#include "xform.h"
const S32 LL_CHARACTER_MAX_JOINTS_PER_MESH = 15;
const U32 LL_CHARACTER_MAX_JOINTS = 32; // must be divisible by 4!
const U32 LL_HAND_JOINT_NUM = 31;
const U32 LL_FACE_JOINT_NUM = 30;
// Need to set this to count of animate-able joints,
// currently = #bones + #collision_volumes + #attachments + 2,
// rounded to next multiple of 4.
const U32 LL_CHARACTER_MAX_ANIMATED_JOINTS = 216; // must be divisible by 4!
const U32 LL_MAX_JOINTS_PER_MESH_OBJECT = 110;
// These should be higher than the joint_num of any
// other joint, to avoid conflicts in updateMotionsByType()
const U32 LL_HAND_JOINT_NUM = (LL_CHARACTER_MAX_ANIMATED_JOINTS-1);
const U32 LL_FACE_JOINT_NUM = (LL_CHARACTER_MAX_ANIMATED_JOINTS-2);
const S32 LL_CHARACTER_MAX_PRIORITY = 7;
const F32 LL_MAX_PELVIS_OFFSET = 5.f;
class LLPosOverrideMap
class LLVector3OverrideMap
{
public:
LLPosOverrideMap() {}
LLVector3OverrideMap() {}
bool findActiveOverride(LLUUID& mesh_id, LLVector3& pos) const;
void showJointPosOverrides(std::ostringstream& os) const;
void showJointVector3Overrides(std::ostringstream& os) const;
U32 count() const;
void add(const LLUUID& mesh_id, const LLVector3& pos);
bool remove(const LLUUID& mesh_id);
void clear();
private:
typedef std::map<LLUUID,LLVector3> map_type;
const map_type& getMap() const { return m_map; }
private:
map_type m_map;
};
@ -86,17 +95,26 @@ public:
POSITION_DIRTY = 0x1 << 2,
ALL_DIRTY = 0x7
};
public:
enum SupportCategory
{
SUPPORT_BASE,
SUPPORT_EXTENDED
};
protected:
std::string mName;
SupportCategory mSupport;
// parent joint
LLJoint *mParent;
// explicit transformation members
LLXformMatrix mXform;
LLUUID mId;
LLVector3 mDefaultPosition;
LLVector3 mDefaultScale;
public:
U32 mDirtyFlags;
BOOL mUpdateXform;
@ -104,6 +122,10 @@ public:
// describes the skin binding pose
LLVector3 mSkinOffset;
// Endpoint of the bone, if applicable. This is only relevant for
// external programs like Blender, and for diagnostic display.
LLVector3 mEnd;
S32 mJointNum;
// child joints
@ -113,15 +135,40 @@ public:
// debug statics
static S32 sNumTouches;
static S32 sNumUpdates;
typedef std::set<std::string> debug_joint_name_t;
static debug_joint_name_t s_debugJointNames;
static void setDebugJointNames(const debug_joint_name_t& names);
static void setDebugJointNames(const std::string& names_string);
LLPosOverrideMap m_attachmentOverrides;
// Position overrides
LLVector3OverrideMap m_attachmentPosOverrides;
LLVector3 m_posBeforeOverrides;
// Scale overrides
LLVector3OverrideMap m_attachmentScaleOverrides;
LLVector3 m_scaleBeforeOverrides;
void updatePos(const std::string& av_info);
void updateScale(const std::string& av_info);
public:
LLJoint();
LLJoint(S32 joint_num);
// Note: these joint_num constructors are a bad idea because there
// are only a couple of places in the code where it is useful to
// have a joint num for a joint (for joints that are used in
// animations), and including them as part of the constructor then
// forces us to maintain an alternate path through the entire
// large-ish class hierarchy of joint types. The only reason they
// are still here now is to avoid breaking the baking service
// (appearanceutility) builds; these constructors are not used in
// the viewer. Once the appearance utility is updated to remove
// these joint num references, which it shouldn't ever need, from
// its own classes, we can also remove all the joint_num
// constructors from LLJoint, LLViewerJoint, LLAvatarJoint, and
// createAvatarJoint.
LLJoint(S32 joint_num);
// *TODO: Only used for LLVOAvatarSelf::mScreenp. *DOES NOT INITIALIZE mResetAfterRestoreOldXform*
LLJoint( const std::string &name, LLJoint *parent=NULL );
virtual ~LLJoint();
@ -139,6 +186,19 @@ public:
const std::string& getName() const { return mName; }
void setName( const std::string &name ) { mName = name; }
// joint num
S32 getJointNum() const { return mJointNum; }
void setJointNum(S32 joint_num);
// get/set support
SupportCategory getSupport() const { return mSupport; }
void setSupport( const SupportCategory& support) { mSupport = support; }
void setSupport( const std::string& support_string);
// get/set end point
void setEnd( const LLVector3& end) { mEnd = end; }
const LLVector3& getEnd() const { return mEnd; }
// getParent
LLJoint *getParent() { return mParent; }
@ -155,10 +215,16 @@ public:
// get/set local position
const LLVector3& getPosition();
void setPosition( const LLVector3& pos );
void setPosition( const LLVector3& pos, bool apply_attachment_overrides = false );
// Tracks the default position defined by the skeleton
void setDefaultPosition( const LLVector3& pos );
const LLVector3& getDefaultPosition() const;
// Tracks the default scale defined by the skeleton
void setDefaultScale( const LLVector3& scale );
const LLVector3& getDefaultScale() const;
// get/set world position
LLVector3 getWorldPosition();
LLVector3 getLastWorldPosition();
@ -175,7 +241,7 @@ public:
// get/set local scale
const LLVector3& getScale();
void setScale( const LLVector3& scale );
void setScale( const LLVector3& scale, bool apply_attachment_overrides = false );
// get/set world matrix
const LLMatrix4 &getWorldMatrix();
@ -198,17 +264,26 @@ public:
virtual BOOL isAnimatable() const { return TRUE; }
S32 getJointNum() const { return mJointNum; }
void addAttachmentPosOverride( const LLVector3& pos, const LLUUID& mesh_id, const std::string& av_info );
void removeAttachmentPosOverride( const LLUUID& mesh_id, const std::string& av_info );
void addAttachmentPosOverride( const LLVector3& pos, const LLUUID& mesh_id, const std::string& av_info, bool& active_override_changed );
void removeAttachmentPosOverride( const LLUUID& mesh_id, const std::string& av_info, bool& active_override_changed );
bool hasAttachmentPosOverride( LLVector3& pos, LLUUID& mesh_id ) const;
void clearAttachmentPosOverrides();
void showAttachmentPosOverrides(const std::string& av_info) const;
//Accessor for the joint id
LLUUID getId( void ) { return mId; }
//Setter for the joints id
void setId( const LLUUID& id ) { mId = id;}
void addAttachmentScaleOverride( const LLVector3& scale, const LLUUID& mesh_id, const std::string& av_info );
void removeAttachmentScaleOverride( const LLUUID& mesh_id, const std::string& av_info );
bool hasAttachmentScaleOverride( LLVector3& scale, LLUUID& mesh_id ) const;
void clearAttachmentScaleOverrides();
void showAttachmentScaleOverrides(const std::string& av_info) const;
void getAllAttachmentPosOverrides(S32& num_pos_overrides,
std::set<LLVector3>& distinct_pos_overrides);
void getAllAttachmentScaleOverrides(S32& num_scale_overrides,
std::set<LLVector3>& distinct_scale_overrides);
// These are used in checks of whether a pos/scale override is considered significant.
bool aboveJointPosThreshold(const LLVector3& pos) const;
bool aboveJointScaleThreshold(const LLVector3& scale) const;
};
#endif // LL_LLJOINT_H

View File

@ -498,6 +498,7 @@ LLMotion::LLMotionInitStatus LLKeyframeMotion::onInitialize(LLCharacter *charact
// request asset
mAssetStatus = ASSET_FETCHED;
LL_DEBUGS("Animation") << "Requesting data fetch for: " << mID << LL_ENDL;
character_id = new LLUUID(mCharacter->getID());
gAssetStorage->getAssetData(mID,
LLAssetType::AT_ANIMATION,
@ -1380,7 +1381,7 @@ BOOL LLKeyframeMotion::deserialize(LLDataPacker& dp)
LL_WARNS() << "no joints in animation" << LL_ENDL;
return FALSE;
}
else if (num_motions > LL_CHARACTER_MAX_JOINTS)
else if (num_motions > LL_CHARACTER_MAX_ANIMATED_JOINTS)
{
LL_WARNS() << "too many joints in animation" << LL_ENDL;
return FALSE;
@ -1419,7 +1420,14 @@ BOOL LLKeyframeMotion::deserialize(LLDataPacker& dp)
LLJoint *joint = mCharacter->getJoint( joint_name );
if (joint)
{
S32 joint_num = joint->getJointNum();
// LL_INFOS() << " joint: " << joint_name << LL_ENDL;
if ((joint_num >= (S32)LL_CHARACTER_MAX_ANIMATED_JOINTS) || (joint_num < 0))
{
LL_WARNS() << "Joint will be omitted from animation: joint_num " << joint_num << " is outside of legal range [0-"
<< LL_CHARACTER_MAX_ANIMATED_JOINTS << ") for joint " << joint->getName() << LL_ENDL;
joint = NULL;
}
}
else
{
@ -1603,6 +1611,12 @@ BOOL LLKeyframeMotion::deserialize(LLDataPacker& dp)
if (old_version)
{
success = dp.unpackVector3(pos_key.mPosition, "pos");
//MAINT-6162
pos_key.mPosition.mV[VX] = llclamp( pos_key.mPosition.mV[VX], -LL_MAX_PELVIS_OFFSET, LL_MAX_PELVIS_OFFSET);
pos_key.mPosition.mV[VY] = llclamp( pos_key.mPosition.mV[VY], -LL_MAX_PELVIS_OFFSET, LL_MAX_PELVIS_OFFSET);
pos_key.mPosition.mV[VZ] = llclamp( pos_key.mPosition.mV[VZ], -LL_MAX_PELVIS_OFFSET, LL_MAX_PELVIS_OFFSET);
}
else
{
@ -1868,6 +1882,8 @@ BOOL LLKeyframeMotion::serialize(LLDataPacker& dp) const
{
BOOL success = TRUE;
LL_DEBUGS("BVH") << "serializing" << LL_ENDL;
success &= dp.packU16(KEYFRAME_MOTION_VERSION, "version");
success &= dp.packU16(KEYFRAME_MOTION_SUBVERSION, "sub_version");
success &= dp.packS32(mJointMotionList->mBasePriority, "base_priority");
@ -1881,6 +1897,19 @@ BOOL LLKeyframeMotion::serialize(LLDataPacker& dp) const
success &= dp.packU32(mJointMotionList->mHandPose, "hand_pose");
success &= dp.packU32(mJointMotionList->getNumJointMotions(), "num_joints");
LL_DEBUGS("BVH") << "version " << KEYFRAME_MOTION_VERSION << LL_ENDL;
LL_DEBUGS("BVH") << "sub_version " << KEYFRAME_MOTION_SUBVERSION << LL_ENDL;
LL_DEBUGS("BVH") << "base_priority " << mJointMotionList->mBasePriority << LL_ENDL;
LL_DEBUGS("BVH") << "duration " << mJointMotionList->mDuration << LL_ENDL;
LL_DEBUGS("BVH") << "emote_name " << mJointMotionList->mEmoteName << LL_ENDL;
LL_DEBUGS("BVH") << "loop_in_point " << mJointMotionList->mLoopInPoint << LL_ENDL;
LL_DEBUGS("BVH") << "loop_out_point " << mJointMotionList->mLoopOutPoint << LL_ENDL;
LL_DEBUGS("BVH") << "loop " << mJointMotionList->mLoop << LL_ENDL;
LL_DEBUGS("BVH") << "ease_in_duration " << mJointMotionList->mEaseInDuration << LL_ENDL;
LL_DEBUGS("BVH") << "ease_out_duration " << mJointMotionList->mEaseOutDuration << LL_ENDL;
LL_DEBUGS("BVH") << "hand_pose " << mJointMotionList->mHandPose << LL_ENDL;
LL_DEBUGS("BVH") << "num_joints " << mJointMotionList->getNumJointMotions() << LL_ENDL;
for (U32 i = 0; i < mJointMotionList->getNumJointMotions(); i++)
{
JointMotion* joint_motionp = mJointMotionList->getJointMotion(i);
@ -1888,6 +1917,7 @@ BOOL LLKeyframeMotion::serialize(LLDataPacker& dp) const
success &= dp.packS32(joint_motionp->mPriority, "joint_priority");
success &= dp.packS32(joint_motionp->mRotationCurve.mNumKeys, "num_rot_keys");
LL_DEBUGS("BVH") << "Joint " << joint_motionp->mJointName << LL_ENDL;
for (RotationCurve::key_map_t::iterator iter = joint_motionp->mRotationCurve.mKeys.begin();
iter != joint_motionp->mRotationCurve.mKeys.end(); ++iter)
{
@ -1905,6 +1935,8 @@ BOOL LLKeyframeMotion::serialize(LLDataPacker& dp) const
success &= dp.packU16(x, "rot_angle_x");
success &= dp.packU16(y, "rot_angle_y");
success &= dp.packU16(z, "rot_angle_z");
LL_DEBUGS("BVH") << " rot: t " << rot_key.mTime << " angles " << rot_angles.mV[VX] <<","<< rot_angles.mV[VY] <<","<< rot_angles.mV[VZ] << LL_ENDL;
}
success &= dp.packS32(joint_motionp->mPositionCurve.mNumKeys, "num_pos_keys");
@ -1923,37 +1955,54 @@ BOOL LLKeyframeMotion::serialize(LLDataPacker& dp) const
success &= dp.packU16(x, "pos_x");
success &= dp.packU16(y, "pos_y");
success &= dp.packU16(z, "pos_z");
LL_DEBUGS("BVH") << " pos: t " << pos_key.mTime << " pos " << pos_key.mPosition.mV[VX] <<","<< pos_key.mPosition.mV[VY] <<","<< pos_key.mPosition.mV[VZ] << LL_ENDL;
}
}
success &= dp.packS32(mJointMotionList->mConstraints.size(), "num_constraints");
LL_DEBUGS("BVH") << "num_constraints " << mJointMotionList->mConstraints.size() << LL_ENDL;
for (JointMotionList::constraint_list_t::const_iterator iter = mJointMotionList->mConstraints.begin();
iter != mJointMotionList->mConstraints.end(); ++iter)
{
JointConstraintSharedData* shared_constraintp = *iter;
success &= dp.packU8(shared_constraintp->mChainLength, "chain_length");
success &= dp.packU8(shared_constraintp->mConstraintType, "constraint_type");
char volume_name[16]; /* Flawfinder: ignore */
snprintf(volume_name, sizeof(volume_name), "%s", /* Flawfinder: ignore */
char source_volume[16]; /* Flawfinder: ignore */
snprintf(source_volume, sizeof(source_volume), "%s", /* Flawfinder: ignore */
mCharacter->findCollisionVolume(shared_constraintp->mSourceConstraintVolume)->getName().c_str());
success &= dp.packBinaryDataFixed((U8*)volume_name, 16, "source_volume");
success &= dp.packBinaryDataFixed((U8*)source_volume, 16, "source_volume");
success &= dp.packVector3(shared_constraintp->mSourceConstraintOffset, "source_offset");
char target_volume[16]; /* Flawfinder: ignore */
if (shared_constraintp->mConstraintTargetType == CONSTRAINT_TARGET_TYPE_GROUND)
{
snprintf(volume_name,sizeof(volume_name), "%s", "GROUND"); /* Flawfinder: ignore */
snprintf(target_volume,sizeof(target_volume), "%s", "GROUND"); /* Flawfinder: ignore */
}
else
{
snprintf(volume_name, sizeof(volume_name),"%s", /* Flawfinder: ignore */
snprintf(target_volume, sizeof(target_volume),"%s", /* Flawfinder: ignore */
mCharacter->findCollisionVolume(shared_constraintp->mTargetConstraintVolume)->getName().c_str());
}
success &= dp.packBinaryDataFixed((U8*)volume_name, 16, "target_volume");
success &= dp.packBinaryDataFixed((U8*)target_volume, 16, "target_volume");
success &= dp.packVector3(shared_constraintp->mTargetConstraintOffset, "target_offset");
success &= dp.packVector3(shared_constraintp->mTargetConstraintDir, "target_dir");
success &= dp.packF32(shared_constraintp->mEaseInStartTime, "ease_in_start");
success &= dp.packF32(shared_constraintp->mEaseInStopTime, "ease_in_stop");
success &= dp.packF32(shared_constraintp->mEaseOutStartTime, "ease_out_start");
success &= dp.packF32(shared_constraintp->mEaseOutStopTime, "ease_out_stop");
LL_DEBUGS("BVH") << " chain_length " << shared_constraintp->mChainLength << LL_ENDL;
LL_DEBUGS("BVH") << " constraint_type " << (S32)shared_constraintp->mConstraintType << LL_ENDL;
LL_DEBUGS("BVH") << " source_volume " << source_volume << LL_ENDL;
LL_DEBUGS("BVH") << " source_offset " << shared_constraintp->mSourceConstraintOffset << LL_ENDL;
LL_DEBUGS("BVH") << " target_volume " << target_volume << LL_ENDL;
LL_DEBUGS("BVH") << " target_offset " << shared_constraintp->mTargetConstraintOffset << LL_ENDL;
LL_DEBUGS("BVH") << " target_dir " << shared_constraintp->mTargetConstraintDir << LL_ENDL;
LL_DEBUGS("BVH") << " ease_in_start " << shared_constraintp->mEaseInStartTime << LL_ENDL;
LL_DEBUGS("BVH") << " ease_in_stop " << shared_constraintp->mEaseInStopTime << LL_ENDL;
LL_DEBUGS("BVH") << " ease_out_start " << shared_constraintp->mEaseOutStartTime << LL_ENDL;
LL_DEBUGS("BVH") << " ease_out_stop " << shared_constraintp->mEaseOutStopTime << LL_ENDL;
}
return success;
@ -1971,6 +2020,51 @@ U32 LLKeyframeMotion::getFileSize()
return dp.getCurrentSize();
}
//-----------------------------------------------------------------------------
// dumpToFile()
//-----------------------------------------------------------------------------
void LLKeyframeMotion::dumpToFile(const std::string& name)
{
if (isLoaded())
{
std::string outfile_base;
if (!name.empty())
{
outfile_base = name;
}
else if (!getName().empty())
{
outfile_base = getName();
}
else
{
const LLUUID& id = getID();
outfile_base = id.asString();
}
std::string outfilename = gDirUtilp->getExpandedFilename(LL_PATH_LOGS,outfile_base + ".anim");
if (LLFile::isfile(outfilename))
{
return;
}
S32 file_size = getFileSize();
U8* buffer = new U8[file_size];
LL_DEBUGS("BVH") << "Dumping " << outfilename << LL_ENDL;
LLDataPackerBinaryBuffer dp(buffer, file_size);
if (serialize(dp))
{
LLAPRFile outfile;
outfile.open(outfilename, LL_APR_WPB);
if (outfile.getFileHandle())
{
outfile.write(buffer, file_size);
}
}
delete [] buffer;
}
}
//-----------------------------------------------------------------------------
// getPelvisBBox()
//-----------------------------------------------------------------------------
@ -2149,7 +2243,7 @@ void LLKeyframeMotion::onLoadComplete(LLVFS *vfs,
LLCharacter* character = *char_iter;
// look for an existing instance of this motion
LLKeyframeMotion* motionp = (LLKeyframeMotion*) character->findMotion(asset_uuid);
LLKeyframeMotion* motionp = dynamic_cast<LLKeyframeMotion*> (character->findMotion(asset_uuid));
if (motionp)
{
if (0 == status)
@ -2164,8 +2258,8 @@ void LLKeyframeMotion::onLoadComplete(LLVFS *vfs,
U8* buffer = new U8[size];
file.read((U8*)buffer, size); /*Flawfinder: ignore*/
LL_DEBUGS() << "Loading keyframe data for: " << motionp->getName() << ":" << motionp->getID() << " (" << size << " bytes)" << LL_ENDL;
LL_DEBUGS("Animation") << "Loading keyframe data for: " << motionp->getName() << ":" << motionp->getID() << " (" << size << " bytes)" << LL_ENDL;
LLDataPackerBinaryBuffer dp(buffer, size);
if (motionp->deserialize(dp))

View File

@ -153,6 +153,7 @@ public:
BOOL serialize(LLDataPacker& dp) const;
BOOL deserialize(LLDataPacker& dp);
BOOL isLoaded() { return mJointMotionList != NULL; }
void dumpToFile(const std::string& name);
// setters for modifying a keyframe animation

View File

@ -201,10 +201,12 @@ BOOL LLKeyframeStandMotion::onUpdate(F32 time, U8* joint_mask)
//-------------------------------------------------------------------------
// propagate joint positions to internal versions
//-------------------------------------------------------------------------
// SL-315
mPelvisJoint.setPosition(
root_world_pos +
mPelvisState->getPosition() );
// SL-315
mHipLeftJoint.setPosition( mHipLeftState->getJoint()->getPosition() );
mKneeLeftJoint.setPosition( mKneeLeftState->getJoint()->getPosition() );
mAnkleLeftJoint.setPosition( mAnkleLeftState->getJoint()->getPosition() );
@ -213,6 +215,7 @@ BOOL LLKeyframeStandMotion::onUpdate(F32 time, U8* joint_mask)
mKneeLeftJoint.setScale( mKneeLeftState->getJoint()->getScale() );
mAnkleLeftJoint.setScale( mAnkleLeftState->getJoint()->getScale() );
// SL-315
mHipRightJoint.setPosition( mHipRightState->getJoint()->getPosition() );
mKneeRightJoint.setPosition( mKneeRightState->getJoint()->getPosition() );
mAnkleRightJoint.setPosition( mAnkleRightState->getJoint()->getPosition() );
@ -265,6 +268,7 @@ BOOL LLKeyframeStandMotion::onUpdate(F32 time, U8* joint_mask)
mCharacter->getGround( mAnkleLeftJoint.getWorldPosition(), mPositionLeft, mNormalLeft);
mCharacter->getGround( mAnkleRightJoint.getWorldPosition(), mPositionRight, mNormalRight);
// SL-315
mTargetLeft.setPosition( mPositionLeft );
mTargetRight.setPosition( mPositionRight );
}

View File

@ -55,7 +55,7 @@ LLMotion::LLMotion( const LLUUID &id ) :
mDeactivateCallbackUserData(NULL)
{
for (S32 i=0; i<3; ++i)
memset(&mJointSignature[i][0], 0, sizeof(U8) * LL_CHARACTER_MAX_JOINTS);
memset(&mJointSignature[i][0], 0, sizeof(U8) * LL_CHARACTER_MAX_ANIMATED_JOINTS);
}
//-----------------------------------------------------------------------------
@ -111,9 +111,15 @@ void LLMotion::addJointState(const LLPointer<LLJointState>& jointState)
U32 usage = jointState->getUsage();
// for now, usage is everything
mJointSignature[0][jointState->getJoint()->getJointNum()] = (usage & LLJointState::POS) ? (0xff >> (7 - priority)) : 0;
mJointSignature[1][jointState->getJoint()->getJointNum()] = (usage & LLJointState::ROT) ? (0xff >> (7 - priority)) : 0;
mJointSignature[2][jointState->getJoint()->getJointNum()] = (usage & LLJointState::SCALE) ? (0xff >> (7 - priority)) : 0;
S32 joint_num = jointState->getJoint()->getJointNum();
if ((joint_num >= (S32)LL_CHARACTER_MAX_ANIMATED_JOINTS) || (joint_num < 0))
{
LL_WARNS() << "joint_num " << joint_num << " is outside of legal range [0-" << LL_CHARACTER_MAX_ANIMATED_JOINTS << ") for joint " << jointState->getJoint()->getName() << LL_ENDL;
return;
}
mJointSignature[0][joint_num] = (usage & LLJointState::POS) ? (0xff >> (7 - priority)) : 0;
mJointSignature[1][joint_num] = (usage & LLJointState::ROT) ? (0xff >> (7 - priority)) : 0;
mJointSignature[2][joint_num] = (usage & LLJointState::SCALE) ? (0xff >> (7 - priority)) : 0;
}
void LLMotion::setDeactivateCallback( void (*cb)(void *), void* userdata )

View File

@ -181,7 +181,7 @@ protected:
F32 mSendStopTimestamp; // time when simulator should be told to stop this motion
F32 mResidualWeight; // blend weight at beginning of stop motion phase
F32 mFadeWeight; // for fading in and out based on LOD
U8 mJointSignature[3][LL_CHARACTER_MAX_JOINTS]; // signature of which joints are animated at what priority
U8 mJointSignature[3][LL_CHARACTER_MAX_ANIMATED_JOINTS]; // signature of which joints are animated at what priority
void (*mDeactivateCallback)(void* data);
void* mDeactivateCallbackUserData;
};

View File

@ -37,7 +37,8 @@
#include "llanimationstates.h"
#include "llstl.h"
const S32 NUM_JOINT_SIGNATURE_STRIDES = LL_CHARACTER_MAX_JOINTS / 4;
// This is why LL_CHARACTER_MAX_ANIMATED_JOINTS needs to be a multiple of 4.
const S32 NUM_JOINT_SIGNATURE_STRIDES = LL_CHARACTER_MAX_ANIMATED_JOINTS / 4;
const U32 MAX_MOTION_INSTANCES = 32;
//-----------------------------------------------------------------------------
@ -488,8 +489,8 @@ void LLMotionController::updateAdditiveMotions()
//-----------------------------------------------------------------------------
void LLMotionController::resetJointSignatures()
{
memset(&mJointSignature[0][0], 0, sizeof(U8) * LL_CHARACTER_MAX_JOINTS);
memset(&mJointSignature[1][0], 0, sizeof(U8) * LL_CHARACTER_MAX_JOINTS);
memset(&mJointSignature[0][0], 0, sizeof(U8) * LL_CHARACTER_MAX_ANIMATED_JOINTS);
memset(&mJointSignature[1][0], 0, sizeof(U8) * LL_CHARACTER_MAX_ANIMATED_JOINTS);
}
//-----------------------------------------------------------------------------
@ -553,9 +554,9 @@ static LLTrace::BlockTimerStatHandle FTM_MOTION_ON_UPDATE("Motion onUpdate");
void LLMotionController::updateMotionsByType(LLMotion::LLMotionBlendType anim_type)
{
BOOL update_result = TRUE;
U8 last_joint_signature[LL_CHARACTER_MAX_JOINTS];
U8 last_joint_signature[LL_CHARACTER_MAX_ANIMATED_JOINTS];
memset(&last_joint_signature, 0, sizeof(U8) * LL_CHARACTER_MAX_JOINTS);
memset(&last_joint_signature, 0, sizeof(U8) * LL_CHARACTER_MAX_ANIMATED_JOINTS);
// iterate through active motions in chronological order
for (motion_list_t::iterator iter = mActiveMotions.begin();
@ -576,7 +577,6 @@ void LLMotionController::updateMotionsByType(LLMotion::LLMotionBlendType anim_ty
}
else
{
// NUM_JOINT_SIGNATURE_STRIDES should be multiple of 4
for (S32 i = 0; i < NUM_JOINT_SIGNATURE_STRIDES; i++)
{
U32 *current_signature = (U32*)&(mJointSignature[0][i * 4]);

View File

@ -223,7 +223,7 @@ protected:
S32 mTimeStepCount;
F32 mLastInterp;
U8 mJointSignature[2][LL_CHARACTER_MAX_JOINTS];
U8 mJointSignature[2][LL_CHARACTER_MAX_ANIMATED_JOINTS];
};
//-----------------------------------------------------------------------------

View File

@ -386,6 +386,7 @@ void LLJointStateBlender::blendJointStates(BOOL apply_now)
}
// apply transforms
// SL-315
target_joint->setPosition(blended_pos + added_pos);
target_joint->setScale(blended_scale + added_scale);
target_joint->setRotation(added_rot * blended_rot);
@ -417,6 +418,7 @@ void LLJointStateBlender::interpolate(F32 u)
return;
}
// SL-315
target_joint->setPosition(lerp(target_joint->getPosition(), mJointCache.getPosition(), u));
target_joint->setScale(lerp(target_joint->getScale(), mJointCache.getScale(), u));
target_joint->setRotation(nlerp(u, target_joint->getRotation(), mJointCache.getRotation()));
@ -444,6 +446,7 @@ void LLJointStateBlender::resetCachedJoint()
return;
}
LLJoint* source_joint = mJointStates[0]->getJoint();
// SL-315
mJointCache.setPosition(source_joint->getPosition());
mJointCache.setScale(source_joint->getScale());
mJointCache.setRotation(source_joint->getRotation());

View File

@ -150,6 +150,7 @@ public:
F32 getWeight() const { return mIsAnimating ? mTargetWeight : mCurWeight; }
F32 getCurrentWeight() const { return mCurWeight; }
F32 getLastWeight() const { return mLastWeight; }
void setLastWeight(F32 val) { mLastWeight = val; }
BOOL isAnimating() const { return mIsAnimating; }
BOOL isTweakable() const { return (getGroup() == VISUAL_PARAM_GROUP_TWEAKABLE) || (getGroup() == VISUAL_PARAM_GROUP_TWEAKABLE_NO_TRANSMIT); }

View File

@ -88,6 +88,7 @@ namespace tut
{
LLJoint lljoint;
LLVector3 vec3(2.3f,30.f,10.f);
// SL-315
lljoint.setPosition(vec3);
LLVector3 pos = lljoint.getPosition();
ensure("setPosition()/getPosition() failed ", (vec3 == pos));
@ -98,6 +99,7 @@ namespace tut
{
LLJoint lljoint;
LLVector3 vec3(2.3f,30.f,10.f);
// SL-315
lljoint.setWorldPosition(vec3);
LLVector3 pos = lljoint.getWorldPosition();
ensure("1:setWorldPosition()/getWorldPosition() failed ", (vec3 == pos));

View File

@ -40,6 +40,7 @@ set(llcommon_SOURCE_FILES
llbase64.cpp
llbitpack.cpp
llcallbacklist.cpp
llcallstack.cpp
llcommon.cpp
llcommonutils.cpp
llcoros.cpp
@ -58,6 +59,7 @@ set(llcommon_SOURCE_FILES
lleventfilter.cpp
llevents.cpp
lleventtimer.cpp
llexception.cpp
llfasttimer.cpp
llfile.cpp
llfindlocale.cpp
@ -114,6 +116,7 @@ set(llcommon_SOURCE_FILES
llworkerthread.cpp
timing.cpp
u64.cpp
StackWalker.cpp
)
set(llcommon_HEADER_FILES
@ -134,6 +137,7 @@ set(llcommon_HEADER_FILES
llbitpack.h
llboost.h
llcallbacklist.h
llcallstack.h
llcommon.h
llcommonutils.h
llcoros.h
@ -236,6 +240,7 @@ set(llcommon_HEADER_FILES
stringize.h
timer.h
u64.h
StackWalker.h
)
set_source_files_properties(${llcommon_HEADER_FILES}
@ -316,7 +321,7 @@ if (LL_TESTS)
LL_ADD_INTEGRATION_TEST(llprocinfo "" "${test_libs}")
LL_ADD_INTEGRATION_TEST(llrand "" "${test_libs}")
LL_ADD_INTEGRATION_TEST(llsdserialize "" "${test_libs}")
LL_ADD_INTEGRATION_TEST(llsingleton "" "${test_libs}")
LL_ADD_INTEGRATION_TEST(llsingleton "" "${test_libs}")
LL_ADD_INTEGRATION_TEST(llstring "" "${test_libs}")
LL_ADD_INTEGRATION_TEST(lltrace "" "${test_libs}")
LL_ADD_INTEGRATION_TEST(lltreeiterators "" "${test_libs}")
@ -329,6 +334,11 @@ if (LL_TESTS)
LL_ADD_INTEGRATION_TEST(llleap "" "${test_libs}")
LL_ADD_INTEGRATION_TEST(llstreamqueue "" "${test_libs}")
## llexception_test.cpp isn't a regression test, and doesn't need to be run
## every build. It's to help a developer make implementation choices about
## throwing and catching exceptions.
##LL_ADD_INTEGRATION_TEST(llexception "" "${test_libs}")
# *TODO - reenable these once tcmalloc libs no longer break the build.
#ADD_BUILD_TEST(llallocator llcommon)
#ADD_BUILD_TEST(llallocator_heap_profile llcommon)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,226 @@
/**********************************************************************
*
* StackWalker.h
*
*
*
* $LicenseInfo:firstyear=2016&license=bsd$
*
* Linden notes: Small modifications from the original source at https://stackwalker.codeplex.com/
*
* LICENSE (http://www.opensource.org/licenses/bsd-license.php)
*
* Copyright (c) 2005-2009, Jochen Kalmbach
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of Jochen Kalmbach nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* **********************************************************************/
#if LL_WINDOWS
// #pragma once is supported starting with _MCS_VER 1000,
// so we need not to check the version (because we only support _MSC_VER >= 1100)!
#pragma once
#include <windows.h>
// special defines for VC5/6 (if no actual PSDK is installed):
#if _MSC_VER < 1300
typedef unsigned __int64 DWORD64, *PDWORD64;
#if defined(_WIN64)
typedef unsigned __int64 SIZE_T, *PSIZE_T;
#else
typedef unsigned long SIZE_T, *PSIZE_T;
#endif
#endif // _MSC_VER < 1300
class StackWalkerInternal; // forward
class StackWalker
{
public:
typedef enum StackWalkOptions
{
// No addition info will be retrived
// (only the address is available)
RetrieveNone = 0,
// Try to get the symbol-name
RetrieveSymbol = 1,
// Try to get the line for this symbol
RetrieveLine = 2,
// Try to retrieve the module-infos
RetrieveModuleInfo = 4,
// Also retrieve the version for the DLL/EXE
RetrieveFileVersion = 8,
// Contains all the abouve
RetrieveVerbose = 0xF,
// Generate a "good" symbol-search-path
SymBuildPath = 0x10,
// Also use the public Microsoft-Symbol-Server
SymUseSymSrv = 0x20,
// Contains all the abouve "Sym"-options
SymAll = 0x30,
// Contains all options (default)
OptionsAll = 0x3F
} StackWalkOptions;
StackWalker(
bool verbose = true,
int options = OptionsAll, // 'int' is by design, to combine the enum-flags
LPCSTR szSymPath = NULL,
DWORD dwProcessId = GetCurrentProcessId(),
HANDLE hProcess = GetCurrentProcess()
);
StackWalker(DWORD dwProcessId, HANDLE hProcess);
virtual ~StackWalker();
typedef BOOL (__stdcall *PReadProcessMemoryRoutine)(
HANDLE hProcess,
DWORD64 qwBaseAddress,
PVOID lpBuffer,
DWORD nSize,
LPDWORD lpNumberOfBytesRead,
LPVOID pUserData // optional data, which was passed in "ShowCallstack"
);
BOOL LoadModules();
BOOL ShowCallstack(
bool verbose,
HANDLE hThread = GetCurrentThread(),
const CONTEXT *context = NULL,
PReadProcessMemoryRoutine readMemoryFunction = NULL,
LPVOID pUserData = NULL // optional to identify some data in the 'readMemoryFunction'-callback
);
#if _MSC_VER >= 1300
// due to some reasons, the "STACKWALK_MAX_NAMELEN" must be declared as "public"
// in older compilers in order to use it... starting with VC7 we can declare it as "protected"
protected:
#endif
enum { STACKWALK_MAX_NAMELEN = 4096 }; // max name length for found symbols
protected:
// Entry for each Callstack-Entry
typedef struct CallstackEntry
{
DWORD64 offset; // if 0, we have no valid entry
CHAR name[STACKWALK_MAX_NAMELEN];
CHAR undName[STACKWALK_MAX_NAMELEN];
CHAR undFullName[STACKWALK_MAX_NAMELEN];
DWORD64 offsetFromSmybol;
DWORD offsetFromLine;
DWORD lineNumber;
CHAR lineFileName[STACKWALK_MAX_NAMELEN];
DWORD symType;
LPCSTR symTypeString;
CHAR moduleName[STACKWALK_MAX_NAMELEN];
DWORD64 baseOfImage;
CHAR loadedImageName[STACKWALK_MAX_NAMELEN];
} CallstackEntry;
typedef enum CallstackEntryType {firstEntry, nextEntry, lastEntry};
virtual void OnSymInit(LPCSTR szSearchPath, DWORD symOptions, LPCSTR szUserName);
virtual void OnLoadModule(LPCSTR img, LPCSTR mod, DWORD64 baseAddr, DWORD size, DWORD result, LPCSTR symType, LPCSTR pdbName, ULONGLONG fileVersion);
virtual void OnCallstackEntry(CallstackEntryType eType, CallstackEntry &entry);
virtual void OnDbgHelpErr(LPCSTR szFuncName, DWORD gle, DWORD64 addr);
virtual void OnOutput(LPCSTR szText);
StackWalkerInternal *m_sw;
HANDLE m_hProcess;
DWORD m_dwProcessId;
BOOL m_modulesLoaded;
LPSTR m_szSymPath;
bool m_verbose;
int m_options;
int m_MaxRecursionCount;
static BOOL __stdcall myReadProcMem(HANDLE hProcess, DWORD64 qwBaseAddress, PVOID lpBuffer, DWORD nSize, LPDWORD lpNumberOfBytesRead);
friend StackWalkerInternal;
}; // class StackWalker
// The "ugly" assembler-implementation is needed for systems before XP
// If you have a new PSDK and you only compile for XP and later, then you can use
// the "RtlCaptureContext"
// Currently there is no define which determines the PSDK-Version...
// So we just use the compiler-version (and assumes that the PSDK is
// the one which was installed by the VS-IDE)
// INFO: If you want, you can use the RtlCaptureContext if you only target XP and later...
// But I currently use it in x64/IA64 environments...
//#if defined(_M_IX86) && (_WIN32_WINNT <= 0x0500) && (_MSC_VER < 1400)
#if defined(_M_IX86)
#ifdef CURRENT_THREAD_VIA_EXCEPTION
// TODO: The following is not a "good" implementation,
// because the callstack is only valid in the "__except" block...
#define GET_CURRENT_CONTEXT_STACKWALKER_CODEPLEX(c, contextFlags) \
do { \
memset(&c, 0, sizeof(CONTEXT)); \
EXCEPTION_POINTERS *pExp = NULL; \
__try { \
throw 0; \
} __except( ( (pExp = GetExceptionInformation()) ? EXCEPTION_EXECUTE_HANDLER : EXCEPTION_EXECUTE_HANDLER)) {} \
if (pExp != NULL) \
memcpy(&c, pExp->ContextRecord, sizeof(CONTEXT)); \
c.ContextFlags = contextFlags; \
} while(0);
#else
// The following should be enough for walking the callstack...
#define GET_CURRENT_CONTEXT_STACKWALKER_CODEPLEX(c, contextFlags) \
do { \
memset(&c, 0, sizeof(CONTEXT)); \
c.ContextFlags = contextFlags; \
__asm call x \
__asm x: pop eax \
__asm mov c.Eip, eax \
__asm mov c.Ebp, ebp \
__asm mov c.Esp, esp \
} while(0);
#endif
#else
// The following is defined for x86 (XP and higher), x64 and IA64:
#define GET_CURRENT_CONTEXT_STACKWALKER_CODEPLEX(c, contextFlags) \
do { \
memset(&c, 0, sizeof(CONTEXT)); \
c.ContextFlags = contextFlags; \
RtlCaptureContext(&c); \
} while(0);
#endif
#endif // LL_WINDOWS

View File

@ -294,9 +294,11 @@ void LLScopedLock::unlock()
bool ll_apr_warn_status(apr_status_t status)
{
if(APR_SUCCESS == status) return false;
#if !LL_LINUX
char buf[MAX_STRING]; /* Flawfinder: ignore */
apr_strerror(status, buf, sizeof(buf));
LL_WARNS("APR") << "APR: " << buf << LL_ENDL;
#endif
return true;
}

View File

@ -0,0 +1,190 @@
/**
* @file llcallstack.cpp
* @brief run-time extraction of the current callstack
*
* $LicenseInfo:firstyear=2016&license=viewerlgpl$
* Second Life Viewer Source Code
* Copyright (C) 2016, Linden Research, Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License only.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
* $/LicenseInfo$
*/
#include "linden_common.h"
#include "llcommon.h"
#include "llcallstack.h"
#include "StackWalker.h"
#include "llthreadlocalstorage.h"
#if LL_WINDOWS
class LLCallStackImpl: public StackWalker
{
public:
LLCallStackImpl():
StackWalker(false,0) // non-verbose, options = 0
{
}
~LLCallStackImpl()
{
}
void getStack(std::vector<std::string>& stack, S32 skip_count=0, bool verbose=false)
{
m_stack.clear();
ShowCallstack(verbose);
// Skip the first few lines because they're just bookkeeping for LLCallStack,
// plus any additional lines requested to skip.
S32 first_line = skip_count + 3;
for (S32 i=first_line; i<m_stack.size(); ++i)
{
stack.push_back(m_stack[i]);
}
}
protected:
virtual void OnOutput(LPCSTR szText)
{
m_stack.push_back(szText);
}
std::vector<std::string> m_stack;
};
#else
// Stub - not implemented currently on other platforms.
class LLCallStackImpl
{
public:
LLCallStackImpl() {}
~LLCallStackImpl() {}
void getStack(std::vector<std::string>& stack, S32 skip_count=0, bool verbose=false)
{
stack.clear();
}
};
#endif
LLCallStackImpl *LLCallStack::s_impl = NULL;
LLCallStack::LLCallStack(S32 skip_count, bool verbose):
m_skipCount(skip_count),
m_verbose(verbose)
{
if (!s_impl)
{
s_impl = new LLCallStackImpl;
}
LLTimer t;
s_impl->getStack(m_strings, m_skipCount, m_verbose);
}
bool LLCallStack::contains(const std::string& str)
{
for (std::vector<std::string>::const_iterator it = m_strings.begin();
it != m_strings.end(); ++it)
{
if (it->find(str) != std::string::npos)
{
return true;
}
}
return false;
}
std::ostream& operator<<(std::ostream& s, const LLCallStack& call_stack)
{
#ifndef LL_RELEASE_FOR_DOWNLOAD
std::vector<std::string>::const_iterator it;
for (it=call_stack.m_strings.begin(); it!=call_stack.m_strings.end(); ++it)
{
s << *it;
}
#else
s << "UNAVAILABLE IN RELEASE";
#endif
return s;
}
LLContextStrings::LLContextStrings()
{
}
// static
LLContextStrings* LLContextStrings::getThreadLocalInstance()
{
LLContextStrings *cons = LLThreadLocalSingletonPointer<LLContextStrings>::getInstance();
if (!cons)
{
LLThreadLocalSingletonPointer<LLContextStrings>::setInstance(new LLContextStrings);
}
return LLThreadLocalSingletonPointer<LLContextStrings>::getInstance();
}
// static
void LLContextStrings::addContextString(const std::string& str)
{
LLContextStrings *cons = getThreadLocalInstance();
//LL_INFOS() << "CTX " << (S32)cons << " ADD " << str << " CNT " << cons->m_contextStrings[str] << LL_ENDL;
cons->m_contextStrings[str]++;
}
// static
void LLContextStrings::removeContextString(const std::string& str)
{
LLContextStrings *cons = getThreadLocalInstance();
cons->m_contextStrings[str]--;
//LL_INFOS() << "CTX " << (S32)cons << " REMOVE " << str << " CNT " << cons->m_contextStrings[str] << LL_ENDL;
if (cons->m_contextStrings[str] == 0)
{
cons->m_contextStrings.erase(str);
}
}
// static
bool LLContextStrings::contains(const std::string& str)
{
const std::map<std::string,S32>& strings =
LLThreadLocalSingletonPointer<LLContextStrings>::getInstance()->m_contextStrings;
for (std::map<std::string,S32>::const_iterator it = strings.begin(); it!=strings.end(); ++it)
{
if (it->first.find(str) != std::string::npos)
{
return true;
}
}
return false;
}
// static
void LLContextStrings::output(std::ostream& os)
{
const std::map<std::string,S32>& strings =
LLThreadLocalSingletonPointer<LLContextStrings>::getInstance()->m_contextStrings;
for (std::map<std::string,S32>::const_iterator it = strings.begin(); it!=strings.end(); ++it)
{
os << it->first << "[" << it->second << "]" << "\n";
}
}
// static
std::ostream& operator<<(std::ostream& s, const LLContextStatus& context_status)
{
LLThreadLocalSingletonPointer<LLContextStrings>::getInstance()->output(s);
return s;
}
bool LLContextStatus::contains(const std::string& str)
{
return LLThreadLocalSingletonPointer<LLContextStrings>::getInstance()->contains(str);
}

View File

@ -0,0 +1,80 @@
/**
* @file llcallstack.h
* @brief run-time extraction of the current callstack
*
* $LicenseInfo:firstyear=2016&license=viewerlgpl$
* Second Life Viewer Source Code
* Copyright (C) 2016, Linden Research, Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License only.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
* $/LicenseInfo$
*/
#include <map>
class LLCallStackImpl;
class LLCallStack
{
public:
LLCallStack(S32 skip_count=0, bool verbose=false);
std::vector<std::string> m_strings;
bool m_verbose;
bool contains(const std::string& str);
private:
static LLCallStackImpl *s_impl;
S32 m_skipCount;
};
LL_COMMON_API std::ostream& operator<<(std::ostream& s, const LLCallStack& call_stack);
class LLContextStrings
{
public:
LLContextStrings();
static void addContextString(const std::string& str);
static void removeContextString(const std::string& str);
static void output(std::ostream& os);
static LLContextStrings* getThreadLocalInstance();
static bool contains(const std::string& str);
private:
std::map<std::string,S32> m_contextStrings;
};
class LLScopedContextString
{
public:
LLScopedContextString(const std::string& str):
m_str(str)
{
LLContextStrings::addContextString(m_str);
}
~LLScopedContextString()
{
LLContextStrings::removeContextString(m_str);
}
private:
std::string m_str;
};
// Mostly exists as a class to hook an ostream override to.
struct LLContextStatus
{
bool contains(const std::string& str);
};
LL_COMMON_API std::ostream& operator<<(std::ostream& s, const LLContextStatus& context_status);

View File

@ -38,6 +38,7 @@
#include "llevents.h"
#include "llerror.h"
#include "stringize.h"
#include "llexception.h"
// do nothing, when we need nothing done
void LLCoros::no_cleanup(CoroData*) {}
@ -131,9 +132,9 @@ bool LLCoros::cleanup(const LLSD&)
if ((previousCount < 5) || !(previousCount % 50))
{
if (previousCount < 5)
LL_INFOS("LLCoros") << "LLCoros: cleaning up coroutine " << mi->first << LL_ENDL;
LL_DEBUGS("LLCoros") << "LLCoros: cleaning up coroutine " << mi->first << LL_ENDL;
else
LL_INFOS("LLCoros") << "LLCoros: cleaning up coroutine " << mi->first << "("<< previousCount << ")" << LL_ENDL;
LL_DEBUGS("LLCoros") << "LLCoros: cleaning up coroutine " << mi->first << "("<< previousCount << ")" << LL_ENDL;
}
// The erase() call will invalidate its passed iterator value --
@ -185,9 +186,9 @@ std::string LLCoros::generateDistinctName(const std::string& prefix) const
if ((previousCount < 5) || !(previousCount % 50))
{
if (previousCount < 5)
LL_INFOS("LLCoros") << "LLCoros: launching coroutine " << name << LL_ENDL;
LL_DEBUGS("LLCoros") << "LLCoros: launching coroutine " << name << LL_ENDL;
else
LL_INFOS("LLCoros") << "LLCoros: launching coroutine " << name << "(" << previousCount << ")" << LL_ENDL;
LL_DEBUGS("LLCoros") << "LLCoros: launching coroutine " << name << "(" << previousCount << ")" << LL_ENDL;
}
@ -223,7 +224,7 @@ std::string LLCoros::getName() const
void LLCoros::setStackSize(S32 stacksize)
{
LL_INFOS("LLCoros") << "Setting coroutine stack size to " << stacksize << LL_ENDL;
LL_DEBUGS("LLCoros") << "Setting coroutine stack size to " << stacksize << LL_ENDL;
mStackSize = stacksize;
}
@ -235,7 +236,23 @@ void LLCoros::toplevel(coro::self& self, CoroData* data, const callable_t& calla
// capture the 'self' param in CoroData
data->mSelf = &self;
// run the code the caller actually wants in the coroutine
callable();
try
{
callable();
}
catch (const LLContinueError&)
{
// Any uncaught exception derived from LLContinueError will be caught
// here and logged. This coroutine will terminate but the rest of the
// viewer will carry on.
LOG_UNHANDLED_EXCEPTION(STRINGIZE("coroutine " << data->mName));
}
catch (...)
{
// Any OTHER kind of uncaught exception will cause the viewer to
// crash, hopefully informatively.
CRASH_ON_UNHANDLED_EXCEPTION(STRINGIZE("coroutine " << data->mName));
}
// This cleanup isn't perfectly symmetrical with the way we initially set
// data->mPrev, but this is our last chance to reset mCurrentCoro.
sCurrentCoro.reset(data->mPrev);

View File

@ -39,8 +39,8 @@
#include <boost/graph/adjacency_list.hpp>
#include <boost/graph/topological_sort.hpp>
#include <boost/graph/exception.hpp>
#include <boost/throw_exception.hpp>
// other Linden headers
#include "llexception.h"
LLDependenciesBase::VertexList LLDependenciesBase::topo_sort(int vertices, const EdgeList& edges) const
{
@ -77,7 +77,7 @@ LLDependenciesBase::VertexList LLDependenciesBase::topo_sort(int vertices, const
// Omit independent nodes: display only those that might contribute to
// the cycle.
describe(out, false);
BOOST_THROW_EXCEPTION(Cycle(out.str()));
LLTHROW(Cycle(out.str()));
}
// A peculiarity of boost::topological_sort() is that it emits results in
// REVERSE topological order: to get the result you want, you must

View File

@ -921,11 +921,6 @@ namespace
std::ostringstream message_stream;
if (show_location && (r->wantsLocation() || level == LLError::LEVEL_ERROR || s->mPrintLocation))
{
message_stream << site.mLocationString << " ";
}
if (show_time && r->wantsTime() && s->mTimeFunction != NULL)
{
message_stream << s->mTimeFunction() << " ";
@ -933,17 +928,17 @@ namespace
if (show_level && r->wantsLevel())
{
message_stream << site.mLevelString;
message_stream << site.mLevelString << " ";
}
if (show_tags && r->wantsTags())
{
message_stream << site.mTagString;
}
if ((show_level && r->wantsLevel())||
(show_tags && r->wantsTags()))
if (show_location && (r->wantsLocation() || level == LLError::LEVEL_ERROR || s->mPrintLocation))
{
message_stream << " ";
message_stream << site.mLocationString << " ";
}
if (show_function && r->wantsFunctionName())
@ -1493,3 +1488,20 @@ namespace LLError
}
}
bool debugLoggingEnabled(const std::string& tag)
{
const char* tags[] = {tag.c_str()};
::size_t tag_count = 1;
LLError::CallSite _site(LLError::LEVEL_DEBUG, __FILE__, __LINE__,
typeid(_LL_CLASS_TO_LOG), __FUNCTION__, false, tags, tag_count);
if (LL_UNLIKELY(_site.shouldLog()))
{
return true;
}
else
{
return false;
}
}

View File

@ -174,7 +174,8 @@ namespace LLError
// not really a level
// used to indicate that no messages should be logged
};
// If you change ELevel, please update llvlog() macro below.
/* Macro support
The classes CallSite and Log are used by the logging macros below.
They are not intended for general use.
@ -305,24 +306,38 @@ typedef LLError::NoClassInfo _LL_CLASS_TO_LOG;
/////////////////////////////////
// Error Logging Macros
// See top of file for common usage.
// See top of file for common usage.
/////////////////////////////////
// this macro uses a one-shot do statement to avoid parsing errors when writing control flow statements
// without braces:
// if (condition) LL_INFOS() << "True" << LL_ENDL; else LL_INFOS()() << "False" << LL_ENDL
// Instead of using LL_DEBUGS(), LL_INFOS() et al., it may be tempting to
// directly code the lllog() macro so you can pass in the LLError::ELevel as a
// variable. DON'T DO IT! The reason is that the first time control passes
// through lllog(), it initializes a local static LLError::CallSite with that
// *first* ELevel value. All subsequent visits will decide whether or not to
// emit output based on the *first* ELevel value bound into that static
// CallSite instance. Use LL_VLOGS() instead. lllog() assumes its ELevel
// argument never varies.
#define lllog(level, once, ...) \
do { \
const char* tags[] = {"", ##__VA_ARGS__}; \
::size_t tag_count = LL_ARRAY_SIZE(tags) - 1; \
static LLError::CallSite _site( \
level, __FILE__, __LINE__, typeid(_LL_CLASS_TO_LOG), __FUNCTION__, once, &tags[1], tag_count);\
if (LL_UNLIKELY(_site.shouldLog())) \
{ \
std::ostringstream* _out = LLError::Log::out(); \
// this macro uses a one-shot do statement to avoid parsing errors when
// writing control flow statements without braces:
// if (condition) LL_INFOS() << "True" << LL_ENDL; else LL_INFOS()() << "False" << LL_ENDL;
#define lllog(level, once, ...) \
do { \
const char* tags[] = {"", ##__VA_ARGS__}; \
static LLError::CallSite _site(lllog_site_args_(level, once, tags)); \
lllog_test_()
#define lllog_test_() \
if (LL_UNLIKELY(_site.shouldLog())) \
{ \
std::ostringstream* _out = LLError::Log::out(); \
(*_out)
#define lllog_site_args_(level, once, tags) \
level, __FILE__, __LINE__, typeid(_LL_CLASS_TO_LOG), \
__FUNCTION__, once, &tags[1], LL_ARRAY_SIZE(tags)-1
//Use this construct if you need to do computation in the middle of a
//message:
//
@ -363,4 +378,49 @@ typedef LLError::NoClassInfo _LL_CLASS_TO_LOG;
#define LL_INFOS_ONCE(...) lllog(LLError::LEVEL_INFO, true, ##__VA_ARGS__)
#define LL_WARNS_ONCE(...) lllog(LLError::LEVEL_WARN, true, ##__VA_ARGS__)
// Use this if you need to pass LLError::ELevel as a variable.
#define LL_VLOGS(level, ...) llvlog(level, false, ##__VA_ARGS__)
#define LL_VLOGS_ONCE(level, ...) llvlog(level, true, ##__VA_ARGS__)
// The problem with using lllog() with a variable level is that the first time
// through, it initializes a static CallSite instance with whatever level you
// pass. That first level is bound into the CallSite; the level parameter is
// never again examined. One approach to variable level would be to
// dynamically construct a CallSite instance every call -- which could get
// expensive, depending on context. So instead, initialize a static CallSite
// for each level value we support, then dynamically select the CallSite
// instance for the passed level value.
// Compare implementation to lllog() above.
#define llvlog(level, once, ...) \
do { \
const char* tags[] = {"", ##__VA_ARGS__}; \
/* Need a static CallSite instance per expected ELevel value. */ \
/* Since we intend to index this array with the ELevel, */ \
/* _sites[0] should be ELevel(0), and so on -- avoid using */ \
/* ELevel symbolic names when initializing -- except for */ \
/* the last entry, which handles anything beyond the end. */ \
/* (Commented ELevel value names are from 2016-09-01.) */ \
/* Passing an ELevel past the end of this array is itself */ \
/* a fatal error, so ensure the last is LEVEL_ERROR. */ \
static LLError::CallSite _sites[] = \
{ \
/* LEVEL_DEBUG */ \
LLError::CallSite(lllog_site_args_(LLError::ELevel(0), once, tags)), \
/* LEVEL_INFO */ \
LLError::CallSite(lllog_site_args_(LLError::ELevel(1), once, tags)), \
/* LEVEL_WARN */ \
LLError::CallSite(lllog_site_args_(LLError::ELevel(2), once, tags)), \
/* LEVEL_ERROR */ \
LLError::CallSite(lllog_site_args_(LLError::LEVEL_ERROR, once, tags)) \
}; \
/* Clamp the passed 'level' to at most last entry */ \
std::size_t which((std::size_t(level) >= LL_ARRAY_SIZE(_sites)) ? \
(LL_ARRAY_SIZE(_sites) - 1) : std::size_t(level)); \
/* selected CallSite *must* be named _site for LL_ENDL */ \
LLError::CallSite& _site(_sites[which]); \
lllog_test_()
// Check at run-time whether logging is enabled, without generating output
bool debugLoggingEnabled(const std::string& tag);
#endif // LL_LLERROR_H

View File

@ -34,12 +34,12 @@
#include <map>
// std headers
// external library headers
#include <boost/throw_exception.hpp>
// other Linden headers
#include "llsdserialize.h"
#include "llerror.h"
#include "llcoros.h"
#include "llmake.h"
#include "llexception.h"
#include "lleventfilter.h"
@ -352,7 +352,7 @@ LLSD errorException(const LLEventWithID& result, const std::string& desc)
// returning it, deliver it via exception.
if (result.second)
{
BOOST_THROW_EXCEPTION(LLErrorEvent(desc, result.first));
LLTHROW(LLErrorEvent(desc, result.first));
}
// That way, our caller knows a simple return must be from the reply
// pump (pump 0).

View File

@ -45,7 +45,6 @@
#include <cctype>
// external library headers
#include <boost/range/iterator_range.hpp>
#include <boost/throw_exception.hpp>
#if LL_WINDOWS
#pragma warning (push)
#pragma warning (disable : 4701) // compiler thinks might use uninitialized var, but no
@ -58,6 +57,7 @@
#include "stringize.h"
#include "llerror.h"
#include "llsdutil.h"
#include "llexception.h"
#if LL_MSVC
#pragma warning (disable : 4702)
#endif
@ -175,7 +175,7 @@ std::string LLEventPumps::registerNew(const LLEventPump& pump, const std::string
// Unless we're permitted to tweak it, that's Bad.
if (! tweak)
{
BOOST_THROW_EXCEPTION(LLEventPump::DupPumpName(std::string("Duplicate LLEventPump name '") + name + "'"));
LLTHROW(LLEventPump::DupPumpName("Duplicate LLEventPump name '" + name + "'"));
}
// The passed name isn't unique, but we're permitted to tweak it. Find the
// first decimal-integer suffix not already taken. The insert() attempt
@ -276,6 +276,8 @@ LLEventPumps::~LLEventPumps()
#pragma warning (push)
#pragma warning (disable : 4355) // 'this' used in initializer list: yes, intentionally
#endif
const std::string LLEventPump::ANONYMOUS = std::string();
LLEventPump::LLEventPump(const std::string& name, bool tweak):
// Register every new instance with LLEventPumps
@ -314,147 +316,162 @@ LLBoundListener LLEventPump::listen_impl(const std::string& name, const LLEventL
const NameList& after,
const NameList& before)
{
// Check for duplicate name before connecting listener to mSignal
ConnectionMap::const_iterator found = mConnections.find(name);
// In some cases the user might disconnect a connection explicitly -- or
// might use LLEventTrackable to disconnect implicitly. Either way, we can
// end up retaining in mConnections a zombie connection object that's
// already been disconnected. Such a connection object can't be
// reconnected -- nor, in the case of LLEventTrackable, would we want to
// try, since disconnection happens with the destruction of the listener
// object. That means it's safe to overwrite a disconnected connection
// object with the new one we're attempting. The case we want to prevent
// is only when the existing connection object is still connected.
if (found != mConnections.end() && found->second.connected())
float nodePosition = 1.0;
// if the supplied name is empty we are not interested in the ordering mechanism
// and can bypass attempting to find the optimal location to insert the new
// listener. We'll just tack it on to the end.
if (!name.empty()) // should be the same as testing against ANONYMOUS
{
BOOST_THROW_EXCEPTION(
DupListenerName(std::string("Attempt to register duplicate listener name '") + name +
"' on " + typeid(*this).name() + " '" + getName() + "'"));
}
// Okay, name is unique, try to reconcile its dependencies. Specify a new
// "node" value that we never use for an mSignal placement; we'll fix it
// later.
DependencyMap::node_type& newNode = mDeps.add(name, -1.0, after, before);
// What if this listener has been added, removed and re-added? In that
// case newNode already has a non-negative value because we never remove a
// listener from mDeps. But keep processing uniformly anyway in case the
// listener was added back with different dependencies. Then mDeps.sort()
// would put it in a different position, and the old newNode placement
// value would be wrong, so we'd have to reassign it anyway. Trust that
// re-adding a listener with the same dependencies is the trivial case for
// mDeps.sort(): it can just replay its cache.
DependencyMap::sorted_range sorted_range;
try
{
// Can we pick an order that works including this new entry?
sorted_range = mDeps.sort();
}
catch (const DependencyMap::Cycle& e)
{
// No: the new node's after/before dependencies have made mDeps
// unsortable. If we leave the new node in mDeps, it will continue
// to screw up all future attempts to sort()! Pull it out.
mDeps.remove(name);
BOOST_THROW_EXCEPTION(
Cycle(std::string("New listener '") + name + "' on " + typeid(*this).name() +
" '" + getName() + "' would cause cycle: " + e.what()));
}
// Walk the list to verify that we haven't changed the order.
float previous = 0.0, myprev = 0.0;
DependencyMap::sorted_iterator mydmi = sorted_range.end(); // need this visible after loop
for (DependencyMap::sorted_iterator dmi = sorted_range.begin();
dmi != sorted_range.end(); ++dmi)
{
// Since we've added the new entry with an invalid placement,
// recognize it and skip it.
if (dmi->first == name)
// Check for duplicate name before connecting listener to mSignal
ConnectionMap::const_iterator found = mConnections.find(name);
// In some cases the user might disconnect a connection explicitly -- or
// might use LLEventTrackable to disconnect implicitly. Either way, we can
// end up retaining in mConnections a zombie connection object that's
// already been disconnected. Such a connection object can't be
// reconnected -- nor, in the case of LLEventTrackable, would we want to
// try, since disconnection happens with the destruction of the listener
// object. That means it's safe to overwrite a disconnected connection
// object with the new one we're attempting. The case we want to prevent
// is only when the existing connection object is still connected.
if (found != mConnections.end() && found->second.connected())
{
// Remember the iterator belonging to our new node, and which
// placement value was 'previous' at that point.
mydmi = dmi;
myprev = previous;
continue;
LLTHROW(DupListenerName("Attempt to register duplicate listener name '" + name +
"' on " + typeid(*this).name() + " '" + getName() + "'"));
}
// If the new node has rearranged the existing nodes, we'll find
// that their placement values are no longer in increasing order.
if (dmi->second < previous)
// Okay, name is unique, try to reconcile its dependencies. Specify a new
// "node" value that we never use for an mSignal placement; we'll fix it
// later.
DependencyMap::node_type& newNode = mDeps.add(name, -1.0, after, before);
// What if this listener has been added, removed and re-added? In that
// case newNode already has a non-negative value because we never remove a
// listener from mDeps. But keep processing uniformly anyway in case the
// listener was added back with different dependencies. Then mDeps.sort()
// would put it in a different position, and the old newNode placement
// value would be wrong, so we'd have to reassign it anyway. Trust that
// re-adding a listener with the same dependencies is the trivial case for
// mDeps.sort(): it can just replay its cache.
DependencyMap::sorted_range sorted_range;
try
{
// This is another scenario in which we'd better back out the
// newly-added node from mDeps -- but don't do it yet, we want to
// traverse the existing mDeps to report on it!
// Describe the change to the order of our listeners. Copy
// everything but the newest listener to a vector we can sort to
// obtain the old order.
typedef std::vector< std::pair<float, std::string> > SortNameList;
SortNameList sortnames;
for (DependencyMap::sorted_iterator cdmi(sorted_range.begin()), cdmend(sorted_range.end());
cdmi != cdmend; ++cdmi)
{
if (cdmi->first != name)
{
sortnames.push_back(SortNameList::value_type(cdmi->second, cdmi->first));
}
}
std::sort(sortnames.begin(), sortnames.end());
std::ostringstream out;
out << "New listener '" << name << "' on " << typeid(*this).name() << " '" << getName()
<< "' would move previous listener '" << dmi->first << "'\nwas: ";
SortNameList::const_iterator sni(sortnames.begin()), snend(sortnames.end());
if (sni != snend)
{
out << sni->second;
while (++sni != snend)
{
out << ", " << sni->second;
}
}
out << "\nnow: ";
DependencyMap::sorted_iterator ddmi(sorted_range.begin()), ddmend(sorted_range.end());
if (ddmi != ddmend)
{
out << ddmi->first;
while (++ddmi != ddmend)
{
out << ", " << ddmi->first;
}
}
// NOW remove the offending listener node.
// Can we pick an order that works including this new entry?
sorted_range = mDeps.sort();
}
catch (const DependencyMap::Cycle& e)
{
// No: the new node's after/before dependencies have made mDeps
// unsortable. If we leave the new node in mDeps, it will continue
// to screw up all future attempts to sort()! Pull it out.
mDeps.remove(name);
// Having constructed a description of the order change, inform caller.
BOOST_THROW_EXCEPTION(OrderChange(out.str()));
LLTHROW(Cycle("New listener '" + name + "' on " + typeid(*this).name() +
" '" + getName() + "' would cause cycle: " + e.what()));
}
// This node becomes the previous one.
previous = dmi->second;
}
// We just got done with a successful mDeps.add(name, ...) call. We'd
// better have found 'name' somewhere in that sorted list!
assert(mydmi != sorted_range.end());
// Four cases:
// 0. name is the only entry: placement 1.0
// 1. name is the first of several entries: placement (next placement)/2
// 2. name is between two other entries: placement (myprev + (next placement))/2
// 3. name is the last entry: placement ceil(myprev) + 1.0
// Since we've cleverly arranged for myprev to be 0.0 if name is the
// first entry, this folds down to two cases. Case 1 is subsumed by
// case 2, and case 0 is subsumed by case 3. So we need only handle
// cases 2 and 3, which means we need only detect whether name is the
// last entry. Increment mydmi to see if there's anything beyond.
if (++mydmi != sorted_range.end())
{
// The new node isn't last. Place it between the previous node and
// the successor.
newNode = (myprev + mydmi->second)/2.f;
}
else
{
// The new node is last. Bump myprev up to the next integer, add
// 1.0 and use that.
newNode = std::ceil(myprev) + 1.f;
// Walk the list to verify that we haven't changed the order.
float previous = 0.0, myprev = 0.0;
DependencyMap::sorted_iterator mydmi = sorted_range.end(); // need this visible after loop
for (DependencyMap::sorted_iterator dmi = sorted_range.begin();
dmi != sorted_range.end(); ++dmi)
{
// Since we've added the new entry with an invalid placement,
// recognize it and skip it.
if (dmi->first == name)
{
// Remember the iterator belonging to our new node, and which
// placement value was 'previous' at that point.
mydmi = dmi;
myprev = previous;
continue;
}
// If the new node has rearranged the existing nodes, we'll find
// that their placement values are no longer in increasing order.
if (dmi->second < previous)
{
// This is another scenario in which we'd better back out the
// newly-added node from mDeps -- but don't do it yet, we want to
// traverse the existing mDeps to report on it!
// Describe the change to the order of our listeners. Copy
// everything but the newest listener to a vector we can sort to
// obtain the old order.
typedef std::vector< std::pair<float, std::string> > SortNameList;
SortNameList sortnames;
for (DependencyMap::sorted_iterator cdmi(sorted_range.begin()), cdmend(sorted_range.end());
cdmi != cdmend; ++cdmi)
{
if (cdmi->first != name)
{
sortnames.push_back(SortNameList::value_type(cdmi->second, cdmi->first));
}
}
std::sort(sortnames.begin(), sortnames.end());
std::ostringstream out;
out << "New listener '" << name << "' on " << typeid(*this).name() << " '" << getName()
<< "' would move previous listener '" << dmi->first << "'\nwas: ";
SortNameList::const_iterator sni(sortnames.begin()), snend(sortnames.end());
if (sni != snend)
{
out << sni->second;
while (++sni != snend)
{
out << ", " << sni->second;
}
}
out << "\nnow: ";
DependencyMap::sorted_iterator ddmi(sorted_range.begin()), ddmend(sorted_range.end());
if (ddmi != ddmend)
{
out << ddmi->first;
while (++ddmi != ddmend)
{
out << ", " << ddmi->first;
}
}
// NOW remove the offending listener node.
mDeps.remove(name);
// Having constructed a description of the order change, inform caller.
LLTHROW(OrderChange(out.str()));
}
// This node becomes the previous one.
previous = dmi->second;
}
// We just got done with a successful mDeps.add(name, ...) call. We'd
// better have found 'name' somewhere in that sorted list!
assert(mydmi != sorted_range.end());
// Four cases:
// 0. name is the only entry: placement 1.0
// 1. name is the first of several entries: placement (next placement)/2
// 2. name is between two other entries: placement (myprev + (next placement))/2
// 3. name is the last entry: placement ceil(myprev) + 1.0
// Since we've cleverly arranged for myprev to be 0.0 if name is the
// first entry, this folds down to two cases. Case 1 is subsumed by
// case 2, and case 0 is subsumed by case 3. So we need only handle
// cases 2 and 3, which means we need only detect whether name is the
// last entry. Increment mydmi to see if there's anything beyond.
if (++mydmi != sorted_range.end())
{
// The new node isn't last. Place it between the previous node and
// the successor.
newNode = (myprev + mydmi->second) / 2.f;
}
else
{
// The new node is last. Bump myprev up to the next integer, add
// 1.0 and use that.
newNode = std::ceil(myprev) + 1.f;
}
nodePosition = newNode;
}
// Now that newNode has a value that places it appropriately in mSignal,
// connect it.
LLBoundListener bound = mSignal->connect(newNode, listener);
mConnections[name] = bound;
LLBoundListener bound = mSignal->connect(nodePosition, listener);
if (!name.empty())
{ // note that we are not tracking anonymous listeners here either.
// This means that it is the caller's responsibility to either assign
// to a TempBoundListerer (scoped_connection) or manually disconnect
// when done.
mConnections[name] = bound;
}
return bound;
}
@ -611,7 +628,7 @@ bool LLListenerOrPumpName::operator()(const LLSD& event) const
{
if (! mListener)
{
BOOST_THROW_EXCEPTION(Empty("attempting to call uninitialized"));
LLTHROW(Empty("attempting to call uninitialized"));
}
return (*mListener)(event);
}

View File

@ -95,12 +95,32 @@ struct LLStopWhenHandled
result_type operator()(InputIterator first, InputIterator last) const
{
for (InputIterator si = first; si != last; ++si)
{
if (*si)
{
return true;
}
}
{
try
{
if (*si)
{
return true;
}
}
catch (const LLContinueError&)
{
// We catch LLContinueError here because an LLContinueError-
// based exception means the viewer as a whole should carry on
// to the best of our ability. Therefore subsequent listeners
// on the same LLEventPump should still receive this event.
// The iterator passed to a boost::signals2 Combiner is very
// clever, but provides no contextual information. We would
// very much like to be able to log the name of the LLEventPump
// plus the name of this particular listener, but alas.
LOG_UNHANDLED_EXCEPTION("LLEventPump");
}
// We do NOT catch (...) here because we might as well let it
// propagate out to the generic handler. If we were able to log
// context information here, that would be great, but we can't, so
// there's no point.
}
return false;
}
};
@ -365,6 +385,8 @@ typedef boost::signals2::trackable LLEventTrackable;
class LL_COMMON_API LLEventPump: public LLEventTrackable
{
public:
static const std::string ANONYMOUS; // constant for anonymous listeners.
/**
* Exception thrown by LLEventPump(). You are trying to instantiate an
* LLEventPump (subclass) using the same name as some other instance, and
@ -476,6 +498,12 @@ public:
* instantiate your listener, then passing the same name on each listen()
* call, allows us to optimize away the second and subsequent dependency
* sorts.
*
* If name is set to LLEventPump::ANONYMOUS listen will bypass the entire
* dependency and ordering calculation. In this case, it is critical that
* the result be assigned to a LLTempBoundListener or the listener is
* manually disconnected when no longer needed since there will be no
* way to later find and disconnect this listener manually.
*
* If (as is typical) you pass a <tt>boost::bind()</tt> expression as @a
* listener, listen() will inspect the components of that expression. If a

View File

@ -0,0 +1,55 @@
/**
* @file llexception.cpp
* @author Nat Goodspeed
* @date 2016-08-12
* @brief Implementation for llexception.
*
* $LicenseInfo:firstyear=2016&license=viewerlgpl$
* Copyright (c) 2016, Linden Research, Inc.
* $/LicenseInfo$
*/
// Precompiled header
#include "linden_common.h"
// associated header
#include "llexception.h"
// STL headers
// std headers
#include <typeinfo>
// external library headers
#include <boost/exception/diagnostic_information.hpp>
// other Linden headers
#include "llerror.h"
#include "llerrorcontrol.h"
namespace {
// used by crash_on_unhandled_exception_() and log_unhandled_exception_()
void log_unhandled_exception_(LLError::ELevel level,
const char* file, int line, const char* pretty_function,
const std::string& context)
{
// log same message but allow caller-specified severity level
LL_VLOGS(level, "LLException") << LLError::abbreviateFile(file)
<< "(" << line << "): Unhandled exception caught in " << pretty_function;
if (! context.empty())
{
LL_CONT << ": " << context;
}
LL_CONT << ":\n" << boost::current_exception_diagnostic_information() << LL_ENDL;
}
}
void crash_on_unhandled_exception_(const char* file, int line, const char* pretty_function,
const std::string& context)
{
// LL_ERRS() terminates and propagates message into crash dump.
log_unhandled_exception_(LLError::LEVEL_ERROR, file, line, pretty_function, context);
}
void log_unhandled_exception_(const char* file, int line, const char* pretty_function,
const std::string& context)
{
// Use LL_WARNS() because we seriously do not expect this to happen
// routinely, but we DO expect to return from this function.
log_unhandled_exception_(LLError::LEVEL_WARN, file, line, pretty_function, context);
}

View File

@ -14,22 +14,24 @@
#include <stdexcept>
#include <boost/exception/exception.hpp>
#include <boost/throw_exception.hpp>
#include <boost/current_function.hpp>
// "Found someone who can comfort me
// But there are always exceptions..."
// - Empty Pages, Traffic, from John Barleycorn (1970)
// https://www.youtube.com/watch?v=dRH0CGVK7ic
/**
* LLException is intended as the common base class from which all
* viewer-specific exceptions are derived. It is itself a subclass of
* boost::exception; use catch (const boost::exception& e) clause to log the
* string from boost::diagnostic_information(e).
* viewer-specific exceptions are derived. Rationale for why it's derived from
* both std::exception and boost::exception is explained in
* tests/llexception_test.cpp.
*
* Since it is also derived from std::exception, a generic catch (const
* std::exception&) should also work, though what() is unlikely to be as
* informative as boost::diagnostic_information().
*
* Please use BOOST_THROW_EXCEPTION()
* http://www.boost.org/doc/libs/release/libs/exception/doc/BOOST_THROW_EXCEPTION.html
* to throw viewer exceptions whenever possible. This enriches the exception's
* diagnostic_information() with the source file, line and containing function
* of the BOOST_THROW_EXCEPTION() macro.
* boost::current_exception_diagnostic_information() is quite wonderful: if
* all we need to do with an exception is log it, in most places we should
* catch (...) and log boost::current_exception_diagnostic_information().
* See CRASH_ON_UNHANDLED_EXCEPTION() and LOG_UNHANDLED_EXCEPTION() below.
*
* There may be circumstances in which it would be valuable to distinguish an
* exception explicitly thrown by viewer code from an exception thrown by
@ -60,4 +62,24 @@ struct LLContinueError: public LLException
{}
};
/**
* Please use LLTHROW() to throw viewer exceptions whenever possible. This
* enriches the exception's diagnostic_information() with the source file,
* line and containing function of the LLTHROW() macro.
*/
// Currently we implement that using BOOST_THROW_EXCEPTION(). Wrap it in
// LLTHROW() in case we ever want to revisit that implementation decision.
#define LLTHROW(x) BOOST_THROW_EXCEPTION(x)
/// Call this macro from a catch (...) clause
#define CRASH_ON_UNHANDLED_EXCEPTION(CONTEXT) \
crash_on_unhandled_exception_(__FILE__, __LINE__, BOOST_CURRENT_FUNCTION, CONTEXT)
void crash_on_unhandled_exception_(const char*, int, const char*, const std::string&);
/// Call this from a catch (const LLContinueError&) clause, or from a catch
/// (...) clause in which you do NOT want the viewer to crash.
#define LOG_UNHANDLED_EXCEPTION(CONTEXT) \
log_unhandled_exception_(__FILE__, __LINE__, BOOST_CURRENT_FUNCTION, CONTEXT)
void log_unhandled_exception_(const char*, int, const char*, const std::string&);
#endif /* ! defined(LL_LLEXCEPTION_H) */

View File

@ -296,7 +296,16 @@ LL_FORCE_INLINE BlockTimer::BlockTimer(BlockTimerStatHandle& timer)
{
#if LL_FAST_TIMER_ON
BlockTimerStackRecord* cur_timer_data = LLThreadLocalSingletonPointer<BlockTimerStackRecord>::getInstance();
if (!cur_timer_data) return;
if (!cur_timer_data)
{
// How likely is it that
// LLThreadLocalSingletonPointer<T>::getInstance() will return NULL?
// Even without researching, what we can say is that if we exit
// without setting mStartTime at all, gcc 4.7 produces (fatal)
// warnings about a possibly-uninitialized data member.
mStartTime = 0;
return;
}
TimeBlockAccumulator& accumulator = timer.getCurrentAccumulator();
accumulator.mActiveCount++;
// keep current parent as long as it is active when we are

View File

@ -21,7 +21,6 @@
#include <boost/bind.hpp>
#include <boost/scoped_ptr.hpp>
#include <boost/tokenizer.hpp>
#include <boost/throw_exception.hpp>
// other Linden headers
#include "llerror.h"
#include "llstring.h"
@ -34,6 +33,7 @@
#include "lltimer.h"
#include "lluuid.h"
#include "llleaplistener.h"
#include "llexception.h"
#if LL_MSVC
#pragma warning (disable : 4355) // 'this' used in initializer list: yes, intentionally
@ -70,7 +70,7 @@ public:
// Rule out empty vector
if (plugin.empty())
{
BOOST_THROW_EXCEPTION(Error("no plugin command"));
LLTHROW(Error("no plugin command"));
}
// Don't leave desc empty either, but in this case, if we weren't
@ -113,7 +113,7 @@ public:
// If that didn't work, no point in keeping this LLLeap object.
if (! mChild)
{
BOOST_THROW_EXCEPTION(Error(STRINGIZE("failed to run " << mDesc)));
LLTHROW(Error(STRINGIZE("failed to run " << mDesc)));
}
// Okay, launch apparently worked. Change our mDonePump listener.

View File

@ -110,11 +110,15 @@ template <typename T> T* LL_NEXT_ALIGNED_ADDRESS_64(T* address)
#if defined(LL_WINDOWS)
return _aligned_malloc(size, align);
#else
char* aligned = NULL;
void* mem = malloc( size + (align - 1) + sizeof(void*) );
char* aligned = ((char*)mem) + sizeof(void*);
aligned += align - ((uintptr_t)aligned & (align - 1));
if (mem)
{
aligned = ((char*)mem) + sizeof(void*);
aligned += align - ((uintptr_t)aligned & (align - 1));
((void**)aligned)[-1] = mem;
((void**)aligned)[-1] = mem;
}
return aligned;
#endif
}

View File

@ -34,12 +34,12 @@
#include "llapr.h"
#include "apr_signal.h"
#include "llevents.h"
#include "llexception.h"
#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <boost/asio/streambuf.hpp>
#include <boost/asio/buffers_iterator.hpp>
#include <boost/throw_exception.hpp>
#include <iostream>
#include <stdexcept>
#include <limits>
@ -531,9 +531,8 @@ LLProcess::LLProcess(const LLSDOrParams& params):
if (! params.validateBlock(true))
{
BOOST_THROW_EXCEPTION(
LLProcessError(STRINGIZE("not launched: failed parameter validation\n"
<< LLSDNotationStreamer(params))));
LLTHROW(LLProcessError(STRINGIZE("not launched: failed parameter validation\n"
<< LLSDNotationStreamer(params))));
}
mPostend = params.postend;
@ -598,11 +597,10 @@ LLProcess::LLProcess(const LLSDOrParams& params):
}
else
{
BOOST_THROW_EXCEPTION(
LLProcessError(STRINGIZE("For " << params.executable()
<< ": unsupported FileParam for " << which
<< ": type='" << fparam.type()
<< "', name='" << fparam.name() << "'")));
LLTHROW(LLProcessError(STRINGIZE("For " << params.executable()
<< ": unsupported FileParam for " << which
<< ": type='" << fparam.type()
<< "', name='" << fparam.name() << "'")));
}
}
// By default, pass APR_NO_PIPE for unspecified slots.
@ -681,7 +679,7 @@ LLProcess::LLProcess(const LLSDOrParams& params):
if (ll_apr_warn_status(apr_proc_create(&mProcess, argv[0], &argv[0], NULL, procattr,
gAPRPoolp)))
{
BOOST_THROW_EXCEPTION(LLProcessError(STRINGIZE(params << " failed")));
LLTHROW(LLProcessError(STRINGIZE(params << " failed")));
}
// arrange to call status_callback()
@ -1066,7 +1064,7 @@ PIPETYPE& LLProcess::getPipe(FILESLOT slot)
PIPETYPE* wp = getPipePtr<PIPETYPE>(error, slot);
if (! wp)
{
BOOST_THROW_EXCEPTION(NoPipe(error));
LLTHROW(NoPipe(error));
}
return *wp;
}

View File

@ -242,7 +242,6 @@ inline T* get_ptr_in_map(const std::map<K,T*>& inmap, const K& key)
template <typename K, typename T>
inline bool is_in_map(const std::map<K,T>& inmap, const K& key)
{
typedef typename std::map<K,T>::const_iterator map_iter;
if(inmap.find(key) == inmap.end())
{
return false;

View File

@ -26,8 +26,8 @@
#include "linden_common.h"
#include <apr_pools.h>
#include <apr_queue.h>
#include <boost/throw_exception.hpp>
#include "llthreadsafequeue.h"
#include "llexception.h"
@ -42,13 +42,13 @@ LLThreadSafeQueueImplementation::LLThreadSafeQueueImplementation(apr_pool_t * po
{
if(mOwnsPool) {
apr_status_t status = apr_pool_create(&mPool, 0);
if(status != APR_SUCCESS) BOOST_THROW_EXCEPTION(LLThreadSafeQueueError("failed to allocate pool"));
if(status != APR_SUCCESS) LLTHROW(LLThreadSafeQueueError("failed to allocate pool"));
} else {
; // No op.
}
apr_status_t status = apr_queue_create(&mQueue, capacity, mPool);
if(status != APR_SUCCESS) BOOST_THROW_EXCEPTION(LLThreadSafeQueueError("failed to allocate queue"));
if(status != APR_SUCCESS) LLTHROW(LLThreadSafeQueueError("failed to allocate queue"));
}
@ -69,9 +69,9 @@ void LLThreadSafeQueueImplementation::pushFront(void * element)
apr_status_t status = apr_queue_push(mQueue, element);
if(status == APR_EINTR) {
BOOST_THROW_EXCEPTION(LLThreadSafeQueueInterrupt());
LLTHROW(LLThreadSafeQueueInterrupt());
} else if(status != APR_SUCCESS) {
BOOST_THROW_EXCEPTION(LLThreadSafeQueueError("push failed"));
LLTHROW(LLThreadSafeQueueError("push failed"));
} else {
; // Success.
}
@ -89,9 +89,9 @@ void * LLThreadSafeQueueImplementation::popBack(void)
apr_status_t status = apr_queue_pop(mQueue, &element);
if(status == APR_EINTR) {
BOOST_THROW_EXCEPTION(LLThreadSafeQueueInterrupt());
LLTHROW(LLThreadSafeQueueInterrupt());
} else if(status != APR_SUCCESS) {
BOOST_THROW_EXCEPTION(LLThreadSafeQueueError("pop failed"));
LLTHROW(LLThreadSafeQueueError("pop failed"));
} else {
return element;
}

View File

@ -257,7 +257,13 @@ F64Kilobytes Recording::getMean(const StatType<MemAccumulator>& stat)
if (active_accumulator && active_accumulator->mSize.hasValue())
{
return F64Bytes(lerp(accumulator.mSize.getMean(), active_accumulator->mSize.getMean(), active_accumulator->mSize.getSampleCount() / (accumulator.mSize.getSampleCount() + active_accumulator->mSize.getSampleCount())));
F32 t = 0.0f;
S32 div = accumulator.mSize.getSampleCount() + active_accumulator->mSize.getSampleCount();
if (div > 0)
{
t = active_accumulator->mSize.getSampleCount() / div;
}
return F64Bytes(lerp(accumulator.mSize.getMean(), active_accumulator->mSize.getMean(), t));
}
else
{
@ -426,7 +432,13 @@ F64 Recording::getMean( const StatType<SampleAccumulator>& stat )
const SampleAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mSamples[stat.getIndex()] : NULL;
if (active_accumulator && active_accumulator->hasValue())
{
return lerp(accumulator.getMean(), active_accumulator->getMean(), active_accumulator->getSampleCount() / (accumulator.getSampleCount() + active_accumulator->getSampleCount()));
F32 t = 0.0f;
S32 div = accumulator.getSampleCount() + active_accumulator->getSampleCount();
if (div > 0)
{
t = active_accumulator->getSampleCount() / div;
}
return lerp(accumulator.getMean(), active_accumulator->getMean(), t);
}
else
{
@ -506,7 +518,13 @@ F64 Recording::getMean( const StatType<EventAccumulator>& stat )
const EventAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mEvents[stat.getIndex()] : NULL;
if (active_accumulator && active_accumulator->hasValue())
{
return lerp(accumulator.getMean(), active_accumulator->getMean(), active_accumulator->getSampleCount() / (accumulator.getSampleCount() + active_accumulator->getSampleCount()));
F32 t = 0.0f;
S32 div = accumulator.getSampleCount() + active_accumulator->getSampleCount();
if (div > 0)
{
t = active_accumulator->getSampleCount() / div;
}
return lerp(accumulator.getMean(), active_accumulator->getMean(), t);
}
else
{

View File

@ -205,9 +205,9 @@ void LLUriParser::glue(std::string& uri) const
uri = first_part + second_part;
}
void LLUriParser::glueFirst(std::string& uri) const
void LLUriParser::glueFirst(std::string& uri, bool use_scheme) const
{
if (mScheme.size())
if (use_scheme && mScheme.size())
{
uri = mScheme;
uri += "://";

View File

@ -60,7 +60,7 @@ public:
void extractParts();
void glue(std::string& uri) const;
void glueFirst(std::string& uri) const;
void glueFirst(std::string& uri, bool use_scheme = true) const;
void glueSecond(std::string& uri) const;
bool test() const;
S32 normalize();

View File

@ -83,7 +83,7 @@ unsigned int decode( char const * fiveChars ) throw( bad_input_data )
unsigned int ret = 0;
for( int ix = 0; ix < 5; ++ix ) {
char * s = strchr( encodeTable, fiveChars[ ix ] );
if( s == 0 ) BOOST_THROW_EXCEPTION(bad_input_data());
if( s == 0 ) LLTHROW(bad_input_data());
ret = ret * 85 + (s-encodeTable);
}
return ret;

View File

@ -237,8 +237,21 @@ namespace tut
void ErrorTestObject::test<4>()
// file abbreviation
{
std::string thisFile = __FILE__;
std::string abbreviateFile = LLError::abbreviateFile(thisFile);
std::string prev, abbreviateFile = __FILE__;
do
{
prev = abbreviateFile;
abbreviateFile = LLError::abbreviateFile(abbreviateFile);
// __FILE__ is assumed to end with
// indra/llcommon/tests/llerror_test.cpp. This test used to call
// abbreviateFile() exactly once, then check below whether it
// still contained the string 'indra'. That fails if the FIRST
// part of the pathname also contains indra! Certain developer
// machine images put local directory trees under
// /ngi-persist/indra, which is where we observe the problem. So
// now, keep calling abbreviateFile() until it returns its
// argument unchanged, THEN check.
} while (abbreviateFile != prev);
ensure_ends_with("file name abbreviation",
abbreviateFile,
@ -551,9 +564,9 @@ namespace tut
function;
writeReturningLocationAndFunction(location, function);
ensure_equals("order is location time type function message",
ensure_equals("order is time location type function message",
message(0),
location + roswell() + " INFO: " + function + ": apple");
roswell() + " INFO: " + location + function + ": apple");
}
template<> template<>

View File

@ -0,0 +1,308 @@
/**
* @file llexception_test.cpp
* @author Nat Goodspeed
* @date 2016-08-12
* @brief Tests for throwing exceptions.
*
* This isn't a regression test: it doesn't need to be run every build, which
* is why the corresponding line in llcommon/CMakeLists.txt is commented out.
* Rather it's a head-to-head test of what kind of exception information we
* can collect from various combinations of exception base classes, type of
* throw verb and sequences of catch clauses.
*
* This "test" makes no ensure() calls: its output goes to stdout for human
* examination.
*
* As of 2016-08-12 with Boost 1.57, we come to the following conclusions.
* These should probably be re-examined from time to time as we update Boost.
*
* - It is indisputably beneficial to use BOOST_THROW_EXCEPTION() rather than
* plain throw. The macro annotates the exception object with the filename,
* line number and function name from which the exception was thrown.
*
* - That being the case, deriving only from boost::exception isn't an option.
* Every exception object passed to BOOST_THROW_EXCEPTION() must be derived
* directly or indirectly from std::exception. The only question is whether
* to also derive from boost::exception. We decided to derive LLException
* from both, as it makes message output slightly cleaner, but this is a
* trivial reason: if a strong reason emerges to prefer single inheritance,
* dropping the boost::exception base class shouldn't be a problem.
*
* - (As you will have guessed, ridiculous things like a char* or int or a
* class derived from neither boost::exception nor std::exception can only
* be caught by that specific type or (...), and
* boost::current_exception_diagnostic_information() simply throws up its
* hands and confesses utter ignorance. Stay away from such nonsense.)
*
* - But if you derive from std::exception, to nat's surprise,
* boost::current_exception_diagnostic_information() gives as much
* information about exceptions in a catch (...) clause as you can get from
* a specific catch (const std::exception&) clause, notably the concrete
* exception class and the what() string. So instead of a sequence like
*
* try { ... }
* catch (const boost::exception& e) { ... boost-flavored logging ... }
* catch (const std::exception& e) { ... std::exception logging ... }
* catch (...) { ... generic logging ... }
*
* we should be able to get away with only a catch (...) clause that logs
* boost::current_exception_diagnostic_information().
*
* - Going further: boost::current_exception_diagnostic_information() provides
* just as much information even within a std::set_terminate() handler. So
* it might not even be strictly necessary to include a catch (...) clause
* since the viewer does use std::set_terminate().
*
* - (We might consider adding a catch (int) clause because Kakadu internally
* throws ints, and who knows if one of those might leak out. If it does,
* boost::current_exception_diagnostic_information() can do nothing with it.
* A catch (int) clause could at least log the value and rethrow.)
*
* $LicenseInfo:firstyear=2016&license=viewerlgpl$
* Copyright (c) 2016, Linden Research, Inc.
* $/LicenseInfo$
*/
// Precompiled header
#include "linden_common.h"
// associated header
#include "llexception.h"
// STL headers
// std headers
#include <typeinfo>
// external library headers
#include <boost/throw_exception.hpp>
// other Linden headers
#include "../test/lltut.h"
// helper for display output
// usage: std::cout << center(some string value, fill char, width) << std::endl;
// (assumes it's the only thing on that particular line)
struct center
{
center(const std::string& label, char fill, std::size_t width):
mLabel(label),
mFill(fill),
mWidth(width)
{}
// Use friend declaration not because we need to grant access, but because
// it lets us declare a free operator like a member function.
friend std::ostream& operator<<(std::ostream& out, const center& ctr)
{
std::size_t padded = ctr.mLabel.length() + 2;
std::size_t left = (ctr.mWidth - padded) / 2;
std::size_t right = ctr.mWidth - left - padded;
return out << std::string(left, ctr.mFill) << ' ' << ctr.mLabel << ' '
<< std::string(right, ctr.mFill);
}
std::string mLabel;
char mFill;
std::size_t mWidth;
};
/*****************************************************************************
* Four kinds of exceptions: derived from boost::exception, from
* std::exception, from both, from neither
*****************************************************************************/
// Interestingly, we can't use this variant with BOOST_THROW_EXCEPTION()
// (which we want) -- we reach a failure topped by this comment:
// //All boost exceptions are required to derive from std::exception,
// //to ensure compatibility with BOOST_NO_EXCEPTIONS.
struct FromBoost: public boost::exception
{
FromBoost(const std::string& what): mWhat(what) {}
~FromBoost() throw() {}
std::string what() const { return mWhat; }
std::string mWhat;
};
struct FromStd: public std::runtime_error
{
FromStd(const std::string& what): std::runtime_error(what) {}
};
struct FromBoth: public boost::exception, public std::runtime_error
{
FromBoth(const std::string& what): std::runtime_error(what) {}
};
// Same deal with FromNeither: can't use with BOOST_THROW_EXCEPTION().
struct FromNeither
{
FromNeither(const std::string& what): mWhat(what) {}
std::string what() const { return mWhat; }
std::string mWhat;
};
/*****************************************************************************
* Two kinds of throws: plain throw and BOOST_THROW_EXCEPTION()
*****************************************************************************/
template <typename EXC>
void plain_throw(const std::string& what)
{
throw EXC(what);
}
template <typename EXC>
void boost_throw(const std::string& what)
{
BOOST_THROW_EXCEPTION(EXC(what));
}
// Okay, for completeness, functions that throw non-class values. We wouldn't
// even deign to consider these if we hadn't found examples in our own source
// code! (Note that Kakadu's internal exception support is still based on
// throwing ints.)
void throw_char_ptr(const std::string& what)
{
throw what.c_str(); // umm...
}
void throw_int(const std::string& what)
{
throw int(what.length());
}
/*****************************************************************************
* Three sequences of catch clauses:
* boost::exception then ...,
* std::exception then ...,
* or just ...
*****************************************************************************/
void catch_boost_dotdotdot(void (*thrower)(const std::string&), const std::string& what)
{
try
{
thrower(what);
}
catch (const boost::exception& e)
{
std::cout << "catch (const boost::exception& e)" << std::endl;
std::cout << "e is " << typeid(e).name() << std::endl;
std::cout << "boost::diagnostic_information(e):\n'"
<< boost::diagnostic_information(e) << "'" << std::endl;
// no way to report e.what()
}
catch (...)
{
std::cout << "catch (...)" << std::endl;
std::cout << "boost::current_exception_diagnostic_information():\n'"
<< boost::current_exception_diagnostic_information() << "'"
<< std::endl;
}
}
void catch_std_dotdotdot(void (*thrower)(const std::string&), const std::string& what)
{
try
{
thrower(what);
}
catch (const std::exception& e)
{
std::cout << "catch (const std::exception& e)" << std::endl;
std::cout << "e is " << typeid(e).name() << std::endl;
std::cout << "boost::diagnostic_information(e):\n'"
<< boost::diagnostic_information(e) << "'" << std::endl;
std::cout << "e.what: '"
<< e.what() << "'" << std::endl;
}
catch (...)
{
std::cout << "catch (...)" << std::endl;
std::cout << "boost::current_exception_diagnostic_information():\n'"
<< boost::current_exception_diagnostic_information() << "'"
<< std::endl;
}
}
void catch_dotdotdot(void (*thrower)(const std::string&), const std::string& what)
{
try
{
thrower(what);
}
catch (...)
{
std::cout << "catch (...)" << std::endl;
std::cout << "boost::current_exception_diagnostic_information():\n'"
<< boost::current_exception_diagnostic_information() << "'"
<< std::endl;
}
}
/*****************************************************************************
* Try a particular kind of throw against each of three catch sequences
*****************************************************************************/
void catch_several(void (*thrower)(const std::string&), const std::string& what)
{
std::cout << std::string(20, '-') << "catch_boost_dotdotdot(" << what << ")" << std::endl;
catch_boost_dotdotdot(thrower, "catch_boost_dotdotdot(" + what + ")");
std::cout << std::string(20, '-') << "catch_std_dotdotdot(" << what << ")" << std::endl;
catch_std_dotdotdot(thrower, "catch_std_dotdotdot(" + what + ")");
std::cout << std::string(20, '-') << "catch_dotdotdot(" << what << ")" << std::endl;
catch_dotdotdot(thrower, "catch_dotdotdot(" + what + ")");
}
/*****************************************************************************
* For a particular kind of exception, try both kinds of throw against all
* three catch sequences
*****************************************************************************/
template <typename EXC>
void catch_both_several(const std::string& what)
{
std::cout << std::string(20, '*') << "plain_throw<" << what << ">" << std::endl;
catch_several(plain_throw<EXC>, "plain_throw<" + what + ">");
std::cout << std::string(20, '*') << "boost_throw<" << what << ">" << std::endl;
catch_several(boost_throw<EXC>, "boost_throw<" + what + ">");
}
/*****************************************************************************
* TUT
*****************************************************************************/
namespace tut
{
struct llexception_data
{
};
typedef test_group<llexception_data> llexception_group;
typedef llexception_group::object object;
llexception_group llexceptiongrp("llexception");
template<> template<>
void object::test<1>()
{
set_test_name("throwing exceptions");
// For each kind of exception, try both kinds of throw against all
// three catch sequences
std::size_t margin = 72;
std::cout << center("FromStd", '=', margin) << std::endl;
catch_both_several<FromStd>("FromStd");
std::cout << center("FromBoth", '=', margin) << std::endl;
catch_both_several<FromBoth>("FromBoth");
std::cout << center("FromBoost", '=', margin) << std::endl;
// can't throw with BOOST_THROW_EXCEPTION(), just use catch_several()
catch_several(plain_throw<FromBoost>, "plain_throw<FromBoost>");
std::cout << center("FromNeither", '=', margin) << std::endl;
// can't throw this with BOOST_THROW_EXCEPTION() either
catch_several(plain_throw<FromNeither>, "plain_throw<FromNeither>");
std::cout << center("const char*", '=', margin) << std::endl;
// We don't expect BOOST_THROW_EXCEPTION() to throw anything so daft
// as a const char* or an int, so don't bother with
// catch_both_several() -- just catch_several().
catch_several(throw_char_ptr, "throw_char_ptr");
std::cout << center("int", '=', margin) << std::endl;
catch_several(throw_int, "throw_int");
}
} // namespace tut

View File

@ -110,10 +110,7 @@ namespace tut
// finding indra/lib/python. Use our __FILE__, with
// raw-string syntax to deal with Windows pathnames.
"mydir = os.path.dirname(r'" << __FILE__ << "')\n"
// We expect mydir to be .../indra/llcommon/tests.
"sys.path.insert(0,\n"
" os.path.join(mydir, os.pardir, os.pardir, 'lib', 'python'))\n"
"from indra.base import llsd\n"
"from llbase import llsd\n"
"\n"
"class ProtocolError(Exception):\n"
" def __init__(self, msg, data):\n"

View File

@ -1518,10 +1518,7 @@ namespace tut
// scanner.
import_llsd("import os.path\n"
"import sys\n"
"sys.path.insert(0,\n"
" os.path.join(os.path.dirname(r'" __FILE__ "'),\n"
" os.pardir, os.pardir, 'lib', 'python'))\n"
"from indra.base import llsd\n")
"from llbase import llsd\n")
{}
~TestPythonCompatible() {}

View File

@ -40,7 +40,6 @@
#include <boost/bind.hpp>
#include <boost/noncopyable.hpp>
#include <boost/shared_ptr.hpp>
#include <boost/throw_exception.hpp>
#include <list>
#include <string>
@ -79,7 +78,7 @@ struct WrapLLErrs
error = message;
// Also throw an appropriate exception since calling code is likely to
// assume that control won't continue beyond LL_ERRS.
BOOST_THROW_EXCEPTION(FatalException(message));
LLTHROW(FatalException(message));
}
std::string error;

View File

@ -42,10 +42,8 @@ except ImportError:
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn
mydir = os.path.dirname(__file__) # expected to be .../indra/llcorehttp/tests/
sys.path.insert(0, os.path.join(mydir, os.pardir, os.pardir, "lib", "python"))
from indra.util.fastest_elementtree import parse as xml_parse
from indra.base import llsd
from llbase.fastest_elementtree import parse as xml_parse
from llbase import llsd
from testrunner import freeport, run, debug, VERBOSE
class TestHTTPRequestHandler(BaseHTTPRequestHandler):

View File

@ -1426,7 +1426,12 @@ void LLImageRaw::copyScaled( LLImageRaw* src )
bool LLImageRaw::scale( S32 new_width, S32 new_height, bool scale_image_data )
{
llassert((1 == getComponents()) || (3 == getComponents()) || (4 == getComponents()) );
S32 components = getComponents();
if (! ((1 == components) || (3 == components) || (4 == components) ))
{
LL_WARNS() << "Invalid getComponents value (" << components << ")" << LL_ENDL;
return false;
}
if (isBufferInvalid())
{
@ -1446,67 +1451,53 @@ bool LLImageRaw::scale( S32 new_width, S32 new_height, bool scale_image_data )
if (scale_image_data)
{
/*
S32 temp_data_size = old_width * new_height * getComponents();
llassert_always(temp_data_size > 0);
std::vector<U8> temp_buffer(temp_data_size);
S32 new_data_size = new_width * new_height * components;
// Vertical
for( S32 col = 0; col < old_width; col++ )
{
copyLineScaled( getData() + (getComponents() * col), &temp_buffer[0] + (getComponents() * col), old_height, new_height, old_width, old_width );
if (new_data_size > 0)
{
U8 *new_data = (U8*)ALLOCATE_MEM(LLImageBase::getPrivatePool(), new_data_size);
if(NULL == new_data)
{
return false;
}
bilinear_scale(getData(), old_width, old_height, components, old_width*components, new_data, new_width, new_height, components, new_width*components);
setDataAndSize(new_data, new_width, new_height, components);
}
deleteData();
U8* new_buffer = allocateDataSize(new_width, new_height, getComponents());
// Horizontal
for( S32 row = 0; row < new_height; row++ )
{
copyLineScaled( &temp_buffer[0] + (getComponents() * old_width * row), new_buffer + (getComponents() * new_width * row), old_width, new_width, 1, 1 );
}
*/
S32 new_data_size = new_width * new_height * getComponents();
llassert_always(new_data_size > 0);
U8 *new_data = (U8*)ALLOCATE_MEM(LLImageBase::getPrivatePool(), new_data_size);
if(NULL == new_data)
{
return false;
}
bilinear_scale(getData(), old_width, old_height, getComponents(), old_width*getComponents(), new_data, new_width, new_height, getComponents(), new_width*getComponents());
setDataAndSize(new_data, new_width, new_height, getComponents());
}
else
{
// copy out existing image data
S32 temp_data_size = old_width * old_height * getComponents();
S32 temp_data_size = old_width * old_height * components;
std::vector<U8> temp_buffer(temp_data_size);
memcpy(&temp_buffer[0], getData(), temp_data_size);
// allocate new image data, will delete old data
U8* new_buffer = allocateDataSize(new_width, new_height, getComponents());
U8* new_buffer = allocateDataSize(new_width, new_height, components);
for( S32 row = 0; row < new_height; row++ )
{
if (row < old_height)
{
memcpy(new_buffer + (new_width * row * getComponents()), &temp_buffer[0] + (old_width * row * getComponents()), getComponents() * llmin(old_width, new_width));
if (old_width < new_width)
{
// pad out rest of row with black
memset(new_buffer + (getComponents() * ((new_width * row) + old_width)), 0, getComponents() * (new_width - old_width));
}
}
else
{
// pad remaining rows with black
memset(new_buffer + (new_width * row * getComponents()), 0, new_width * getComponents());
}
}
if (!new_buffer)
{
LL_WARNS() << "Failed to allocate new image data buffer" << LL_ENDL;
return false;
}
for( S32 row = 0; row < new_height; row++ )
{
if (row < old_height)
{
memcpy(new_buffer + (new_width * row * components), &temp_buffer[0] + (old_width * row * components), components * llmin(old_width, new_width));
if (old_width < new_width)
{
// pad out rest of row with black
memset(new_buffer + (components * ((new_width * row) + old_width)), 0, components * (new_width - old_width));
}
}
else
{
// pad remaining rows with black
memset(new_buffer + (new_width * row * components), 0, new_width * components);
}
}
}
return true ;

View File

@ -32,12 +32,12 @@
#include "llpngwrapper.h"
#include "llexception.h"
#include <boost/throw_exception.hpp>
namespace {
struct PngError: public LLException
// Failure to load an image shouldn't crash the whole viewer.
struct PngError: public LLContinueError
{
PngError(png_const_charp msg): LLException(msg) {}
PngError(png_const_charp msg): LLContinueError(msg) {}
};
} // anonymous namespace
@ -88,7 +88,7 @@ BOOL LLPngWrapper::isValidPng(U8* src)
// occurs. We throw PngError and let our try/catch block clean up.
void LLPngWrapper::errorHandler(png_structp png_ptr, png_const_charp msg)
{
BOOST_THROW_EXCEPTION(PngError(msg));
LLTHROW(PngError(msg));
}
// Called by the libpng library when reading (decoding) the PNG file. We
@ -138,7 +138,7 @@ BOOL LLPngWrapper::readPng(U8* src, S32 dataSize, LLImageRaw* rawImage, ImageInf
this, &errorHandler, NULL);
if (mReadPngPtr == NULL)
{
BOOST_THROW_EXCEPTION(PngError("Problem creating png read structure"));
LLTHROW(PngError("Problem creating png read structure"));
}
// Allocate/initialize the memory for image information.
@ -297,14 +297,14 @@ BOOL LLPngWrapper::writePng(const LLImageRaw* rawImage, U8* dest)
if (mColorType == -1)
{
BOOST_THROW_EXCEPTION(PngError("Unsupported image: unexpected number of channels"));
LLTHROW(PngError("Unsupported image: unexpected number of channels"));
}
mWritePngPtr = png_create_write_struct(PNG_LIBPNG_VER_STRING,
NULL, &errorHandler, NULL);
if (!mWritePngPtr)
{
BOOST_THROW_EXCEPTION(PngError("Problem creating png write structure"));
LLTHROW(PngError("Problem creating png write structure"));
}
mWriteInfoPtr = png_create_info_struct(mWritePngPtr);

View File

@ -1,4 +1,4 @@
/**
/**
* @file llimagej2ckdu.cpp
* @brief This is an implementation of JPEG2000 encode/decode using Kakadu
*
@ -35,18 +35,51 @@
#include "kdu_block_coding.h"
#include <stdexcept>
#include <iostream>
#include "llexception.h"
#include <boost/throw_exception.hpp>
#include <boost/exception/diagnostic_information.hpp>
#include <sstream>
#include <iomanip>
namespace {
// exception used to keep KDU from terminating entire program -- see comments
// in LLKDUMessageError::flush()
struct KDUError: public LLException
// Failure to load an image shouldn't crash the whole viewer.
struct KDUError: public LLContinueError
{
KDUError(const std::string& msg): LLException(msg) {}
KDUError(const std::string& msg): LLContinueError(msg) {}
};
// KDU defines int error codes as hex values, so we should log them in hex
// so we can grep KDU headers for the hex. However those hex values
// generally "happen" to encode big-endian multibyte character sequences,
// e.g. KDU_ERROR_EXCEPTION is 0x6b647545: 'kduE'
// But beware because KDU_NULL_EXCEPTION is simply 0 -- which doesn't
// preclude somebody from throwing it.
std::string report_kdu_exception(kdu_exception mb)
{
std::ostringstream out;
// always report mb in hex
out << "kdu_exception " << std::hex << mb;
// Also display as many chars as are encoded in the kdu_exception
// value. Make a char array; reserve 1 extra byte for nul terminator.
char bytes[sizeof(kdu_exception) + 1];
// Back up through 'bytes'
char *bptr = bytes + sizeof(bytes);
*(--bptr) = '\0';
while (mb)
{
// store low-order byte of mb in next-left char
*(--bptr) = char(mb & 0xFF);
// then shift mb right by one byte
mb >>= 8;
}
// did that produce any characters?
if (*bptr)
{
out << " (" << bptr << ')';
}
return out.str();
}
} // anonymous namespace
// stream kdu_dims to std::ostream
@ -198,7 +231,7 @@ struct LLKDUMessageError : public LLKDUMessage
// shutdown will NOT engage the behavior described above.
if (end_of_message)
{
throw "KDU throwing an exception";
LLTHROW(KDUError("LLKDUMessageError::flush()"));
}
}
};
@ -312,10 +345,10 @@ void LLImageJ2CKDU::setupCodeStream(LLImageJ2C &base, bool keep_codestream, ECod
{
// This method is only called from methods that catch KDUError.
// We want to fail the image load, not crash the viewer.
throw KDUError(STRINGIZE("Component " << idx << " dimensions "
LLTHROW(KDUError(STRINGIZE("Component " << idx << " dimensions "
<< other_dims
<< " do not match component 0 dimensions "
<< dims << "!"));
<< dims << "!")));
}
}
@ -442,9 +475,19 @@ bool LLImageJ2CKDU::initDecode(LLImageJ2C &base, LLImageRaw &raw_image, F32 deco
base.setLastError(msg.what());
return false;
}
catch (kdu_exception kdu_value)
{
// KDU internally throws kdu_exception. It's possible that such an
// exception might leak out into our code. Catch kdu_exception
// specially because boost::current_exception_diagnostic_information()
// could do nothing with it.
base.setLastError(report_kdu_exception(kdu_value));
return false;
}
catch (...)
{
base.setLastError("Unknown J2C error");
base.setLastError("Unknown J2C error: " +
boost::current_exception_diagnostic_information());
return false;
}
@ -540,9 +583,21 @@ bool LLImageJ2CKDU::decodeImpl(LLImageJ2C &base, LLImageRaw &raw_image, F32 deco
cleanupCodeStream();
return true; // done
}
catch (kdu_exception kdu_value)
{
// KDU internally throws kdu_exception. It's possible that such an
// exception might leak out into our code. Catch kdu_exception
// specially because boost::current_exception_diagnostic_information()
// could do nothing with it.
base.setLastError(report_kdu_exception(kdu_value));
base.decodeFailed();
cleanupCodeStream();
return true; // done
}
catch (...)
{
base.setLastError( "Unknown J2C error" );
base.setLastError("Unknown J2C error: " +
boost::current_exception_diagnostic_information());
base.decodeFailed();
cleanupCodeStream();
return true; // done
@ -731,9 +786,19 @@ bool LLImageJ2CKDU::encodeImpl(LLImageJ2C &base, const LLImageRaw &raw_image, co
base.setLastError(msg.what());
return false;
}
catch (kdu_exception kdu_value)
{
// KDU internally throws kdu_exception. It's possible that such an
// exception might leak out into our code. Catch kdu_exception
// specially because boost::current_exception_diagnostic_information()
// could do nothing with it.
base.setLastError(report_kdu_exception(kdu_value));
return false;
}
catch( ... )
{
base.setLastError( "Unknown J2C error" );
base.setLastError("Unknown J2C error: " +
boost::current_exception_diagnostic_information());
return false;
}
@ -755,9 +820,19 @@ bool LLImageJ2CKDU::getMetadata(LLImageJ2C &base)
base.setLastError(msg.what());
return false;
}
catch (kdu_exception kdu_value)
{
// KDU internally throws kdu_exception. It's possible that such an
// exception might leak out into our code. Catch kdu_exception
// specially because boost::current_exception_diagnostic_information()
// could do nothing with it.
base.setLastError(report_kdu_exception(kdu_value));
return false;
}
catch (...)
{
base.setLastError( "Unknown J2C error" );
base.setLastError("Unknown J2C error: " +
boost::current_exception_diagnostic_information());
return false;
}
}

Some files were not shown because too many files have changed in this diff Show More