master
Brad Payne (Vir Linden) 2016-11-16 08:39:41 -05:00
commit d31596db6a
187 changed files with 2362 additions and 7302 deletions

View File

@ -520,3 +520,4 @@ e9d350764dfbf5a46229e627547ef5c1b1eeef00 4.0.2-release
33981d8130f031597b4c7f4c981b18359afb61a0 4.0.7-release
45eaee56883df7a439ed3300c44d3126f7e3a41e 4.0.8-release
b280a1c797a3891e68dbc237e73de9cf19f426e9 4.1.1-release
bfbba2244320dc2ae47758cd7edd8fa3b67dc756 4.1.2-release

View File

@ -1,6 +1,6 @@
Second Life Viewer
====================
This project manages the source code for the
[Second Life](https://www.secondlife.com) Viewer.

View File

@ -1484,11 +1484,11 @@
<key>archive</key>
<map>
<key>hash</key>
<string>db992d58c46c80df7d4d31f8a4784b98</string>
<string>2845033912eb947a1401847ece1469ce</string>
<key>hash_algorithm</key>
<string>md5</string>
<key>url</key>
<string>http://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-llceflib_3p-llceflib/rev/317959/arch/Darwin/installer/llceflib-1.5.3.317959-darwin-317959.tar.bz2</string>
<string>http://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-llceflib_3p-llceflib/rev/321153/arch/Darwin/installer/llceflib-1.5.3.321153-darwin-321153.tar.bz2</string>
</map>
<key>name</key>
<string>darwin</string>
@ -1498,11 +1498,11 @@
<key>archive</key>
<map>
<key>hash</key>
<string>bb3818628131a99cd789febfad9dc2c2</string>
<string>1156121b4ccbb4aa29bc01f15c589f98</string>
<key>hash_algorithm</key>
<string>md5</string>
<key>url</key>
<string>http://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-llceflib_3p-llceflib/rev/317959/arch/CYGWIN/installer/llceflib-1.5.3.317959-windows-317959.tar.bz2</string>
<string>http://automated-builds-secondlife-com.s3.amazonaws.com/hg/repo/3p-llceflib_3p-llceflib/rev/321153/arch/CYGWIN/installer/llceflib-1.5.3.321153-windows-321153.tar.bz2</string>
</map>
<key>name</key>
<string>windows</string>

View File

@ -191,9 +191,19 @@ Ansariel Hiller
STORM-2094
MAINT-5756
MAINT-4677
MAINT-6300
MAINT-6397
MAINT-6432
MAINT-6513
MAINT-6514
MAINT-6552
STORM-2133
MAINT-6511
MAINT-6612
MAINT-6637
MAINT-6636
MAINT-6744
MAINT-6752
Aralara Rajal
Arare Chantilly
CHUIBUG-191
@ -792,6 +802,7 @@ Kitty Barnett
MAINT-6152
MAINT-6153
MAINT-6154
MAINT-6568
Kolor Fall
Komiko Okamoto
Korvel Noh
@ -1024,6 +1035,7 @@ Nicky Dasmijn
OPEN-187
STORM-2010
STORM-2082
MAINT-6665
Nicky Perian
OPEN-1
STORM-1087

View File

@ -83,8 +83,8 @@ if (WINDOWS)
add_definitions(/WX)
endif (NOT VS_DISABLE_FATAL_WARNINGS)
# configure win32 API for windows XP+ compatibility
set(WINVER "0x0501" CACHE STRING "Win32 API Target version (see http://msdn.microsoft.com/en-us/library/aa383745%28v=VS.85%29.aspx)")
# configure Win32 API for Windows Vista+ compatibility
set(WINVER "0x0600" CACHE STRING "Win32 API Target version (see http://msdn.microsoft.com/en-us/library/aa383745%28v=VS.85%29.aspx)")
add_definitions("/DWINVER=${WINVER}" "/D_WIN32_WINNT=${WINVER}")
endif (WINDOWS)

View File

@ -1,27 +0,0 @@
"""\
@file __init__.py
@brief Initialization file for the indra.base module.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""

View File

@ -1,73 +0,0 @@
#!/usr/bin/python
##
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
## Second Life Viewer Source Code
## Copyright (C) 2011, Linden Research, Inc.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation;
## version 2.1 of the License only.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
## $/LicenseInfo$
from indra.base import llsd, lluuid
from datetime import datetime
import cllsd
import time, sys
class myint(int):
pass
values = (
'&<>',
u'\u81acj',
llsd.uri('http://foo<'),
lluuid.UUID(),
llsd.LLSD(['thing']),
1,
myint(31337),
sys.maxint + 10,
llsd.binary('foo'),
[],
{},
{u'f&\u1212': 3},
3.1,
True,
None,
datetime.fromtimestamp(time.time()),
)
def valuator(values):
for v in values:
yield v
longvalues = () # (values, list(values), iter(values), valuator(values))
for v in values + longvalues:
print '%r => %r' % (v, cllsd.llsd_to_xml(v))
a = [[{'a':3}]] * 1000000
s = time.time()
print hash(cllsd.llsd_to_xml(a))
e = time.time()
t1 = e - s
print t1
s = time.time()
print hash(llsd.LLSDXMLFormatter()._format(a))
e = time.time()
t2 = e - s
print t2
print 'Speedup:', t2 / t1

View File

@ -1,266 +0,0 @@
"""\
@file config.py
@brief Utility module for parsing and accessing the indra.xml config file.
$LicenseInfo:firstyear=2006&license=mit$
Copyright (c) 2006-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import copy
import errno
import os
import traceback
import time
import types
from os.path import dirname, getmtime, join, realpath
from indra.base import llsd
_g_config = None
class IndraConfig(object):
"""
IndraConfig loads a 'indra' xml configuration file
and loads into memory. This representation in memory
can get updated to overwrite values or add new values.
The xml configuration file is considered a live file and changes
to the file are checked and reloaded periodically. If a value had
been overwritten via the update or set method, the loaded values
from the file are ignored (the values from the update/set methods
override)
"""
def __init__(self, indra_config_file):
self._indra_config_file = indra_config_file
self._reload_check_interval = 30 # seconds
self._last_check_time = 0
self._last_mod_time = 0
self._config_overrides = {}
self._config_file_dict = {}
self._combined_dict = {}
self._load()
def _load(self):
# if you initialize the IndraConfig with None, no attempt
# is made to load any files
if self._indra_config_file is None:
return
config_file = open(self._indra_config_file)
self._config_file_dict = llsd.parse(config_file.read())
self._combine_dictionaries()
config_file.close()
self._last_mod_time = self._get_last_modified_time()
self._last_check_time = time.time() # now
def _get_last_modified_time(self):
"""
Returns the mtime (last modified time) of the config file,
if such exists.
"""
if self._indra_config_file is not None:
return os.path.getmtime(self._indra_config_file)
return 0
def _combine_dictionaries(self):
self._combined_dict = {}
self._combined_dict.update(self._config_file_dict)
self._combined_dict.update(self._config_overrides)
def _reload_if_necessary(self):
now = time.time()
if (now - self._last_check_time) > self._reload_check_interval:
self._last_check_time = now
try:
modtime = self._get_last_modified_time()
if modtime > self._last_mod_time:
self._load()
except OSError, e:
if e.errno == errno.ENOENT: # file not found
# someone messed with our internal state
# or removed the file
print 'WARNING: Configuration file has been removed ' + (self._indra_config_file)
print 'Disabling reloading of configuration file.'
traceback.print_exc()
self._indra_config_file = None
self._last_check_time = 0
self._last_mod_time = 0
else:
raise # pass the exception along to the caller
def __getitem__(self, key):
self._reload_if_necessary()
return self._combined_dict[key]
def get(self, key, default = None):
try:
return self.__getitem__(key)
except KeyError:
return default
def __setitem__(self, key, value):
"""
Sets the value of the config setting of key to be newval
Once any key/value pair is changed via the set method,
that key/value pair will remain set with that value until
change via the update or set method
"""
self._config_overrides[key] = value
self._combine_dictionaries()
def set(self, key, newval):
return self.__setitem__(key, newval)
def update(self, new_conf):
"""
Load an XML file and apply its map as overrides or additions
to the existing config. Update can be a file or a dict.
Once any key/value pair is changed via the update method,
that key/value pair will remain set with that value until
change via the update or set method
"""
if isinstance(new_conf, dict):
overrides = new_conf
else:
# assuming that it is a filename
config_file = open(new_conf)
overrides = llsd.parse(config_file.read())
config_file.close()
self._config_overrides.update(overrides)
self._combine_dictionaries()
def as_dict(self):
"""
Returns immutable copy of the IndraConfig as a dictionary
"""
return copy.deepcopy(self._combined_dict)
def load(config_xml_file = None):
global _g_config
load_default_files = config_xml_file is None
if load_default_files:
## going from:
## "/opt/linden/indra/lib/python/indra/base/config.py"
## to:
## "/opt/linden/etc/indra.xml"
config_xml_file = realpath(
dirname(realpath(__file__)) + "../../../../../../etc/indra.xml")
try:
_g_config = IndraConfig(config_xml_file)
except IOError:
# Failure to load passed in file
# or indra.xml default file
if load_default_files:
try:
config_xml_file = realpath(
dirname(realpath(__file__)) + "../../../../../../etc/globals.xml")
_g_config = IndraConfig(config_xml_file)
return
except IOError:
# Failure to load globals.xml
# fall to code below
pass
# Either failed to load passed in file
# or failed to load all default files
_g_config = IndraConfig(None)
def dump(indra_xml_file, indra_cfg = None, update_in_mem=False):
'''
Dump config contents into a file
Kindof reverse of load.
Optionally takes a new config to dump.
Does NOT update global config unless requested.
'''
global _g_config
if not indra_cfg:
if _g_config is None:
return
indra_cfg = _g_config.as_dict()
if not indra_cfg:
return
config_file = open(indra_xml_file, 'w')
_config_xml = llsd.format_xml(indra_cfg)
config_file.write(_config_xml)
config_file.close()
if update_in_mem:
update(indra_cfg)
def update(new_conf):
global _g_config
if _g_config is None:
# To keep with how this function behaved
# previously, a call to update
# before the global is defined
# make a new global config which does not
# load data from a file.
_g_config = IndraConfig(None)
return _g_config.update(new_conf)
def get(key, default = None):
global _g_config
if _g_config is None:
load()
return _g_config.get(key, default)
def set(key, newval):
"""
Sets the value of the config setting of key to be newval
Once any key/value pair is changed via the set method,
that key/value pair will remain set with that value until
change via the update or set method or program termination
"""
global _g_config
if _g_config is None:
_g_config = IndraConfig(None)
_g_config.set(key, newval)
def get_config():
global _g_config
return _g_config

File diff suppressed because it is too large Load Diff

View File

@ -1,319 +0,0 @@
"""\
@file lluuid.py
@brief UUID parser/generator.
$LicenseInfo:firstyear=2004&license=mit$
Copyright (c) 2004-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import random, socket, string, time, re
import uuid
try:
# Python 2.6
from hashlib import md5
except ImportError:
# Python 2.5 and earlier
from md5 import new as md5
def _int2binstr(i,l):
s=''
for a in range(l):
s=chr(i&0xFF)+s
i>>=8
return s
def _binstr2int(s):
i = long(0)
for c in s:
i = (i<<8) + ord(c)
return i
class UUID(object):
"""
A class which represents a 16 byte integer. Stored as a 16 byte 8
bit character string.
The string version is to be of the form:
AAAAAAAA-AAAA-BBBB-BBBB-BBBBBBCCCCCC (a 128-bit number in hex)
where A=network address, B=timestamp, C=random.
"""
NULL_STR = "00000000-0000-0000-0000-000000000000"
# the UUIDREGEX_STRING is helpful for parsing UUID's in text
hex_wildcard = r"[0-9a-fA-F]"
word = hex_wildcard + r"{4,4}-"
long_word = hex_wildcard + r"{8,8}-"
very_long_word = hex_wildcard + r"{12,12}"
UUID_REGEX_STRING = long_word + word + word + word + very_long_word
uuid_regex = re.compile(UUID_REGEX_STRING)
rand = random.Random()
ip = ''
try:
ip = socket.gethostbyname(socket.gethostname())
except(socket.gaierror, socket.error):
# no ip address, so just default to somewhere in 10.x.x.x
ip = '10'
for i in range(3):
ip += '.' + str(rand.randrange(1,254))
hexip = ''.join(["%04x" % long(i) for i in ip.split('.')])
lastid = ''
def __init__(self, possible_uuid=None):
"""
Initialize to first valid UUID in argument (if a string),
or to null UUID if none found or argument is not supplied.
If the argument is a UUID, the constructed object will be a copy of it.
"""
self._bits = "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
if possible_uuid is None:
return
if isinstance(possible_uuid, type(self)):
self.set(possible_uuid)
return
uuid_match = UUID.uuid_regex.search(possible_uuid)
if uuid_match:
uuid_string = uuid_match.group()
s = string.replace(uuid_string, '-', '')
self._bits = _int2binstr(string.atol(s[:8],16),4) + \
_int2binstr(string.atol(s[8:16],16),4) + \
_int2binstr(string.atol(s[16:24],16),4) + \
_int2binstr(string.atol(s[24:],16),4)
def __len__(self):
"""
Used by the len() builtin.
"""
return 36
def __nonzero__(self):
return self._bits != "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
def __str__(self):
uuid_string = self.toString()
return uuid_string
__repr__ = __str__
def __getitem__(self, index):
return str(self)[index]
def __eq__(self, other):
if isinstance(other, (str, unicode)):
return other == str(self)
return self._bits == getattr(other, '_bits', '')
def __ne__(self, other):
return not self.__eq__(other)
def __le__(self, other):
return self._bits <= other._bits
def __ge__(self, other):
return self._bits >= other._bits
def __lt__(self, other):
return self._bits < other._bits
def __gt__(self, other):
return self._bits > other._bits
def __hash__(self):
return hash(self._bits)
def set(self, uuid):
self._bits = uuid._bits
def setFromString(self, uuid_string):
"""
Given a string version of a uuid, set self bits
appropriately. Returns self.
"""
s = string.replace(uuid_string, '-', '')
self._bits = _int2binstr(string.atol(s[:8],16),4) + \
_int2binstr(string.atol(s[8:16],16),4) + \
_int2binstr(string.atol(s[16:24],16),4) + \
_int2binstr(string.atol(s[24:],16),4)
return self
def setFromMemoryDump(self, gdb_string):
"""
We expect to get gdb_string as four hex units. eg:
0x147d54db 0xc34b3f1b 0x714f989b 0x0a892fd2
Which will be translated to:
db547d14-1b3f4bc3-9b984f71-d22f890a
Returns self.
"""
s = string.replace(gdb_string, '0x', '')
s = string.replace(s, ' ', '')
t = ''
for i in range(8,40,8):
for j in range(0,8,2):
t = t + s[i-j-2:i-j]
self.setFromString(t)
def toString(self):
"""
Return as a string matching the LL standard
AAAAAAAA-AAAA-BBBB-BBBB-BBBBBBCCCCCC (a 128-bit number in hex)
where A=network address, B=timestamp, C=random.
"""
return uuid_bits_to_string(self._bits)
def getAsString(self):
"""
Return a different string representation of the form
AAAAAAAA-AAAABBBB-BBBBBBBB-BBCCCCCC (a 128-bit number in hex)
where A=network address, B=timestamp, C=random.
"""
i1 = _binstr2int(self._bits[0:4])
i2 = _binstr2int(self._bits[4:8])
i3 = _binstr2int(self._bits[8:12])
i4 = _binstr2int(self._bits[12:16])
return '%08lx-%08lx-%08lx-%08lx' % (i1,i2,i3,i4)
def generate(self):
"""
Generate a new uuid. This algorithm is slightly different
from c++ implementation for portability reasons.
Returns self.
"""
m = md5()
m.update(uuid.uuid1().bytes)
self._bits = m.digest()
return self
def isNull(self):
"""
Returns 1 if the uuid is null - ie, equal to default uuid.
"""
return (self._bits == "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0")
def xor(self, rhs):
"""
xors self with rhs.
"""
v1 = _binstr2int(self._bits[0:4]) ^ _binstr2int(rhs._bits[0:4])
v2 = _binstr2int(self._bits[4:8]) ^ _binstr2int(rhs._bits[4:8])
v3 = _binstr2int(self._bits[8:12]) ^ _binstr2int(rhs._bits[8:12])
v4 = _binstr2int(self._bits[12:16]) ^ _binstr2int(rhs._bits[12:16])
self._bits = _int2binstr(v1,4) + \
_int2binstr(v2,4) + \
_int2binstr(v3,4) + \
_int2binstr(v4,4)
# module-level null constant
NULL = UUID()
def printTranslatedMemory(four_hex_uints):
"""
We expect to get the string as four hex units. eg:
0x147d54db 0xc34b3f1b 0x714f989b 0x0a892fd2
Which will be translated to:
db547d14-1b3f4bc3-9b984f71-d22f890a
"""
uuid = UUID()
uuid.setFromMemoryDump(four_hex_uints)
print uuid.toString()
def isUUID(id_str):
"""
This function returns:
- 1 if the string passed is a UUID
- 0 is the string passed is not a UUID
- None if it neither of the if's below is satisfied
"""
if not id_str or len(id_str) < 5 or len(id_str) > 36:
return 0
if isinstance(id_str, UUID) or UUID.uuid_regex.match(id_str):
return 1
return None
def isPossiblyID(id_str):
"""
This function returns 1 if the string passed has some uuid-like
characteristics. Otherwise returns 0.
"""
is_uuid = isUUID(id_str)
if is_uuid is not None:
return is_uuid
# build a string which matches every character.
hex_wildcard = r"[0-9a-fA-F]"
chars = len(id_str)
next = min(chars, 8)
matcher = hex_wildcard+"{"+str(next)+","+str(next)+"}"
chars = chars - next
if chars > 0:
matcher = matcher + "-"
chars = chars - 1
for block in range(3):
next = max(min(chars, 4), 0)
if next:
matcher = matcher + hex_wildcard+"{"+str(next)+","+str(next)+"}"
chars = chars - next
if chars > 0:
matcher = matcher + "-"
chars = chars - 1
if chars > 0:
next = min(chars, 12)
matcher = matcher + hex_wildcard+"{"+str(next)+","+str(next)+"}"
#print matcher
uuid_matcher = re.compile(matcher)
if uuid_matcher.match(id_str):
return 1
return 0
def uuid_bits_to_string(bits):
i1 = _binstr2int(bits[0:4])
i2 = _binstr2int(bits[4:6])
i3 = _binstr2int(bits[6:8])
i4 = _binstr2int(bits[8:10])
i5 = _binstr2int(bits[10:12])
i6 = _binstr2int(bits[12:16])
return '%08lx-%04lx-%04lx-%04lx-%04lx%08lx' % (i1,i2,i3,i4,i5,i6)
def uuid_bits_to_uuid(bits):
return UUID(uuid_bits_to_string(bits))
try:
from mulib import stacked
stacked.NoProducer() # just to exercise stacked
except:
#print "Couldn't import mulib.stacked, not registering UUID converter"
pass
else:
def convertUUID(uuid, req):
req.write(str(uuid))
stacked.add_producer(UUID, convertUUID, "*/*")
stacked.add_producer(UUID, convertUUID, "text/html")

View File

@ -1,121 +0,0 @@
"""\
@file metrics.py
@author Phoenix
@date 2007-11-27
@brief simple interface for logging metrics
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import sys
try:
import syslog
except ImportError:
# Windows
import sys
class syslog(object):
# wrap to a lame syslog for windows
_logfp = sys.stderr
def syslog(msg):
_logfp.write(msg)
if not msg.endswith('\n'):
_logfp.write('\n')
syslog = staticmethod(syslog)
from indra.base.llsd import format_notation
def record_metrics(table, stats):
"Write a standard metrics log"
_log("LLMETRICS", table, stats)
def record_event(table, data):
"Write a standard logmessage log"
_log("LLLOGMESSAGE", table, data)
def set_destination(dest):
"""Set the destination of metrics logs for this process.
If you do not call this function prior to calling a logging
method, that function will open sys.stdout as a destination.
Attempts to set dest to None will throw a RuntimeError.
@param dest a file-like object which will be the destination for logs."""
if dest is None:
raise RuntimeError("Attempt to unset metrics destination.")
global _destination
_destination = dest
def destination():
"""Get the destination of the metrics logs for this process.
Returns None if no destination is set"""
global _destination
return _destination
class SysLogger(object):
"A file-like object which writes to syslog."
def __init__(self, ident='indra', logopt = None, facility = None):
try:
if logopt is None:
logopt = syslog.LOG_CONS | syslog.LOG_PID
if facility is None:
facility = syslog.LOG_LOCAL0
syslog.openlog(ident, logopt, facility)
import atexit
atexit.register(syslog.closelog)
except AttributeError:
# No syslog module on Windows
pass
def write(str):
syslog.syslog(str)
write = staticmethod(write)
def flush():
pass
flush = staticmethod(flush)
#
# internal API
#
_sequence_id = 0
_destination = None
def _next_id():
global _sequence_id
next = _sequence_id
_sequence_id += 1
return next
def _dest():
global _destination
if _destination is None:
# this default behavior is documented in the metrics functions above.
_destination = sys.stdout
return _destination
def _log(header, table, data):
log_line = "%s (%d) %s %s" \
% (header, _next_id(), table, format_notation(data))
dest = _dest()
dest.write(log_line)
dest.flush()

View File

@ -1,30 +0,0 @@
#!/usr/bin/python
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
## Second Life Viewer Source Code
## Copyright (C) 2011, Linden Research, Inc.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation;
## version 2.1 of the License only.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
## $/LicenseInfo$
import warnings
warnings.warn("indra.ipc.httputil has been deprecated; use eventlet.httpc instead", DeprecationWarning, 2)
from eventlet.httpc import *
makeConnection = make_connection

View File

@ -1,100 +0,0 @@
"""\
@file llsdhttp.py
@brief Functions to ease moving llsd over http
$LicenseInfo:firstyear=2006&license=mit$
Copyright (c) 2006-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import os.path
import os
import urlparse
from indra.base import llsd
from eventlet import httpc
suite = httpc.HttpSuite(llsd.format_xml, llsd.parse, 'application/llsd+xml')
delete = suite.delete
delete_ = suite.delete_
get = suite.get
get_ = suite.get_
head = suite.head
head_ = suite.head_
post = suite.post
post_ = suite.post_
put = suite.put
put_ = suite.put_
request = suite.request
request_ = suite.request_
# import every httpc error exception into our namespace for convenience
for x in httpc.status_to_error_map.itervalues():
globals()[x.__name__] = x
ConnectionError = httpc.ConnectionError
Retriable = httpc.Retriable
for x in (httpc.ConnectionError,):
globals()[x.__name__] = x
def postFile(url, filename):
f = open(filename)
body = f.read()
f.close()
llsd_body = llsd.parse(body)
return post_(url, llsd_body)
# deprecated in favor of get_
def getStatus(url, use_proxy=False):
status, _headers, _body = get_(url, use_proxy=use_proxy)
return status
# deprecated in favor of put_
def putStatus(url, data):
status, _headers, _body = put_(url, data)
return status
# deprecated in favor of delete_
def deleteStatus(url):
status, _headers, _body = delete_(url)
return status
# deprecated in favor of post_
def postStatus(url, data):
status, _headers, _body = post_(url, data)
return status
def postFileStatus(url, filename):
status, _headers, body = postFile(url, filename)
return status, body
def getFromSimulator(path, use_proxy=False):
return get('http://' + simulatorHostAndPort + path, use_proxy=use_proxy)
def postToSimulator(path, data=None):
return post('http://' + simulatorHostAndPort + path, data)

View File

@ -1,81 +0,0 @@
"""\
@file mysql_pool.py
@brief Thin wrapper around eventlet.db_pool that chooses MySQLdb and Tpool.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import MySQLdb
from eventlet import db_pool
class DatabaseConnector(db_pool.DatabaseConnector):
def __init__(self, credentials, *args, **kwargs):
super(DatabaseConnector, self).__init__(MySQLdb, credentials,
conn_pool=db_pool.ConnectionPool,
*args, **kwargs)
# get is extended relative to eventlet.db_pool to accept a port argument
def get(self, host, dbname, port=3306):
key = (host, dbname, port)
if key not in self._databases:
new_kwargs = self._kwargs.copy()
new_kwargs['db'] = dbname
new_kwargs['host'] = host
new_kwargs['port'] = port
new_kwargs.update(self.credentials_for(host))
dbpool = ConnectionPool(*self._args, **new_kwargs)
self._databases[key] = dbpool
return self._databases[key]
class ConnectionPool(db_pool.TpooledConnectionPool):
"""A pool which gives out saranwrapped MySQLdb connections from a pool
"""
def __init__(self, *args, **kwargs):
super(ConnectionPool, self).__init__(MySQLdb, *args, **kwargs)
def get(self):
conn = super(ConnectionPool, self).get()
# annotate the connection object with the details on the
# connection; this is used elsewhere to check that you haven't
# suddenly changed databases in midstream while making a
# series of queries on a connection.
arg_names = ['host','user','passwd','db','port','unix_socket','conv','connect_timeout',
'compress', 'named_pipe', 'init_command', 'read_default_file', 'read_default_group',
'cursorclass', 'use_unicode', 'charset', 'sql_mode', 'client_flag', 'ssl',
'local_infile']
# you could have constructed this connectionpool with a mix of
# keyword and non-keyword arguments, but we want to annotate
# the connection object with a dict so it's easy to check
# against so here we are converting the list of non-keyword
# arguments (in self._args) into a dict of keyword arguments,
# and merging that with the actual keyword arguments
# (self._kwargs). The arg_names variable lists the
# constructor arguments for MySQLdb Connection objects.
converted_kwargs = dict([ (arg_names[i], arg) for i, arg in enumerate(self._args) ])
converted_kwargs.update(self._kwargs)
conn.connection_parameters = converted_kwargs
return conn

View File

@ -1,165 +0,0 @@
"""\
@file russ.py
@brief Recursive URL Substitution Syntax helpers
@author Phoenix
Many details on how this should work is available on the wiki:
https://wiki.secondlife.com/wiki/Recursive_URL_Substitution_Syntax
Adding features to this should be reflected in that page in the
implementations section.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import urllib
from indra.ipc import llsdhttp
class UnbalancedBraces(Exception):
pass
class UnknownDirective(Exception):
pass
class BadDirective(Exception):
pass
def format_value_for_path(value):
if type(value) in [list, tuple]:
# *NOTE: treat lists as unquoted path components so that the quoting
# doesn't get out-of-hand. This is a workaround for the fact that
# russ always quotes, even if the data it's given is already quoted,
# and it's not safe to simply unquote a path directly, so if we want
# russ to substitute urls parts inside other url parts we always
# have to do so via lists of unquoted path components.
return '/'.join([urllib.quote(str(item)) for item in value])
else:
return urllib.quote(str(value))
def format(format_str, context):
"""@brief Format format string according to rules for RUSS.
@see https://osiris.lindenlab.com/mediawiki/index.php/Recursive_URL_Substitution_Syntax
@param format_str The input string to format.
@param context A map used for string substitutions.
@return Returns the formatted string. If no match, the braces remain intact.
"""
while True:
#print "format_str:", format_str
all_matches = _find_sub_matches(format_str)
if not all_matches:
break
substitutions = 0
while True:
matches = all_matches.pop()
# we work from right to left to make sure we do not
# invalidate positions earlier in format_str
matches.reverse()
for pos in matches:
# Use index since _find_sub_matches should have raised
# an exception, and failure to find now is an exception.
end = format_str.index('}', pos)
#print "directive:", format_str[pos+1:pos+5]
if format_str[pos + 1] == '$':
value = context[format_str[pos + 2:end]]
if value is not None:
value = format_value_for_path(value)
elif format_str[pos + 1] == '%':
value = _build_query_string(
context.get(format_str[pos + 2:end]))
elif format_str[pos+1:pos+5] == 'http' or format_str[pos+1:pos+5] == 'file':
value = _fetch_url_directive(format_str[pos + 1:end])
else:
raise UnknownDirective, format_str[pos:end + 1]
if value is not None:
format_str = format_str[:pos]+str(value)+format_str[end+1:]
substitutions += 1
# If there were any substitutions at this depth, re-parse
# since this may have revealed new things to substitute
if substitutions:
break
if not all_matches:
break
# If there were no substitutions at all, and we have exhausted
# the possible matches, bail.
if not substitutions:
break
return format_str
def _find_sub_matches(format_str):
"""@brief Find all of the substitution matches.
@param format_str the RUSS conformant format string.
@return Returns an array of depths of arrays of positional matches in input.
"""
depth = 0
matches = []
for pos in range(len(format_str)):
if format_str[pos] == '{':
depth += 1
if not len(matches) == depth:
matches.append([])
matches[depth - 1].append(pos)
continue
if format_str[pos] == '}':
depth -= 1
continue
if not depth == 0:
raise UnbalancedBraces, format_str
return matches
def _build_query_string(query_dict):
"""\
@breif given a dict, return a query string. utility wrapper for urllib.
@param query_dict input query dict
@returns Returns an urlencoded query string including leading '?'.
"""
if query_dict:
keys = query_dict.keys()
keys.sort()
def stringize(value):
if type(value) in (str,unicode):
return value
else:
return str(value)
query_list = [urllib.quote(str(key)) + '=' + urllib.quote(stringize(query_dict[key])) for key in keys]
return '?' + '&'.join(query_list)
else:
return ''
def _fetch_url_directive(directive):
"*FIX: This only supports GET"
commands = directive.split('|')
resource = llsdhttp.get(commands[0])
if len(commands) == 3:
resource = _walk_resource(resource, commands[2])
return resource
def _walk_resource(resource, path):
path = path.split('/')
for child in path:
if not child:
continue
resource = resource[child]
return resource

View File

@ -1,134 +0,0 @@
"""\
@file servicebuilder.py
@author Phoenix
@brief Class which will generate service urls.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
from indra.base import config
from indra.ipc import llsdhttp
from indra.ipc import russ
# *NOTE: agent presence relies on this variable existing and being current, it is a huge hack
services_config = {}
try:
services_config = llsdhttp.get(config.get('services-config'))
except:
pass
_g_builder = None
def _builder():
global _g_builder
if _g_builder is None:
_g_builder = ServiceBuilder()
return _g_builder
def build(name, context={}, **kwargs):
""" Convenience method for using a global, singleton, service builder. Pass arguments either via a dict or via python keyword arguments, or both!
Example use:
> context = {'channel':'Second Life Release', 'version':'1.18.2.0'}
> servicebuilder.build('version-manager-version', context)
'http://int.util.vaak.lindenlab.com/channel/Second%20Life%20Release/1.18.2.0'
> servicebuilder.build('version-manager-version', channel='Second Life Release', version='1.18.2.0')
'http://int.util.vaak.lindenlab.com/channel/Second%20Life%20Release/1.18.2.0'
> servicebuilder.build('version-manager-version', context, version='1.18.1.2')
'http://int.util.vaak.lindenlab.com/channel/Second%20Life%20Release/1.18.1.2'
"""
global _g_builder
if _g_builder is None:
_g_builder = ServiceBuilder()
return _g_builder.buildServiceURL(name, context, **kwargs)
def build_path(name, context={}, **kwargs):
context = context.copy() # shouldn't modify the caller's dictionary
context.update(kwargs)
return _builder().buildPath(name, context)
class ServiceBuilder(object):
def __init__(self, services_definition = services_config):
"""\
@brief
@brief Create a ServiceBuilder.
@param services_definition Complete services definition, services.xml.
"""
# no need to keep a copy of the services section of the
# complete services definition, but it doesn't hurt much.
self.services = services_definition['services']
self.builders = {}
for service in self.services:
service_builder = service.get('service-builder')
if not service_builder:
continue
if isinstance(service_builder, dict):
# We will be constructing several builders
for name, builder in service_builder.iteritems():
full_builder_name = service['name'] + '-' + name
self.builders[full_builder_name] = builder
else:
self.builders[service['name']] = service_builder
def buildPath(self, name, context):
"""\
@brief given the environment on construction, return a service path.
@param name The name of the service.
@param context A dict of name value lookups for the service.
@returns Returns the
"""
return russ.format(self.builders[name], context)
def buildServiceURL(self, name, context={}, **kwargs):
"""\
@brief given the environment on construction, return a service URL.
@param name The name of the service.
@param context A dict of name value lookups for the service.
@param kwargs Any keyword arguments are treated as members of the
context, this allows you to be all 31337 by writing shit like:
servicebuilder.build('name', param=value)
@returns Returns the
"""
context = context.copy() # shouldn't modify the caller's dictionary
context.update(kwargs)
base_url = config.get('services-base-url')
svc_path = russ.format(self.builders[name], context)
return base_url + svc_path
def on_in(query_name, host_key, schema_key):
"""\
@brief Constructs an on/in snippet (for running named queries)
from a schema name and two keys referencing values stored in
indra.xml.
@param query_name Name of the query.
@param host_key Logical name of destination host. Will be
looked up in indra.xml.
@param schema_key Logical name of destination schema. Will
be looked up in indra.xml.
"""
return "on/config:%s/in/config:%s/%s" % (host_key.strip('/'),
schema_key.strip('/'),
query_name.lstrip('/'))

View File

@ -1,468 +0,0 @@
"""\
@file siesta.py
@brief A tiny llsd based RESTful web services framework
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
from indra.base import config
from indra.base import llsd
from webob import exc
import webob
import re, socket
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
import cjson
json_decode = cjson.decode
json_encode = cjson.encode
JsonDecodeError = cjson.DecodeError
JsonEncodeError = cjson.EncodeError
except ImportError:
import simplejson
json_decode = simplejson.loads
json_encode = simplejson.dumps
JsonDecodeError = ValueError
JsonEncodeError = TypeError
llsd_parsers = {
'application/json': json_decode,
llsd.BINARY_MIME_TYPE: llsd.parse_binary,
'application/llsd+notation': llsd.parse_notation,
llsd.XML_MIME_TYPE: llsd.parse_xml,
'application/xml': llsd.parse_xml,
}
def mime_type(content_type):
'''Given a Content-Type header, return only the MIME type.'''
return content_type.split(';', 1)[0].strip().lower()
class BodyLLSD(object):
'''Give a webob Request or Response an llsd based "content" property.
Getting the content property parses the body, and caches the result.
Setting the content property formats a payload, and the body property
is set.'''
def _llsd__get(self):
'''Get, set, or delete the LLSD value stored in this object.'''
try:
return self._llsd
except AttributeError:
if not self.body:
raise AttributeError('No llsd attribute has been set')
else:
mtype = mime_type(self.content_type)
try:
parser = llsd_parsers[mtype]
except KeyError:
raise exc.HTTPUnsupportedMediaType(
'Content type %s not supported' % mtype).exception
try:
self._llsd = parser(self.body)
except (llsd.LLSDParseError, JsonDecodeError, TypeError), err:
raise exc.HTTPBadRequest(
'Could not parse body: %r' % err.args).exception
return self._llsd
def _llsd__set(self, val):
req = getattr(self, 'request', None)
if req is not None:
formatter, ctype = formatter_for_request(req)
self.content_type = ctype
else:
formatter, ctype = formatter_for_mime_type(
mime_type(self.content_type))
self.body = formatter(val)
def _llsd__del(self):
if hasattr(self, '_llsd'):
del self._llsd
content = property(_llsd__get, _llsd__set, _llsd__del)
class Response(webob.Response, BodyLLSD):
'''Response class with LLSD support.
A sensible default content type is used.
Setting the llsd property also sets the body. Getting the llsd
property parses the body if necessary.
If you set the body property directly, the llsd property will be
deleted.'''
default_content_type = 'application/llsd+xml'
def _body__set(self, body):
if hasattr(self, '_llsd'):
del self._llsd
super(Response, self)._body__set(body)
def cache_forever(self):
self.cache_expires(86400 * 365)
body = property(webob.Response._body__get, _body__set,
webob.Response._body__del,
webob.Response._body__get.__doc__)
class Request(webob.Request, BodyLLSD):
'''Request class with LLSD support.
Sensible content type and accept headers are used by default.
Setting the content property also sets the body. Getting the content
property parses the body if necessary.
If you set the body property directly, the content property will be
deleted.'''
default_content_type = 'application/llsd+xml'
default_accept = ('application/llsd+xml; q=0.5, '
'application/llsd+notation; q=0.3, '
'application/llsd+binary; q=0.2, '
'application/xml; q=0.1, '
'application/json; q=0.0')
def __init__(self, environ=None, *args, **kwargs):
if environ is None:
environ = {}
else:
environ = environ.copy()
if 'CONTENT_TYPE' not in environ:
environ['CONTENT_TYPE'] = self.default_content_type
if 'HTTP_ACCEPT' not in environ:
environ['HTTP_ACCEPT'] = self.default_accept
super(Request, self).__init__(environ, *args, **kwargs)
def _body__set(self, body):
if hasattr(self, '_llsd'):
del self._llsd
super(Request, self)._body__set(body)
def path_urljoin(self, *parts):
return '/'.join([path_url.rstrip('/')] + list(parts))
body = property(webob.Request._body__get, _body__set,
webob.Request._body__del, webob.Request._body__get.__doc__)
def create_response(self, content=None, status='200 OK',
conditional_response=webob.NoDefault):
resp = self.ResponseClass(status=status, request=self,
conditional_response=conditional_response)
resp.content = content
return resp
def curl(self):
'''Create and fill out a pycurl easy object from this request.'''
import pycurl
c = pycurl.Curl()
c.setopt(pycurl.URL, self.url())
if self.headers:
c.setopt(pycurl.HTTPHEADER,
['%s: %s' % (k, self.headers[k]) for k in self.headers])
c.setopt(pycurl.FOLLOWLOCATION, True)
c.setopt(pycurl.AUTOREFERER, True)
c.setopt(pycurl.MAXREDIRS, 16)
c.setopt(pycurl.NOSIGNAL, True)
c.setopt(pycurl.READFUNCTION, self.body_file.read)
c.setopt(pycurl.SSL_VERIFYHOST, 2)
if self.method == 'POST':
c.setopt(pycurl.POST, True)
post301 = getattr(pycurl, 'POST301', None)
if post301 is not None:
# Added in libcurl 7.17.1.
c.setopt(post301, True)
elif self.method == 'PUT':
c.setopt(pycurl.PUT, True)
elif self.method != 'GET':
c.setopt(pycurl.CUSTOMREQUEST, self.method)
return c
Request.ResponseClass = Response
Response.RequestClass = Request
llsd_formatters = {
'application/json': json_encode,
'application/llsd+binary': llsd.format_binary,
'application/llsd+notation': llsd.format_notation,
'application/llsd+xml': llsd.format_xml,
'application/xml': llsd.format_xml,
}
formatter_qualities = (
('application/llsd+xml', 1.0),
('application/llsd+notation', 0.5),
('application/llsd+binary', 0.4),
('application/xml', 0.3),
('application/json', 0.2),
)
def formatter_for_mime_type(mime_type):
'''Return a formatter that encodes to the given MIME type.
The result is a pair of function and MIME type.'''
try:
return llsd_formatters[mime_type], mime_type
except KeyError:
raise exc.HTTPInternalServerError(
'Could not use MIME type %r to format response' %
mime_type).exception
def formatter_for_request(req):
'''Return a formatter that encodes to the preferred type of the client.
The result is a pair of function and actual MIME type.'''
ctype = req.accept.best_match(formatter_qualities)
try:
return llsd_formatters[ctype], ctype
except KeyError:
raise exc.HTTPNotAcceptable().exception
def wsgi_adapter(func, environ, start_response):
'''Adapt a Siesta callable to act as a WSGI application.'''
# Process the request as appropriate.
try:
req = Request(environ)
#print req.urlvars
resp = func(req, **req.urlvars)
if not isinstance(resp, webob.Response):
try:
formatter, ctype = formatter_for_request(req)
resp = req.ResponseClass(formatter(resp), content_type=ctype)
resp._llsd = resp
except (JsonEncodeError, TypeError), err:
resp = exc.HTTPInternalServerError(
detail='Could not format response')
except exc.HTTPException, e:
resp = e
except socket.error, e:
resp = exc.HTTPInternalServerError(detail=e.args[1])
return resp(environ, start_response)
def llsd_callable(func):
'''Turn a callable into a Siesta application.'''
def replacement(environ, start_response):
return wsgi_adapter(func, environ, start_response)
return replacement
def llsd_method(http_method, func):
def replacement(environ, start_response):
if environ['REQUEST_METHOD'] == http_method:
return wsgi_adapter(func, environ, start_response)
return exc.HTTPMethodNotAllowed()(environ, start_response)
return replacement
http11_methods = 'OPTIONS GET HEAD POST PUT DELETE TRACE CONNECT'.split()
http11_methods.sort()
def llsd_class(cls):
'''Turn a class into a Siesta application.
A new instance is created for each request. A HTTP method FOO is
turned into a call to the handle_foo method of the instance.'''
def foo(req, **kwargs):
instance = cls()
method = req.method.lower()
try:
handler = getattr(instance, 'handle_' + method)
except AttributeError:
allowed = [m for m in http11_methods
if hasattr(instance, 'handle_' + m.lower())]
raise exc.HTTPMethodNotAllowed(
headers={'Allow': ', '.join(allowed)}).exception
#print "kwargs: ", kwargs
return handler(req, **kwargs)
def replacement(environ, start_response):
return wsgi_adapter(foo, environ, start_response)
return replacement
def curl(reqs):
import pycurl
m = pycurl.CurlMulti()
curls = [r.curl() for r in reqs]
io = {}
for c in curls:
fp = StringIO()
hdr = StringIO()
c.setopt(pycurl.WRITEFUNCTION, fp.write)
c.setopt(pycurl.HEADERFUNCTION, hdr.write)
io[id(c)] = fp, hdr
m.handles = curls
try:
while True:
ret, num_handles = m.perform()
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
finally:
m.close()
for req, c in zip(reqs, curls):
fp, hdr = io[id(c)]
hdr.seek(0)
status = hdr.readline().rstrip()
headers = []
name, values = None, None
# XXX We don't currently handle bogus header data.
for line in hdr.readlines():
if not line[0].isspace():
if name:
headers.append((name, ' '.join(values)))
name, value = line.strip().split(':', 1)
value = [value]
else:
values.append(line.strip())
if name:
headers.append((name, ' '.join(values)))
resp = c.ResponseClass(fp.getvalue(), status, headers, request=req)
route_re = re.compile(r'''
\{ # exact character "{"
(\w*) # "config" or variable (restricted to a-z, 0-9, _)
(?:([:~])([^}]+))? # optional :type or ~regex part
\} # exact character "}"
''', re.VERBOSE)
predefined_regexps = {
'uuid': r'[a-f0-9][a-f0-9-]{31,35}',
'int': r'\d+',
'host': r'[a-z0-9][a-z0-9\-\.]*',
}
def compile_route(route):
fp = StringIO()
last_pos = 0
for match in route_re.finditer(route):
#print "matches: ", match.groups()
fp.write(re.escape(route[last_pos:match.start()]))
var_name = match.group(1)
sep = match.group(2)
expr = match.group(3)
if var_name == 'config':
expr = re.escape(str(config.get(var_name)))
else:
if expr:
if sep == ':':
expr = predefined_regexps[expr]
# otherwise, treat what follows '~' as a regexp
else:
expr = '[^/]+'
if var_name != '':
expr = '(?P<%s>%s)' % (var_name, expr)
else:
expr = '(%s)' % (expr,)
fp.write(expr)
last_pos = match.end()
fp.write(re.escape(route[last_pos:]))
compiled_route = '^%s$' % fp.getvalue()
#print route, "->", compiled_route
return compiled_route
class Router(object):
'''WSGI routing class. Parses a URL and hands off a request to
some other WSGI application. If no suitable application is found,
responds with a 404.'''
def __init__(self):
self._new_routes = []
self._routes = []
self._paths = []
def add(self, route, app, methods=None):
self._new_routes.append((route, app, methods))
def _create_routes(self):
for route, app, methods in self._new_routes:
self._paths.append(route)
self._routes.append(
(re.compile(compile_route(route)),
app,
methods and dict.fromkeys(methods)))
self._new_routes = []
def __call__(self, environ, start_response):
# load up the config from the config file. Only needs to be
# done once per interpreter. This is the entry point of all
# siesta applications, so this is where we trap it.
_conf = config.get_config()
if _conf is None:
import os.path
fname = os.path.join(
environ.get('ll.config_dir', '/local/linden/etc'),
'indra.xml')
config.load(fname)
# proceed with handling the request
self._create_routes()
path_info = environ['PATH_INFO']
request_method = environ['REQUEST_METHOD']
allowed = []
for regex, app, methods in self._routes:
m = regex.match(path_info)
if m:
#print "groupdict:",m.groupdict()
if not methods or request_method in methods:
environ['paste.urlvars'] = m.groupdict()
return app(environ, start_response)
else:
allowed += methods
if allowed:
allowed = dict.fromkeys(allows).keys()
allowed.sort()
resp = exc.HTTPMethodNotAllowed(
headers={'Allow': ', '.join(allowed)})
else:
resp = exc.HTTPNotFound()
return resp(environ, start_response)

View File

@ -1,235 +0,0 @@
#!/usr/bin/python
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
## Second Life Viewer Source Code
## Copyright (C) 2011, Linden Research, Inc.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation;
## version 2.1 of the License only.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
## $/LicenseInfo$
from indra.base import llsd, lluuid
from indra.ipc import siesta
import datetime, math, unittest
from webob import exc
class ClassApp(object):
def handle_get(self, req):
pass
def handle_post(self, req):
return req.llsd
def callable_app(req):
if req.method == 'UNDERPANTS':
raise exc.HTTPMethodNotAllowed()
elif req.method == 'GET':
return None
return req.llsd
class TestBase:
def test_basic_get(self):
req = siesta.Request.blank('/')
self.assertEquals(req.get_response(self.server).body,
llsd.format_xml(None))
def test_bad_method(self):
req = siesta.Request.blank('/')
req.environ['REQUEST_METHOD'] = 'UNDERPANTS'
self.assertEquals(req.get_response(self.server).status_int,
exc.HTTPMethodNotAllowed.code)
json_safe = {
'none': None,
'bool_true': True,
'bool_false': False,
'int_zero': 0,
'int_max': 2147483647,
'int_min': -2147483648,
'long_zero': 0,
'long_max': 2147483647L,
'long_min': -2147483648L,
'float_zero': 0,
'float': math.pi,
'float_huge': 3.14159265358979323846e299,
'str_empty': '',
'str': 'foo',
u'unic\u1e51de_empty': u'',
u'unic\u1e51de': u'\u1e4exx\u10480',
}
json_safe['array'] = json_safe.values()
json_safe['tuple'] = tuple(json_safe.values())
json_safe['dict'] = json_safe.copy()
json_unsafe = {
'uuid_empty': lluuid.UUID(),
'uuid_full': lluuid.UUID('dc61ab0530200d7554d23510559102c1a98aab1b'),
'binary_empty': llsd.binary(),
'binary': llsd.binary('f\0\xff'),
'uri_empty': llsd.uri(),
'uri': llsd.uri('http://www.secondlife.com/'),
'datetime_empty': datetime.datetime(1970,1,1),
'datetime': datetime.datetime(1999,9,9,9,9,9),
}
json_unsafe.update(json_safe)
json_unsafe['array'] = json_unsafe.values()
json_unsafe['tuple'] = tuple(json_unsafe.values())
json_unsafe['dict'] = json_unsafe.copy()
json_unsafe['iter'] = iter(json_unsafe.values())
def _test_client_content_type_good(self, content_type, ll):
def run(ll):
req = siesta.Request.blank('/')
req.environ['REQUEST_METHOD'] = 'POST'
req.content_type = content_type
req.llsd = ll
req.accept = content_type
resp = req.get_response(self.server)
self.assertEquals(resp.status_int, 200)
return req, resp
if False and isinstance(ll, dict):
def fixup(v):
if isinstance(v, float):
return '%.5f' % v
if isinstance(v, long):
return int(v)
if isinstance(v, (llsd.binary, llsd.uri)):
return v
if isinstance(v, (tuple, list)):
return [fixup(i) for i in v]
if isinstance(v, dict):
return dict([(k, fixup(i)) for k, i in v.iteritems()])
return v
for k, v in ll.iteritems():
l = [k, v]
req, resp = run(l)
self.assertEquals(fixup(resp.llsd), fixup(l))
run(ll)
def test_client_content_type_json_good(self):
self._test_client_content_type_good('application/json', self.json_safe)
def test_client_content_type_llsd_xml_good(self):
self._test_client_content_type_good('application/llsd+xml',
self.json_unsafe)
def test_client_content_type_llsd_notation_good(self):
self._test_client_content_type_good('application/llsd+notation',
self.json_unsafe)
def test_client_content_type_llsd_binary_good(self):
self._test_client_content_type_good('application/llsd+binary',
self.json_unsafe)
def test_client_content_type_xml_good(self):
self._test_client_content_type_good('application/xml',
self.json_unsafe)
def _test_client_content_type_bad(self, content_type):
req = siesta.Request.blank('/')
req.environ['REQUEST_METHOD'] = 'POST'
req.body = '\0invalid nonsense under all encodings'
req.content_type = content_type
self.assertEquals(req.get_response(self.server).status_int,
exc.HTTPBadRequest.code)
def test_client_content_type_json_bad(self):
self._test_client_content_type_bad('application/json')
def test_client_content_type_llsd_xml_bad(self):
self._test_client_content_type_bad('application/llsd+xml')
def test_client_content_type_llsd_notation_bad(self):
self._test_client_content_type_bad('application/llsd+notation')
def test_client_content_type_llsd_binary_bad(self):
self._test_client_content_type_bad('application/llsd+binary')
def test_client_content_type_xml_bad(self):
self._test_client_content_type_bad('application/xml')
def test_client_content_type_bad(self):
req = siesta.Request.blank('/')
req.environ['REQUEST_METHOD'] = 'POST'
req.body = 'XXX'
req.content_type = 'application/nonsense'
self.assertEquals(req.get_response(self.server).status_int,
exc.HTTPUnsupportedMediaType.code)
def test_request_default_content_type(self):
req = siesta.Request.blank('/')
self.assertEquals(req.content_type, req.default_content_type)
def test_request_default_accept(self):
req = siesta.Request.blank('/')
from webob import acceptparse
self.assertEquals(str(req.accept).replace(' ', ''),
req.default_accept.replace(' ', ''))
def test_request_llsd_auto_body(self):
req = siesta.Request.blank('/')
req.llsd = {'a': 2}
self.assertEquals(req.body, '<?xml version="1.0" ?><llsd><map>'
'<key>a</key><integer>2</integer></map></llsd>')
def test_request_llsd_mod_body_changes_llsd(self):
req = siesta.Request.blank('/')
req.llsd = {'a': 2}
req.body = '<?xml version="1.0" ?><llsd><integer>1337</integer></llsd>'
self.assertEquals(req.llsd, 1337)
def test_request_bad_llsd_fails(self):
def crashme(ctype):
def boom():
class foo(object): pass
req = siesta.Request.blank('/')
req.content_type = ctype
req.llsd = foo()
for mime_type in siesta.llsd_parsers:
self.assertRaises(TypeError, crashme(mime_type))
class ClassServer(TestBase, unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.server = siesta.llsd_class(ClassApp)
class CallableServer(TestBase, unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.server = siesta.llsd_callable(callable_app)
class RouterServer(unittest.TestCase):
def test_router(self):
def foo(req, quux):
print quux
r = siesta.Router()
r.add('/foo/{quux:int}', siesta.llsd_callable(foo), methods=['GET'])
req = siesta.Request.blank('/foo/33')
req.get_response(r)
req = siesta.Request.blank('/foo/bar')
self.assertEquals(req.get_response(r).status_int,
exc.HTTPNotFound.code)
if __name__ == '__main__':
unittest.main()

View File

@ -1,597 +0,0 @@
"""
@file webdav.py
@brief Classes to make manipulation of a webdav store easier.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import sys, os, httplib, urlparse
import socket, time
import xml.dom.minidom
import syslog
# import signal
__revision__ = '0'
dav_debug = False
# def urlsafe_b64decode (enc):
# return base64.decodestring (enc.replace ('_', '/').replace ('-', '+'))
# def urlsafe_b64encode (str):
# return base64.encodestring (str).replace ('+', '-').replace ('/', '_')
class DAVError (Exception):
""" Base class for exceptions in this module. """
def __init__ (self, status=0, message='', body='', details=''):
self.status = status
self.message = message
self.body = body
self.details = details
Exception.__init__ (self, '%d:%s:%s%s' % (self.status, self.message,
self.body, self.details))
def print_to_stderr (self):
""" print_to_stderr docstring """
print >> sys.stderr, str (self.status) + ' ' + self.message
print >> sys.stderr, str (self.details)
class Timeout (Exception):
""" Timeout docstring """
def __init__ (self, arg=''):
Exception.__init__ (self, arg)
def alarm_handler (signum, frame):
""" alarm_handler docstring """
raise Timeout ('caught alarm')
class WebDAV:
""" WebDAV docstring """
def __init__ (self, url, proxy=None, retries_before_fail=6):
self.init_url = url
self.init_proxy = proxy
self.retries_before_fail = retries_before_fail
url_parsed = urlparse.urlsplit (url)
self.top_path = url_parsed[ 2 ]
# make sure top_path has a trailing /
if self.top_path == None or self.top_path == '':
self.top_path = '/'
elif len (self.top_path) > 1 and self.top_path[-1:] != '/':
self.top_path += '/'
if dav_debug:
syslog.syslog ('new WebDAV %s : %s' % (str (url), str (proxy)))
if proxy:
proxy_parsed = urlparse.urlsplit (proxy)
self.host_header = url_parsed[ 1 ]
host_and_port = proxy_parsed[ 1 ].split (':')
self.host = host_and_port[ 0 ]
if len (host_and_port) > 1:
self.port = int(host_and_port[ 1 ])
else:
self.port = 80
else: # no proxy
host_and_port = url_parsed[ 1 ].split (':')
self.host_header = None
self.host = host_and_port[ 0 ]
if len (host_and_port) > 1:
self.port = int(host_and_port[ 1 ])
else:
self.port = 80
self.connection = False
self.connect ()
def log (self, msg, depth=0):
""" log docstring """
if dav_debug and depth == 0:
host = str (self.init_url)
if host == 'http://int.tuco.lindenlab.com:80/asset/':
host = 'tuco'
if host == 'http://harriet.lindenlab.com/asset-keep/':
host = 'harriet/asset-keep'
if host == 'http://harriet.lindenlab.com/asset-flag/':
host = 'harriet/asset-flag'
if host == 'http://harriet.lindenlab.com/asset/':
host = 'harriet/asset'
if host == 'http://ozzy.lindenlab.com/asset/':
host = 'ozzy/asset'
if host == 'http://station11.lindenlab.com:12041/:':
host = 'station11:12041'
proxy = str (self.init_proxy)
if proxy == 'None':
proxy = ''
if proxy == 'http://int.tuco.lindenlab.com:3128/':
proxy = 'tuco'
syslog.syslog ('WebDAV (%s:%s) %s' % (host, proxy, str (msg)))
def connect (self):
""" connect docstring """
self.log ('connect')
self.connection = httplib.HTTPConnection (self.host, self.port)
def __err (self, response, details):
""" __err docstring """
raise DAVError (response.status, response.reason, response.read (),
str (self.init_url) + ':' + \
str (self.init_proxy) + ':' + str (details))
def request (self, method, path, body=None, headers=None,
read_all=True, body_hook = None, recurse=0, allow_cache=True):
""" request docstring """
# self.log ('request %s %s' % (method, path))
if headers == None:
headers = {}
if not allow_cache:
headers['Pragma'] = 'no-cache'
headers['cache-control'] = 'no-cache'
try:
if method.lower () != 'purge':
if path.startswith ('/'):
path = path[1:]
if self.host_header: # use proxy
headers[ 'host' ] = self.host_header
fullpath = 'http://%s%s%s' % (self.host_header,
self.top_path, path)
else: # no proxy
fullpath = self.top_path + path
else:
fullpath = path
self.connection.request (method, fullpath, body, headers)
if body_hook:
body_hook ()
# signal.signal (signal.SIGALRM, alarm_handler)
# try:
# signal.alarm (120)
# signal.alarm (0)
# except Timeout, e:
# if recurse < 6:
# return self.retry_request (method, path, body, headers,
# read_all, body_hook, recurse)
# else:
# raise DAVError (0, 'timeout', self.host,
# (method, path, body, headers, recurse))
response = self.connection.getresponse ()
if read_all:
while len (response.read (1024)) > 0:
pass
if (response.status == 500 or \
response.status == 503 or \
response.status == 403) and \
recurse < self.retries_before_fail:
return self.retry_request (method, path, body, headers,
read_all, body_hook, recurse)
return response
except (httplib.ResponseNotReady,
httplib.BadStatusLine,
socket.error):
# if the server hangs up on us (keepalive off, broken pipe),
# we need to reconnect and try again.
if recurse < self.retries_before_fail:
return self.retry_request (method, path, body, headers,
read_all, body_hook, recurse)
raise DAVError (0, 'reconnect failed', self.host,
(method, path, body, headers, recurse))
def retry_request (self, method, path, body, headers,
read_all, body_hook, recurse):
""" retry_request docstring """
time.sleep (10.0 * recurse)
self.connect ()
return self.request (method, path, body, headers,
read_all, body_hook, recurse+1)
def propfind (self, path, body=None, depth=1):
""" propfind docstring """
# self.log ('propfind %s' % path)
headers = {'Content-Type':'text/xml; charset="utf-8"',
'Depth':str(depth)}
response = self.request ('PROPFIND', path, body, headers, False)
if response.status == 207:
return response # Multi-Status
self.__err (response, ('PROPFIND', path, body, headers, 0))
def purge (self, path):
""" issue a squid purge command """
headers = {'Accept':'*/*'}
response = self.request ('PURGE', path, None, headers)
if response.status == 200 or response.status == 404:
# 200 if it was purge, 404 if it wasn't there.
return response
self.__err (response, ('PURGE', path, None, headers))
def get_file_size (self, path):
"""
Use propfind to ask a webdav server what the size of
a file is. If used on a directory (collection) return 0
"""
self.log ('get_file_size %s' % path)
# "getcontentlength" property
# 8.1.1 Example - Retrieving Named Properties
# http://docs.python.org/lib/module-xml.dom.html
nsurl = 'http://apache.org/dav/props/'
doc = xml.dom.minidom.Document ()
propfind_element = doc.createElementNS (nsurl, "D:propfind")
propfind_element.setAttributeNS (nsurl, 'xmlns:D', 'DAV:')
doc.appendChild (propfind_element)
prop_element = doc.createElementNS (nsurl, "D:prop")
propfind_element.appendChild (prop_element)
con_len_element = doc.createElementNS (nsurl, "D:getcontentlength")
prop_element.appendChild (con_len_element)
response = self.propfind (path, doc.toxml ())
doc.unlink ()
resp_doc = xml.dom.minidom.parseString (response.read ())
cln = resp_doc.getElementsByTagNameNS ('DAV:','getcontentlength')[ 0 ]
try:
content_length = int (cln.childNodes[ 0 ].nodeValue)
except IndexError:
return 0
resp_doc.unlink ()
return content_length
def file_exists (self, path):
"""
do an http head on the given file. return True if it succeeds
"""
self.log ('file_exists %s' % path)
expect_gzip = path.endswith ('.gz')
response = self.request ('HEAD', path)
got_gzip = response.getheader ('Content-Encoding', '').strip ()
if got_gzip.lower () == 'x-gzip' and expect_gzip == False:
# the asset server fakes us out if we ask for the non-gzipped
# version of an asset, but the server has the gzipped version.
return False
return response.status == 200
def mkdir (self, path):
""" mkdir docstring """
self.log ('mkdir %s' % path)
headers = {}
response = self.request ('MKCOL', path, None, headers)
if response.status == 201:
return # success
if response.status == 405:
return # directory already existed?
self.__err (response, ('MKCOL', path, None, headers, 0))
def delete (self, path):
""" delete docstring """
self.log ('delete %s' % path)
headers = {'Depth':'infinity'} # collections require infinity
response = self.request ('DELETE', path, None, headers)
if response.status == 204:
return # no content
if response.status == 404:
return # hmm
self.__err (response, ('DELETE', path, None, headers, 0))
def list_directory (self, path, dir_filter=None, allow_cache=True,
minimum_cache_time=False):
"""
Request an http directory listing and parse the filenames out of lines
like: '<LI><A HREF="X"> X</A>'. If a filter function is provided,
only return filenames that the filter returns True for.
This is sort of grody, but it seems faster than other ways of getting
this information from an isilon.
"""
self.log ('list_directory %s' % path)
def try_match (lline, before, after):
""" try_match docstring """
try:
blen = len (before)
asset_start_index = lline.index (before)
asset_end_index = lline.index (after, asset_start_index + blen)
asset = line[ asset_start_index + blen : asset_end_index ]
if not dir_filter or dir_filter (asset):
return [ asset ]
return []
except ValueError:
return []
if len (path) > 0 and path[-1:] != '/':
path += '/'
response = self.request ('GET', path, None, {}, False,
allow_cache=allow_cache)
if allow_cache and minimum_cache_time: # XXX
print response.getheader ('Date')
# s = "2005-12-06T12:13:14"
# from datetime import datetime
# from time import strptime
# datetime(*strptime(s, "%Y-%m-%dT%H:%M:%S")[0:6])
# datetime.datetime(2005, 12, 6, 12, 13, 14)
if response.status != 200:
self.__err (response, ('GET', path, None, {}, 0))
assets = []
for line in response.read ().split ('\n'):
lline = line.lower ()
if lline.find ("parent directory") == -1:
# isilon file
assets += try_match (lline, '<li><a href="', '"> ')
# apache dir
assets += try_match (lline, 'alt="[dir]"> <a href="', '/">')
# apache file
assets += try_match (lline, 'alt="[ ]"> <a href="', '">')
return assets
def __tmp_filename (self, path_and_file):
""" __tmp_filename docstring """
head, tail = os.path.split (path_and_file)
if head != '':
return head + '/.' + tail + '.' + str (os.getpid ())
else:
return head + '.' + tail + '.' + str (os.getpid ())
def __put__ (self, filesize, body_hook, remotefile):
""" __put__ docstring """
headers = {'Content-Length' : str (filesize)}
remotefile_tmp = self.__tmp_filename (remotefile)
response = self.request ('PUT', remotefile_tmp, None,
headers, True, body_hook)
if not response.status in (201, 204): # created, no content
self.__err (response, ('PUT', remotefile, None, headers, 0))
if filesize != self.get_file_size (remotefile_tmp):
try:
self.delete (remotefile_tmp)
except:
pass
raise DAVError (0, 'tmp upload error', remotefile_tmp)
# move the file to its final location
try:
self.rename (remotefile_tmp, remotefile)
except DAVError, exc:
if exc.status == 403: # try to clean up the tmp file
try:
self.delete (remotefile_tmp)
except:
pass
raise
if filesize != self.get_file_size (remotefile):
raise DAVError (0, 'file upload error', str (remotefile_tmp))
def put_string (self, strng, remotefile):
""" put_string docstring """
self.log ('put_string %d -> %s' % (len (strng), remotefile))
filesize = len (strng)
def body_hook ():
""" body_hook docstring """
self.connection.send (strng)
self.__put__ (filesize, body_hook, remotefile)
def put_file (self, localfile, remotefile):
"""
Send a local file to a remote webdav store. First, upload to
a temporary filename. Next make sure the file is the size we
expected. Next, move the file to its final location. Next,
check the file size at the final location.
"""
self.log ('put_file %s -> %s' % (localfile, remotefile))
filesize = os.path.getsize (localfile)
def body_hook ():
""" body_hook docstring """
handle = open (localfile)
while True:
data = handle.read (1300)
if len (data) == 0:
break
self.connection.send (data)
handle.close ()
self.__put__ (filesize, body_hook, remotefile)
def create_empty_file (self, remotefile):
""" create an empty file """
self.log ('touch_file %s' % (remotefile))
headers = {'Content-Length' : '0'}
response = self.request ('PUT', remotefile, None, headers)
if not response.status in (201, 204): # created, no content
self.__err (response, ('PUT', remotefile, None, headers, 0))
if self.get_file_size (remotefile) != 0:
raise DAVError (0, 'file upload error', str (remotefile))
def __get_file_setup (self, remotefile, check_size=True):
""" __get_file_setup docstring """
if check_size:
remotesize = self.get_file_size (remotefile)
response = self.request ('GET', remotefile, None, {}, False)
if response.status != 200:
self.__err (response, ('GET', remotefile, None, {}, 0))
try:
content_length = int (response.getheader ("Content-Length"))
except TypeError:
content_length = None
if check_size:
if content_length != remotesize:
raise DAVError (0, 'file DL size error', remotefile)
return (response, content_length)
def __get_file_read (self, writehandle, response, content_length):
""" __get_file_read docstring """
if content_length != None:
so_far_length = 0
while so_far_length < content_length:
data = response.read (content_length - so_far_length)
if len (data) == 0:
raise DAVError (0, 'short file download')
so_far_length += len (data)
writehandle.write (data)
while len (response.read ()) > 0:
pass
else:
while True:
data = response.read ()
if (len (data) < 1):
break
writehandle.write (data)
def get_file (self, remotefile, localfile, check_size=True):
"""
Get a remote file from a webdav server. Download to a local
tmp file, then move into place. Sanity check file sizes as
we go.
"""
self.log ('get_file %s -> %s' % (remotefile, localfile))
(response, content_length) = \
self.__get_file_setup (remotefile, check_size)
localfile_tmp = self.__tmp_filename (localfile)
handle = open (localfile_tmp, 'w')
self.__get_file_read (handle, response, content_length)
handle.close ()
if check_size:
if content_length != os.path.getsize (localfile_tmp):
raise DAVError (0, 'file DL size error',
remotefile+','+localfile)
os.rename (localfile_tmp, localfile)
def get_file_as_string (self, remotefile, check_size=True):
"""
download a file from a webdav server and return it as a string.
"""
self.log ('get_file_as_string %s' % remotefile)
(response, content_length) = \
self.__get_file_setup (remotefile, check_size)
# (tmp_handle, tmp_filename) = tempfile.mkstemp ()
tmp_handle = os.tmpfile ()
self.__get_file_read (tmp_handle, response, content_length)
tmp_handle.seek (0)
ret = tmp_handle.read ()
tmp_handle.close ()
# os.unlink (tmp_filename)
return ret
def get_post_as_string (self, remotefile, body):
"""
Do an http POST, send body, get response and return it.
"""
self.log ('get_post_as_string %s' % remotefile)
# headers = {'Content-Type':'application/x-www-form-urlencoded'}
headers = {'Content-Type':'text/xml; charset="utf-8"'}
# b64body = urlsafe_b64encode (asset_url)
response = self.request ('POST', remotefile, body, headers, False)
if response.status != 200:
self.__err (response, ('POST', remotefile, body, headers, 0))
try:
content_length = int (response.getheader ('Content-Length'))
except TypeError:
content_length = None
tmp_handle = os.tmpfile ()
self.__get_file_read (tmp_handle, response, content_length)
tmp_handle.seek (0)
ret = tmp_handle.read ()
tmp_handle.close ()
return ret
def __destination_command (self, verb, remotesrc, dstdav, remotedst):
"""
self and dstdav should point to the same http server.
"""
if len (remotedst) > 0 and remotedst[ 0 ] == '/':
remotedst = remotedst[1:]
headers = {'Destination': 'http://%s:%d%s%s' % (dstdav.host,
dstdav.port,
dstdav.top_path,
remotedst)}
response = self.request (verb, remotesrc, None, headers)
if response.status == 201:
return # created
if response.status == 204:
return # no content
self.__err (response, (verb, remotesrc, None, headers, 0))
def rename (self, remotesrc, remotedst):
""" rename a file on a webdav server """
self.log ('rename %s -> %s' % (remotesrc, remotedst))
self.__destination_command ('MOVE', remotesrc, self, remotedst)
def xrename (self, remotesrc, dstdav, remotedst):
""" rename a file on a webdav server """
self.log ('xrename %s -> %s' % (remotesrc, remotedst))
self.__destination_command ('MOVE', remotesrc, dstdav, remotedst)
def copy (self, remotesrc, remotedst):
""" copy a file on a webdav server """
self.log ('copy %s -> %s' % (remotesrc, remotedst))
self.__destination_command ('COPY', remotesrc, self, remotedst)
def xcopy (self, remotesrc, dstdav, remotedst):
""" copy a file on a webdav server """
self.log ('xcopy %s -> %s' % (remotesrc, remotedst))
self.__destination_command ('COPY', remotesrc, dstdav, remotedst)
def put_string (data, url):
"""
upload string s to a url
"""
url_parsed = urlparse.urlsplit (url)
dav = WebDAV ('%s://%s/' % (url_parsed[ 0 ], url_parsed[ 1 ]))
dav.put_string (data, url_parsed[ 2 ])
def get_string (url, check_size=True):
"""
return the contents of a url as a string
"""
url_parsed = urlparse.urlsplit (url)
dav = WebDAV ('%s://%s/' % (url_parsed[ 0 ], url_parsed[ 1 ]))
return dav.get_file_as_string (url_parsed[ 2 ], check_size)

View File

@ -1,273 +0,0 @@
"""\
@file xml_rpc.py
@brief An implementation of a parser/generator for the XML-RPC xml format.
$LicenseInfo:firstyear=2006&license=mit$
Copyright (c) 2006-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
from greenlet import greenlet
from mulib import mu
from xml.sax import handler
from xml.sax import parseString
# States
class Expected(object):
def __init__(self, tag):
self.tag = tag
def __getattr__(self, name):
return type(self)(name)
def __repr__(self):
return '%s(%r)' % (
type(self).__name__, self.tag)
class START(Expected):
pass
class END(Expected):
pass
class STR(object):
tag = ''
START = START('')
END = END('')
class Malformed(Exception):
pass
class XMLParser(handler.ContentHandler):
def __init__(self, state_machine, next_states):
handler.ContentHandler.__init__(self)
self.state_machine = state_machine
if not isinstance(next_states, tuple):
next_states = (next_states, )
self.next_states = next_states
self._character_buffer = ''
def assertState(self, state, name, *rest):
if not isinstance(self.next_states, tuple):
self.next_states = (self.next_states, )
for next in self.next_states:
if type(state) == type(next):
if next.tag and next.tag != name:
raise Malformed(
"Expected %s, got %s %s %s" % (
next, state, name, rest))
break
else:
raise Malformed(
"Expected %s, got %s %s %s" % (
self.next_states, state, name, rest))
def startElement(self, name, attrs):
self.assertState(START, name.lower(), attrs)
self.next_states = self.state_machine.switch(START, (name.lower(), dict(attrs)))
def endElement(self, name):
if self._character_buffer.strip():
characters = self._character_buffer.strip()
self._character_buffer = ''
self.assertState(STR, characters)
self.next_states = self.state_machine.switch(characters)
self.assertState(END, name.lower())
self.next_states = self.state_machine.switch(END, name.lower())
def error(self, exc):
self.bozo = 1
self.exc = exc
def fatalError(self, exc):
self.error(exc)
raise exc
def characters(self, characters):
self._character_buffer += characters
def parse(what):
child = greenlet(xml_rpc)
me = greenlet.getcurrent()
startup_states = child.switch(me)
parser = XMLParser(child, startup_states)
try:
parseString(what, parser)
except Malformed:
print what
raise
return child.switch()
def xml_rpc(yielder):
yielder.switch(START.methodcall)
yielder.switch(START.methodname)
methodName = yielder.switch(STR)
yielder.switch(END.methodname)
yielder.switch(START.params)
root = None
params = []
while True:
state, _ = yielder.switch(START.param, END.params)
if state == END:
break
yielder.switch(START.value)
params.append(
handle(yielder))
yielder.switch(END.value)
yielder.switch(END.param)
yielder.switch(END.methodcall)
## Resume parse
yielder.switch()
## Return result to parse
return methodName.strip(), params
def handle(yielder):
_, (tag, attrs) = yielder.switch(START)
if tag in ['int', 'i4']:
result = int(yielder.switch(STR))
elif tag == 'boolean':
result = bool(int(yielder.switch(STR)))
elif tag == 'string':
result = yielder.switch(STR)
elif tag == 'double':
result = float(yielder.switch(STR))
elif tag == 'datetime.iso8601':
result = yielder.switch(STR)
elif tag == 'base64':
result = base64.b64decode(yielder.switch(STR))
elif tag == 'struct':
result = {}
while True:
state, _ = yielder.switch(START.member, END.struct)
if state == END:
break
yielder.switch(START.name)
key = yielder.switch(STR)
yielder.switch(END.name)
yielder.switch(START.value)
result[key] = handle(yielder)
yielder.switch(END.value)
yielder.switch(END.member)
## We already handled </struct> above, don't want to handle it below
return result
elif tag == 'array':
result = []
yielder.switch(START.data)
while True:
state, _ = yielder.switch(START.value, END.data)
if state == END:
break
result.append(handle(yielder))
yielder.switch(END.value)
yielder.switch(getattr(END, tag))
return result
VALUE = mu.tag_factory('value')
BOOLEAN = mu.tag_factory('boolean')
INT = mu.tag_factory('int')
STRUCT = mu.tag_factory('struct')
MEMBER = mu.tag_factory('member')
NAME = mu.tag_factory('name')
ARRAY = mu.tag_factory('array')
DATA = mu.tag_factory('data')
STRING = mu.tag_factory('string')
DOUBLE = mu.tag_factory('double')
METHODRESPONSE = mu.tag_factory('methodResponse')
PARAMS = mu.tag_factory('params')
PARAM = mu.tag_factory('param')
mu.inline_elements['string'] = True
mu.inline_elements['boolean'] = True
mu.inline_elements['name'] = True
def _generate(something):
if isinstance(something, dict):
result = STRUCT()
for key, value in something.items():
result[
MEMBER[
NAME[key], _generate(value)]]
return VALUE[result]
elif isinstance(something, list):
result = DATA()
for item in something:
result[_generate(item)]
return VALUE[ARRAY[[result]]]
elif isinstance(something, basestring):
return VALUE[STRING[something]]
elif isinstance(something, bool):
if something:
return VALUE[BOOLEAN['1']]
return VALUE[BOOLEAN['0']]
elif isinstance(something, int):
return VALUE[INT[something]]
elif isinstance(something, float):
return VALUE[DOUBLE[something]]
def generate(*args):
params = PARAMS()
for arg in args:
params[PARAM[_generate(arg)]]
return METHODRESPONSE[params]
if __name__ == '__main__':
print parse("""<?xml version="1.0"?> <methodCall> <methodName>examples.getStateName</methodName> <params> <param> <value><i4>41</i4></value> </param> </params> </methodCall>
""")

View File

@ -1,64 +0,0 @@
"""\
@file fastest_elementtree.py
@brief Concealing some gnarly import logic in here. This should export the interface of elementtree.
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
# The parsing exception raised by the underlying library depends
# on the ElementTree implementation we're using, so we provide an
# alias here.
#
# Use ElementTreeError as the exception type for catching parsing
# errors.
# Using cElementTree might cause some unforeseen problems, so here's a
# convenient off switch.
use_celementree = True
try:
if not use_celementree:
raise ImportError()
# Python 2.3 and 2.4.
from cElementTree import *
ElementTreeError = SyntaxError
except ImportError:
try:
if not use_celementree:
raise ImportError()
# Python 2.5 and above.
from xml.etree.cElementTree import *
ElementTreeError = SyntaxError
except ImportError:
# Pure Python code.
try:
# Python 2.3 and 2.4.
from elementtree.ElementTree import *
except ImportError:
# Python 2.5 and above.
from xml.etree.ElementTree import *
# The pure Python ElementTree module uses Expat for parsing.
from xml.parsers.expat import ExpatError as ElementTreeError

View File

@ -1,52 +0,0 @@
"""\
@file helpformatter.py
@author Phoenix
@brief Class for formatting optparse descriptions.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import optparse
import textwrap
class Formatter(optparse.IndentedHelpFormatter):
def __init__(
self,
p_indentIncrement = 2,
p_maxHelpPosition = 24,
p_width = 79,
p_shortFirst = 1) :
optparse.HelpFormatter.__init__(
self,
p_indentIncrement,
p_maxHelpPosition,
p_width,
p_shortFirst)
def format_description(self, p_description):
t_descWidth = self.width - self.current_indent
t_indent = " " * (self.current_indent + 2)
return "\n".join(
[textwrap.fill(descr, t_descWidth, initial_indent = t_indent,
subsequent_indent = t_indent)
for descr in p_description.split("\n")] )

View File

@ -1,63 +0,0 @@
"""\
@file iterators.py
@brief Useful general-purpose iterators.
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
from __future__ import nested_scopes
def iter_chunks(rows, aggregate_size=100):
"""
Given an iterable set of items (@p rows), produces lists of up to @p
aggregate_size items at a time, for example:
iter_chunks([1,2,3,4,5,6,7,8,9,10], 3)
Values for @p aggregate_size < 1 will raise ValueError.
Will return a generator that produces, in the following order:
- [1, 2, 3]
- [4, 5, 6]
- [7, 8, 9]
- [10]
"""
if aggregate_size < 1:
raise ValueError()
def iter_chunks_inner():
row_iter = iter(rows)
done = False
agg = []
while not done:
try:
row = row_iter.next()
agg.append(row)
except StopIteration:
done = True
if agg and (len(agg) >= aggregate_size or done):
yield agg
agg = []
return iter_chunks_inner()

View File

@ -1,72 +0,0 @@
"""\
@file iterators_test.py
@brief Test cases for iterators module.
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import unittest
from indra.util.iterators import iter_chunks
class TestIterChunks(unittest.TestCase):
"""Unittests for iter_chunks"""
def test_bad_agg_size(self):
rows = [1,2,3,4]
self.assertRaises(ValueError, iter_chunks, rows, 0)
self.assertRaises(ValueError, iter_chunks, rows, -1)
try:
for i in iter_chunks(rows, 0):
pass
except ValueError:
pass
else:
self.fail()
try:
result = list(iter_chunks(rows, 0))
except ValueError:
pass
else:
self.fail()
def test_empty(self):
rows = []
result = list(iter_chunks(rows))
self.assertEqual(result, [])
def test_small(self):
rows = [[1]]
result = list(iter_chunks(rows, 2))
self.assertEqual(result, [[[1]]])
def test_size(self):
rows = [[1],[2]]
result = list(iter_chunks(rows, 2))
self.assertEqual(result, [[[1],[2]]])
def test_multi_agg(self):
rows = [[1],[2],[3],[4],[5]]
result = list(iter_chunks(rows, 2))
self.assertEqual(result, [[[1],[2]],[[3],[4]],[[5]]])
if __name__ == "__main__":
unittest.main()

View File

@ -1,182 +0,0 @@
#!/usr/bin/env python
"""\
@file llperformance.py
$LicenseInfo:firstyear=2010&license=viewerlgpl$
Second Life Viewer Source Code
Copyright (C) 2010-2011, Linden Research, Inc.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation;
version 2.1 of the License only.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
$/LicenseInfo$
"""
# ------------------------------------------------
# Sim metrics utility functions.
import glob, os, time, sys, stat, exceptions
from indra.base import llsd
gBlockMap = {} #Map of performance metric data with function hierarchy information.
gCurrentStatPath = ""
gIsLoggingEnabled=False
class LLPerfStat:
def __init__(self,key):
self.mTotalTime = 0
self.mNumRuns = 0
self.mName=key
self.mTimeStamp = int(time.time()*1000)
self.mUTCTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def __str__(self):
return "%f" % self.mTotalTime
def start(self):
self.mStartTime = int(time.time() * 1000000)
self.mNumRuns += 1
def stop(self):
execution_time = int(time.time() * 1000000) - self.mStartTime
self.mTotalTime += execution_time
def get_map(self):
results={}
results['name']=self.mName
results['utc_time']=self.mUTCTime
results['timestamp']=self.mTimeStamp
results['us']=self.mTotalTime
results['count']=self.mNumRuns
return results
class PerfError(exceptions.Exception):
def __init__(self):
return
def __Str__(self):
print "","Unfinished LLPerfBlock"
class LLPerfBlock:
def __init__( self, key ):
global gBlockMap
global gCurrentStatPath
global gIsLoggingEnabled
#Check to see if we're running metrics right now.
if gIsLoggingEnabled:
self.mRunning = True #Mark myself as running.
self.mPreviousStatPath = gCurrentStatPath
gCurrentStatPath += "/" + key
if gCurrentStatPath not in gBlockMap:
gBlockMap[gCurrentStatPath] = LLPerfStat(key)
self.mStat = gBlockMap[gCurrentStatPath]
self.mStat.start()
def finish( self ):
global gBlockMap
global gIsLoggingEnabled
if gIsLoggingEnabled:
self.mStat.stop()
self.mRunning = False
gCurrentStatPath = self.mPreviousStatPath
# def __del__( self ):
# if self.mRunning:
# #SPATTERS FIXME
# raise PerfError
class LLPerformance:
#--------------------------------------------------
# Determine whether or not we want to log statistics
def __init__( self, process_name = "python" ):
self.process_name = process_name
self.init_testing()
self.mTimeStamp = int(time.time()*1000)
self.mUTCTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def init_testing( self ):
global gIsLoggingEnabled
host_performance_file = "/dev/shm/simperf/simperf_proc_config.llsd"
#If file exists, open
if os.path.exists(host_performance_file):
file = open (host_performance_file,'r')
#Read serialized LLSD from file.
body = llsd.parse(file.read())
#Calculate time since file last modified.
stats = os.stat(host_performance_file)
now = time.time()
mod = stats[stat.ST_MTIME]
age = now - mod
if age < ( body['duration'] ):
gIsLoggingEnabled = True
def get ( self ):
global gIsLoggingEnabled
return gIsLoggingEnabled
#def output(self,ptr,path):
# if 'stats' in ptr:
# stats = ptr['stats']
# self.mOutputPtr[path] = stats.get_map()
# if 'children' in ptr:
# children=ptr['children']
# curptr = self.mOutputPtr
# curchildren={}
# curptr['children'] = curchildren
# for key in children:
# curchildren[key]={}
# self.mOutputPtr = curchildren[key]
# self.output(children[key],path + '/' + key)
def done(self):
global gBlockMap
if not self.get():
return
output_name = "/dev/shm/simperf/%s_proc.%d.llsd" % (self.process_name, os.getpid())
output_file = open(output_name, 'w')
process_info = {
"name" : self.process_name,
"pid" : os.getpid(),
"ppid" : os.getppid(),
"timestamp" : self.mTimeStamp,
"utc_time" : self.mUTCTime,
}
output_file.write(llsd.format_notation(process_info))
output_file.write('\n')
for key in gBlockMap.keys():
gBlockMap[key] = gBlockMap[key].get_map()
output_file.write(llsd.format_notation(gBlockMap))
output_file.write('\n')
output_file.close()

View File

@ -1,117 +0,0 @@
"""\
@file llsubprocess.py
@author Phoenix
@date 2008-01-18
@brief The simplest possible wrapper for a common sub-process paradigm.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import os
import popen2
import time
import select
class Timeout(RuntimeError):
"Exception raised when a subprocess times out."
pass
def run(command, args=None, data=None, timeout=None):
"""\
@brief Run command with arguments
This is it. This is the function I want to run all the time when doing
subprocces, but end up copying the code everywhere. none of the
standard commands are secure and provide a way to specify input, get
all the output, and get the result.
@param command A string specifying a process to launch.
@param args Arguments to be passed to command. Must be list, tuple or None.
@param data input to feed to the command.
@param timeout Maximum number of many seconds to run.
@return Returns (result, stdout, stderr) from process.
"""
cmd = [command]
if args:
cmd.extend([str(arg) for arg in args])
#print "cmd: ","' '".join(cmd)
child = popen2.Popen3(cmd, True)
#print child.pid
out = []
err = []
result = -1
time_left = timeout
tochild = [child.tochild.fileno()]
while True:
time_start = time.time()
#print "time:",time_left
p_in, p_out, p_err = select.select(
[child.fromchild.fileno(), child.childerr.fileno()],
tochild,
[],
time_left)
if p_in:
new_line = os.read(child.fromchild.fileno(), 32 * 1024)
if new_line:
#print "line:",new_line
out.append(new_line)
new_line = os.read(child.childerr.fileno(), 32 * 1024)
if new_line:
#print "error:", new_line
err.append(new_line)
if p_out:
if data:
#print "p_out"
bytes = os.write(child.tochild.fileno(), data)
data = data[bytes:]
if len(data) == 0:
data = None
tochild = []
child.tochild.close()
result = child.poll()
if result != -1:
# At this point, the child process has exited and result
# is the return value from the process. Between the time
# we called select() and poll() the process may have
# exited so read all the data left on the child process
# stdout and stderr.
last = child.fromchild.read()
if last:
out.append(last)
last = child.childerr.read()
if last:
err.append(last)
child.tochild.close()
child.fromchild.close()
child.childerr.close()
break
if time_left is not None:
time_left -= (time.time() - time_start)
if time_left < 0:
raise Timeout
#print "result:",result
out = ''.join(out)
#print "stdout:", out
err = ''.join(err)
#print "stderr:", err
return result, out, err

View File

@ -1,592 +0,0 @@
"""\
@file named_query.py
@author Ryan Williams, Phoenix
@date 2007-07-31
@brief An API for running named queries.
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import errno
import MySQLdb
import MySQLdb.cursors
import os
import os.path
import re
import time
from indra.base import llsd
from indra.base import config
DEBUG = False
NQ_FILE_SUFFIX = config.get('named-query-file-suffix', '.nq')
NQ_FILE_SUFFIX_LEN = len(NQ_FILE_SUFFIX)
_g_named_manager = None
def _init_g_named_manager(sql_dir = None):
"""Initializes a global NamedManager object to point at a
specified named queries hierarchy.
This function is intended entirely for testing purposes,
because it's tricky to control the config from inside a test."""
global NQ_FILE_SUFFIX
NQ_FILE_SUFFIX = config.get('named-query-file-suffix', '.nq')
global NQ_FILE_SUFFIX_LEN
NQ_FILE_SUFFIX_LEN = len(NQ_FILE_SUFFIX)
if sql_dir is None:
sql_dir = config.get('named-query-base-dir')
# extra fallback directory in case config doesn't return what we want
if sql_dir is None:
sql_dir = os.path.abspath(
os.path.join(
os.path.realpath(os.path.dirname(__file__)), "..", "..", "..", "..", "web", "dataservice", "sql"))
global _g_named_manager
_g_named_manager = NamedQueryManager(
os.path.abspath(os.path.realpath(sql_dir)))
def get(name, schema = None):
"Get the named query object to be used to perform queries"
if _g_named_manager is None:
_init_g_named_manager()
return _g_named_manager.get(name).for_schema(schema)
def sql(connection, name, params):
# use module-global NamedQuery object to perform default substitution
return get(name).sql(connection, params)
def run(connection, name, params, expect_rows = None):
"""\
@brief given a connection, run a named query with the params
Note that this function will fetch ALL rows.
@param connection The connection to use
@param name The name of the query to run
@param params The parameters passed into the query
@param expect_rows The number of rows expected. Set to 1 if return_as_map is true. Raises ExpectationFailed if the number of returned rows doesn't exactly match. Kind of a hack.
@return Returns the result set as a list of dicts.
"""
return get(name).run(connection, params, expect_rows)
class ExpectationFailed(Exception):
""" Exception that is raised when an expectation for an sql query
is not met."""
def __init__(self, message):
Exception.__init__(self, message)
self.message = message
class NamedQuery(object):
def __init__(self, name, filename):
""" Construct a NamedQuery object. The name argument is an
arbitrary name as a handle for the query, and the filename is
a path to a file or a file-like object containing an llsd named
query document."""
self._stat_interval_seconds = 5 # 5 seconds
self._name = name
if (filename is not None and isinstance(filename, (str, unicode))
and NQ_FILE_SUFFIX != filename[-NQ_FILE_SUFFIX_LEN:]):
filename = filename + NQ_FILE_SUFFIX
self._location = filename
self._alternative = dict()
self._last_mod_time = 0
self._last_check_time = 0
self.deleted = False
self.load_contents()
def name(self):
""" The name of the query. """
return self._name
def get_modtime(self):
""" Returns the mtime (last modified time) of the named query
filename. For file-like objects, expect a modtime of 0"""
if self._location and isinstance(self._location, (str, unicode)):
return os.path.getmtime(self._location)
return 0
def load_contents(self):
""" Loads and parses the named query file into self. Does
nothing if self.location is nonexistant."""
if self._location:
if isinstance(self._location, (str, unicode)):
contents = llsd.parse(open(self._location).read())
else:
# we probably have a file-like object. Godspeed!
contents = llsd.parse(self._location.read())
self._reference_contents(contents)
# Check for alternative implementations
try:
for name, alt in self._contents['alternative'].items():
nq = NamedQuery(name, None)
nq._reference_contents(alt)
self._alternative[name] = nq
except KeyError, e:
pass
self._last_mod_time = self.get_modtime()
self._last_check_time = time.time()
def _reference_contents(self, contents):
"Helper method which builds internal structure from parsed contents"
self._contents = contents
self._ttl = int(self._contents.get('ttl', 0))
self._return_as_map = bool(self._contents.get('return_as_map', False))
self._legacy_dbname = self._contents.get('legacy_dbname', None)
# reset these before doing the sql conversion because we will
# read them there. reset these while loading so we pick up
# changes.
self._around = set()
self._append = set()
self._integer = set()
self._options = self._contents.get('dynamic_where', {})
for key in self._options:
if isinstance(self._options[key], basestring):
self._options[key] = self._convert_sql(self._options[key])
elif isinstance(self._options[key], list):
lines = []
for line in self._options[key]:
lines.append(self._convert_sql(line))
self._options[key] = lines
else:
moreopt = {}
for kk in self._options[key]:
moreopt[kk] = self._convert_sql(self._options[key][kk])
self._options[key] = moreopt
self._base_query = self._convert_sql(self._contents['base_query'])
self._query_suffix = self._convert_sql(
self._contents.get('query_suffix', ''))
def _convert_sql(self, sql):
"""convert the parsed sql into a useful internal structure.
This function has to turn the named query format into a pyformat
style. It also has to look for %:name% and :name% and
ready them for use in LIKE statements"""
if sql:
# This first sub is to properly escape any % signs that
# are meant to be literally passed through to mysql in the
# query. It leaves any %'s that are used for
# like-expressions.
expr = re.compile("(?<=[^a-zA-Z0-9_-])%(?=[^:])")
sql = expr.sub('%%', sql)
# This should tackle the rest of the %'s in the query, by
# converting them to LIKE clauses.
expr = re.compile("(%?):([a-zA-Z][a-zA-Z0-9_-]*)%")
sql = expr.sub(self._prepare_like, sql)
expr = re.compile("#:([a-zA-Z][a-zA-Z0-9_-]*)")
sql = expr.sub(self._prepare_integer, sql)
expr = re.compile(":([a-zA-Z][a-zA-Z0-9_-]*)")
sql = expr.sub("%(\\1)s", sql)
return sql
def _prepare_like(self, match):
"""This function changes LIKE statement replace behavior
It works by turning %:name% to %(_name_around)s and :name% to
%(_name_append)s. Since a leading '_' is not a valid keyname
input (enforced via unit tests), it will never clash with
existing keys. Then, when building the statement, the query
runner will generate corrected strings."""
if match.group(1) == '%':
# there is a leading % so this is treated as prefix/suffix
self._around.add(match.group(2))
return "%(" + self._build_around_key(match.group(2)) + ")s"
else:
# there is no leading %, so this is suffix only
self._append.add(match.group(2))
return "%(" + self._build_append_key(match.group(2)) + ")s"
def _build_around_key(self, key):
return "_" + key + "_around"
def _build_append_key(self, key):
return "_" + key + "_append"
def _prepare_integer(self, match):
"""This function adjusts the sql for #:name replacements
It works by turning #:name to %(_name_as_integer)s. Since a
leading '_' is not a valid keyname input (enforced via unit
tests), it will never clash with existing keys. Then, when
building the statement, the query runner will generate
corrected strings."""
self._integer.add(match.group(1))
return "%(" + self._build_integer_key(match.group(1)) + ")s"
def _build_integer_key(self, key):
return "_" + key + "_as_integer"
def _strip_wildcards_to_list(self, value):
"""Take string, and strip out the LIKE special characters.
Technically, this is database dependant, but postgresql and
mysql use the same wildcards, and I am not aware of a general
way to handle this. I think you need a sql statement of the
form:
LIKE_STRING( [ANY,ONE,str]... )
which would treat ANY as their any string, and ONE as their
single glyph, and str as something that needs database
specific encoding to not allow any % or _ to affect the query.
As it stands, I believe it's impossible to write a named query
style interface which uses like to search the entire space of
text available. Imagine the query:
% of brain used by average linden
In order to search for %, it must be escaped, so once you have
escaped the string to not do wildcard searches, and be escaped
for the database, and then prepended the wildcard you come
back with one of:
1) %\% of brain used by average linden
2) %%% of brain used by average linden
Then, when passed to the database to be escaped to be database
safe, you get back:
1) %\\% of brain used by average linden
: which means search for any character sequence, followed by a
backslash, followed by any sequence, followed by ' of
brain...'
2) %%% of brain used by average linden
: which (I believe) means search for a % followed by any
character sequence followed by 'of brain...'
Neither of which is what we want!
So, we need a vendor (or extention) for LIKE_STRING. Anyone
want to write it?"""
if isinstance(value, unicode):
utf8_value = value
else:
utf8_value = unicode(value, "utf-8")
esc_list = []
remove_chars = set(u"%_")
for glyph in utf8_value:
if glyph in remove_chars:
continue
esc_list.append(glyph.encode("utf-8"))
return esc_list
def delete(self):
""" Makes this query unusable by deleting all the members and
setting the deleted member. This is desired when the on-disk
query has been deleted but the in-memory copy remains."""
# blow away all members except _name, _location, and deleted
name, location = self._name, self._location
for key in self.__dict__.keys():
del self.__dict__[key]
self.deleted = True
self._name, self._location = name, location
def ttl(self):
""" Estimated time to live of this query. Used for web
services to set the Expires header."""
return self._ttl
def legacy_dbname(self):
return self._legacy_dbname
def return_as_map(self):
""" Returns true if this query is configured to return its
results as a single map (as opposed to a list of maps, the
normal behavior)."""
return self._return_as_map
def for_schema(self, db_name):
"Look trough the alternates and return the correct query"
if db_name is None:
return self
try:
return self._alternative[db_name]
except KeyError, e:
pass
return self
def run(self, connection, params, expect_rows = None, use_dictcursor = True):
"""given a connection, run a named query with the params
Note that this function will fetch ALL rows. We do this because it
opens and closes the cursor to generate the values, and this
isn't a generator so the cursor has no life beyond the method call.
@param cursor The connection to use (this generates its own cursor for the query)
@param name The name of the query to run
@param params The parameters passed into the query
@param expect_rows The number of rows expected. Set to 1 if return_as_map is true. Raises ExpectationFailed if the number of returned rows doesn't exactly match. Kind of a hack.
@param use_dictcursor Set to false to use a normal cursor and manually convert the rows to dicts.
@return Returns the result set as a list of dicts, or, if the named query has return_as_map set to true, returns a single dict.
"""
if use_dictcursor:
cursor = connection.cursor(MySQLdb.cursors.DictCursor)
else:
cursor = connection.cursor()
full_query, params = self._construct_sql(params)
if DEBUG:
print "SQL:", self.sql(connection, params)
rows = cursor.execute(full_query, params)
# *NOTE: the expect_rows argument is a very cheesy way to get some
# validation on the result set. If you want to add more expectation
# logic, do something more object-oriented and flexible. Or use an ORM.
if(self._return_as_map):
expect_rows = 1
if expect_rows is not None and rows != expect_rows:
cursor.close()
raise ExpectationFailed("Statement expected %s rows, got %s. Sql: '%s' %s" % (
expect_rows, rows, full_query, params))
# convert to dicts manually if we're not using a dictcursor
if use_dictcursor:
result_set = cursor.fetchall()
else:
if cursor.description is None:
# an insert or something
x = cursor.fetchall()
cursor.close()
return x
names = [x[0] for x in cursor.description]
result_set = []
for row in cursor.fetchall():
converted_row = {}
for idx, col_name in enumerate(names):
converted_row[col_name] = row[idx]
result_set.append(converted_row)
cursor.close()
if self._return_as_map:
return result_set[0]
return result_set
def _construct_sql(self, params):
""" Returns a query string and a dictionary of parameters,
suitable for directly passing to the execute() method."""
self.refresh()
# build the query from the options available and the params
base_query = []
base_query.append(self._base_query)
for opt, extra_where in self._options.items():
if type(extra_where) in (dict, list, tuple):
if opt in params:
base_query.append(extra_where[params[opt]])
else:
if opt in params and params[opt]:
base_query.append(extra_where)
if self._query_suffix:
base_query.append(self._query_suffix)
full_query = '\n'.join(base_query)
# Go through the query and rewrite all of the ones with the
# @:name syntax.
rewrite = _RewriteQueryForArray(params)
expr = re.compile("@%\(([a-zA-Z][a-zA-Z0-9_-]*)\)s")
full_query = expr.sub(rewrite.operate, full_query)
params.update(rewrite.new_params)
# build out the params for like. We only have to do this
# parameters which were detected to have ued the where syntax
# during load.
#
# * treat the incoming string as utf-8
# * strip wildcards
# * append or prepend % as appropriate
new_params = {}
for key in params:
if key in self._around:
new_value = ['%']
new_value.extend(self._strip_wildcards_to_list(params[key]))
new_value.append('%')
new_params[self._build_around_key(key)] = ''.join(new_value)
if key in self._append:
new_value = self._strip_wildcards_to_list(params[key])
new_value.append('%')
new_params[self._build_append_key(key)] = ''.join(new_value)
if key in self._integer:
new_params[self._build_integer_key(key)] = int(params[key])
params.update(new_params)
return full_query, params
def sql(self, connection, params):
""" Generates an SQL statement from the named query document
and a dictionary of parameters.
*NOTE: Only use for debugging, because it uses the
non-standard MySQLdb 'literal' method.
"""
if not DEBUG:
import warnings
warnings.warn("Don't use named_query.sql() when not debugging. Used on %s" % self._location)
# do substitution using the mysql (non-standard) 'literal'
# function to do the escaping.
full_query, params = self._construct_sql(params)
return full_query % connection.literal(params)
def refresh(self):
""" Refresh self from the file on the filesystem.
This is optimized to be callable as frequently as you wish,
without adding too much load. It does so by only stat-ing the
file every N seconds, where N defaults to 5 and is
configurable through the member _stat_interval_seconds. If the stat
reveals that the file has changed, refresh will re-parse the
contents of the file and use them to update the named query
instance. If the stat reveals that the file has been deleted,
refresh will call self.delete to make the in-memory
representation unusable."""
now = time.time()
if(now - self._last_check_time > self._stat_interval_seconds):
self._last_check_time = now
try:
modtime = self.get_modtime()
if(modtime > self._last_mod_time):
self.load_contents()
except OSError, e:
if e.errno == errno.ENOENT: # file not found
self.delete() # clean up self
raise # pass the exception along to the caller so they know that this query disappeared
class NamedQueryManager(object):
""" Manages the lifespan of NamedQuery objects, drawing from a
directory hierarchy of named query documents.
In practice this amounts to a memory cache of NamedQuery objects."""
def __init__(self, named_queries_dir):
""" Initializes a manager to look for named queries in a
directory."""
self._dir = os.path.abspath(os.path.realpath(named_queries_dir))
self._cached_queries = {}
def sql(self, connection, name, params):
nq = self.get(name)
return nq.sql(connection, params)
def get(self, name):
""" Returns a NamedQuery instance based on the name, either
from memory cache, or by parsing from disk.
The name is simply a relative path to the directory associated
with the manager object. Before returning the instance, the
NamedQuery object is cached in memory, so that subsequent
accesses don't have to read from disk or do any parsing. This
means that NamedQuery objects returned by this method are
shared across all users of the manager object.
NamedQuery.refresh is used to bring the NamedQuery objects in
sync with the actual files on disk."""
nq = self._cached_queries.get(name)
if nq is None:
nq = NamedQuery(name, os.path.join(self._dir, name))
self._cached_queries[name] = nq
else:
try:
nq.refresh()
except OSError, e:
if e.errno == errno.ENOENT: # file not found
del self._cached_queries[name]
raise # pass exception along to caller so they know that the query disappeared
return nq
class _RewriteQueryForArray(object):
"Helper class for rewriting queries with the @:name syntax"
def __init__(self, params):
self.params = params
self.new_params = dict()
def operate(self, match):
"Given a match, return the string that should be in use"
key = match.group(1)
value = self.params[key]
if type(value) in (list,tuple):
rv = []
for idx in range(len(value)):
# if the value@idx is array-like, we are
# probably dealing with a VALUES
new_key = "_%s_%s"%(key, str(idx))
val_item = value[idx]
if type(val_item) in (list, tuple, dict):
if type(val_item) is dict:
# this is because in Python, the order of
# key, value retrieval from the dict is not
# guaranteed to match what the input intended
# and for VALUES, order is important.
# TODO: Implemented ordered dict in LLSD parser?
raise ExpectationFailed('Only lists/tuples allowed,\
received dict')
values_keys = []
for value_idx, item in enumerate(val_item):
# we want a key of the format :
# key_#replacement_#value_row_#value_col
# ugh... so if we are replacing 10 rows in user_note,
# the first values clause would read (for @:user_notes) :-
# ( :_user_notes_0_1_1, :_user_notes_0_1_2, :_user_notes_0_1_3 )
# the input LLSD for VALUES will look like:
# <llsd>...
# <map>
# <key>user_notes</key>
# <array>
# <array> <!-- row 1 for VALUES -->
# <string>...</string>
# <string>...</string>
# <string>...</string>
# </array>
# ...
# </array>
# </map>
# ... </llsd>
values_key = "%s_%s"%(new_key, value_idx)
self.new_params[values_key] = item
values_keys.append("%%(%s)s"%values_key)
# now collapse all these new place holders enclosed in ()
# from [':_key_0_1_1', ':_key_0_1_2', ':_key_0_1_3,...]
# rv will have [ '(:_key_0_1_1, :_key_0_1_2, :_key_0_1_3)', ]
# which is flattened a few lines below join(rv)
rv.append('(%s)' % ','.join(values_keys))
else:
self.new_params[new_key] = val_item
rv.append("%%(%s)s"%new_key)
return ','.join(rv)
else:
# not something that can be expanded, so just drop the
# leading @ in the front of the match. This will mean that
# the single value we have, be it a string, int, whatever
# (other than dict) will correctly show up, eg:
#
# where foo in (@:foobar) -- foobar is a string, so we get
# where foo in (:foobar)
return match.group(0)[1:]

View File

@ -1,84 +0,0 @@
'''
@file shutil2.py
@brief a better shutil.copytree replacement
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
'''
#
# shutil2.py
# Taken from http://www.scons.org/wiki/AccumulateBuilder
# the stock copytree sucks because it insists that the
# target dir not exist
#
import os.path
import shutil
def copytree(src, dest, symlinks=False):
"""My own copyTree which does not fail if the directory exists.
Recursively copy a directory tree using copy2().
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied.
Behavior is meant to be identical to GNU 'cp -R'.
"""
def copyItems(src, dest, symlinks=False):
"""Function that does all the work.
It is necessary to handle the two 'cp' cases:
- destination does exist
- destination does not exist
See 'cp -R' documentation for more details
"""
for item in os.listdir(src):
srcPath = os.path.join(src, item)
if os.path.isdir(srcPath):
srcBasename = os.path.basename(srcPath)
destDirPath = os.path.join(dest, srcBasename)
if not os.path.exists(destDirPath):
os.makedirs(destDirPath)
copyItems(srcPath, destDirPath)
elif os.path.islink(item) and symlinks:
linkto = os.readlink(item)
os.symlink(linkto, dest)
else:
shutil.copy2(srcPath, dest)
# case 'cp -R src/ dest/' where dest/ already exists
if os.path.exists(dest):
destPath = os.path.join(dest, os.path.basename(src))
if not os.path.exists(destPath):
os.makedirs(destPath)
# case 'cp -R src/ dest/' where dest/ does not exist
else:
os.makedirs(dest)
destPath = dest
# actually copy the files
copyItems(src, destPath)

View File

@ -1,338 +0,0 @@
#!/usr/bin/env python
"""\
@file simperf_host_xml_parser.py
@brief Digest collector's XML dump and convert to simple dict/list structure
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import sys, os, getopt, time
import simplejson
from xml import sax
def usage():
print "Usage:"
print sys.argv[0] + " [options]"
print " Convert RRD's XML dump to JSON. Script to convert the simperf_host_collector-"
print " generated RRD dump into JSON. Steps include converting selected named"
print " fields from GAUGE type to COUNTER type by computing delta with preceding"
print " values. Top-level named fields are:"
print
print " lastupdate Time (javascript timestamp) of last data sample"
print " step Time in seconds between samples"
print " ds Data specification (name/type) for each column"
print " database Table of data samples, one time step per row"
print
print "Options:"
print " -i, --in Input settings filename. (Default: stdin)"
print " -o, --out Output settings filename. (Default: stdout)"
print " -h, --help Print this message and exit."
print
print "Example: %s -i rrddump.xml -o rrddump.json" % sys.argv[0]
print
print "Interfaces:"
print " class SimPerfHostXMLParser() # SAX content handler"
print " def simperf_host_xml_fixup(parser) # post-parse value fixup"
class SimPerfHostXMLParser(sax.handler.ContentHandler):
def __init__(self):
pass
def startDocument(self):
self.rrd_last_update = 0 # public
self.rrd_step = 0 # public
self.rrd_ds = [] # public
self.rrd_records = [] # public
self._rrd_level = 0
self._rrd_parse_state = 0
self._rrd_chars = ""
self._rrd_capture = False
self._rrd_ds_val = {}
self._rrd_data_row = []
self._rrd_data_row_has_nan = False
def endDocument(self):
pass
# Nasty little ad-hoc state machine to extract the elements that are
# necessary from the 'rrdtool dump' XML output. The same element
# name '<ds>' is used for two different data sets so we need to pay
# some attention to the actual structure to get the ones we want
# and ignore the ones we don't.
def startElement(self, name, attrs):
self._rrd_level = self._rrd_level + 1
self._rrd_capture = False
if self._rrd_level == 1:
if name == "rrd" and self._rrd_parse_state == 0:
self._rrd_parse_state = 1 # In <rrd>
self._rrd_capture = True
self._rrd_chars = ""
elif self._rrd_level == 2:
if self._rrd_parse_state == 1:
if name == "lastupdate":
self._rrd_parse_state = 2 # In <rrd><lastupdate>
self._rrd_capture = True
self._rrd_chars = ""
elif name == "step":
self._rrd_parse_state = 3 # In <rrd><step>
self._rrd_capture = True
self._rrd_chars = ""
elif name == "ds":
self._rrd_parse_state = 4 # In <rrd><ds>
self._rrd_ds_val = {}
self._rrd_chars = ""
elif name == "rra":
self._rrd_parse_state = 5 # In <rrd><rra>
elif self._rrd_level == 3:
if self._rrd_parse_state == 4:
if name == "name":
self._rrd_parse_state = 6 # In <rrd><ds><name>
self._rrd_capture = True
self._rrd_chars = ""
elif name == "type":
self._rrd_parse_state = 7 # In <rrd><ds><type>
self._rrd_capture = True
self._rrd_chars = ""
elif self._rrd_parse_state == 5:
if name == "database":
self._rrd_parse_state = 8 # In <rrd><rra><database>
elif self._rrd_level == 4:
if self._rrd_parse_state == 8:
if name == "row":
self._rrd_parse_state = 9 # In <rrd><rra><database><row>
self._rrd_data_row = []
self._rrd_data_row_has_nan = False
elif self._rrd_level == 5:
if self._rrd_parse_state == 9:
if name == "v":
self._rrd_parse_state = 10 # In <rrd><rra><database><row><v>
self._rrd_capture = True
self._rrd_chars = ""
def endElement(self, name):
self._rrd_capture = False
if self._rrd_parse_state == 10:
self._rrd_capture = self._rrd_level == 6
if self._rrd_level == 5:
if self._rrd_chars == "NaN":
self._rrd_data_row_has_nan = True
else:
self._rrd_data_row.append(self._rrd_chars)
self._rrd_parse_state = 9 # In <rrd><rra><database><row>
elif self._rrd_parse_state == 9:
if self._rrd_level == 4:
if not self._rrd_data_row_has_nan:
self.rrd_records.append(self._rrd_data_row)
self._rrd_parse_state = 8 # In <rrd><rra><database>
elif self._rrd_parse_state == 8:
if self._rrd_level == 3:
self._rrd_parse_state = 5 # In <rrd><rra>
elif self._rrd_parse_state == 7:
if self._rrd_level == 3:
self._rrd_ds_val["type"] = self._rrd_chars
self._rrd_parse_state = 4 # In <rrd><ds>
elif self._rrd_parse_state == 6:
if self._rrd_level == 3:
self._rrd_ds_val["name"] = self._rrd_chars
self._rrd_parse_state = 4 # In <rrd><ds>
elif self._rrd_parse_state == 5:
if self._rrd_level == 2:
self._rrd_parse_state = 1 # In <rrd>
elif self._rrd_parse_state == 4:
if self._rrd_level == 2:
self.rrd_ds.append(self._rrd_ds_val)
self._rrd_parse_state = 1 # In <rrd>
elif self._rrd_parse_state == 3:
if self._rrd_level == 2:
self.rrd_step = long(self._rrd_chars)
self._rrd_parse_state = 1 # In <rrd>
elif self._rrd_parse_state == 2:
if self._rrd_level == 2:
self.rrd_last_update = long(self._rrd_chars)
self._rrd_parse_state = 1 # In <rrd>
elif self._rrd_parse_state == 1:
if self._rrd_level == 1:
self._rrd_parse_state = 0 # At top
if self._rrd_level:
self._rrd_level = self._rrd_level - 1
def characters(self, content):
if self._rrd_capture:
self._rrd_chars = self._rrd_chars + content.strip()
def _make_numeric(value):
try:
value = float(value)
except:
value = ""
return value
def simperf_host_xml_fixup(parser, filter_start_time = None, filter_end_time = None):
# Fixup for GAUGE fields that are really COUNTS. They
# were forced to GAUGE to try to disable rrdtool's
# data interpolation/extrapolation for non-uniform time
# samples.
fixup_tags = [ "cpu_user",
"cpu_nice",
"cpu_sys",
"cpu_idle",
"cpu_waitio",
"cpu_intr",
# "file_active",
# "file_free",
# "inode_active",
# "inode_free",
"netif_in_kb",
"netif_in_pkts",
"netif_in_errs",
"netif_in_drop",
"netif_out_kb",
"netif_out_pkts",
"netif_out_errs",
"netif_out_drop",
"vm_page_in",
"vm_page_out",
"vm_swap_in",
"vm_swap_out",
#"vm_mem_total",
#"vm_mem_used",
#"vm_mem_active",
#"vm_mem_inactive",
#"vm_mem_free",
#"vm_mem_buffer",
#"vm_swap_cache",
#"vm_swap_total",
#"vm_swap_used",
#"vm_swap_free",
"cpu_interrupts",
"cpu_switches",
"cpu_forks" ]
col_count = len(parser.rrd_ds)
row_count = len(parser.rrd_records)
# Process the last row separately, just to make all values numeric.
for j in range(col_count):
parser.rrd_records[row_count - 1][j] = _make_numeric(parser.rrd_records[row_count - 1][j])
# Process all other row/columns.
last_different_row = row_count - 1
current_row = row_count - 2
while current_row >= 0:
# Check for a different value than the previous row. If everything is the same
# then this is probably just a filler/bogus entry.
is_different = False
for j in range(col_count):
parser.rrd_records[current_row][j] = _make_numeric(parser.rrd_records[current_row][j])
if parser.rrd_records[current_row][j] != parser.rrd_records[last_different_row][j]:
# We're good. This is a different row.
is_different = True
if not is_different:
# This is a filler/bogus entry. Just ignore it.
for j in range(col_count):
parser.rrd_records[current_row][j] = float('nan')
else:
# Some tags need to be converted into deltas.
for j in range(col_count):
if parser.rrd_ds[j]["name"] in fixup_tags:
parser.rrd_records[last_different_row][j] = \
parser.rrd_records[last_different_row][j] - parser.rrd_records[current_row][j]
last_different_row = current_row
current_row -= 1
# Set fixup_tags in the first row to 'nan' since they aren't useful anymore.
for j in range(col_count):
if parser.rrd_ds[j]["name"] in fixup_tags:
parser.rrd_records[0][j] = float('nan')
# Add a timestamp to each row and to the catalog. Format and name
# chosen to match other simulator logging (hopefully).
start_time = parser.rrd_last_update - (parser.rrd_step * (row_count - 1))
# Build a filtered list of rrd_records if we are limited to a time range.
filter_records = False
if filter_start_time is not None or filter_end_time is not None:
filter_records = True
filtered_rrd_records = []
if filter_start_time is None:
filter_start_time = start_time * 1000
if filter_end_time is None:
filter_end_time = parser.rrd_last_update * 1000
for i in range(row_count):
record_timestamp = (start_time + (i * parser.rrd_step)) * 1000
parser.rrd_records[i].insert(0, record_timestamp)
if filter_records:
if filter_start_time <= record_timestamp and record_timestamp <= filter_end_time:
filtered_rrd_records.append(parser.rrd_records[i])
if filter_records:
parser.rrd_records = filtered_rrd_records
parser.rrd_ds.insert(0, {"type": "GAUGE", "name": "javascript_timestamp"})
def main(argv=None):
opts, args = getopt.getopt(sys.argv[1:], "i:o:h", ["in=", "out=", "help"])
input_file = sys.stdin
output_file = sys.stdout
for o, a in opts:
if o in ("-i", "--in"):
input_file = open(a, 'r')
if o in ("-o", "--out"):
output_file = open(a, 'w')
if o in ("-h", "--help"):
usage()
sys.exit(0)
# Using the SAX parser as it is at least 4X faster and far, far
# smaller on this dataset than the DOM-based interface in xml.dom.minidom.
# With SAX and a 5.4MB xml file, this requires about seven seconds of
# wall-clock time and 32MB VSZ. With the DOM interface, about 22 seconds
# and over 270MB VSZ.
handler = SimPerfHostXMLParser()
sax.parse(input_file, handler)
if input_file != sys.stdin:
input_file.close()
# Various format fixups: string-to-num, gauge-to-counts, add
# a time stamp, etc.
simperf_host_xml_fixup(handler)
# Create JSONable dict with interesting data and format/print it
print >>output_file, simplejson.dumps({ "step" : handler.rrd_step,
"lastupdate": handler.rrd_last_update * 1000,
"ds" : handler.rrd_ds,
"database" : handler.rrd_records })
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,167 +0,0 @@
#!/usr/bin/env python
"""\
@file simperf_oprof_interface.py
@brief Manage OProfile data collection on a host
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
import sys, os, getopt
import simplejson
def usage():
print "Usage:"
print sys.argv[0] + " [options]"
print " Digest the OProfile report forms that come out of the"
print " simperf_oprof_ctl program's -r/--report command. The result"
print " is an array of dictionaires with the following keys:"
print
print " symbol Name of sampled, calling, or called procedure"
print " file Executable or library where symbol resides"
print " percentage Percentage contribution to profile, calls or called"
print " samples Sample count"
print " calls Methods called by the method in question (full only)"
print " called_by Methods calling the method (full only)"
print
print " For 'full' reports the two keys 'calls' and 'called_by' are"
print " themselves arrays of dictionaries based on the first four keys."
print
print "Return Codes:"
print " None. Aggressively digests everything. Will likely mung results"
print " if a program or library has whitespace in its name."
print
print "Options:"
print " -i, --in Input settings filename. (Default: stdin)"
print " -o, --out Output settings filename. (Default: stdout)"
print " -h, --help Print this message and exit."
print
print "Interfaces:"
print " class SimPerfOProfileInterface()"
class SimPerfOProfileInterface:
def __init__(self):
self.isBrief = True # public
self.isValid = False # public
self.result = [] # public
def parse(self, input):
in_samples = False
for line in input:
if in_samples:
if line[0:6] == "------":
self.isBrief = False
self._parseFull(input)
else:
self._parseBrief(input, line)
self.isValid = True
return
try:
hd1, remain = line.split(None, 1)
if hd1 == "samples":
in_samples = True
except ValueError:
pass
def _parseBrief(self, input, line1):
try:
fld1, fld2, fld3, fld4 = line1.split(None, 3)
self.result.append({"samples" : fld1,
"percentage" : fld2,
"file" : fld3,
"symbol" : fld4.strip("\n")})
except ValueError:
pass
for line in input:
try:
fld1, fld2, fld3, fld4 = line.split(None, 3)
self.result.append({"samples" : fld1,
"percentage" : fld2,
"file" : fld3,
"symbol" : fld4.strip("\n")})
except ValueError:
pass
def _parseFull(self, input):
state = 0 # In 'called_by' section
calls = []
called_by = []
current = {}
for line in input:
if line[0:6] == "------":
if len(current):
current["calls"] = calls
current["called_by"] = called_by
self.result.append(current)
state = 0
calls = []
called_by = []
current = {}
else:
try:
fld1, fld2, fld3, fld4 = line.split(None, 3)
tmp = {"samples" : fld1,
"percentage" : fld2,
"file" : fld3,
"symbol" : fld4.strip("\n")}
except ValueError:
continue
if line[0] != " ":
current = tmp
state = 1 # In 'calls' section
elif state == 0:
called_by.append(tmp)
else:
calls.append(tmp)
if len(current):
current["calls"] = calls
current["called_by"] = called_by
self.result.append(current)
def main(argv=None):
opts, args = getopt.getopt(sys.argv[1:], "i:o:h", ["in=", "out=", "help"])
input_file = sys.stdin
output_file = sys.stdout
for o, a in opts:
if o in ("-i", "--in"):
input_file = open(a, 'r')
if o in ("-o", "--out"):
output_file = open(a, 'w')
if o in ("-h", "--help"):
usage()
sys.exit(0)
oprof = SimPerfOProfileInterface()
oprof.parse(input_file)
if input_file != sys.stdin:
input_file.close()
# Create JSONable dict with interesting data and format/print it
print >>output_file, simplejson.dumps(oprof.result)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,191 +0,0 @@
#!/usr/bin/env python
"""\
@file simperf_proc_interface.py
@brief Utility to extract log messages from *.<pid>.llsd files containing performance statistics.
$LicenseInfo:firstyear=2008&license=mit$
Copyright (c) 2008-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
"""
# ----------------------------------------------------
# Utility to extract log messages from *.<pid>.llsd
# files that contain performance statistics.
# ----------------------------------------------------
import sys, os
if os.path.exists("setup-path.py"):
execfile("setup-path.py")
from indra.base import llsd
DEFAULT_PATH="/dev/shm/simperf/"
# ----------------------------------------------------
# Pull out the stats and return a single document
def parse_logfile(filename, target_column=None, verbose=False):
full_doc = []
# Open source temp log file. Let exceptions percolate up.
sourcefile = open( filename,'r')
if verbose:
print "Reading " + filename
# Parse and output all lines from the temp file
for line in sourcefile.xreadlines():
partial_doc = llsd.parse(line)
if partial_doc is not None:
if target_column is None:
full_doc.append(partial_doc)
else:
trim_doc = { target_column: partial_doc[target_column] }
if target_column != "fps":
trim_doc[ 'fps' ] = partial_doc[ 'fps' ]
trim_doc[ '/total_time' ] = partial_doc[ '/total_time' ]
trim_doc[ 'utc_time' ] = partial_doc[ 'utc_time' ]
full_doc.append(trim_doc)
sourcefile.close()
return full_doc
# Extract just the meta info line, and the timestamp of the first/last frame entry.
def parse_logfile_info(filename, verbose=False):
# Open source temp log file. Let exceptions percolate up.
sourcefile = open(filename, 'rU') # U is to open with Universal newline support
if verbose:
print "Reading " + filename
# The first line is the meta info line.
info_line = sourcefile.readline()
if not info_line:
sourcefile.close()
return None
# The rest of the lines are frames. Read the first and last to get the time range.
info = llsd.parse( info_line )
info['start_time'] = None
info['end_time'] = None
first_frame = sourcefile.readline()
if first_frame:
try:
info['start_time'] = int(llsd.parse(first_frame)['timestamp'])
except:
pass
# Read the file backwards to find the last two lines.
sourcefile.seek(0, 2)
file_size = sourcefile.tell()
offset = 1024
num_attempts = 0
end_time = None
if file_size < offset:
offset = file_size
while 1:
sourcefile.seek(-1*offset, 2)
read_str = sourcefile.read(offset)
# Remove newline at the end
if read_str[offset - 1] == '\n':
read_str = read_str[0:-1]
lines = read_str.split('\n')
full_line = None
if len(lines) > 2: # Got two line
try:
end_time = llsd.parse(lines[-1])['timestamp']
except:
# We couldn't parse this line. Try once more.
try:
end_time = llsd.parse(lines[-2])['timestamp']
except:
# Nope. Just move on.
pass
break
if len(read_str) == file_size: # Reached the beginning
break
offset += 1024
info['end_time'] = int(end_time)
sourcefile.close()
return info
def parse_proc_filename(filename):
try:
name_as_list = filename.split(".")
cur_stat_type = name_as_list[0].split("_")[0]
cur_pid = name_as_list[1]
except IndexError, ValueError:
return (None, None)
return (cur_pid, cur_stat_type)
# ----------------------------------------------------
def get_simstats_list(path=None):
""" Return stats (pid, type) listed in <type>_proc.<pid>.llsd """
if path is None:
path = DEFAULT_PATH
simstats_list = []
for file_name in os.listdir(path):
if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd":
simstats_info = parse_logfile_info(path + file_name)
if simstats_info is not None:
simstats_list.append(simstats_info)
return simstats_list
def get_log_info_list(pid=None, stat_type=None, path=None, target_column=None, verbose=False):
""" Return data from all llsd files matching the pid and stat type """
if path is None:
path = DEFAULT_PATH
log_info_list = {}
for file_name in os.listdir ( path ):
if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd":
(cur_pid, cur_stat_type) = parse_proc_filename(file_name)
if cur_pid is None:
continue
if pid is not None and pid != cur_pid:
continue
if stat_type is not None and stat_type != cur_stat_type:
continue
log_info_list[cur_pid] = parse_logfile(path + file_name, target_column, verbose)
return log_info_list
def delete_simstats_files(pid=None, stat_type=None, path=None):
""" Delete *.<pid>.llsd files """
if path is None:
path = DEFAULT_PATH
del_list = []
for file_name in os.listdir(path):
if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd":
(cur_pid, cur_stat_type) = parse_proc_filename(file_name)
if cur_pid is None:
continue
if pid is not None and pid != cur_pid:
continue
if stat_type is not None and stat_type != cur_stat_type:
continue
del_list.append(cur_pid)
# Allow delete related exceptions to percolate up if this fails.
os.unlink(os.path.join(DEFAULT_PATH, file_name))
return del_list

View File

@ -1,222 +0,0 @@
'''
@file term.py
@brief a better shutil.copytree replacement
$LicenseInfo:firstyear=2007&license=mit$
Copyright (c) 2007-2009, Linden Research, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
$/LicenseInfo$
'''
#http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/475116
import sys, re
class TerminalController:
"""
A class that can be used to portably generate formatted output to
a terminal.
`TerminalController` defines a set of instance variables whose
values are initialized to the control sequence necessary to
perform a given action. These can be simply included in normal
output to the terminal:
>>> term = TerminalController()
>>> print 'This is '+term.GREEN+'green'+term.NORMAL
Alternatively, the `render()` method can used, which replaces
'${action}' with the string required to perform 'action':
>>> term = TerminalController()
>>> print term.render('This is ${GREEN}green${NORMAL}')
If the terminal doesn't support a given action, then the value of
the corresponding instance variable will be set to ''. As a
result, the above code will still work on terminals that do not
support color, except that their output will not be colored.
Also, this means that you can test whether the terminal supports a
given action by simply testing the truth value of the
corresponding instance variable:
>>> term = TerminalController()
>>> if term.CLEAR_SCREEN:
... print 'This terminal supports clearning the screen.'
Finally, if the width and height of the terminal are known, then
they will be stored in the `COLS` and `LINES` attributes.
"""
# Cursor movement:
BOL = '' #: Move the cursor to the beginning of the line
UP = '' #: Move the cursor up one line
DOWN = '' #: Move the cursor down one line
LEFT = '' #: Move the cursor left one char
RIGHT = '' #: Move the cursor right one char
# Deletion:
CLEAR_SCREEN = '' #: Clear the screen and move to home position
CLEAR_EOL = '' #: Clear to the end of the line.
CLEAR_BOL = '' #: Clear to the beginning of the line.
CLEAR_EOS = '' #: Clear to the end of the screen
# Output modes:
BOLD = '' #: Turn on bold mode
BLINK = '' #: Turn on blink mode
DIM = '' #: Turn on half-bright mode
REVERSE = '' #: Turn on reverse-video mode
NORMAL = '' #: Turn off all modes
# Cursor display:
HIDE_CURSOR = '' #: Make the cursor invisible
SHOW_CURSOR = '' #: Make the cursor visible
# Terminal size:
COLS = None #: Width of the terminal (None for unknown)
LINES = None #: Height of the terminal (None for unknown)
# Foreground colors:
BLACK = BLUE = GREEN = CYAN = RED = MAGENTA = YELLOW = WHITE = ''
# Background colors:
BG_BLACK = BG_BLUE = BG_GREEN = BG_CYAN = ''
BG_RED = BG_MAGENTA = BG_YELLOW = BG_WHITE = ''
_STRING_CAPABILITIES = """
BOL=cr UP=cuu1 DOWN=cud1 LEFT=cub1 RIGHT=cuf1
CLEAR_SCREEN=clear CLEAR_EOL=el CLEAR_BOL=el1 CLEAR_EOS=ed BOLD=bold
BLINK=blink DIM=dim REVERSE=rev UNDERLINE=smul NORMAL=sgr0
HIDE_CURSOR=cinvis SHOW_CURSOR=cnorm""".split()
_COLORS = """BLACK BLUE GREEN CYAN RED MAGENTA YELLOW WHITE""".split()
_ANSICOLORS = "BLACK RED GREEN YELLOW BLUE MAGENTA CYAN WHITE".split()
def __init__(self, term_stream=sys.stdout):
"""
Create a `TerminalController` and initialize its attributes
with appropriate values for the current terminal.
`term_stream` is the stream that will be used for terminal
output; if this stream is not a tty, then the terminal is
assumed to be a dumb terminal (i.e., have no capabilities).
"""
# Curses isn't available on all platforms
try: import curses
except: return
# If the stream isn't a tty, then assume it has no capabilities.
if not term_stream.isatty(): return
# Check the terminal type. If we fail, then assume that the
# terminal has no capabilities.
try: curses.setupterm()
except: return
# Look up numeric capabilities.
self.COLS = curses.tigetnum('cols')
self.LINES = curses.tigetnum('lines')
# Look up string capabilities.
for capability in self._STRING_CAPABILITIES:
(attrib, cap_name) = capability.split('=')
setattr(self, attrib, self._tigetstr(cap_name) or '')
# Colors
set_fg = self._tigetstr('setf')
if set_fg:
for i,color in zip(range(len(self._COLORS)), self._COLORS):
setattr(self, color, curses.tparm(set_fg, i) or '')
set_fg_ansi = self._tigetstr('setaf')
if set_fg_ansi:
for i,color in zip(range(len(self._ANSICOLORS)), self._ANSICOLORS):
setattr(self, color, curses.tparm(set_fg_ansi, i) or '')
set_bg = self._tigetstr('setb')
if set_bg:
for i,color in zip(range(len(self._COLORS)), self._COLORS):
setattr(self, 'BG_'+color, curses.tparm(set_bg, i) or '')
set_bg_ansi = self._tigetstr('setab')
if set_bg_ansi:
for i,color in zip(range(len(self._ANSICOLORS)), self._ANSICOLORS):
setattr(self, 'BG_'+color, curses.tparm(set_bg_ansi, i) or '')
def _tigetstr(self, cap_name):
# String capabilities can include "delays" of the form "$<2>".
# For any modern terminal, we should be able to just ignore
# these, so strip them out.
import curses
cap = curses.tigetstr(cap_name) or ''
return re.sub(r'\$<\d+>[/*]?', '', cap)
def render(self, template):
"""
Replace each $-substitutions in the given template string with
the corresponding terminal control string (if it's defined) or
'' (if it's not).
"""
return re.sub(r'\$\$|\${\w+}', self._render_sub, template)
def _render_sub(self, match):
s = match.group()
if s == '$$': return s
else: return getattr(self, s[2:-1])
#######################################################################
# Example use case: progress bar
#######################################################################
class ProgressBar:
"""
A 3-line progress bar, which looks like::
Header
20% [===========----------------------------------]
progress message
The progress bar is colored, if the terminal supports color
output; and adjusts to the width of the terminal.
"""
BAR = '%3d%% ${GREEN}[${BOLD}%s%s${NORMAL}${GREEN}]${NORMAL}\n'
HEADER = '${BOLD}${CYAN}%s${NORMAL}\n\n'
def __init__(self, term, header):
self.term = term
if not (self.term.CLEAR_EOL and self.term.UP and self.term.BOL):
raise ValueError("Terminal isn't capable enough -- you "
"should use a simpler progress dispaly.")
self.width = self.term.COLS or 75
self.bar = term.render(self.BAR)
self.header = self.term.render(self.HEADER % header.center(self.width))
self.cleared = 1 #: true if we haven't drawn the bar yet.
self.update(0, '')
def update(self, percent, message):
if self.cleared:
sys.stdout.write(self.header)
self.cleared = 0
n = int((self.width-10)*percent)
sys.stdout.write(
self.term.BOL + self.term.UP + self.term.CLEAR_EOL +
(self.bar % (100*percent, '='*n, '-'*(self.width-10-n))) +
self.term.CLEAR_EOL + message.center(self.width))
def clear(self):
if not self.cleared:
sys.stdout.write(self.term.BOL + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL)
self.cleared = 1

View File

@ -1,508 +0,0 @@
#!/usr/bin/python
## $LicenseInfo:firstyear=2011&license=viewerlgpl$
## Second Life Viewer Source Code
## Copyright (C) 2011, Linden Research, Inc.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation;
## version 2.1 of the License only.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
## $/LicenseInfo$
r"""UUID objects (universally unique identifiers) according to RFC 4122.
This module provides immutable UUID objects (class UUID) and the functions
uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5
UUIDs as specified in RFC 4122.
If all you want is a unique ID, you should probably call uuid1() or uuid4().
Note that uuid1() may compromise privacy since it creates a UUID containing
the computer's network address. uuid4() creates a random UUID.
Typical usage:
>>> import uuid
# make a UUID based on the host ID and current time
>>> uuid.uuid1()
UUID('a8098c1a-f86e-11da-bd1a-00112444be1e')
# make a UUID using an MD5 hash of a namespace UUID and a name
>>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org')
UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e')
# make a random UUID
>>> uuid.uuid4()
UUID('16fd2706-8baf-433b-82eb-8c7fada847da')
# make a UUID using a SHA-1 hash of a namespace UUID and a name
>>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org')
UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d')
# make a UUID from a string of hex digits (braces and hyphens ignored)
>>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}')
# convert a UUID to a string of hex digits in standard form
>>> str(x)
'00010203-0405-0607-0809-0a0b0c0d0e0f'
# get the raw 16 bytes of the UUID
>>> x.bytes
'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
# make a UUID from a 16-byte string
>>> uuid.UUID(bytes=x.bytes)
UUID('00010203-0405-0607-0809-0a0b0c0d0e0f')
This module works with Python 2.3 or higher."""
__author__ = 'Ka-Ping Yee <ping@zesty.ca>'
__date__ = '$Date: 2006/06/12 23:15:40 $'.split()[1].replace('/', '-')
__version__ = '$Revision: 1.30 $'.split()[1]
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
'reserved for NCS compatibility', 'specified in RFC 4122',
'reserved for Microsoft compatibility', 'reserved for future definition']
class UUID(object):
"""Instances of the UUID class represent UUIDs as specified in RFC 4122.
UUID objects are immutable, hashable, and usable as dictionary keys.
Converting a UUID to a string with str() yields something in the form
'12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts
four possible forms: a similar string of hexadecimal digits, or a
string of 16 raw bytes as an argument named 'bytes', or a tuple of
six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and
48-bit values respectively) as an argument named 'fields', or a single
128-bit integer as an argument named 'int'.
UUIDs have these read-only attributes:
bytes the UUID as a 16-byte string
fields a tuple of the six integer fields of the UUID,
which are also available as six individual attributes
and two derived attributes:
time_low the first 32 bits of the UUID
time_mid the next 16 bits of the UUID
time_hi_version the next 16 bits of the UUID
clock_seq_hi_variant the next 8 bits of the UUID
clock_seq_low the next 8 bits of the UUID
node the last 48 bits of the UUID
time the 60-bit timestamp
clock_seq the 14-bit sequence number
hex the UUID as a 32-character hexadecimal string
int the UUID as a 128-bit integer
urn the UUID as a URN as specified in RFC 4122
variant the UUID variant (one of the constants RESERVED_NCS,
RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE)
version the UUID version number (1 through 5, meaningful only
when the variant is RFC_4122)
"""
def __init__(self, hex=None, bytes=None, fields=None, int=None,
version=None):
r"""Create a UUID from either a string of 32 hexadecimal digits,
a string of 16 bytes as the 'bytes' argument, a tuple of six
integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version,
8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as
the 'fields' argument, or a single 128-bit integer as the 'int'
argument. When a string of hex digits is given, curly braces,
hyphens, and a URN prefix are all optional. For example, these
expressions all yield the same UUID:
UUID('{12345678-1234-5678-1234-567812345678}')
UUID('12345678123456781234567812345678')
UUID('urn:uuid:12345678-1234-5678-1234-567812345678')
UUID(bytes='\x12\x34\x56\x78'*4)
UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678))
UUID(int=0x12345678123456781234567812345678)
Exactly one of 'hex', 'bytes', 'fields', or 'int' must be given.
The 'version' argument is optional; if given, the resulting UUID
will have its variant and version number set according to RFC 4122,
overriding bits in the given 'hex', 'bytes', 'fields', or 'int'.
"""
if [hex, bytes, fields, int].count(None) != 3:
raise TypeError('need just one of hex, bytes, fields, or int')
if hex is not None:
hex = hex.replace('urn:', '').replace('uuid:', '')
hex = hex.strip('{}').replace('-', '')
if len(hex) != 32:
raise ValueError('badly formed hexadecimal UUID string')
int = long(hex, 16)
if bytes is not None:
if len(bytes) != 16:
raise ValueError('bytes is not a 16-char string')
int = long(('%02x'*16) % tuple(map(ord, bytes)), 16)
if fields is not None:
if len(fields) != 6:
raise ValueError('fields is not a 6-tuple')
(time_low, time_mid, time_hi_version,
clock_seq_hi_variant, clock_seq_low, node) = fields
if not 0 <= time_low < 1<<32L:
raise ValueError('field 1 out of range (need a 32-bit value)')
if not 0 <= time_mid < 1<<16L:
raise ValueError('field 2 out of range (need a 16-bit value)')
if not 0 <= time_hi_version < 1<<16L:
raise ValueError('field 3 out of range (need a 16-bit value)')
if not 0 <= clock_seq_hi_variant < 1<<8L:
raise ValueError('field 4 out of range (need an 8-bit value)')
if not 0 <= clock_seq_low < 1<<8L:
raise ValueError('field 5 out of range (need an 8-bit value)')
if not 0 <= node < 1<<48L:
raise ValueError('field 6 out of range (need a 48-bit value)')
clock_seq = (clock_seq_hi_variant << 8L) | clock_seq_low
int = ((time_low << 96L) | (time_mid << 80L) |
(time_hi_version << 64L) | (clock_seq << 48L) | node)
if int is not None:
if not 0 <= int < 1<<128L:
raise ValueError('int is out of range (need a 128-bit value)')
if version is not None:
if not 1 <= version <= 5:
raise ValueError('illegal version number')
# Set the variant to RFC 4122.
int &= ~(0xc000 << 48L)
int |= 0x8000 << 48L
# Set the version number.
int &= ~(0xf000 << 64L)
int |= version << 76L
self.__dict__['int'] = int
def __cmp__(self, other):
if isinstance(other, UUID):
return cmp(self.int, other.int)
return NotImplemented
def __hash__(self):
return hash(self.int)
def __int__(self):
return self.int
def __repr__(self):
return 'UUID(%r)' % str(self)
def __setattr__(self, name, value):
raise TypeError('UUID objects are immutable')
def __str__(self):
hex = '%032x' % self.int
return '%s-%s-%s-%s-%s' % (
hex[:8], hex[8:12], hex[12:16], hex[16:20], hex[20:])
def get_bytes(self):
bytes = ''
for shift in range(0, 128, 8):
bytes = chr((self.int >> shift) & 0xff) + bytes
return bytes
bytes = property(get_bytes)
def get_fields(self):
return (self.time_low, self.time_mid, self.time_hi_version,
self.clock_seq_hi_variant, self.clock_seq_low, self.node)
fields = property(get_fields)
def get_time_low(self):
return self.int >> 96L
time_low = property(get_time_low)
def get_time_mid(self):
return (self.int >> 80L) & 0xffff
time_mid = property(get_time_mid)
def get_time_hi_version(self):
return (self.int >> 64L) & 0xffff
time_hi_version = property(get_time_hi_version)
def get_clock_seq_hi_variant(self):
return (self.int >> 56L) & 0xff
clock_seq_hi_variant = property(get_clock_seq_hi_variant)
def get_clock_seq_low(self):
return (self.int >> 48L) & 0xff
clock_seq_low = property(get_clock_seq_low)
def get_time(self):
return (((self.time_hi_version & 0x0fffL) << 48L) |
(self.time_mid << 32L) | self.time_low)
time = property(get_time)
def get_clock_seq(self):
return (((self.clock_seq_hi_variant & 0x3fL) << 8L) |
self.clock_seq_low)
clock_seq = property(get_clock_seq)
def get_node(self):
return self.int & 0xffffffffffff
node = property(get_node)
def get_hex(self):
return '%032x' % self.int
hex = property(get_hex)
def get_urn(self):
return 'urn:uuid:' + str(self)
urn = property(get_urn)
def get_variant(self):
if not self.int & (0x8000 << 48L):
return RESERVED_NCS
elif not self.int & (0x4000 << 48L):
return RFC_4122
elif not self.int & (0x2000 << 48L):
return RESERVED_MICROSOFT
else:
return RESERVED_FUTURE
variant = property(get_variant)
def get_version(self):
# The version bits are only meaningful for RFC 4122 UUIDs.
if self.variant == RFC_4122:
return int((self.int >> 76L) & 0xf)
version = property(get_version)
def _ifconfig_getnode():
"""Get the hardware address on Unix by running ifconfig."""
import os
for dir in ['', '/sbin/', '/usr/sbin']:
try:
path = os.path.join(dir, 'ifconfig')
if os.path.exists(path):
pipe = os.popen(path)
else:
continue
except IOError:
continue
for line in pipe:
words = line.lower().split()
for i in range(len(words)):
if words[i] in ['hwaddr', 'ether']:
return int(words[i + 1].replace(':', ''), 16)
def _ipconfig_getnode():
"""Get the hardware address on Windows by running ipconfig.exe."""
import os, re
dirs = ['', r'c:\windows\system32', r'c:\winnt\system32']
try:
import ctypes
buffer = ctypes.create_string_buffer(300)
ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300)
dirs.insert(0, buffer.value.decode('mbcs'))
except:
pass
for dir in dirs:
try:
pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all')
except IOError:
continue
for line in pipe:
value = line.split(':')[-1].strip().lower()
if re.match('([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value):
return int(value.replace('-', ''), 16)
def _netbios_getnode():
"""Get the hardware address on Windows using NetBIOS calls.
See http://support.microsoft.com/kb/118623 for details."""
import win32wnet, netbios
ncb = netbios.NCB()
ncb.Command = netbios.NCBENUM
ncb.Buffer = adapters = netbios.LANA_ENUM()
adapters._pack()
if win32wnet.Netbios(ncb) != 0:
return
adapters._unpack()
for i in range(adapters.length):
ncb.Reset()
ncb.Command = netbios.NCBRESET
ncb.Lana_num = ord(adapters.lana[i])
if win32wnet.Netbios(ncb) != 0:
continue
ncb.Reset()
ncb.Command = netbios.NCBASTAT
ncb.Lana_num = ord(adapters.lana[i])
ncb.Callname = '*'.ljust(16)
ncb.Buffer = status = netbios.ADAPTER_STATUS()
if win32wnet.Netbios(ncb) != 0:
continue
status._unpack()
bytes = map(ord, status.adapter_address)
return ((bytes[0]<<40L) + (bytes[1]<<32L) + (bytes[2]<<24L) +
(bytes[3]<<16L) + (bytes[4]<<8L) + bytes[5])
# Thanks to Thomas Heller for ctypes and for his help with its use here.
# If ctypes is available, use it to find system routines for UUID generation.
_uuid_generate_random = _uuid_generate_time = _UuidCreate = None
try:
import ctypes, ctypes.util
_buffer = ctypes.create_string_buffer(16)
# The uuid_generate_* routines are provided by libuuid on at least
# Linux and FreeBSD, and provided by libc on Mac OS X.
for libname in ['uuid', 'c']:
try:
lib = ctypes.CDLL(ctypes.util.find_library(libname))
except:
continue
if hasattr(lib, 'uuid_generate_random'):
_uuid_generate_random = lib.uuid_generate_random
if hasattr(lib, 'uuid_generate_time'):
_uuid_generate_time = lib.uuid_generate_time
# On Windows prior to 2000, UuidCreate gives a UUID containing the
# hardware address. On Windows 2000 and later, UuidCreate makes a
# random UUID and UuidCreateSequential gives a UUID containing the
# hardware address. These routines are provided by the RPC runtime.
try:
lib = ctypes.windll.rpcrt4
except:
lib = None
_UuidCreate = getattr(lib, 'UuidCreateSequential',
getattr(lib, 'UuidCreate', None))
except:
pass
def _unixdll_getnode():
"""Get the hardware address on Unix using ctypes."""
_uuid_generate_time(_buffer)
return UUID(bytes=_buffer.raw).node
def _windll_getnode():
"""Get the hardware address on Windows using ctypes."""
if _UuidCreate(_buffer) == 0:
return UUID(bytes=_buffer.raw).node
def _random_getnode():
"""Get a random node ID, with eighth bit set as suggested by RFC 4122."""
import random
return random.randrange(0, 1<<48L) | 0x010000000000L
_node = None
def getnode():
"""Get the hardware address as a 48-bit integer. The first time this
runs, it may launch a separate program, which could be quite slow. If
all attempts to obtain the hardware address fail, we choose a random
48-bit number with its eighth bit set to 1 as recommended in RFC 4122."""
global _node
if _node is not None:
return _node
import sys
if sys.platform == 'win32':
getters = [_windll_getnode, _netbios_getnode, _ipconfig_getnode]
else:
getters = [_unixdll_getnode, _ifconfig_getnode]
for getter in getters + [_random_getnode]:
try:
_node = getter()
except:
continue
if _node is not None:
return _node
def uuid1(node=None, clock_seq=None):
"""Generate a UUID from a host ID, sequence number, and the current time.
If 'node' is not given, getnode() is used to obtain the hardware
address. If 'clock_seq' is given, it is used as the sequence number;
otherwise a random 14-bit sequence number is chosen."""
# When the system provides a version-1 UUID generator, use it (but don't
# use UuidCreate here because its UUIDs don't conform to RFC 4122).
if _uuid_generate_time and node is clock_seq is None:
_uuid_generate_time(_buffer)
return UUID(bytes=_buffer.raw)
import time
nanoseconds = int(time.time() * 1e9)
# 0x01b21dd213814000 is the number of 100-ns intervals between the
# UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
timestamp = int(nanoseconds/100) + 0x01b21dd213814000L
if clock_seq is None:
import random
clock_seq = random.randrange(1<<14L) # instead of stable storage
time_low = timestamp & 0xffffffffL
time_mid = (timestamp >> 32L) & 0xffffL
time_hi_version = (timestamp >> 48L) & 0x0fffL
clock_seq_low = clock_seq & 0xffL
clock_seq_hi_variant = (clock_seq >> 8L) & 0x3fL
if node is None:
node = getnode()
return UUID(fields=(time_low, time_mid, time_hi_version,
clock_seq_hi_variant, clock_seq_low, node), version=1)
def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
try:
# Python 2.6
from hashlib import md5
except ImportError:
# Python 2.5 and earlier
from md5 import new as md5
hash = md5(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=3)
def uuid4():
"""Generate a random UUID."""
# When the system provides a version-4 UUID generator, use it.
if _uuid_generate_random:
_uuid_generate_random(_buffer)
return UUID(bytes=_buffer.raw)
# Otherwise, get randomness from urandom or the 'random' module.
try:
import os
return UUID(bytes=os.urandom(16), version=4)
except:
import random
bytes = [chr(random.randrange(256)) for i in range(16)]
return UUID(bytes=bytes, version=4)
def uuid5(namespace, name):
"""Generate a UUID from the SHA-1 hash of a namespace UUID and a name."""
import sha
hash = sha.sha(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=5)
# The following standard UUIDs are for use with uuid3() or uuid5().
NAMESPACE_DNS = UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_URL = UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')

View File

@ -2243,7 +2243,7 @@ void LLKeyframeMotion::onLoadComplete(LLVFS *vfs,
LLCharacter* character = *char_iter;
// look for an existing instance of this motion
LLKeyframeMotion* motionp = (LLKeyframeMotion*) character->findMotion(asset_uuid);
LLKeyframeMotion* motionp = dynamic_cast<LLKeyframeMotion*> (character->findMotion(asset_uuid));
if (motionp)
{
if (0 == status)

View File

@ -294,9 +294,11 @@ void LLScopedLock::unlock()
bool ll_apr_warn_status(apr_status_t status)
{
if(APR_SUCCESS == status) return false;
#if !LL_LINUX
char buf[MAX_STRING]; /* Flawfinder: ignore */
apr_strerror(status, buf, sizeof(buf));
LL_WARNS("APR") << "APR: " << buf << LL_ENDL;
#endif
return true;
}

View File

@ -921,11 +921,6 @@ namespace
std::ostringstream message_stream;
if (show_location && (r->wantsLocation() || level == LLError::LEVEL_ERROR || s->mPrintLocation))
{
message_stream << site.mLocationString << " ";
}
if (show_time && r->wantsTime() && s->mTimeFunction != NULL)
{
message_stream << s->mTimeFunction() << " ";
@ -933,17 +928,17 @@ namespace
if (show_level && r->wantsLevel())
{
message_stream << site.mLevelString;
message_stream << site.mLevelString << " ";
}
if (show_tags && r->wantsTags())
{
message_stream << site.mTagString;
}
if ((show_level && r->wantsLevel())||
(show_tags && r->wantsTags()))
if (show_location && (r->wantsLocation() || level == LLError::LEVEL_ERROR || s->mPrintLocation))
{
message_stream << " ";
message_stream << site.mLocationString << " ";
}
if (show_function && r->wantsFunctionName())

View File

@ -276,6 +276,8 @@ LLEventPumps::~LLEventPumps()
#pragma warning (push)
#pragma warning (disable : 4355) // 'this' used in initializer list: yes, intentionally
#endif
const std::string LLEventPump::ANONYMOUS = std::string();
LLEventPump::LLEventPump(const std::string& name, bool tweak):
// Register every new instance with LLEventPumps
@ -314,145 +316,162 @@ LLBoundListener LLEventPump::listen_impl(const std::string& name, const LLEventL
const NameList& after,
const NameList& before)
{
// Check for duplicate name before connecting listener to mSignal
ConnectionMap::const_iterator found = mConnections.find(name);
// In some cases the user might disconnect a connection explicitly -- or
// might use LLEventTrackable to disconnect implicitly. Either way, we can
// end up retaining in mConnections a zombie connection object that's
// already been disconnected. Such a connection object can't be
// reconnected -- nor, in the case of LLEventTrackable, would we want to
// try, since disconnection happens with the destruction of the listener
// object. That means it's safe to overwrite a disconnected connection
// object with the new one we're attempting. The case we want to prevent
// is only when the existing connection object is still connected.
if (found != mConnections.end() && found->second.connected())
float nodePosition = 1.0;
// if the supplied name is empty we are not interested in the ordering mechanism
// and can bypass attempting to find the optimal location to insert the new
// listener. We'll just tack it on to the end.
if (!name.empty()) // should be the same as testing against ANONYMOUS
{
// Check for duplicate name before connecting listener to mSignal
ConnectionMap::const_iterator found = mConnections.find(name);
// In some cases the user might disconnect a connection explicitly -- or
// might use LLEventTrackable to disconnect implicitly. Either way, we can
// end up retaining in mConnections a zombie connection object that's
// already been disconnected. Such a connection object can't be
// reconnected -- nor, in the case of LLEventTrackable, would we want to
// try, since disconnection happens with the destruction of the listener
// object. That means it's safe to overwrite a disconnected connection
// object with the new one we're attempting. The case we want to prevent
// is only when the existing connection object is still connected.
if (found != mConnections.end() && found->second.connected())
{
LLTHROW(DupListenerName("Attempt to register duplicate listener name '" + name +
"' on " + typeid(*this).name() + " '" + getName() + "'"));
}
// Okay, name is unique, try to reconcile its dependencies. Specify a new
// "node" value that we never use for an mSignal placement; we'll fix it
// later.
DependencyMap::node_type& newNode = mDeps.add(name, -1.0, after, before);
// What if this listener has been added, removed and re-added? In that
// case newNode already has a non-negative value because we never remove a
// listener from mDeps. But keep processing uniformly anyway in case the
// listener was added back with different dependencies. Then mDeps.sort()
// would put it in a different position, and the old newNode placement
// value would be wrong, so we'd have to reassign it anyway. Trust that
// re-adding a listener with the same dependencies is the trivial case for
// mDeps.sort(): it can just replay its cache.
DependencyMap::sorted_range sorted_range;
try
{
// Can we pick an order that works including this new entry?
sorted_range = mDeps.sort();
}
catch (const DependencyMap::Cycle& e)
{
// No: the new node's after/before dependencies have made mDeps
// unsortable. If we leave the new node in mDeps, it will continue
// to screw up all future attempts to sort()! Pull it out.
mDeps.remove(name);
}
// Okay, name is unique, try to reconcile its dependencies. Specify a new
// "node" value that we never use for an mSignal placement; we'll fix it
// later.
DependencyMap::node_type& newNode = mDeps.add(name, -1.0, after, before);
// What if this listener has been added, removed and re-added? In that
// case newNode already has a non-negative value because we never remove a
// listener from mDeps. But keep processing uniformly anyway in case the
// listener was added back with different dependencies. Then mDeps.sort()
// would put it in a different position, and the old newNode placement
// value would be wrong, so we'd have to reassign it anyway. Trust that
// re-adding a listener with the same dependencies is the trivial case for
// mDeps.sort(): it can just replay its cache.
DependencyMap::sorted_range sorted_range;
try
{
// Can we pick an order that works including this new entry?
sorted_range = mDeps.sort();
}
catch (const DependencyMap::Cycle& e)
{
// No: the new node's after/before dependencies have made mDeps
// unsortable. If we leave the new node in mDeps, it will continue
// to screw up all future attempts to sort()! Pull it out.
mDeps.remove(name);
LLTHROW(Cycle("New listener '" + name + "' on " + typeid(*this).name() +
" '" + getName() + "' would cause cycle: " + e.what()));
}
// Walk the list to verify that we haven't changed the order.
float previous = 0.0, myprev = 0.0;
DependencyMap::sorted_iterator mydmi = sorted_range.end(); // need this visible after loop
for (DependencyMap::sorted_iterator dmi = sorted_range.begin();
dmi != sorted_range.end(); ++dmi)
{
// Since we've added the new entry with an invalid placement,
// recognize it and skip it.
if (dmi->first == name)
{
// Remember the iterator belonging to our new node, and which
// placement value was 'previous' at that point.
mydmi = dmi;
myprev = previous;
continue;
}
// If the new node has rearranged the existing nodes, we'll find
// that their placement values are no longer in increasing order.
if (dmi->second < previous)
// Walk the list to verify that we haven't changed the order.
float previous = 0.0, myprev = 0.0;
DependencyMap::sorted_iterator mydmi = sorted_range.end(); // need this visible after loop
for (DependencyMap::sorted_iterator dmi = sorted_range.begin();
dmi != sorted_range.end(); ++dmi)
{
// This is another scenario in which we'd better back out the
// newly-added node from mDeps -- but don't do it yet, we want to
// traverse the existing mDeps to report on it!
// Describe the change to the order of our listeners. Copy
// everything but the newest listener to a vector we can sort to
// obtain the old order.
typedef std::vector< std::pair<float, std::string> > SortNameList;
SortNameList sortnames;
for (DependencyMap::sorted_iterator cdmi(sorted_range.begin()), cdmend(sorted_range.end());
cdmi != cdmend; ++cdmi)
// Since we've added the new entry with an invalid placement,
// recognize it and skip it.
if (dmi->first == name)
{
if (cdmi->first != name)
{
sortnames.push_back(SortNameList::value_type(cdmi->second, cdmi->first));
}
// Remember the iterator belonging to our new node, and which
// placement value was 'previous' at that point.
mydmi = dmi;
myprev = previous;
continue;
}
std::sort(sortnames.begin(), sortnames.end());
std::ostringstream out;
out << "New listener '" << name << "' on " << typeid(*this).name() << " '" << getName()
<< "' would move previous listener '" << dmi->first << "'\nwas: ";
SortNameList::const_iterator sni(sortnames.begin()), snend(sortnames.end());
if (sni != snend)
// If the new node has rearranged the existing nodes, we'll find
// that their placement values are no longer in increasing order.
if (dmi->second < previous)
{
out << sni->second;
while (++sni != snend)
// This is another scenario in which we'd better back out the
// newly-added node from mDeps -- but don't do it yet, we want to
// traverse the existing mDeps to report on it!
// Describe the change to the order of our listeners. Copy
// everything but the newest listener to a vector we can sort to
// obtain the old order.
typedef std::vector< std::pair<float, std::string> > SortNameList;
SortNameList sortnames;
for (DependencyMap::sorted_iterator cdmi(sorted_range.begin()), cdmend(sorted_range.end());
cdmi != cdmend; ++cdmi)
{
out << ", " << sni->second;
if (cdmi->first != name)
{
sortnames.push_back(SortNameList::value_type(cdmi->second, cdmi->first));
}
}
}
out << "\nnow: ";
DependencyMap::sorted_iterator ddmi(sorted_range.begin()), ddmend(sorted_range.end());
if (ddmi != ddmend)
{
out << ddmi->first;
while (++ddmi != ddmend)
std::sort(sortnames.begin(), sortnames.end());
std::ostringstream out;
out << "New listener '" << name << "' on " << typeid(*this).name() << " '" << getName()
<< "' would move previous listener '" << dmi->first << "'\nwas: ";
SortNameList::const_iterator sni(sortnames.begin()), snend(sortnames.end());
if (sni != snend)
{
out << ", " << ddmi->first;
out << sni->second;
while (++sni != snend)
{
out << ", " << sni->second;
}
}
}
// NOW remove the offending listener node.
mDeps.remove(name);
// Having constructed a description of the order change, inform caller.
out << "\nnow: ";
DependencyMap::sorted_iterator ddmi(sorted_range.begin()), ddmend(sorted_range.end());
if (ddmi != ddmend)
{
out << ddmi->first;
while (++ddmi != ddmend)
{
out << ", " << ddmi->first;
}
}
// NOW remove the offending listener node.
mDeps.remove(name);
// Having constructed a description of the order change, inform caller.
LLTHROW(OrderChange(out.str()));
}
// This node becomes the previous one.
previous = dmi->second;
}
// This node becomes the previous one.
previous = dmi->second;
}
// We just got done with a successful mDeps.add(name, ...) call. We'd
// better have found 'name' somewhere in that sorted list!
assert(mydmi != sorted_range.end());
// Four cases:
// 0. name is the only entry: placement 1.0
// 1. name is the first of several entries: placement (next placement)/2
// 2. name is between two other entries: placement (myprev + (next placement))/2
// 3. name is the last entry: placement ceil(myprev) + 1.0
// Since we've cleverly arranged for myprev to be 0.0 if name is the
// first entry, this folds down to two cases. Case 1 is subsumed by
// case 2, and case 0 is subsumed by case 3. So we need only handle
// cases 2 and 3, which means we need only detect whether name is the
// last entry. Increment mydmi to see if there's anything beyond.
if (++mydmi != sorted_range.end())
{
// The new node isn't last. Place it between the previous node and
// the successor.
newNode = (myprev + mydmi->second)/2.f;
}
else
{
// The new node is last. Bump myprev up to the next integer, add
// 1.0 and use that.
newNode = std::ceil(myprev) + 1.f;
// We just got done with a successful mDeps.add(name, ...) call. We'd
// better have found 'name' somewhere in that sorted list!
assert(mydmi != sorted_range.end());
// Four cases:
// 0. name is the only entry: placement 1.0
// 1. name is the first of several entries: placement (next placement)/2
// 2. name is between two other entries: placement (myprev + (next placement))/2
// 3. name is the last entry: placement ceil(myprev) + 1.0
// Since we've cleverly arranged for myprev to be 0.0 if name is the
// first entry, this folds down to two cases. Case 1 is subsumed by
// case 2, and case 0 is subsumed by case 3. So we need only handle
// cases 2 and 3, which means we need only detect whether name is the
// last entry. Increment mydmi to see if there's anything beyond.
if (++mydmi != sorted_range.end())
{
// The new node isn't last. Place it between the previous node and
// the successor.
newNode = (myprev + mydmi->second) / 2.f;
}
else
{
// The new node is last. Bump myprev up to the next integer, add
// 1.0 and use that.
newNode = std::ceil(myprev) + 1.f;
}
nodePosition = newNode;
}
// Now that newNode has a value that places it appropriately in mSignal,
// connect it.
LLBoundListener bound = mSignal->connect(newNode, listener);
mConnections[name] = bound;
LLBoundListener bound = mSignal->connect(nodePosition, listener);
if (!name.empty())
{ // note that we are not tracking anonymous listeners here either.
// This means that it is the caller's responsibility to either assign
// to a TempBoundListerer (scoped_connection) or manually disconnect
// when done.
mConnections[name] = bound;
}
return bound;
}

View File

@ -385,6 +385,8 @@ typedef boost::signals2::trackable LLEventTrackable;
class LL_COMMON_API LLEventPump: public LLEventTrackable
{
public:
static const std::string ANONYMOUS; // constant for anonymous listeners.
/**
* Exception thrown by LLEventPump(). You are trying to instantiate an
* LLEventPump (subclass) using the same name as some other instance, and
@ -496,6 +498,12 @@ public:
* instantiate your listener, then passing the same name on each listen()
* call, allows us to optimize away the second and subsequent dependency
* sorts.
*
* If name is set to LLEventPump::ANONYMOUS listen will bypass the entire
* dependency and ordering calculation. In this case, it is critical that
* the result be assigned to a LLTempBoundListener or the listener is
* manually disconnected when no longer needed since there will be no
* way to later find and disconnect this listener manually.
*
* If (as is typical) you pass a <tt>boost::bind()</tt> expression as @a
* listener, listen() will inspect the components of that expression. If a

View File

@ -205,9 +205,9 @@ void LLUriParser::glue(std::string& uri) const
uri = first_part + second_part;
}
void LLUriParser::glueFirst(std::string& uri) const
void LLUriParser::glueFirst(std::string& uri, bool use_scheme) const
{
if (mScheme.size())
if (use_scheme && mScheme.size())
{
uri = mScheme;
uri += "://";

View File

@ -60,7 +60,7 @@ public:
void extractParts();
void glue(std::string& uri) const;
void glueFirst(std::string& uri) const;
void glueFirst(std::string& uri, bool use_scheme = true) const;
void glueSecond(std::string& uri) const;
bool test() const;
S32 normalize();

View File

@ -564,9 +564,9 @@ namespace tut
function;
writeReturningLocationAndFunction(location, function);
ensure_equals("order is location time type function message",
ensure_equals("order is time location type function message",
message(0),
location + roswell() + " INFO: " + function + ": apple");
roswell() + " INFO: " + location + function + ": apple");
}
template<> template<>

View File

@ -110,10 +110,7 @@ namespace tut
// finding indra/lib/python. Use our __FILE__, with
// raw-string syntax to deal with Windows pathnames.
"mydir = os.path.dirname(r'" << __FILE__ << "')\n"
// We expect mydir to be .../indra/llcommon/tests.
"sys.path.insert(0,\n"
" os.path.join(mydir, os.pardir, os.pardir, 'lib', 'python'))\n"
"from indra.base import llsd\n"
"from llbase import llsd\n"
"\n"
"class ProtocolError(Exception):\n"
" def __init__(self, msg, data):\n"

View File

@ -1518,10 +1518,7 @@ namespace tut
// scanner.
import_llsd("import os.path\n"
"import sys\n"
"sys.path.insert(0,\n"
" os.path.join(os.path.dirname(r'" __FILE__ "'),\n"
" os.pardir, os.pardir, 'lib', 'python'))\n"
"from indra.base import llsd\n")
"from llbase import llsd\n")
{}
~TestPythonCompatible() {}

View File

@ -42,10 +42,8 @@ except ImportError:
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn
mydir = os.path.dirname(__file__) # expected to be .../indra/llcorehttp/tests/
sys.path.insert(0, os.path.join(mydir, os.pardir, os.pardir, "lib", "python"))
from indra.util.fastest_elementtree import parse as xml_parse
from indra.base import llsd
from llbase.fastest_elementtree import parse as xml_parse
from llbase import llsd
from testrunner import freeport, run, debug, VERBOSE
class TestHTTPRequestHandler(BaseHTTPRequestHandler):

View File

@ -2143,19 +2143,22 @@ BOOL LLVolume::generate()
F32 profile_detail = mDetail;
F32 path_detail = mDetail;
U8 path_type = mParams.getPathParams().getCurveType();
U8 profile_type = mParams.getProfileParams().getCurveType();
if (path_type == LL_PCODE_PATH_LINE && profile_type == LL_PCODE_PROFILE_CIRCLE)
{ //cylinders don't care about Z-Axis
mLODScaleBias.setVec(0.6f, 0.6f, 0.0f);
if ((mParams.getSculptType() & LL_SCULPT_TYPE_MASK) != LL_SCULPT_TYPE_MESH)
{
U8 path_type = mParams.getPathParams().getCurveType();
U8 profile_type = mParams.getProfileParams().getCurveType();
if (path_type == LL_PCODE_PATH_LINE && profile_type == LL_PCODE_PROFILE_CIRCLE)
{
//cylinders don't care about Z-Axis
mLODScaleBias.setVec(0.6f, 0.6f, 0.0f);
}
else if (path_type == LL_PCODE_PATH_CIRCLE)
{
mLODScaleBias.setVec(0.6f, 0.6f, 0.6f);
}
}
else if (path_type == LL_PCODE_PATH_CIRCLE)
{
mLODScaleBias.setVec(0.6f, 0.6f, 0.6f);
}
BOOL regenPath = mPathp->generate(mParams.getPathParams(), path_detail, split);
BOOL regenProf = mProfilep->generate(mParams.getProfileParams(), mPathp->isOpen(),profile_detail, split);

View File

@ -642,7 +642,7 @@ HttpRequestPumper::HttpRequestPumper(const LLCore::HttpRequest::ptr_t &request)
mHttpRequest(request)
{
mBoundListener = LLEventPumps::instance().obtain("mainloop").
listen(LLEventPump::inventName(), boost::bind(&HttpRequestPumper::pollRequest, this, _1));
listen(LLEventPump::ANONYMOUS, boost::bind(&HttpRequestPumper::pollRequest, this, _1));
}
HttpRequestPumper::~HttpRequestPumper()

View File

@ -294,7 +294,7 @@ S32 LLXfer::processEOF()
}
else
{
LL_INFOS() << "xfer from " << mRemoteHost << " failed, code "
LL_INFOS() << "xfer from " << mRemoteHost << " failed or aborted, code "
<< mCallbackResult << ": " << getFileName() << LL_ENDL;
}

View File

@ -401,7 +401,7 @@ U64 LLXferManager::registerXfer(const void *datap, const S32 length)
///////////////////////////////////////////////////////////
void LLXferManager::requestFile(const std::string& local_filename,
U64 LLXferManager::requestFile(const std::string& local_filename,
const std::string& remote_filename,
ELLPath remote_path,
const LLHost& remote_host,
@ -424,10 +424,12 @@ void LLXferManager::requestFile(const std::string& local_filename,
{
// cout << "requested a xfer already in progress" << endl;
return;
return xferp->mID;
}
}
U64 xfer_id = 0;
S32 chunk_size = use_big_packets ? LL_XFER_LARGE_PAYLOAD : -1;
xferp = (LLXfer *) new LLXfer_File(chunk_size);
if (xferp)
@ -438,13 +440,15 @@ void LLXferManager::requestFile(const std::string& local_filename,
// around.
// Note: according to AaronB, this is here to deal with locks on files that were
// in transit during a crash,
if(delete_remote_on_completion &&
(remote_filename.substr(remote_filename.length()-4) == ".tmp"))
if( delete_remote_on_completion
&& (remote_filename.substr(remote_filename.length()-4) == ".tmp")
&& gDirUtilp->fileExists(local_filename))
{
LLFile::remove(local_filename);
}
xfer_id = getNextID();
((LLXfer_File *)xferp)->initializeRequest(
getNextID(),
xfer_id,
local_filename,
remote_filename,
remote_path,
@ -457,6 +461,7 @@ void LLXferManager::requestFile(const std::string& local_filename,
{
LL_ERRS() << "Xfer allocation error" << LL_ENDL;
}
return xfer_id;
}
void LLXferManager::requestFile(const std::string& remote_filename,
@ -616,7 +621,7 @@ void LLXferManager::processReceiveData (LLMessageSystem *mesgsys, void ** /*user
if (!xferp)
{
char U64_BUF[MAX_STRING]; /* Flawfinder : ignore */
LL_WARNS() << "received xfer data from " << mesgsys->getSender()
LL_INFOS() << "received xfer data from " << mesgsys->getSender()
<< " for non-existent xfer id: "
<< U64_to_str(id, U64_BUF, sizeof(U64_BUF)) << LL_ENDL;
return;
@ -1103,6 +1108,29 @@ void LLXferManager::retransmitUnackedPackets ()
}
}
///////////////////////////////////////////////////////////
void LLXferManager::abortRequestById(U64 xfer_id, S32 result_code)
{
LLXfer * xferp = findXfer(xfer_id, mReceiveList);
if (xferp)
{
if (xferp->mStatus == e_LL_XFER_IN_PROGRESS)
{
// causes processAbort();
xferp->abort(result_code);
}
else
{
xferp->mCallbackResult = result_code;
xferp->processEOF(); //should notify requester
removeXfer(xferp, &mReceiveList);
}
// Since already removed or marked as aborted no need
// to wait for processAbort() to start new download
startPendingDownloads();
}
}
///////////////////////////////////////////////////////////

View File

@ -140,7 +140,7 @@ class LLXferManager
// file requesting routines
// .. to file
virtual void requestFile(const std::string& local_filename,
virtual U64 requestFile(const std::string& local_filename,
const std::string& remote_filename,
ELLPath remote_path,
const LLHost& remote_host,
@ -202,6 +202,7 @@ class LLXferManager
virtual void retransmitUnackedPackets ();
// error handling
void abortRequestById(U64 xfer_id, S32 result_code);
virtual void processAbort (LLMessageSystem *mesgsys, void **user_data);
};

View File

@ -4007,7 +4007,7 @@ void LLMessageSystem::sendUntrustedSimulatorMessageCoro(std::string url, std::st
{
LLCore::HttpRequest::policy_t httpPolicy(LLCore::HttpRequest::DEFAULT_POLICY_ID);
LLCoreHttpUtil::HttpCoroutineAdapter::ptr_t
httpAdapter(new LLCoreHttpUtil::HttpCoroutineAdapter("groupMembersRequest", httpPolicy));
httpAdapter(new LLCoreHttpUtil::HttpCoroutineAdapter("untrustedSimulatorMessage", httpPolicy));
LLCore::HttpRequest::ptr_t httpRequest(new LLCore::HttpRequest);
LLCore::HttpOptions::ptr_t httpOpts = LLCore::HttpOptions::ptr_t(new LLCore::HttpOptions);

View File

@ -34,10 +34,8 @@ import sys
from threading import Thread
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
mydir = os.path.dirname(__file__) # expected to be .../indra/llmessage/tests/
sys.path.insert(0, os.path.join(mydir, os.pardir, os.pardir, "lib", "python"))
from indra.util.fastest_elementtree import parse as xml_parse
from indra.base import llsd
from llbase.fastest_elementtree import parse as xml_parse
from llbase import llsd
from testrunner import freeport, run, debug, VERBOSE
import time

View File

@ -73,6 +73,7 @@ bool LLPluginClassMedia::init(const std::string &launcher_filename, const std::s
// Queue up the media init message -- it will be sent after all the currently queued messages.
LLPluginMessage message(LLPLUGIN_MESSAGE_CLASS_MEDIA, "init");
message.setValue("target", mTarget);
message.setValueReal("factor", mZoomFactor);
sendMessage(message);
mPlugin->init(launcher_filename, plugin_dir, plugin_filename, debug);
@ -1259,7 +1260,7 @@ void LLPluginClassMedia::focus(bool focused)
sendMessage(message);
}
void LLPluginClassMedia::set_page_zoom_factor( double factor )
void LLPluginClassMedia::set_page_zoom_factor( F64 factor )
{
LLPluginMessage message(LLPLUGIN_MESSAGE_CLASS_MEDIA_BROWSER, "set_page_zoom_factor");

View File

@ -68,6 +68,7 @@ public:
int getTextureHeight() const;
int getFullWidth() const { return mFullMediaWidth; };
int getFullHeight() const { return mFullMediaHeight; };
F64 getZoomFactor() const { return mZoomFactor; };
// This may return NULL. Callers need to check for and handle this case.
unsigned char* getBitsData();
@ -83,7 +84,8 @@ public:
void setSize(int width, int height);
void setAutoScale(bool auto_scale);
void setZoomFactor(F64 zoom_factor) { mZoomFactor = zoom_factor; }
void setBackgroundColor(LLColor4 color) { mBackgroundColor = color; };
void setOwner(LLPluginClassMediaOwner *owner) { mOwner = owner; };
@ -204,7 +206,7 @@ public:
bool pluginSupportsMediaBrowser(void);
void focus(bool focused);
void set_page_zoom_factor( double factor );
void set_page_zoom_factor( F64 factor );
void clear_cache();
void clear_cookies();
void set_cookies(const std::string &cookies);
@ -367,6 +369,8 @@ protected:
int mTextureHeight;
int mMediaWidth;
int mMediaHeight;
F64 mZoomFactor;
float mRequestedVolume;

View File

@ -1075,7 +1075,7 @@ bool LLDAELoader::OpenFile(const std::string& filename)
std::string LLDAELoader::preprocessDAE(std::string filename)
{
// Open a DAE file for some preprocessing (like removing space characters in IDs), see MAINT-5678
std::ifstream inFile;
llifstream inFile;
inFile.open(filename.c_str(), std::ios_base::in);
std::stringstream strStream;
strStream << inFile.rdbuf();

View File

@ -364,12 +364,7 @@ S32 LLFontGL::render(const LLWString &wstr, S32 begin_offset, F32 x, F32 y, cons
if (right_x)
{
F32 cr_x = (cur_x - origin.mV[VX]) / sScaleX;
if (*right_x < cr_x)
{
// rightmost edge of previously drawn text, don't draw over previous text
*right_x = cr_x;
}
*right_x = (cur_x - origin.mV[VX]) / sScaleX;
}
//FIXME: add underline as glyph?

View File

@ -487,14 +487,15 @@ bool LLImageGL::checkSize(S32 width, S32 height)
return check_power_of_two(width) && check_power_of_two(height);
}
void LLImageGL::setSize(S32 width, S32 height, S32 ncomponents, S32 discard_level)
bool LLImageGL::setSize(S32 width, S32 height, S32 ncomponents, S32 discard_level)
{
if (width != mWidth || height != mHeight || ncomponents != mComponents)
{
// Check if dimensions are a power of two!
if (!checkSize(width,height))
{
LL_ERRS() << llformat("Texture has non power of two dimension: %dx%d",width,height) << LL_ENDL;
LL_WARNS() << llformat("Texture has non power of two dimension: %dx%d",width,height) << LL_ENDL;
return false;
}
if (mTexName)
@ -529,6 +530,8 @@ void LLImageGL::setSize(S32 width, S32 height, S32 ncomponents, S32 discard_leve
mMaxDiscardLevel = MAX_DISCARD_LEVEL;
}
}
return true;
}
//----------------------------------------------------------------------------
@ -909,7 +912,11 @@ BOOL LLImageGL::preAddToAtlas(S32 discard_level, const LLImageRaw* raw_image)
S32 h = raw_image->getHeight() << discard_level;
// setSize may call destroyGLTexture if the size does not match
setSize(w, h, raw_image->getComponents(), discard_level);
if (!setSize(w, h, raw_image->getComponents(), discard_level))
{
LL_WARNS() << "Trying to create a texture with incorrect dimensions!" << LL_ENDL;
return FALSE;
}
if( !mHasExplicitFormat )
{
@ -1273,7 +1280,11 @@ BOOL LLImageGL::createGLTexture(S32 discard_level, const LLImageRaw* imageraw, S
S32 h = raw_h << discard_level;
// setSize may call destroyGLTexture if the size does not match
setSize(w, h, imageraw->getComponents(), discard_level);
if (!setSize(w, h, imageraw->getComponents(), discard_level))
{
LL_WARNS() << "Trying to create a texture with incorrect dimensions!" << LL_ENDL;
return FALSE;
}
if( !mHasExplicitFormat )
{

View File

@ -94,7 +94,7 @@ protected:
public:
virtual void dump(); // debugging info to LL_INFOS()
void setSize(S32 width, S32 height, S32 ncomponents, S32 discard_level = -1);
bool setSize(S32 width, S32 height, S32 ncomponents, S32 discard_level = -1);
void setComponents(S32 ncomponents) { mComponents = (S8)ncomponents ;}
void setAllowCompression(bool allow) { mAllowCompression = allow; }

View File

@ -123,7 +123,15 @@ bool LLClipboard::copyToClipboard(const LLWString &src, S32 pos, S32 len, bool u
// Concatenate the input string to the LL and the system clipboard
bool LLClipboard::addToClipboard(const LLWString &src, S32 pos, S32 len, bool use_primary)
{
mString = src.substr(pos, len);
try
{
mString = src.substr(pos, len);
}
catch (const std::exception& e)
{
LL_WARNS() << "Can't add the substring to clipboard: " << e.what() << LL_ENDL;
return false;
}
return (use_primary ? LLView::getWindow()->copyTextToPrimary(mString) : LLView::getWindow()->copyTextToClipboard(mString));
}

View File

@ -684,6 +684,13 @@ void LLFolderView::draw()
}
}
if (mRenameItem && mRenamer && mRenamer->getVisible() && !getVisibleRect().overlaps(mRenamer->getRect()))
{
// renamer is not connected to the item we are renaming in any form so manage it manually
// TODO: consider stopping on any scroll action instead of when out of visible area
finishRenamingItem();
}
// skip over LLFolderViewFolder::draw since we don't want the folder icon, label,
// and arrow for the root folder
LLView::draw();

View File

@ -127,6 +127,8 @@ LLFolderViewItem::LLFolderViewItem(const LLFolderViewItem::Params& p)
mIsSelected( FALSE ),
mIsCurSelection( FALSE ),
mSelectPending(FALSE),
mIsItemCut(false),
mCutGeneration(0),
mLabelStyle( LLFontGL::NORMAL ),
mHasVisibleChildren(FALSE),
mIsFolderComplete(true),
@ -694,6 +696,19 @@ void LLFolderViewItem::drawOpenFolderArrow(const Params& default_params, const L
return mIsCurSelection;
}
/*virtual*/ bool LLFolderViewItem::isFadeItem()
{
LLClipboard& clipboard = LLClipboard::instance();
if (mCutGeneration != clipboard.getGeneration())
{
mCutGeneration = clipboard.getGeneration();
mIsItemCut = clipboard.isCutMode()
&& ((getParentFolder() && getParentFolder()->isFadeItem())
|| getViewModelItem()->isCutToClipboard());
}
return mIsItemCut;
}
void LLFolderViewItem::drawHighlight(const BOOL showContent, const BOOL hasKeyboardFocus, const LLUIColor &selectColor, const LLUIColor &flashColor,
const LLUIColor &focusOutlineColor, const LLUIColor &mouseOverColor)
{
@ -875,6 +890,12 @@ void LLFolderViewItem::draw()
}
LLColor4 color = (mIsSelected && filled) ? mFontHighlightColor : mFontColor;
if (isFadeItem())
{
// Fade out item color to indicate it's being cut
color.mV[VALPHA] *= 0.5f;
}
drawLabel(font, text_left, y, color, right_x);
//--------------------------------------------------------------------------------//
@ -882,7 +903,7 @@ void LLFolderViewItem::draw()
//
if (!mLabelSuffix.empty())
{
font->renderUTF8( mLabelSuffix, 0, right_x, y, sSuffixColor,
font->renderUTF8( mLabelSuffix, 0, right_x, y, isFadeItem() ? color : (LLColor4)sSuffixColor,
LLFontGL::LEFT, LLFontGL::BOTTOM, LLFontGL::NORMAL, LLFontGL::NO_SHADOW,
S32_MAX, S32_MAX, &right_x, FALSE );
}

View File

@ -121,8 +121,11 @@ protected:
mIsMouseOverTitle,
mAllowWear,
mAllowDrop,
mSelectPending;
mSelectPending,
mIsItemCut;
S32 mCutGeneration;
LLUIColor mFontColor;
LLUIColor mFontHighlightColor;
@ -145,6 +148,7 @@ protected:
virtual void addFolder(LLFolderViewFolder*) { }
virtual bool isHighlightAllowed();
virtual bool isHighlightActive();
virtual bool isFadeItem();
virtual bool isFlashing() { return false; }
virtual void setFlashState(bool) { }

View File

@ -173,6 +173,7 @@ public:
virtual BOOL isItemCopyable() const = 0;
virtual BOOL copyToClipboard() const = 0;
virtual BOOL cutToClipboard() = 0;
virtual bool isCutToClipboard() { return false; };
virtual BOOL isClipboardPasteable() const = 0;
virtual void pasteFromClipboard() = 0;

View File

@ -2636,10 +2636,17 @@ void LLLineEditor::showContextMenu(S32 x, S32 y)
void LLLineEditor::setContextMenu(LLContextMenu* new_context_menu)
{
if (new_context_menu)
mContextMenuHandle = new_context_menu->getHandle();
else
mContextMenuHandle.markDead();
LLContextMenu* menu = static_cast<LLContextMenu*>(mContextMenuHandle.get());
if (menu)
{
menu->die();
mContextMenuHandle.markDead();
}
if (new_context_menu)
{
mContextMenuHandle = new_context_menu->getHandle();
}
}
void LLLineEditor::setFont(const LLFontGL* font)

View File

@ -272,7 +272,7 @@ public:
void setReplaceNewlinesWithSpaces(BOOL replace);
void setContextMenu(LLContextMenu* new_context_menu);
void resetContextMenu() { setContextMenu(NULL); };
private:
// private helper methods
@ -308,6 +308,8 @@ private:
virtual S32 getPreeditFontSize() const;
virtual LLWString getPreeditString() const { return getWText(); }
void setContextMenu(LLContextMenu* new_context_menu);
protected:
LLUIString mText; // The string being edited.
std::string mPrevText; // Saved string for 'ESC' revert

View File

@ -181,7 +181,7 @@ LLTextBase::LLTextBase(const LLTextBase::Params &p)
mMaxTextByteLength( p.max_text_length ),
mFont(p.font),
mFontShadow(p.font_shadow),
mPopupMenu(NULL),
mPopupMenuHandle(),
mReadOnly(p.read_only),
mSpellCheck(p.spellcheck),
mSpellCheckStart(-1),
@ -1263,9 +1263,10 @@ void LLTextBase::setReadOnlyColor(const LLColor4 &c)
//virtual
void LLTextBase::onVisibilityChange( BOOL new_visibility )
{
if(!new_visibility && mPopupMenu)
LLContextMenu* menu = static_cast<LLContextMenu*>(mPopupMenuHandle.get());
if(!new_visibility && menu)
{
mPopupMenu->hide();
menu->hide();
}
LLUICtrl::onVisibilityChange(new_visibility);
}
@ -1956,41 +1957,48 @@ void LLTextBase::createUrlContextMenu(S32 x, S32 y, const std::string &in_url)
registrar.add("Url.CopyUrl", boost::bind(&LLUrlAction::copyURLToClipboard, url));
// create and return the context menu from the XUI file
delete mPopupMenu;
LLContextMenu* menu = static_cast<LLContextMenu*>(mPopupMenuHandle.get());
if (menu)
{
menu->die();
mPopupMenuHandle.markDead();
}
llassert(LLMenuGL::sMenuContainer != NULL);
mPopupMenu = LLUICtrlFactory::getInstance()->createFromFile<LLContextMenu>(xui_file, LLMenuGL::sMenuContainer,
LLMenuHolderGL::child_registry_t::instance());
if (mIsFriendSignal)
{
bool isFriend = *(*mIsFriendSignal)(LLUUID(LLUrlAction::getUserID(url)));
LLView* addFriendButton = mPopupMenu->getChild<LLView>("add_friend");
LLView* removeFriendButton = mPopupMenu->getChild<LLView>("remove_friend");
menu = LLUICtrlFactory::getInstance()->createFromFile<LLContextMenu>(xui_file, LLMenuGL::sMenuContainer,
LLMenuHolderGL::child_registry_t::instance());
if (menu)
{
mPopupMenuHandle = menu->getHandle();
if (addFriendButton && removeFriendButton)
{
addFriendButton->setEnabled(!isFriend);
removeFriendButton->setEnabled(isFriend);
}
}
if (mIsFriendSignal)
{
bool isFriend = *(*mIsFriendSignal)(LLUUID(LLUrlAction::getUserID(url)));
LLView* addFriendButton = menu->getChild<LLView>("add_friend");
LLView* removeFriendButton = menu->getChild<LLView>("remove_friend");
if (mIsObjectBlockedSignal)
{
bool is_blocked = *(*mIsObjectBlockedSignal)(LLUUID(LLUrlAction::getObjectId(url)), LLUrlAction::getObjectName(url));
LLView* blockButton = mPopupMenu->getChild<LLView>("block_object");
LLView* unblockButton = mPopupMenu->getChild<LLView>("unblock_object");
if (addFriendButton && removeFriendButton)
{
addFriendButton->setEnabled(!isFriend);
removeFriendButton->setEnabled(isFriend);
}
}
if (blockButton && unblockButton)
{
blockButton->setVisible(!is_blocked);
unblockButton->setVisible(is_blocked);
}
}
if (mPopupMenu)
{
mPopupMenu->show(x, y);
LLMenuGL::showPopup(this, mPopupMenu, x, y);
}
if (mIsObjectBlockedSignal)
{
bool is_blocked = *(*mIsObjectBlockedSignal)(LLUUID(LLUrlAction::getObjectId(url)), LLUrlAction::getObjectName(url));
LLView* blockButton = menu->getChild<LLView>("block_object");
LLView* unblockButton = menu->getChild<LLView>("unblock_object");
if (blockButton && unblockButton)
{
blockButton->setVisible(!is_blocked);
unblockButton->setVisible(is_blocked);
}
}
menu->show(x, y);
LLMenuGL::showPopup(this, menu, x, y);
}
}
void LLTextBase::setText(const LLStringExplicit &utf8str, const LLStyle::Params& input_params)

View File

@ -673,7 +673,7 @@ protected:
S32 mMaxTextByteLength; // Maximum length mText is allowed to be in bytes
// support widgets
LLContextMenu* mPopupMenu;
LLHandle<LLContextMenu> mPopupMenuHandle;
LLView* mDocumentView;
LLScrollContainer* mScroller;

View File

@ -328,6 +328,15 @@ namespace LLTextValidate
return rv;
}
bool validateASCIINoLeadingSpace(const LLWString &str)
{
if (LLStringOps::isSpace(str[0]))
{
return FALSE;
}
return validateASCII(str);
}
// Used for multiline text stored on the server.
// Example is landmark description in Places SP.
bool validateASCIIWithNewLine(const LLWString &str)

View File

@ -52,6 +52,7 @@ namespace LLTextValidate
bool validateASCIIPrintableNoPipe(const LLWString &str);
bool validateASCIIPrintableNoSpace(const LLWString &str);
bool validateASCII(const LLWString &str);
bool validateASCIINoLeadingSpace(const LLWString &str);
bool validateASCIIWithNewLine(const LLWString &str);
}

View File

@ -522,7 +522,7 @@ const LLView* LLUI::resolvePath(const LLView* context, const std::string& path)
else
{
std::string part(ti->begin(), ti->end());
context = context->findChildView(part, recurse);
context = context->findChildView(LLURI::unescape(part), recurse);
recurse = false;
}
}

View File

@ -183,8 +183,9 @@ bool LLUrlEntryBase::isLinkDisabled() const
bool LLUrlEntryBase::isWikiLinkCorrect(std::string url)
{
std::string label = getLabelFromWikiLink(url);
return (LLUrlRegistry::instance().hasUrl(label)) ? false : true;
LLWString label = utf8str_to_wstring(getLabelFromWikiLink(url));
label.erase(std::remove(label.begin(), label.end(), L'\u200B'), label.end());
return (LLUrlRegistry::instance().hasUrl(wstring_to_utf8str(label))) ? false : true;
}
std::string LLUrlEntryBase::urlToLabelWithGreyQuery(const std::string &url) const
@ -205,9 +206,15 @@ std::string LLUrlEntryBase::urlToGreyQuery(const std::string &url) const
std::string label;
up.extractParts();
up.glueFirst(label);
std::string query = url.substr(label.size());
return query;
up.glueFirst(label, false);
size_t pos = url.find(label);
if (pos == std::string::npos)
{
return "";
}
pos += label.size();
return url.substr(pos);
}

View File

@ -391,7 +391,27 @@ static void buildPathname(std::ostream& out, const LLView* view)
buildPathname(out, view->getParent());
// Build pathname into ostream on the way back from recursion.
out << '/' << view->getName();
out << '/';
// substitute all '/' in name with appropriate code
std::string name = view->getName();
std::size_t found = name.find('/');
std::size_t start = 0;
while (found != std::string::npos)
{
std::size_t sub_len = found - start;
if (sub_len > 0)
{
out << name.substr(start, sub_len);
}
out << "%2F";
start = found + 1;
found = name.find('/', start);
}
if (start < name.size())
{
out << name.substr(start, name.size() - start);
}
}
std::string LLView::getPathname() const

View File

@ -531,6 +531,13 @@ std::string LLDir::getExpandedFilename(ELLPath location, const std::string& subd
case LL_PATH_PER_ACCOUNT_CHAT_LOGS:
prefix = getPerAccountChatLogsDir();
if (prefix.empty())
{
// potentially directory was not set yet
// intentionally return a blank string to the caller
LL_DEBUGS("LLDir") << "Conversation log directory is not yet set" << LL_ENDL;
return std::string();
}
break;
case LL_PATH_LOGS:

View File

@ -166,6 +166,8 @@ public:
// Provide native key event data
virtual LLSD getNativeKeyData() { return LLSD::emptyMap(); }
// Get system UI size based on DPI (for 96 DPI UI size should be 1.0)
virtual F32 getSystemUISize() { return 1.0; }
protected:
LLWindow(LLWindowCallbacks* callbacks, BOOL fullscreen, U32 flags);
virtual ~LLWindow();

View File

@ -175,6 +175,11 @@ BOOL LLWindowCallbacks::handleDeviceChange(LLWindow *window)
return FALSE;
}
void LLWindowCallbacks::handleDPIChanged(LLWindow *window, F32 ui_scale_factor, S32 window_width, S32 window_height)
{
}
void LLWindowCallbacks::handlePingWatchdog(LLWindow *window, const char * msg)
{

View File

@ -65,6 +65,7 @@ public:
virtual void handleDataCopy(LLWindow *window, S32 data_type, void *data);
virtual BOOL handleTimerEvent(LLWindow *window);
virtual BOOL handleDeviceChange(LLWindow *window);
virtual void handleDPIChanged(LLWindow *window, F32 ui_scale_factor, S32 window_width, S32 window_height);
enum DragNDropAction {
DNDA_START_TRACKING = 0,// Start tracking an incoming drag

View File

@ -60,24 +60,50 @@
#include <dinput.h>
#include <Dbt.h.>
// culled from winuser.h
#ifndef WM_MOUSEWHEEL /* Added to be compatible with later SDK's */
const S32 WM_MOUSEWHEEL = 0x020A;
#endif
#ifndef WHEEL_DELTA /* Added to be compatible with later SDK's */
const S32 WHEEL_DELTA = 120; /* Value for rolling one detent */
#endif
const S32 MAX_MESSAGE_PER_UPDATE = 20;
const S32 BITS_PER_PIXEL = 32;
const S32 MAX_NUM_RESOLUTIONS = 32;
const F32 ICON_FLASH_TIME = 0.5f;
#ifndef WM_DPICHANGED
#define WM_DPICHANGED 0x02E0
#endif
extern BOOL gDebugWindowProc;
LPWSTR gIconResource = IDI_APPLICATION;
LLW32MsgCallback gAsyncMsgCallback = NULL;
#ifndef DPI_ENUMS_DECLARED
typedef enum PROCESS_DPI_AWARENESS {
PROCESS_DPI_UNAWARE = 0,
PROCESS_SYSTEM_DPI_AWARE = 1,
PROCESS_PER_MONITOR_DPI_AWARE = 2
} PROCESS_DPI_AWARENESS;
typedef enum MONITOR_DPI_TYPE {
MDT_EFFECTIVE_DPI = 0,
MDT_ANGULAR_DPI = 1,
MDT_RAW_DPI = 2,
MDT_DEFAULT = MDT_EFFECTIVE_DPI
} MONITOR_DPI_TYPE;
#endif
typedef HRESULT(STDAPICALLTYPE *SetProcessDpiAwarenessType)(_In_ PROCESS_DPI_AWARENESS value);
typedef HRESULT(STDAPICALLTYPE *GetProcessDpiAwarenessType)(
_In_ HANDLE hprocess,
_Out_ PROCESS_DPI_AWARENESS *value);
typedef HRESULT(STDAPICALLTYPE *GetDpiForMonitorType)(
_In_ HMONITOR hmonitor,
_In_ MONITOR_DPI_TYPE dpiType,
_Out_ UINT *dpiX,
_Out_ UINT *dpiY);
//
// LLWindowWin32
//
@ -2593,6 +2619,24 @@ LRESULT CALLBACK LLWindowWin32::mainWindowProc(HWND h_wnd, UINT u_msg, WPARAM w_
return 0;
}
case WM_DPICHANGED:
{
LPRECT lprc_new_scale;
F32 new_scale = LOWORD(w_param) / USER_DEFAULT_SCREEN_DPI;
lprc_new_scale = (LPRECT)l_param;
S32 new_width = lprc_new_scale->right - lprc_new_scale->left;
S32 new_height = lprc_new_scale->bottom - lprc_new_scale->top;
window_imp->mCallbacks->handleDPIChanged(window_imp, new_scale, new_width, new_height);
SetWindowPos(h_wnd,
HWND_TOP,
lprc_new_scale->left,
lprc_new_scale->top,
new_width,
new_height,
SWP_NOZORDER | SWP_NOACTIVATE);
return 0;
}
case WM_SETFOCUS:
window_imp->mCallbacks->handlePingWatchdog(window_imp, "Main:WM_SETFOCUS");
@ -3878,6 +3922,92 @@ BOOL LLWindowWin32::handleImeRequests(U32 request, U32 param, LRESULT *result)
return FALSE;
}
//static
void LLWindowWin32::setDPIAwareness()
{
HMODULE hShcore = LoadLibrary(L"shcore.dll");
if (hShcore != NULL)
{
SetProcessDpiAwarenessType pSPDA;
pSPDA = (SetProcessDpiAwarenessType)GetProcAddress(hShcore, "SetProcessDpiAwareness");
if (pSPDA)
{
HRESULT hr = pSPDA(PROCESS_PER_MONITOR_DPI_AWARE);
if (hr != S_OK)
{
LL_WARNS() << "SetProcessDpiAwareness() function returned an error. Will use legacy DPI awareness API of Win XP/7" << LL_ENDL;
}
}
FreeLibrary(hShcore);
}
else
{
LL_WARNS() << "Could not load shcore.dll library (included by <ShellScalingAPI.h> from Win 8.1 SDK. Will use legacy DPI awareness API of Win XP/7" << LL_ENDL;
}
}
F32 LLWindowWin32::getSystemUISize()
{
float scale_value = 0;
HWND hWnd = (HWND)getPlatformWindow();
HDC hdc = GetDC(hWnd);
HMONITOR hMonitor;
HANDLE hProcess = GetCurrentProcess();
PROCESS_DPI_AWARENESS dpi_awareness;
HMODULE hShcore = LoadLibrary(L"shcore.dll");
if (hShcore != NULL)
{
GetProcessDpiAwarenessType pGPDA;
pGPDA = (GetProcessDpiAwarenessType)GetProcAddress(hShcore, "GetProcessDpiAwareness");
GetDpiForMonitorType pGDFM;
pGDFM = (GetDpiForMonitorType)GetProcAddress(hShcore, "GetDpiForMonitor");
if (pGPDA != NULL && pGDFM != NULL)
{
pGPDA(hProcess, &dpi_awareness);
if (dpi_awareness == PROCESS_PER_MONITOR_DPI_AWARE)
{
POINT pt;
UINT dpix = 0, dpiy = 0;
HRESULT hr = E_FAIL;
RECT rect;
GetWindowRect(hWnd, &rect);
// Get the DPI for the monitor, on which the center of window is displayed and set the scaling factor
pt.x = (rect.left + rect.right) / 2;
pt.y = (rect.top + rect.bottom) / 2;
hMonitor = MonitorFromPoint(pt, MONITOR_DEFAULTTONEAREST);
hr = pGDFM(hMonitor, MDT_EFFECTIVE_DPI, &dpix, &dpiy);
if (hr == S_OK)
{
scale_value = dpix / USER_DEFAULT_SCREEN_DPI;
}
else
{
LL_WARNS() << "Could not determine DPI for monitor. Setting scale to default 100 %" << LL_ENDL;
scale_value = 1.0f;
}
}
else
{
LL_WARNS() << "Process is not per-monitor DPI-aware. Setting scale to default 100 %" << LL_ENDL;
scale_value = 1.0f;
}
}
FreeLibrary(hShcore);
}
else
{
LL_WARNS() << "Could not load shcore.dll library (included by <ShellScalingAPI.h> from Win 8.1 SDK). Using legacy DPI awareness API of Win XP/7" << LL_ENDL;
scale_value = GetDeviceCaps(hdc, LOGPIXELSX) / USER_DEFAULT_SCREEN_DPI;
}
ReleaseDC(hWnd, hdc);
return scale_value;
}
//static
std::vector<std::string> LLWindowWin32::getDynamicFallbackFontList()
{

View File

@ -110,10 +110,12 @@ public:
/*virtual*/ void interruptLanguageTextInput();
/*virtual*/ void spawnWebBrowser(const std::string& escaped_url, bool async);
/*virtual*/ F32 getSystemUISize();
LLWindowCallbacks::DragNDropResult completeDragNDropRequest( const LLCoordGL gl_coord, const MASK mask, LLWindowCallbacks::DragNDropAction action, const std::string url );
static std::vector<std::string> getDynamicFallbackFontList();
static void setDPIAwareness();
protected:
LLWindowWin32(LLWindowCallbacks* callbacks,
const std::string& title, const std::string& name, int x, int y, int width, int height, U32 flags,

View File

@ -501,6 +501,7 @@ void MediaPluginCEF::receiveMessage(const char* message_string)
LLCEFLib::LLCEFLibSettings settings;
settings.initial_width = 1024;
settings.initial_height = 1024;
settings.page_zoom_factor = message_in.getValueReal("factor");
settings.plugins_enabled = mPluginsEnabled;
settings.media_stream_enabled = false; // MAINT-6060 - WebRTC media removed until we can add granualrity/query UI
settings.javascript_enabled = mJavascriptEnabled;

View File

@ -4537,28 +4537,6 @@
<key>Value</key>
<string>http://search.secondlife.com/viewer/[CATEGORY]/?q=[QUERY]&amp;p=[AUTH_TOKEN]&amp;r=[MATURITY]&amp;lang=[LANGUAGE]&amp;g=[GODLIKE]&amp;sid=[SESSION_ID]&amp;rid=[REGION_ID]&amp;pid=[PARCEL_ID]&amp;channel=[CHANNEL]&amp;version=[VERSION]&amp;major=[VERSION_MAJOR]&amp;minor=[VERSION_MINOR]&amp;patch=[VERSION_PATCH]&amp;build=[VERSION_BUILD]</string>
</map>
<key>WebProfileURL</key>
<map>
<key>Comment</key>
<string>URL for Web Profiles</string>
<key>Persist</key>
<integer>0</integer>
<key>Type</key>
<string>String</string>
<key>Value</key>
<string>https://my.secondlife.com/[AGENT_NAME]</string>
</map>
<key>WebProfileNonProductionURL</key>
<map>
<key>Comment</key>
<string>URL for Web Profiles on Non-Production grids</string>
<key>Persist</key>
<integer>0</integer>
<key>Type</key>
<string>String</string>
<key>Value</key>
<string>https://my-demo.secondlife.com/[AGENT_NAME]</string>
</map>
<key>HighResSnapshot</key>
<map>
<key>Comment</key>
@ -4903,7 +4881,7 @@
<key>InventoryTrashMaxCapacity</key>
<map>
<key>Comment</key>
<string>Maximum capacity of the Trash folder. User will ve offered to clean it up when exceeded.</string>
<string>Maximum capacity of the Trash folder. User will be offered to clean it up when exceeded.</string>
<key>Persist</key>
<integer>1</integer>
<key>Type</key>
@ -7973,6 +7951,17 @@
<key>Value</key>
<integer>100000</integer>
</map>
<key>PrimTextMaxDrawDistance</key>
<map>
<key>Comment</key>
<string>Maximum draw distance beyond which PRIM_TEXT won't be rendered</string>
<key>Persist</key>
<integer>1</integer>
<key>Type</key>
<string>F32</string>
<key>Value</key>
<real>64.0</real>
</map>
<key>ProbeHardwareOnStartup</key>
<map>
<key>Comment</key>
@ -10125,6 +10114,17 @@
<key>Value</key>
<integer>10</integer>
</map>
<key>ComplexityChangesPopUpDelay</key>
<map>
<key>Comment</key>
<string>Delay before viewer will show avatar complexity notice again</string>
<key>Persist</key>
<integer>1</integer>
<key>Type</key>
<string>U32</string>
<key>Value</key>
<integer>300</integer>
</map>
<key>RenderAvatarMaxComplexity</key>
<map>
<key>Comment</key>
@ -10137,6 +10137,50 @@
<key>Value</key>
<integer>0</integer>
</map>
<key>RenderHUDObjectsWarning</key>
<map>
<key>Comment</key>
<string>Viewer will warn user about HUD containing to many objects if objects count is above this value</string>
<key>Persist</key>
<integer>1</integer>
<key>Type</key>
<string>U32</string>
<key>Value</key>
<integer>1000</integer>
</map>
<key>RenderHUDTexturesWarning</key>
<map>
<key>Comment</key>
<string>Viewer will warn user about HUD containing to many textures if texture count is above this value</string>
<key>Persist</key>
<integer>1</integer>
<key>Type</key>
<string>U32</string>
<key>Value</key>
<integer>200</integer>
</map>
<key>RenderHUDOversizedTexturesWarning</key>
<map>
<key>Comment</key>
<string>How many textures with size 1024 * 1024 or bigger HUD can contain before notifying user</string>
<key>Persist</key>
<integer>1</integer>
<key>Type</key>
<string>U32</string>
<key>Value</key>
<integer>6</integer>
</map>
<key>RenderHUDTexturesMemoryWarning</key>
<map>
<key>Comment</key>
<string>Viewer will warn user about HUD textures using memory above this value (in bytes)</string>
<key>Persist</key>
<integer>1</integer>
<key>Type</key>
<string>U32</string>
<key>Value</key>
<integer>32000000</integer>
</map>
<key>RenderAutoMuteSurfaceAreaLimit</key>
<map>
<key>Comment</key>
@ -12825,6 +12869,17 @@
<key>Value</key>
<real>1.0</real>
</map>
<key>LastSystemUIScaleFactor</key>
<map>
<key>Comment</key>
<string>Size of system UI during last run. On Windows 100% (96 DPI) system setting is 1.0 UI size</string>
<key>Persist</key>
<integer>1</integer>
<key>Type</key>
<string>F32</string>
<key>Value</key>
<real>1.0</real>
</map>
<key>UIScrollbarSize</key>
<map>
<key>Comment</key>
@ -13320,6 +13375,17 @@
<key>Value</key>
<string>1</string>
</map>
<key>UpdaterShowReleaseNotes</key>
<map>
<key>Comment</key>
<string>Enables displaying of the Release notes in a web floater after update.</string>
<key>Persist</key>
<integer>1</integer>
<key>Type</key>
<string>Boolean</string>
<key>Value</key>
<integer>1</integer>
</map>
<key>UploadBakedTexOld</key>
<map>
<key>Comment</key>

View File

@ -3895,11 +3895,17 @@ void LLAgent::handleTeleportFinished()
mIsMaturityRatingChangingDuringTeleport = false;
}
// Init SLM Marketplace connection so we know which UI should be used for the user as a merchant
// Note: Eventually, all merchant will be migrated to the new SLM system and there will be no reason to show the old UI at all.
// Note: Some regions will not support the SLM cap for a while so we need to do that check for each teleport.
// *TODO : Suppress that line from here once the whole grid migrated to SLM and move it to idle_startup() (llstartup.cpp)
check_merchant_status();
if (mRegionp)
{
if (mRegionp->capabilitiesReceived())
{
onCapabilitiesReceivedAfterTeleport();
}
else
{
mRegionp->setCapabilitiesReceivedCallback(boost::bind(&LLAgent::onCapabilitiesReceivedAfterTeleport));
}
}
}
void LLAgent::handleTeleportFailed()
@ -3931,6 +3937,14 @@ void LLAgent::handleTeleportFailed()
}
}
/*static*/
void LLAgent::onCapabilitiesReceivedAfterTeleport()
{
check_merchant_status();
}
void LLAgent::teleportRequest(
const U64& region_handle,
const LLVector3& pos_local,

View File

@ -676,6 +676,8 @@ private:
void handleTeleportFinished();
void handleTeleportFailed();
static void onCapabilitiesReceivedAfterTeleport();
//--------------------------------------------------------------------
// Teleport State
//--------------------------------------------------------------------

View File

@ -62,23 +62,37 @@ using namespace LLAvatarAppearanceDefines;
///////////////////////////////////////////////////////////////////////////////
void set_default_permissions(LLViewerInventoryItem* item)
{
llassert(item);
LLPermissions perm = item->getPermissions();
if (perm.getMaskNextOwner() != LLFloaterPerms::getNextOwnerPerms("Wearables")
|| perm.getMaskEveryone() != LLFloaterPerms::getEveryonePerms("Wearables")
|| perm.getMaskGroup() != LLFloaterPerms::getGroupPerms("Wearables"))
{
perm.setMaskNext(LLFloaterPerms::getNextOwnerPerms("Wearables"));
perm.setMaskEveryone(LLFloaterPerms::getEveryonePerms("Wearables"));
perm.setMaskGroup(LLFloaterPerms::getGroupPerms("Wearables"));
item->setPermissions(perm);
item->updateServer(FALSE);
}
}
// Callback to wear and start editing an item that has just been created.
void wear_and_edit_cb(const LLUUID& inv_item)
{
if (inv_item.isNull()) return;
LLViewerInventoryItem* item = gInventory.getItem(inv_item);
if (!item) return;
LLViewerInventoryItem* item = gInventory.getItem(inv_item);
if (!item) return;
LLPermissions perm = item->getPermissions();
perm.setMaskNext(LLFloaterPerms::getNextOwnerPerms("Wearables"));
perm.setMaskEveryone(LLFloaterPerms::getEveryonePerms("Wearables"));
perm.setMaskGroup(LLFloaterPerms::getGroupPerms("Wearables"));
item->setPermissions(perm);
set_default_permissions(item);
item->updateServer(FALSE);
gInventory.updateItem(item);
gInventory.notifyObservers();
// item was just created, update even if permissions did not changed
gInventory.updateItem(item);
gInventory.notifyObservers();
// Request editing the item after it gets worn.
gAgentWearables.requestEditingWearable(inv_item);
@ -94,13 +108,8 @@ void wear_cb(const LLUUID& inv_item)
LLViewerInventoryItem* item = gInventory.getItem(inv_item);
if (item)
{
LLPermissions perm = item->getPermissions();
perm.setMaskNext(LLFloaterPerms::getNextOwnerPerms("Wearables"));
perm.setMaskEveryone(LLFloaterPerms::getEveryonePerms("Wearables"));
perm.setMaskGroup(LLFloaterPerms::getGroupPerms("Wearables"));
item->setPermissions(perm);
set_default_permissions(item);
item->updateServer(FALSE);
gInventory.updateItem(item);
gInventory.notifyObservers();
}
@ -253,6 +262,7 @@ void LLAgentWearables::AddWearableToAgentInventoryCallback::fire(const LLUUID& i
{
LLAppearanceMgr::instance().addCOFItemLink(inv_item,
new LLUpdateAppearanceAndEditWearableOnDestroy(inv_item), mDescription);
editWearable(inv_item);
}
}
@ -423,7 +433,7 @@ void LLAgentWearables::saveWearableAs(const LLWearableType::EType type,
// old_wearable may still be referred to by other inventory items. Revert
// unsaved changes so other inventory items aren't affected by the changes
// that were just saved.
old_wearable->revertValues();
old_wearable->revertValuesWithoutUpdate();
}
void LLAgentWearables::revertWearable(const LLWearableType::EType type, const U32 index)
@ -1364,6 +1374,30 @@ void LLAgentWearables::findAttachmentsAddRemoveInfo(LLInventoryModel::item_array
// LL_INFOS() << "remove " << remove_count << " add " << add_count << LL_ENDL;
}
std::vector<LLViewerObject*> LLAgentWearables::getTempAttachments()
{
llvo_vec_t temp_attachs;
if (isAgentAvatarValid())
{
for (LLVOAvatar::attachment_map_t::iterator iter = gAgentAvatarp->mAttachmentPoints.begin(); iter != gAgentAvatarp->mAttachmentPoints.end();)
{
LLVOAvatar::attachment_map_t::iterator curiter = iter++;
LLViewerJointAttachment* attachment = curiter->second;
for (LLViewerJointAttachment::attachedobjs_vec_t::iterator attachment_iter = attachment->mAttachedObjects.begin();
attachment_iter != attachment->mAttachedObjects.end();
++attachment_iter)
{
LLViewerObject *objectp = (*attachment_iter);
if (objectp && objectp->isTempAttachment())
{
temp_attachs.push_back(objectp);
}
}
}
}
return temp_attachs;
}
void LLAgentWearables::userRemoveMultipleAttachments(llvo_vec_t& objects_to_remove)
{
if (!isAgentAvatarValid()) return;

View File

@ -185,6 +185,8 @@ public:
static void userRemoveMultipleAttachments(llvo_vec_t& llvo_array);
static void userAttachMultipleAttachments(LLInventoryModel::item_array_t& obj_item_array);
static llvo_vec_t getTempAttachments();
//--------------------------------------------------------------------
// Signals
//--------------------------------------------------------------------

View File

@ -1875,15 +1875,15 @@ bool LLAppearanceMgr::getCanReplaceCOF(const LLUUID& outfit_cat_id)
return false;
}
// Check whether the outfit contains any wearables we aren't wearing already (STORM-702).
// Check whether the outfit contains any wearables
LLInventoryModel::cat_array_t cats;
LLInventoryModel::item_array_t items;
LLFindWearablesEx is_worn(/*is_worn=*/ false, /*include_body_parts=*/ true);
LLFindWearables is_wearable;
gInventory.collectDescendentsIf(outfit_cat_id,
cats,
items,
LLInventoryModel::EXCLUDE_TRASH,
is_worn);
is_wearable);
return items.size() > 0;
}
@ -3477,13 +3477,13 @@ void LLAppearanceMgr::serverAppearanceUpdateCoro(LLCoreHttpUtil::HttpCoroutineAd
}
else
{
if (cofVersion < lastRcv)
if (cofVersion <= lastRcv)
{
LL_WARNS("Avatar") << "Have already received update for cof version " << lastRcv
<< " but requesting for " << cofVersion << LL_ENDL;
return;
}
if (lastReq > cofVersion)
if (lastReq >= cofVersion)
{
LL_WARNS("Avatar") << "Request already in flight for cof version " << lastReq
<< " but requesting for " << cofVersion << LL_ENDL;
@ -3503,7 +3503,7 @@ void LLAppearanceMgr::serverAppearanceUpdateCoro(LLCoreHttpUtil::HttpCoroutineAd
LL_WARNS("Avatar") << "Forcing version failure on COF Baking" << LL_ENDL;
}
LL_INFOS() << "Requesting bake for COF version " << cofVersion << LL_ENDL;
LL_INFOS("Avatar") << "Requesting bake for COF version " << cofVersion << LL_ENDL;
LLSD postData;
if (gSavedSettings.getBOOL("DebugAvatarExperimentalServerAppearanceUpdate"))
@ -3969,6 +3969,10 @@ void LLAppearanceMgr::setAttachmentInvLinkEnable(bool val)
LL_DEBUGS("Avatar") << "setAttachmentInvLinkEnable => " << (int) val << LL_ENDL;
mAttachmentInvLinkEnabled = val;
}
boost::signals2::connection LLAppearanceMgr::setAttachmentsChangedCallback(attachments_changed_callback_t cb)
{
return mAttachmentsChangeSignal.connect(cb);
}
void dumpAttachmentSet(const std::set<LLUUID>& atts, const std::string& msg)
{
@ -3995,6 +3999,8 @@ void LLAppearanceMgr::registerAttachment(const LLUUID& item_id)
gInventory.addChangedMask(LLInventoryObserver::LABEL, item_id);
LLAttachmentsMgr::instance().onAttachmentArrived(item_id);
mAttachmentsChangeSignal();
}
void LLAppearanceMgr::unregisterAttachment(const LLUUID& item_id)
@ -4015,6 +4021,8 @@ void LLAppearanceMgr::unregisterAttachment(const LLUUID& item_id)
{
//LL_INFOS() << "no link changes, inv link not enabled" << LL_ENDL;
}
mAttachmentsChangeSignal();
}
BOOL LLAppearanceMgr::getIsInCOF(const LLUUID& obj_id) const

View File

@ -229,6 +229,10 @@ public:
void setAppearanceServiceURL(const std::string& url) { mAppearanceServiceURL = url; }
std::string getAppearanceServiceURL() const;
typedef boost::function<void ()> attachments_changed_callback_t;
typedef boost::signals2::signal<void ()> attachments_changed_signal_t;
boost::signals2::connection setAttachmentsChangedCallback(attachments_changed_callback_t cb);
private:
@ -272,6 +276,8 @@ private:
LLTimer mInFlightTimer;
static bool mActive;
attachments_changed_signal_t mAttachmentsChangeSignal;
LLUUID mCOFImageID;
std::auto_ptr<LLOutfitUnLockTimer> mUnlockOutfitTimer;

View File

@ -702,7 +702,8 @@ LLAppViewer::LLAppViewer()
mPeriodicSlowFrame(LLCachedControl<bool>(gSavedSettings,"Periodic Slow Frame", FALSE)),
mFastTimerLogThread(NULL),
mUpdater(new LLUpdaterService()),
mSettingsLocationList(NULL)
mSettingsLocationList(NULL),
mIsFirstRun(false)
{
if(NULL != sInstance)
{
@ -1128,17 +1129,23 @@ bool LLAppViewer::init()
#if LL_WINDOWS
if (gGLManager.mGLVersion < LLFeatureManager::getInstance()->getExpectedGLVersion())
{
std::string url;
if (gGLManager.mIsIntel)
{
LLNotificationsUtil::add("IntelOldDriver");
url = LLTrans::getString("IntelDriverPage");
}
else if (gGLManager.mIsNVIDIA)
{
LLNotificationsUtil::add("NVIDIAOldDriver");
url = LLTrans::getString("NvidiaDriverPage");
}
else if (gGLManager.mIsATI)
{
LLNotificationsUtil::add("AMDOldDriver");
url = LLTrans::getString("AMDDriverPage");
}
if (!url.empty())
{
LLNotificationsUtil::add("OldGPUDriver", LLSD().with("URL", url));
}
}
#endif
@ -1226,6 +1233,8 @@ bool LLAppViewer::init()
boost::bind(&LLControlGroup::getU32, boost::ref(gSavedSettings), _1),
boost::bind(&LLControlGroup::declareU32, boost::ref(gSavedSettings), _1, _2, _3, LLControlVariable::PERSIST_ALWAYS));
showReleaseNotesIfRequired();
/*----------------------------------------------------------------------*/
// nat 2016-06-29 moved the following here from the former mainLoop().
mMainloopTimeout = new LLWatchdogTimeout();
@ -2477,7 +2486,10 @@ bool LLAppViewer::initConfiguration()
if (gSavedSettings.getBOOL("FirstRunThisInstall"))
{
// Note that the "FirstRunThisInstall" settings is currently unused.
// Set firstrun flag to indicate that some further init actiona should be taken
// like determining screen DPI value and so on
mIsFirstRun = true;
gSavedSettings.setBOOL("FirstRunThisInstall", FALSE);
}
@ -3134,7 +3146,8 @@ bool LLAppViewer::initWindow()
.min_width(gSavedSettings.getU32("MinWindowWidth"))
.min_height(gSavedSettings.getU32("MinWindowHeight"))
.fullscreen(gSavedSettings.getBOOL("FullScreen"))
.ignore_pixel_depth(ignorePixelDepth);
.ignore_pixel_depth(ignorePixelDepth)
.first_run(mIsFirstRun);
gViewerWindow = new LLViewerWindow(window_params);
@ -5805,6 +5818,20 @@ void LLAppViewer::launchUpdater()
// LLAppViewer::instance()->forceQuit();
}
/**
* Check if user is running a new version of the viewer.
* Display the Release Notes if it's not overriden by the "UpdaterShowReleaseNotes" setting.
*/
void LLAppViewer::showReleaseNotesIfRequired()
{
if (LLVersionInfo::getChannelAndVersion() != gLastRunVersion
&& gSavedSettings.getBOOL("UpdaterShowReleaseNotes")
&& !gSavedSettings.getBOOL("FirstLoginThisInstall"))
{
LLSD info(getViewerInfo());
LLWeb::loadURLInternal(info["VIEWER_RELEASE_NOTES_URL"]);
}
}
//virtual
void LLAppViewer::setMasterSystemAudioMute(bool mute)

View File

@ -254,6 +254,8 @@ private:
void sendLogoutRequest();
void disconnectViewer();
void showReleaseNotesIfRequired();
// *FIX: the app viewer class should be some sort of singleton, no?
// Perhaps its child class is the singleton and this should be an abstract base.
@ -315,6 +317,7 @@ private:
// llcorehttp library init/shutdown helper
LLAppCoreHttp mAppCoreHttp;
bool mIsFirstRun;
//---------------------------------------------
//*NOTE: Mani - legacy updater stuff
// Still useable?

View File

@ -231,6 +231,8 @@ int APIENTRY WINMAIN(HINSTANCE hInstance,
DWORD heap_enable_lfh_error[MAX_HEAPS];
S32 num_heaps = 0;
LLWindowWin32::setDPIAwareness();
#if WINDOWS_CRT_MEM_CHECKS && !INCLUDE_VLD
_CrtSetDbgFlag ( _CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF ); // dump memory leaks on exit
#elif 0
@ -567,7 +569,7 @@ bool LLAppViewerWin32::initHardwareTest()
// Do driver verification and initialization based on DirectX
// hardware polling and driver versions
//
if (FALSE == gSavedSettings.getBOOL("NoHardwareProbe"))
if (TRUE == gSavedSettings.getBOOL("ProbeHardwareOnStartup") && FALSE == gSavedSettings.getBOOL("NoHardwareProbe"))
{
// per DEV-11631 - disable hardware probing for everything
// but vram.

View File

@ -950,15 +950,22 @@ bool LLAvatarActions::canShareSelectedItems(LLInventoryPanel* inv_panel /* = NUL
const std::set<LLFolderViewItem*> inventory_selected = root_folder->getSelectionList();
if (inventory_selected.empty()) return false; // nothing selected
const LLUUID trash_id = gInventory.findCategoryUUIDForType(LLFolderType::FT_TRASH);
bool can_share = true;
std::set<LLFolderViewItem*>::const_iterator it = inventory_selected.begin();
const std::set<LLFolderViewItem*>::const_iterator it_end = inventory_selected.end();
for (; it != it_end; ++it)
{
LLViewerInventoryCategory* inv_cat = gInventory.getCategory(static_cast<LLFolderViewModelItemInventory*>((*it)->getViewModelItem())->getUUID());
// any category can be offered.
LLUUID cat_id = static_cast<LLFolderViewModelItemInventory*>((*it)->getViewModelItem())->getUUID();
LLViewerInventoryCategory* inv_cat = gInventory.getCategory(cat_id);
// any category can be offered if it's not in trash.
if (inv_cat)
{
if ((cat_id == trash_id) || gInventory.isObjectDescendentOf(cat_id, trash_id))
{
can_share = false;
break;
}
continue;
}

View File

@ -38,6 +38,7 @@
#include "llnotifications.h"
#include "llnotificationsutil.h"
#include "llnotificationtemplate.h"
#include "llslurl.h"
#include "lltimer.h"
#include "llvoavatarself.h"
#include "llviewercontrol.h"
@ -51,6 +52,11 @@ static const F32 RENDER_ALLOWED_CHANGE_PCT = 0.1;
// wait seconds before processing over limit updates after last complexity change
static const U32 OVER_LIMIT_UPDATE_DELAY = 70;
static const U32 WARN_HUD_OBJECTS_LIMIT = 1000;
static const U32 WARN_HUD_TEXTURES_LIMIT = 200;
static const U32 WARN_HUD_OVERSIZED_TEXTURES_LIMIT = 6;
static const U32 WARN_HUD_TEXTURE_MEMORY_LIMIT = 32000000; // in bytes
LLAvatarRenderNotifier::LLAvatarRenderNotifier() :
mAgentsCount(0),
@ -264,3 +270,220 @@ void LLAvatarRenderNotifier::updateNotificationAgent(U32 agentComplexity)
}
}
// LLHUDRenderNotifier
static const char* e_hud_messages[] =
{
"hud_render_textures_warning",
"hud_render_cramped_warning",
"hud_render_heavy_textures_warning",
"hud_render_cost_warning",
"hud_render_memory_warning",
};
LLHUDRenderNotifier::LLHUDRenderNotifier() :
mReportedHUDWarning(WARN_NONE)
{
}
LLHUDRenderNotifier::~LLHUDRenderNotifier()
{
}
void LLHUDRenderNotifier::updateNotificationHUD(hud_complexity_list_t complexity)
{
if (!isAgentAvatarValid() || !gAgentWearables.areWearablesLoaded())
{
// data not ready.
return;
}
// TODO:
// Find a way to show message with list of issues, but without making it too large
// and intrusive.
LLHUDComplexity new_total_complexity;
LLHUDComplexity report_complexity;
hud_complexity_list_t::iterator iter = complexity.begin();
hud_complexity_list_t::iterator end = complexity.end();
EWarnLevel warning_level = WARN_NONE;
for (; iter != end; ++iter)
{
LLHUDComplexity object_complexity = *iter;
EWarnLevel object_level = getWarningType(object_complexity, report_complexity);
if (object_level >= 0)
{
warning_level = object_level;
report_complexity = object_complexity;
}
new_total_complexity.objectsCost += object_complexity.objectsCost;
new_total_complexity.objectsCount += object_complexity.objectsCount;
new_total_complexity.texturesCost += object_complexity.texturesCost;
new_total_complexity.texturesCount += object_complexity.texturesCount;
new_total_complexity.largeTexturesCount += object_complexity.largeTexturesCount;
new_total_complexity.texturesMemoryTotal += object_complexity.texturesMemoryTotal;
}
if (mHUDPopUpDelayTimer.hasExpired() || isNotificationVisible())
{
if (warning_level >= 0)
{
// Display info about most complex HUD object
// make sure it shown only once unless object's complexity or object itself changed
if (mReportedHUDComplexity.objectId != report_complexity.objectId
|| mReportedHUDWarning != warning_level)
{
displayHUDNotification(warning_level, report_complexity.objectId, report_complexity.objectName, report_complexity.jointName);
mReportedHUDComplexity = report_complexity;
mReportedHUDWarning = warning_level;
}
}
else
{
// Check if total complexity is above threshold and above previous warning
// Show warning with highest importance (5m delay between warnings by default)
if (!mReportedHUDComplexity.objectId.isNull())
{
mReportedHUDComplexity.reset();
mReportedHUDWarning = WARN_NONE;
}
warning_level = getWarningType(new_total_complexity, mReportedHUDComplexity);
if (warning_level >= 0 && mReportedHUDWarning != warning_level)
{
displayHUDNotification(warning_level);
}
mReportedHUDComplexity = new_total_complexity;
mReportedHUDWarning = warning_level;
}
}
else if (warning_level >= 0)
{
LL_DEBUGS("HUDdetail") << "HUD individual warning postponed" << LL_ENDL;
}
if (mLatestHUDComplexity.objectsCost != new_total_complexity.objectsCost
|| mLatestHUDComplexity.objectsCount != new_total_complexity.objectsCount
|| mLatestHUDComplexity.texturesCost != new_total_complexity.texturesCost
|| mLatestHUDComplexity.texturesCount != new_total_complexity.texturesCount
|| mLatestHUDComplexity.largeTexturesCount != new_total_complexity.largeTexturesCount
|| mLatestHUDComplexity.texturesMemoryTotal != new_total_complexity.texturesMemoryTotal)
{
LL_INFOS("HUDdetail") << "HUD textures count: " << new_total_complexity.texturesCount
<< " HUD textures cost: " << new_total_complexity.texturesCost
<< " Large textures: " << new_total_complexity.largeTexturesCount
<< " HUD objects cost: " << new_total_complexity.objectsCost
<< " HUD objects count: " << new_total_complexity.objectsCount << LL_ENDL;
mLatestHUDComplexity = new_total_complexity;
}
}
bool LLHUDRenderNotifier::isNotificationVisible()
{
return mHUDNotificationPtr != NULL && mHUDNotificationPtr->isActive();
}
// private static
LLHUDRenderNotifier::EWarnLevel LLHUDRenderNotifier::getWarningType(LLHUDComplexity object_complexity, LLHUDComplexity cmp_complexity)
{
static LLCachedControl<U32> max_render_cost(gSavedSettings, "RenderAvatarMaxComplexity", 0U); // ties max HUD cost to avatar cost
static LLCachedControl<U32> max_objects_count(gSavedSettings, "RenderHUDObjectsWarning", WARN_HUD_OBJECTS_LIMIT);
static LLCachedControl<U32> max_textures_count(gSavedSettings, "RenderHUDTexturesWarning", WARN_HUD_TEXTURES_LIMIT);
static LLCachedControl<U32> max_oversized_count(gSavedSettings, "RenderHUDOversizedTexturesWarning", WARN_HUD_OVERSIZED_TEXTURES_LIMIT);
static LLCachedControl<U32> max_texture_memory(gSavedSettings, "RenderHUDTexturesMemoryWarning", WARN_HUD_TEXTURE_MEMORY_LIMIT);
if (cmp_complexity.texturesMemoryTotal < object_complexity.texturesMemoryTotal
&& object_complexity.texturesMemoryTotal > (F64Bytes)max_texture_memory)
{
// Note: Memory might not be accurate since texture is still loading or discard level changes
LL_DEBUGS("HUDdetail") << "HUD " << object_complexity.objectName << " memory usage over limit, "
<< " was " << cmp_complexity.texturesMemoryTotal
<< " is " << object_complexity.texturesMemoryTotal << LL_ENDL;
return WARN_MEMORY;
}
else if ((cmp_complexity.objectsCost < object_complexity.objectsCost
|| cmp_complexity.texturesCost < object_complexity.texturesCost)
&& max_render_cost > 0
&& object_complexity.objectsCost + object_complexity.texturesCost > max_render_cost)
{
LL_DEBUGS("HUDdetail") << "HUD " << object_complexity.objectName << " complexity over limit,"
<< " HUD textures cost: " << object_complexity.texturesCost
<< " HUD objects cost: " << object_complexity.objectsCost << LL_ENDL;
return WARN_COST;
}
else if (cmp_complexity.largeTexturesCount < object_complexity.largeTexturesCount
&& object_complexity.largeTexturesCount > max_oversized_count)
{
LL_DEBUGS("HUDdetail") << "HUD " << object_complexity.objectName << " contains to many large textures: "
<< object_complexity.largeTexturesCount << LL_ENDL;
return WARN_HEAVY;
}
else if (cmp_complexity.texturesCount < object_complexity.texturesCount
&& object_complexity.texturesCount > max_textures_count)
{
LL_DEBUGS("HUDdetail") << "HUD " << object_complexity.objectName << " contains too many textures: "
<< object_complexity.texturesCount << LL_ENDL;
return WARN_CRAMPED;
}
else if (cmp_complexity.objectsCount < object_complexity.objectsCount
&& object_complexity.objectsCount > max_objects_count)
{
LL_DEBUGS("HUDdetail") << "HUD " << object_complexity.objectName << " contains too many objects: "
<< object_complexity.objectsCount << LL_ENDL;
return WARN_TEXTURES;
}
return WARN_NONE;
}
void LLHUDRenderNotifier::displayHUDNotification(EWarnLevel warn_type, LLUUID obj_id, std::string obj_name, std::string joint_name)
{
static LLCachedControl<U32> pop_up_delay(gSavedSettings, "ComplexityChangesPopUpDelay", 300);
static LLCachedControl<U32> expire_delay(gSavedSettings, "ShowMyComplexityChanges", 20);
LLDate expire_date(LLDate::now().secondsSinceEpoch() + expire_delay);
// Since we need working "ignoretext" there is no other way but to
// use single notification while constructing it from multiple pieces
LLSD reason_args;
if (obj_id.isNull())
{
reason_args["HUD_DETAILS"] = LLTrans::getString("hud_description_total");
}
else
{
if (obj_name.empty())
{
LL_WARNS("HUDdetail") << "Object name not assigned" << LL_ENDL;
}
if (joint_name.empty())
{
std::string verb = "select?name=" + LLURI::escape(obj_name);
reason_args["HUD_DETAILS"] = LLSLURL("inventory", obj_id, verb.c_str()).getSLURLString();
}
else
{
LLSD object_args;
std::string verb = "select?name=" + LLURI::escape(obj_name);
object_args["OBJ_NAME"] = LLSLURL("inventory", obj_id, verb.c_str()).getSLURLString();
object_args["JNT_NAME"] = LLTrans::getString(joint_name);
reason_args["HUD_DETAILS"] = LLTrans::getString("hud_name_with_joint", object_args);
}
}
LLSD msg_args;
msg_args["HUD_REASON"] = LLTrans::getString(e_hud_messages[warn_type], reason_args);
mHUDNotificationPtr = LLNotifications::instance().add(LLNotification::Params()
.name("HUDComplexityWarning")
.expiry(expire_date)
.substitutions(msg_args));
mHUDPopUpDelayTimer.resetWithExpiry(pop_up_delay);
}

View File

@ -33,6 +33,36 @@
class LLViewerRegion;
struct LLHUDComplexity
{
LLHUDComplexity()
{
reset();
}
void reset()
{
objectId = LLUUID::null;
objectName = "";
objectsCost = 0;
objectsCount = 0;
texturesCost = 0;
texturesCount = 0;
largeTexturesCount = 0;
texturesMemoryTotal = (F64Bytes)0;
}
LLUUID objectId;
std::string objectName;
std::string jointName;
U32 objectsCost;
U32 objectsCount;
U32 texturesCost;
U32 texturesCount;
U32 largeTexturesCount;
F64Bytes texturesMemoryTotal;
};
typedef std::list<LLHUDComplexity> hud_complexity_list_t;
// Class to notify user about drastic changes in agent's render weights or if other agents
// reported that user's agent is too 'heavy' for their settings
class LLAvatarRenderNotifier : public LLSingleton<LLAvatarRenderNotifier>
@ -81,4 +111,36 @@ private:
S32 mLastOutfitRezStatus;
};
// Class to notify user about heavy set of HUD
class LLHUDRenderNotifier : public LLSingleton<LLHUDRenderNotifier>
{
public:
LLHUDRenderNotifier();
~LLHUDRenderNotifier();
void updateNotificationHUD(hud_complexity_list_t complexity);
bool isNotificationVisible();
private:
enum EWarnLevel
{
WARN_NONE = -1,
WARN_TEXTURES = 0, // least important
WARN_CRAMPED,
WARN_HEAVY,
WARN_COST,
WARN_MEMORY, //most important
};
LLNotificationPtr mHUDNotificationPtr;
static EWarnLevel getWarningType(LLHUDComplexity object_complexity, LLHUDComplexity cmp_complexity);
void displayHUDNotification(EWarnLevel warn_type, LLUUID obj_id = LLUUID::null, std::string object_name = "", std::string joint_name = "");
LLHUDComplexity mReportedHUDComplexity;
EWarnLevel mReportedHUDWarning;
LLHUDComplexity mLatestHUDComplexity;
LLFrameTimer mHUDPopUpDelayTimer;
};
#endif /* ! defined(LL_llavatarrendernotifier_H) */

View File

@ -311,7 +311,8 @@ LLWString LLChatBar::stripChannelNumber(const LLWString &mesg, S32* channel)
}
else if (mesg[0] == '/'
&& mesg[1]
&& LLStringOps::isDigit(mesg[1]))
&& (LLStringOps::isDigit(mesg[1])
|| (mesg[1] == '-' && mesg[2] && LLStringOps::isDigit(mesg[2]))))
{
// This a special "/20" speak on a channel
S32 pos = 0;
@ -325,7 +326,7 @@ LLWString LLChatBar::stripChannelNumber(const LLWString &mesg, S32* channel)
channel_string.push_back(c);
pos++;
}
while(c && pos < 64 && LLStringOps::isDigit(c));
while(c && pos < 64 && (LLStringOps::isDigit(c) || (pos == 1 && c == '-')));
// Move the pointer forward to the first non-whitespace char
// Check isspace before looping, so we can handle "/33foo"

View File

@ -448,7 +448,12 @@ bool LLConversationLog::moveLog(const std::string &originDirectory, const std::s
std::string LLConversationLog::getFileName()
{
std::string filename = "conversation";
return gDirUtilp->getExpandedFilename(LL_PATH_PER_ACCOUNT_CHAT_LOGS, filename) + ".log";
std::string log_address = gDirUtilp->getExpandedFilename(LL_PATH_PER_ACCOUNT_CHAT_LOGS, filename);
if (!log_address.empty())
{
log_address += ".log";
}
return log_address;
}
bool LLConversationLog::saveToFile(const std::string& filename)

View File

@ -108,6 +108,7 @@ private:
static const std::string sCheckUpdateListenerName;
static void startFetchServerReleaseNotes();
static void fetchServerReleaseNotesCoro(const std::string& cap_url);
static void handleServerReleaseNotes(LLSD results);
};
@ -224,35 +225,62 @@ void LLFloaterAbout::startFetchServerReleaseNotes()
// an URL suitable for external browsers in the "Location:" HTTP header.
std::string cap_url = region->getCapability("ServerReleaseNotes");
LLCoreHttpUtil::HttpCoroutineAdapter::callbackHttpGet(cap_url,
&LLFloaterAbout::handleServerReleaseNotes, &LLFloaterAbout::handleServerReleaseNotes);
LLCoros::instance().launch("fetchServerReleaseNotesCoro", boost::bind(&LLFloaterAbout::fetchServerReleaseNotesCoro, cap_url));
}
/*static*/
void LLFloaterAbout::fetchServerReleaseNotesCoro(const std::string& cap_url)
{
LLCoreHttpUtil::HttpCoroutineAdapter::ptr_t
httpAdapter(new LLCoreHttpUtil::HttpCoroutineAdapter("fetchServerReleaseNotesCoro", LLCore::HttpRequest::DEFAULT_POLICY_ID));
LLCore::HttpRequest::ptr_t httpRequest(new LLCore::HttpRequest);
LLCore::HttpOptions::ptr_t httpOpts(new LLCore::HttpOptions);
httpOpts->setWantHeaders(true);
httpOpts->setFollowRedirects(false);
LLSD result = httpAdapter->getAndSuspend(httpRequest, cap_url, httpOpts);
LLSD httpResults = result[LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS];
LLCore::HttpStatus status = LLCoreHttpUtil::HttpCoroutineAdapter::getStatusFromLLSD(httpResults);
if (!status)
{
handleServerReleaseNotes(httpResults);
}
else
{
handleServerReleaseNotes(result);
}
}
/*static*/
void LLFloaterAbout::handleServerReleaseNotes(LLSD results)
{
// LLFloaterAbout* floater_about = LLFloaterReg::getTypedInstance<LLFloaterAbout>("sl_about");
// if (floater_about)
// {
LLSD http_headers;
if (results.has(LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS))
{
LLSD http_results = results[LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS];
http_headers = http_results[LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS_HEADERS];
}
else
{
http_headers = results[LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS_HEADERS];
}
std::string location = http_headers[HTTP_IN_HEADER_LOCATION].asString();
if (location.empty())
{
location = LLTrans::getString("ErrorFetchingServerReleaseNotesURL");
}
LLAppViewer::instance()->setServerReleaseNotesURL(location);
// }
LLSD http_headers;
if (results.has(LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS))
{
LLSD http_results = results[LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS];
http_headers = http_results[LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS_HEADERS];
}
else
{
http_headers = results[LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS_HEADERS];
}
std::string location = http_headers[HTTP_IN_HEADER_LOCATION].asString();
if (location.empty())
{
location = LLTrans::getString("ErrorFetchingServerReleaseNotesURL");
}
LLAppViewer::instance()->setServerReleaseNotesURL(location);
LLFloaterAbout* floater_about = LLFloaterReg::findTypedInstance<LLFloaterAbout>("sl_about");
if (floater_about)
{
floater_about->setSupportText(location);
}
}
class LLFloaterAboutListener: public LLEventAPI

View File

@ -55,6 +55,8 @@
//#include "llsdserialize.h"
static const U32 AVATAR_PICKER_SEARCH_TIMEOUT = 180U;
//put it back as a member once the legacy path is out?
static std::map<LLUUID, LLAvatarName> sAvatarNameMap;
@ -463,10 +465,13 @@ void LLFloaterAvatarPicker::findCoro(std::string url, LLUUID queryID, std::strin
LLCoreHttpUtil::HttpCoroutineAdapter::ptr_t
httpAdapter(new LLCoreHttpUtil::HttpCoroutineAdapter("genericPostCoro", httpPolicy));
LLCore::HttpRequest::ptr_t httpRequest(new LLCore::HttpRequest);
LLCore::HttpOptions::ptr_t httpOpts(new LLCore::HttpOptions);
LL_INFOS("HttpCoroutineAdapter", "genericPostCoro") << "Generic POST for " << url << LL_ENDL;
LLSD result = httpAdapter->getAndSuspend(httpRequest, url);
httpOpts->setTimeout(AVATAR_PICKER_SEARCH_TIMEOUT);
LLSD result = httpAdapter->getAndSuspend(httpRequest, url, httpOpts);
LLSD httpResults = result[LLCoreHttpUtil::HttpCoroutineAdapter::HTTP_RESULTS];
LLCore::HttpStatus status = LLCoreHttpUtil::HttpCoroutineAdapter::getStatusFromLLSD(httpResults);

View File

@ -280,7 +280,7 @@ void LLFloaterBuyCurrencyUI::onClickCancel()
void LLFloaterBuyCurrencyUI::onClickErrorWeb()
{
LLWeb::loadURLExternal(mManager.errorURI());
LLWeb::loadURL(mManager.errorURI());
closeFloater();
// Update L$ balance
LLStatusBar::sendMoneyBalanceRequest();

View File

@ -328,7 +328,7 @@ void LLFloaterGesture::addGesture(const LLUUID& item_id , LLMultiGesture* gestur
element["columns"][0]["font"]["name"] = "SANSSERIF";
element["columns"][0]["font"]["style"] = font_style;
std::string key_string = LLKeyboard::stringFromKey(gesture->mKey);
std::string key_string;
std::string buffer;
if (gesture->mKey == KEY_NONE)
@ -338,6 +338,7 @@ void LLFloaterGesture::addGesture(const LLUUID& item_id , LLMultiGesture* gestur
}
else
{
key_string = LLKeyboard::stringFromKey(gesture->mKey);
buffer = LLKeyboard::stringFromAccelerator(gesture->mMask,
gesture->mKey);
}

View File

@ -798,7 +798,8 @@ LLWString LLFloaterIMNearbyChat::stripChannelNumber(const LLWString &mesg, S32*
}
else if (mesg[0] == '/'
&& mesg[1]
&& LLStringOps::isDigit(mesg[1]))
&& (LLStringOps::isDigit(mesg[1])
|| (mesg[1] == '-' && mesg[2] && LLStringOps::isDigit(mesg[2]))))
{
// This a special "/20" speak on a channel
S32 pos = 0;
@ -812,7 +813,7 @@ LLWString LLFloaterIMNearbyChat::stripChannelNumber(const LLWString &mesg, S32*
channel_string.push_back(c);
pos++;
}
while(c && pos < 64 && LLStringOps::isDigit(c));
while(c && pos < 64 && (LLStringOps::isDigit(c) || (pos==1 && c =='-')));
// Move the pointer forward to the first non-whitespace char
// Check isspace before looping, so we can handle "/33foo"
@ -837,19 +838,36 @@ LLWString LLFloaterIMNearbyChat::stripChannelNumber(const LLWString &mesg, S32*
void send_chat_from_viewer(const std::string& utf8_out_text, EChatType type, S32 channel)
{
LLMessageSystem* msg = gMessageSystem;
msg->newMessageFast(_PREHASH_ChatFromViewer);
msg->nextBlockFast(_PREHASH_AgentData);
msg->addUUIDFast(_PREHASH_AgentID, gAgent.getID());
msg->addUUIDFast(_PREHASH_SessionID, gAgent.getSessionID());
msg->nextBlockFast(_PREHASH_ChatData);
msg->addStringFast(_PREHASH_Message, utf8_out_text);
msg->addU8Fast(_PREHASH_Type, type);
msg->addS32("Channel", channel);
LLMessageSystem* msg = gMessageSystem;
gAgent.sendReliableMessage();
if (channel >= 0)
{
msg->newMessageFast(_PREHASH_ChatFromViewer);
msg->nextBlockFast(_PREHASH_AgentData);
msg->addUUIDFast(_PREHASH_AgentID, gAgent.getID());
msg->addUUIDFast(_PREHASH_SessionID, gAgent.getSessionID());
msg->nextBlockFast(_PREHASH_ChatData);
msg->addStringFast(_PREHASH_Message, utf8_out_text);
msg->addU8Fast(_PREHASH_Type, type);
msg->addS32("Channel", channel);
add(LLStatViewer::CHAT_COUNT, 1);
}
else
{
// Hack: ChatFromViewer doesn't allow negative channels
msg->newMessage("ScriptDialogReply");
msg->nextBlock("AgentData");
msg->addUUID("AgentID", gAgentID);
msg->addUUID("SessionID", gAgentSessionID);
msg->nextBlock("Data");
msg->addUUID("ObjectID", gAgentID);
msg->addS32("ChatChannel", channel);
msg->addS32("ButtonIndex", 0);
msg->addString("ButtonLabel", utf8_out_text);
}
gAgent.sendReliableMessage();
add(LLStatViewer::CHAT_COUNT, 1);
}
class LLChatCommandHandler : public LLCommandHandler

View File

@ -72,6 +72,8 @@ struct LLGiveMoneyInfo
mFloater(floater), mAmount(amount){}
};
typedef boost::shared_ptr<LLGiveMoneyInfo> give_money_ptr;
///----------------------------------------------------------------------------
/// Class LLFloaterPay
///----------------------------------------------------------------------------
@ -94,18 +96,18 @@ public:
bool is_group);
static bool payConfirmationCallback(const LLSD& notification,
const LLSD& response,
LLGiveMoneyInfo* info);
give_money_ptr info);
private:
static void onCancel(void* data);
static void onKeystroke(LLLineEditor* editor, void* data);
static void onGive(void* data);
static void onGive(give_money_ptr info);
void give(S32 amount);
static void processPayPriceReply(LLMessageSystem* msg, void **userdata);
void finishPayUI(const LLUUID& target_id, BOOL is_group);
protected:
std::vector<LLGiveMoneyInfo*> mCallbackData;
std::vector<give_money_ptr> mCallbackData;
money_callback mCallback;
LLTextBox* mObjectNameText;
LLUUID mTargetUUID;
@ -113,7 +115,7 @@ protected:
BOOL mHaveName;
LLButton* mQuickPayButton[MAX_PAY_BUTTONS];
LLGiveMoneyInfo* mQuickPayInfo[MAX_PAY_BUTTONS];
give_money_ptr mQuickPayInfo[MAX_PAY_BUTTONS];
LLSafeHandle<LLObjectSelection> mObjectSelection;
};
@ -136,7 +138,11 @@ LLFloaterPay::LLFloaterPay(const LLSD& key)
// Destroys the object
LLFloaterPay::~LLFloaterPay()
{
std::for_each(mCallbackData.begin(), mCallbackData.end(), DeletePointer());
std::vector<give_money_ptr>::iterator iter;
for (iter = mCallbackData.begin(); iter != mCallbackData.end(); ++iter)
{
(*iter)->mFloater = NULL;
}
mCallbackData.clear();
// Name callbacks will be automatically disconnected since LLFloater is trackable
@ -148,40 +154,40 @@ BOOL LLFloaterPay::postBuild()
{
S32 i = 0;
LLGiveMoneyInfo* info = new LLGiveMoneyInfo(this, PAY_BUTTON_DEFAULT_0);
give_money_ptr info = give_money_ptr(new LLGiveMoneyInfo(this, PAY_BUTTON_DEFAULT_0));
mCallbackData.push_back(info);
childSetAction("fastpay 1",&LLFloaterPay::onGive,info);
childSetAction("fastpay 1", boost::bind(LLFloaterPay::onGive, info));
getChildView("fastpay 1")->setVisible(FALSE);
mQuickPayButton[i] = getChild<LLButton>("fastpay 1");
mQuickPayInfo[i] = info;
++i;
info = new LLGiveMoneyInfo(this, PAY_BUTTON_DEFAULT_1);
info = give_money_ptr(new LLGiveMoneyInfo(this, PAY_BUTTON_DEFAULT_1));
mCallbackData.push_back(info);
childSetAction("fastpay 5",&LLFloaterPay::onGive,info);
childSetAction("fastpay 5", boost::bind(LLFloaterPay::onGive, info));
getChildView("fastpay 5")->setVisible(FALSE);
mQuickPayButton[i] = getChild<LLButton>("fastpay 5");
mQuickPayInfo[i] = info;
++i;
info = new LLGiveMoneyInfo(this, PAY_BUTTON_DEFAULT_2);
info = give_money_ptr(new LLGiveMoneyInfo(this, PAY_BUTTON_DEFAULT_2));
mCallbackData.push_back(info);
childSetAction("fastpay 10",&LLFloaterPay::onGive,info);
childSetAction("fastpay 10", boost::bind(LLFloaterPay::onGive, info));
getChildView("fastpay 10")->setVisible(FALSE);
mQuickPayButton[i] = getChild<LLButton>("fastpay 10");
mQuickPayInfo[i] = info;
++i;
info = new LLGiveMoneyInfo(this, PAY_BUTTON_DEFAULT_3);
info = give_money_ptr(new LLGiveMoneyInfo(this, PAY_BUTTON_DEFAULT_3));
mCallbackData.push_back(info);
childSetAction("fastpay 20",&LLFloaterPay::onGive,info);
childSetAction("fastpay 20", boost::bind(LLFloaterPay::onGive, info));
getChildView("fastpay 20")->setVisible(FALSE);
mQuickPayButton[i] = getChild<LLButton>("fastpay 20");
@ -195,10 +201,10 @@ BOOL LLFloaterPay::postBuild()
getChild<LLLineEditor>("amount")->setKeystrokeCallback(&LLFloaterPay::onKeystroke, this);
getChild<LLLineEditor>("amount")->setPrevalidate(LLTextValidate::validateNonNegativeS32);
info = new LLGiveMoneyInfo(this, 0);
info = give_money_ptr(new LLGiveMoneyInfo(this, 0));
mCallbackData.push_back(info);
childSetAction("pay btn",&LLFloaterPay::onGive,info);
childSetAction("pay btn", boost::bind(LLFloaterPay::onGive, info));
setDefaultBtn("pay btn");
getChildView("pay btn")->setVisible(FALSE);
getChildView("pay btn")->setEnabled(FALSE);
@ -415,9 +421,9 @@ void LLFloaterPay::payDirectly(money_callback callback,
floater->finishPayUI(target_id, is_group);
}
bool LLFloaterPay::payConfirmationCallback(const LLSD& notification, const LLSD& response, LLGiveMoneyInfo* info)
bool LLFloaterPay::payConfirmationCallback(const LLSD& notification, const LLSD& response, give_money_ptr info)
{
if (!info || !info->mFloater)
if (!info.get() || !info->mFloater)
{
return false;
}
@ -479,54 +485,61 @@ void LLFloaterPay::onKeystroke(LLLineEditor*, void* data)
}
// static
void LLFloaterPay::onGive(void* data)
void LLFloaterPay::onGive(give_money_ptr info)
{
LLGiveMoneyInfo* info = reinterpret_cast<LLGiveMoneyInfo*>(data);
LLFloaterPay* floater = info->mFloater;
if(info && floater)
{
S32 amount = info->mAmount;
if(amount == 0)
{
amount = atoi(floater->getChild<LLUICtrl>("amount")->getValue().asString().c_str());
}
if (amount > PAY_AMOUNT_NOTIFICATION && gStatusBar && gStatusBar->getBalance() > amount)
{
LLUUID payee_id = LLUUID::null;
BOOL is_group = false;
if (floater->mObjectSelection.notNull())
{
LLSelectNode* node = floater->mObjectSelection->getFirstRootNode();
if (node)
{
node->mPermissions->getOwnership(payee_id, is_group);
}
else
{
// object no longer exists
LLNotificationsUtil::add("PayObjectFailed");
floater->closeFloater();
return;
}
}
else
{
is_group = floater->mTargetIsGroup;
payee_id = floater->mTargetUUID;
}
if (!info.get() || !info->mFloater)
{
return;
}
LLSD args;
args["TARGET"] = LLSLURL( is_group ? "group" : "agent", payee_id, "completename").getSLURLString();
args["AMOUNT"] = amount;
LLFloaterPay* floater = info->mFloater;
S32 amount = info->mAmount;
if (amount == 0)
{
LLUICtrl* text_field = floater->getChild<LLUICtrl>("amount");
if (!text_field)
{
return;
}
amount = atoi(text_field->getValue().asString().c_str());
}
LLNotificationsUtil::add("PayConfirmation", args, LLSD(), boost::bind(&LLFloaterPay::payConfirmationCallback, _1, _2, info));
}
else
{
floater->give(amount);
floater->closeFloater();
}
}
if (amount > PAY_AMOUNT_NOTIFICATION && gStatusBar && gStatusBar->getBalance() > amount)
{
LLUUID payee_id = LLUUID::null;
BOOL is_group = false;
if (floater->mObjectSelection.notNull())
{
LLSelectNode* node = floater->mObjectSelection->getFirstRootNode();
if (node)
{
node->mPermissions->getOwnership(payee_id, is_group);
}
else
{
// object no longer exists
LLNotificationsUtil::add("PayObjectFailed");
floater->closeFloater();
return;
}
}
else
{
is_group = floater->mTargetIsGroup;
payee_id = floater->mTargetUUID;
}
LLSD args;
args["TARGET"] = LLSLURL(is_group ? "group" : "agent", payee_id, "completename").getSLURLString();
args["AMOUNT"] = amount;
LLNotificationsUtil::add("PayConfirmation", args, LLSD(), boost::bind(&LLFloaterPay::payConfirmationCallback, _1, _2, info));
}
else
{
floater->give(amount);
floater->closeFloater();
}
}
void LLFloaterPay::give(S32 amount)

View File

@ -1232,6 +1232,9 @@ void LLFloaterPreference::refreshEnabledState()
(ctrl_wind_light->get()) ? TRUE : FALSE;
ctrl_deferred->setEnabled(enabled);
// Cannot have floater active until caps have been received
getChild<LLButton>("default_creation_permissions")->setEnabled(LLStartUp::getStartupState() < STATE_STARTED ? false : true);
}
void LLFloaterPreferenceGraphicsAdvanced::refreshEnabledState()
@ -1369,9 +1372,6 @@ void LLFloaterPreferenceGraphicsAdvanced::refreshEnabledState()
disableUnavailableSettings();
getChildView("block_list")->setEnabled(LLLoginInstance::getInstance()->authSuccess());
// Cannot have floater active until caps have been received
getChild<LLButton>("default_creation_permissions")->setEnabled(LLStartUp::getStartupState() < STATE_STARTED ? false : true);
}
// static
@ -1411,7 +1411,7 @@ void LLAvatarComplexityControls::setIndirectMaxArc()
else
{
// This is the inverse of the calculation in updateMaxComplexity
indirect_max_arc = (U32)((log(max_arc) - MIN_ARC_LOG) / ARC_LIMIT_MAP_SCALE) + MIN_INDIRECT_ARC_LIMIT;
indirect_max_arc = (U32)ll_round(((log(F32(max_arc)) - MIN_ARC_LOG) / ARC_LIMIT_MAP_SCALE)) + MIN_INDIRECT_ARC_LIMIT;
}
gSavedSettings.setU32("IndirectMaxComplexity", indirect_max_arc);
}
@ -1930,7 +1930,7 @@ void LLAvatarComplexityControls::updateMax(LLSliderCtrl* slider, LLTextBox* valu
{
// if this is changed, the inverse calculation in setIndirectMaxArc
// must be changed to match
max_arc = (U32)exp(MIN_ARC_LOG + (ARC_LIMIT_MAP_SCALE * (indirect_value - MIN_INDIRECT_ARC_LIMIT)));
max_arc = (U32)ll_round(exp(MIN_ARC_LOG + (ARC_LIMIT_MAP_SCALE * (indirect_value - MIN_INDIRECT_ARC_LIMIT))));
}
gSavedSettings.setU32("RenderAvatarMaxComplexity", (U32)max_arc);

Some files were not shown because too many files have changed in this diff Show More