repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
awkspace/ansible | lib/ansible/modules/identity/ipa/ipa_sudocmdgroup.py | 71 | 6070 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipa_sudocmdgroup
author: Thomas Krahn (@Nosmoht)
short_description: Manage FreeIPA sudo command group
description:
- Add, modify or delete sudo command group within IPA server using IPA API.
options:
cn:
description:
- Sudo Command Group.
aliases: ['name']
required: true
description:
description:
- Group description.
state:
description: State to ensure
default: present
choices: ['present', 'absent', 'enabled', 'disabled']
sudocmd:
description:
- List of sudo commands to assign to the group.
- If an empty list is passed all assigned commands will be removed from the group.
- If option is omitted sudo commands will not be checked or changed.
extends_documentation_fragment: ipa.documentation
version_added: "2.3"
'''
EXAMPLES = '''
- name: Ensure sudo command group exists
ipa_sudocmdgroup:
name: group01
description: Group of important commands
sudocmd:
- su
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
- name: Ensure sudo command group does not exist
ipa_sudocmdgroup:
name: group01
state: absent
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
'''
RETURN = '''
sudocmdgroup:
description: Sudo command group as returned by IPA API
returned: always
type: dict
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ipa import IPAClient, ipa_argument_spec
from ansible.module_utils._text import to_native
class SudoCmdGroupIPAClient(IPAClient):
def __init__(self, module, host, port, protocol):
super(SudoCmdGroupIPAClient, self).__init__(module, host, port, protocol)
def sudocmdgroup_find(self, name):
return self._post_json(method='sudocmdgroup_find', name=None, item={'all': True, 'cn': name})
def sudocmdgroup_add(self, name, item):
return self._post_json(method='sudocmdgroup_add', name=name, item=item)
def sudocmdgroup_mod(self, name, item):
return self._post_json(method='sudocmdgroup_mod', name=name, item=item)
def sudocmdgroup_del(self, name):
return self._post_json(method='sudocmdgroup_del', name=name)
def sudocmdgroup_add_member(self, name, item):
return self._post_json(method='sudocmdgroup_add_member', name=name, item=item)
def sudocmdgroup_add_member_sudocmd(self, name, item):
return self.sudocmdgroup_add_member(name=name, item={'sudocmd': item})
def sudocmdgroup_remove_member(self, name, item):
return self._post_json(method='sudocmdgroup_remove_member', name=name, item=item)
def sudocmdgroup_remove_member_sudocmd(self, name, item):
return self.sudocmdgroup_remove_member(name=name, item={'sudocmd': item})
def get_sudocmdgroup_dict(description=None):
data = {}
if description is not None:
data['description'] = description
return data
def get_sudocmdgroup_diff(client, ipa_sudocmdgroup, module_sudocmdgroup):
return client.get_diff(ipa_data=ipa_sudocmdgroup, module_data=module_sudocmdgroup)
def ensure(module, client):
name = module.params['cn']
state = module.params['state']
sudocmd = module.params['sudocmd']
module_sudocmdgroup = get_sudocmdgroup_dict(description=module.params['description'])
ipa_sudocmdgroup = client.sudocmdgroup_find(name=name)
changed = False
if state == 'present':
if not ipa_sudocmdgroup:
changed = True
if not module.check_mode:
ipa_sudocmdgroup = client.sudocmdgroup_add(name=name, item=module_sudocmdgroup)
else:
diff = get_sudocmdgroup_diff(client, ipa_sudocmdgroup, module_sudocmdgroup)
if len(diff) > 0:
changed = True
if not module.check_mode:
data = {}
for key in diff:
data[key] = module_sudocmdgroup.get(key)
client.sudocmdgroup_mod(name=name, item=data)
if sudocmd is not None:
changed = client.modify_if_diff(name, ipa_sudocmdgroup.get('member_sudocmd', []), sudocmd,
client.sudocmdgroup_add_member_sudocmd,
client.sudocmdgroup_remove_member_sudocmd)
else:
if ipa_sudocmdgroup:
changed = True
if not module.check_mode:
client.sudocmdgroup_del(name=name)
return changed, client.sudocmdgroup_find(name=name)
def main():
argument_spec = ipa_argument_spec()
argument_spec.update(cn=dict(type='str', required=True, aliases=['name']),
description=dict(type='str'),
state=dict(type='str', default='present', choices=['present', 'absent', 'enabled', 'disabled']),
sudocmd=dict(type='list'))
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
client = SudoCmdGroupIPAClient(module=module,
host=module.params['ipa_host'],
port=module.params['ipa_port'],
protocol=module.params['ipa_prot'])
try:
client.login(username=module.params['ipa_user'],
password=module.params['ipa_pass'])
changed, sudocmdgroup = ensure(module, client)
module.exit_json(changed=changed, sudorule=sudocmdgroup)
except Exception as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 |
harshilasu/LinkurApp | y/google-cloud-sdk/platform/gsutil/third_party/boto/boto/contrib/ymlmessage.py | 20 | 1879 | # Copyright (c) 2006,2007 Chris Moyer
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
This module was contributed by Chris Moyer. It provides a subclass of the
SQS Message class that supports YAML as the body of the message.
This module requires the yaml module.
"""
from boto.sqs.message import Message
import yaml
class YAMLMessage(Message):
"""
The YAMLMessage class provides a YAML compatible message. Encoding and
decoding are handled automaticaly.
Access this message data like such:
m.data = [ 1, 2, 3]
m.data[0] # Returns 1
This depends on the PyYAML package
"""
def __init__(self, queue=None, body='', xml_attrs=None):
self.data = None
super(YAMLMessage, self).__init__(queue, body)
def set_body(self, body):
self.data = yaml.load(body)
def get_body(self):
return yaml.dump(self.data)
| gpl-3.0 |
jaruba/chromium.src | chrome/common/extensions/docs/server2/chroot_file_system.py | 85 | 1890 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import posixpath
from docs_server_utils import StringIdentity
from file_system import FileSystem
from future import Future
class ChrootFileSystem(FileSystem):
'''ChrootFileSystem(fs, path) exposes a FileSystem whose root is |path| inside
|fs|, so ChrootFileSystem(fs, 'hello').Read(['world']) is equivalent to
fs.Read(['hello/world']) with the 'hello' prefix stripped from the result.
'''
def __init__(self, file_system, root):
'''Parameters:
|file_system| The FileSystem instance to transpose paths of.
|root| The path to transpose all Read/Stat calls by.
'''
self._file_system = file_system
self._root = root.strip('/')
def Read(self, paths, skip_not_found=False):
# Maintain reverse mapping so the result can be mapped to the original
# paths given (the result from |file_system| will include |root| in the
# result, which would be wrong).
prefixed_paths = {}
def prefix(path):
prefixed = posixpath.join(self._root, path)
prefixed_paths[prefixed] = path
return prefixed
def next(results):
return dict((prefixed_paths[path], content)
for path, content in results.iteritems())
return self._file_system.Read(tuple(prefix(path) for path in paths),
skip_not_found-skip_not_found).Then(next)
def Refresh(self):
return self._file_system.Refresh()
def Stat(self, path):
return self._file_system.Stat(posixpath.join(self._root, path))
def GetIdentity(self):
return StringIdentity(
'%s/%s' % (self._file_system.GetIdentity(), self._root))
def __repr__(self):
return 'ChrootFileSystem(%s, %s)' % (
self._root, repr(self._file_system))
| bsd-3-clause |
bdh1011/cupeye | venv/lib/python2.7/site-packages/pip/req/req_requirement.py | 118 | 1245 | from pip._vendor.packaging.version import parse as parse_version
class InstallationCandidate(object):
def __init__(self, project, version, location):
self.project = project
self.version = parse_version(version)
self.location = location
self._key = (self.project, self.version, self.location)
def __repr__(self):
return "<InstallationCandidate({0!r}, {1!r}, {2!r})>".format(
self.project, self.version, self.location,
)
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, InstallationCandidate):
return NotImplemented
return method(self._key, other._key)
| bsd-3-clause |
mottosso/mindbender-setup | bin/windows/python36/Lib/http/cookies.py | 6 | 21257 | ####
# Copyright 2000 by Timothy O'Malley <timo@alum.mit.edu>
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software
# and its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Timothy O'Malley not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
#
####
#
# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp
# by Timothy O'Malley <timo@alum.mit.edu>
#
# Cookie.py is a Python module for the handling of HTTP
# cookies as a Python dictionary. See RFC 2109 for more
# information on cookies.
#
# The original idea to treat Cookies as a dictionary came from
# Dave Mitchell (davem@magnet.com) in 1995, when he released the
# first version of nscookie.py.
#
####
r"""
Here's a sample session to show how to use this module.
At the moment, this is the only documentation.
The Basics
----------
Importing is easy...
>>> from http import cookies
Most of the time you start by creating a cookie.
>>> C = cookies.SimpleCookie()
Once you've created your Cookie, you can add values just as if it were
a dictionary.
>>> C = cookies.SimpleCookie()
>>> C["fig"] = "newton"
>>> C["sugar"] = "wafer"
>>> C.output()
'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer'
Notice that the printable representation of a Cookie is the
appropriate format for a Set-Cookie: header. This is the
default behavior. You can change the header and printed
attributes by using the .output() function
>>> C = cookies.SimpleCookie()
>>> C["rocky"] = "road"
>>> C["rocky"]["path"] = "/cookie"
>>> print(C.output(header="Cookie:"))
Cookie: rocky=road; Path=/cookie
>>> print(C.output(attrs=[], header="Cookie:"))
Cookie: rocky=road
The load() method of a Cookie extracts cookies from a string. In a
CGI script, you would use this method to extract the cookies from the
HTTP_COOKIE environment variable.
>>> C = cookies.SimpleCookie()
>>> C.load("chips=ahoy; vienna=finger")
>>> C.output()
'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger'
The load() method is darn-tootin smart about identifying cookies
within a string. Escaped quotation marks, nested semicolons, and other
such trickeries do not confuse it.
>>> C = cookies.SimpleCookie()
>>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";')
>>> print(C)
Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;"
Each element of the Cookie also supports all of the RFC 2109
Cookie attributes. Here's an example which sets the Path
attribute.
>>> C = cookies.SimpleCookie()
>>> C["oreo"] = "doublestuff"
>>> C["oreo"]["path"] = "/"
>>> print(C)
Set-Cookie: oreo=doublestuff; Path=/
Each dictionary element has a 'value' attribute, which gives you
back the value associated with the key.
>>> C = cookies.SimpleCookie()
>>> C["twix"] = "none for you"
>>> C["twix"].value
'none for you'
The SimpleCookie expects that all values should be standard strings.
Just to be sure, SimpleCookie invokes the str() builtin to convert
the value to a string, when the values are set dictionary-style.
>>> C = cookies.SimpleCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
'7'
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number=7\r\nSet-Cookie: string=seven'
Finis.
"""
#
# Import our required modules
#
import re
import string
__all__ = ["CookieError", "BaseCookie", "SimpleCookie"]
_nulljoin = ''.join
_semispacejoin = '; '.join
_spacejoin = ' '.join
def _warn_deprecated_setter(setter):
import warnings
msg = ('The .%s setter is deprecated. The attribute will be read-only in '
'future releases. Please use the set() method instead.' % setter)
warnings.warn(msg, DeprecationWarning, stacklevel=3)
#
# Define an exception visible to External modules
#
class CookieError(Exception):
pass
# These quoting routines conform to the RFC2109 specification, which in
# turn references the character definitions from RFC2068. They provide
# a two-way quoting algorithm. Any non-text character is translated
# into a 4 character sequence: a forward-slash followed by the
# three-digit octal equivalent of the character. Any '\' or '"' is
# quoted with a preceding '\' slash.
# Because of the way browsers really handle cookies (as opposed to what
# the RFC says) we also encode "," and ";".
#
# These are taken from RFC2068 and RFC2109.
# _LegalChars is the list of chars which don't require "'s
# _Translator hash-table for fast quoting
#
_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:"
_UnescapedChars = _LegalChars + ' ()/<=>?@[]{}'
_Translator = {n: '\\%03o' % n
for n in set(range(256)) - set(map(ord, _UnescapedChars))}
_Translator.update({
ord('"'): '\\"',
ord('\\'): '\\\\',
})
_is_legal_key = re.compile('[%s]+' % re.escape(_LegalChars)).fullmatch
def _quote(str):
r"""Quote a string for use in a cookie header.
If the string does not need to be double-quoted, then just return the
string. Otherwise, surround the string in doublequotes and quote
(with a \) special characters.
"""
if str is None or _is_legal_key(str):
return str
else:
return '"' + str.translate(_Translator) + '"'
_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
_QuotePatt = re.compile(r"[\\].")
def _unquote(str):
# If there aren't any doublequotes,
# then there can't be any special characters. See RFC 2109.
if str is None or len(str) < 2:
return str
if str[0] != '"' or str[-1] != '"':
return str
# We have to assume that we must decode this string.
# Down to work.
# Remove the "s
str = str[1:-1]
# Check for special sequences. Examples:
# \012 --> \n
# \" --> "
#
i = 0
n = len(str)
res = []
while 0 <= i < n:
o_match = _OctalPatt.search(str, i)
q_match = _QuotePatt.search(str, i)
if not o_match and not q_match: # Neither matched
res.append(str[i:])
break
# else:
j = k = -1
if o_match:
j = o_match.start(0)
if q_match:
k = q_match.start(0)
if q_match and (not o_match or k < j): # QuotePatt matched
res.append(str[i:k])
res.append(str[k+1])
i = k + 2
else: # OctalPatt matched
res.append(str[i:j])
res.append(chr(int(str[j+1:j+4], 8)))
i = j + 4
return _nulljoin(res)
# The _getdate() routine is used to set the expiration time in the cookie's HTTP
# header. By default, _getdate() returns the current time in the appropriate
# "expires" format for a Set-Cookie header. The one optional argument is an
# offset from now, in seconds. For example, an offset of -3600 means "one hour
# ago". The offset may be a floating point number.
#
_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
_monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname):
from time import gmtime, time
now = time()
year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future)
return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \
(weekdayname[wd], day, monthname[month], year, hh, mm, ss)
class Morsel(dict):
"""A class to hold ONE (key, value) pair.
In a cookie, each such pair may have several attributes, so this class is
used to keep the attributes associated with the appropriate key,value pair.
This class also includes a coded_value attribute, which is used to hold
the network representation of the value. This is most useful when Python
objects are pickled for network transit.
"""
# RFC 2109 lists these attributes as reserved:
# path comment domain
# max-age secure version
#
# For historical reasons, these attributes are also reserved:
# expires
#
# This is an extension from Microsoft:
# httponly
#
# This dictionary provides a mapping from the lowercase
# variant on the left to the appropriate traditional
# formatting on the right.
_reserved = {
"expires" : "expires",
"path" : "Path",
"comment" : "Comment",
"domain" : "Domain",
"max-age" : "Max-Age",
"secure" : "Secure",
"httponly" : "HttpOnly",
"version" : "Version",
}
_flags = {'secure', 'httponly'}
def __init__(self):
# Set defaults
self._key = self._value = self._coded_value = None
# Set default attributes
for key in self._reserved:
dict.__setitem__(self, key, "")
@property
def key(self):
return self._key
@key.setter
def key(self, key):
_warn_deprecated_setter('key')
self._key = key
@property
def value(self):
return self._value
@value.setter
def value(self, value):
_warn_deprecated_setter('value')
self._value = value
@property
def coded_value(self):
return self._coded_value
@coded_value.setter
def coded_value(self, coded_value):
_warn_deprecated_setter('coded_value')
self._coded_value = coded_value
def __setitem__(self, K, V):
K = K.lower()
if not K in self._reserved:
raise CookieError("Invalid attribute %r" % (K,))
dict.__setitem__(self, K, V)
def setdefault(self, key, val=None):
key = key.lower()
if key not in self._reserved:
raise CookieError("Invalid attribute %r" % (key,))
return dict.setdefault(self, key, val)
def __eq__(self, morsel):
if not isinstance(morsel, Morsel):
return NotImplemented
return (dict.__eq__(self, morsel) and
self._value == morsel._value and
self._key == morsel._key and
self._coded_value == morsel._coded_value)
__ne__ = object.__ne__
def copy(self):
morsel = Morsel()
dict.update(morsel, self)
morsel.__dict__.update(self.__dict__)
return morsel
def update(self, values):
data = {}
for key, val in dict(values).items():
key = key.lower()
if key not in self._reserved:
raise CookieError("Invalid attribute %r" % (key,))
data[key] = val
dict.update(self, data)
def isReservedKey(self, K):
return K.lower() in self._reserved
def set(self, key, val, coded_val, LegalChars=_LegalChars):
if LegalChars != _LegalChars:
import warnings
warnings.warn(
'LegalChars parameter is deprecated, ignored and will '
'be removed in future versions.', DeprecationWarning,
stacklevel=2)
if key.lower() in self._reserved:
raise CookieError('Attempt to set a reserved key %r' % (key,))
if not _is_legal_key(key):
raise CookieError('Illegal key %r' % (key,))
# It's a good key, so save it.
self._key = key
self._value = val
self._coded_value = coded_val
def __getstate__(self):
return {
'key': self._key,
'value': self._value,
'coded_value': self._coded_value,
}
def __setstate__(self, state):
self._key = state['key']
self._value = state['value']
self._coded_value = state['coded_value']
def output(self, attrs=None, header="Set-Cookie:"):
return "%s %s" % (header, self.OutputString(attrs))
__str__ = output
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.OutputString())
def js_output(self, attrs=None):
# Print javascript
return """
<script type="text/javascript">
<!-- begin hiding
document.cookie = \"%s\";
// end hiding -->
</script>
""" % (self.OutputString(attrs).replace('"', r'\"'))
def OutputString(self, attrs=None):
# Build up our result
#
result = []
append = result.append
# First, the key=value pair
append("%s=%s" % (self.key, self.coded_value))
# Now add any defined attributes
if attrs is None:
attrs = self._reserved
items = sorted(self.items())
for key, value in items:
if value == "":
continue
if key not in attrs:
continue
if key == "expires" and isinstance(value, int):
append("%s=%s" % (self._reserved[key], _getdate(value)))
elif key == "max-age" and isinstance(value, int):
append("%s=%d" % (self._reserved[key], value))
elif key in self._flags:
if value:
append(str(self._reserved[key]))
else:
append("%s=%s" % (self._reserved[key], value))
# Return the result
return _semispacejoin(result)
#
# Pattern for finding cookie
#
# This used to be strict parsing based on the RFC2109 and RFC2068
# specifications. I have since discovered that MSIE 3.0x doesn't
# follow the character rules outlined in those specs. As a
# result, the parsing rules here are less strict.
#
_LegalKeyChars = r"\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\="
_LegalValueChars = _LegalKeyChars + r'\[\]'
_CookiePattern = re.compile(r"""
\s* # Optional whitespace at start of cookie
(?P<key> # Start of group 'key'
[""" + _LegalKeyChars + r"""]+? # Any word of at least one letter
) # End of group 'key'
( # Optional group: there may not be a value.
\s*=\s* # Equal Sign
(?P<val> # Start of group 'val'
"(?:[^\\"]|\\.)*" # Any doublequoted string
| # or
\w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr
| # or
[""" + _LegalValueChars + r"""]* # Any word or empty string
) # End of group 'val'
)? # End of optional value group
\s* # Any number of spaces.
(\s+|;|$) # Ending either at space, semicolon, or EOS.
""", re.ASCII | re.VERBOSE) # re.ASCII may be removed if safe.
# At long last, here is the cookie class. Using this class is almost just like
# using a dictionary. See this module's docstring for example usage.
#
class BaseCookie(dict):
"""A container class for a set of Morsels."""
def value_decode(self, val):
"""real_value, coded_value = value_decode(STRING)
Called prior to setting a cookie's value from the network
representation. The VALUE is the value read from HTTP
header.
Override this function to modify the behavior of cookies.
"""
return val, val
def value_encode(self, val):
"""real_value, coded_value = value_encode(VALUE)
Called prior to setting a cookie's value from the dictionary
representation. The VALUE is the value being assigned.
Override this function to modify the behavior of cookies.
"""
strval = str(val)
return strval, strval
def __init__(self, input=None):
if input:
self.load(input)
def __set(self, key, real_value, coded_value):
"""Private method for setting a cookie's value"""
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
def __setitem__(self, key, value):
"""Dictionary style assignment."""
if isinstance(value, Morsel):
# allow assignment of constructed Morsels (e.g. for pickling)
dict.__setitem__(self, key, value)
else:
rval, cval = self.value_encode(value)
self.__set(key, rval, cval)
def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"):
"""Return a string suitable for HTTP."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.output(attrs, header))
return sep.join(result)
__str__ = output
def __repr__(self):
l = []
items = sorted(self.items())
for key, value in items:
l.append('%s=%s' % (key, repr(value.value)))
return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l))
def js_output(self, attrs=None):
"""Return a string suitable for JavaScript."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.js_output(attrs))
return _nulljoin(result)
def load(self, rawdata):
"""Load cookies from a string (presumably HTTP_COOKIE) or
from a dictionary. Loading cookies from a dictionary 'd'
is equivalent to calling:
map(Cookie.__setitem__, d.keys(), d.values())
"""
if isinstance(rawdata, str):
self.__parse_string(rawdata)
else:
# self.update() wouldn't call our custom __setitem__
for key, value in rawdata.items():
self[key] = value
return
def __parse_string(self, str, patt=_CookiePattern):
i = 0 # Our starting point
n = len(str) # Length of string
parsed_items = [] # Parsed (type, key, value) triples
morsel_seen = False # A key=value pair was previously encountered
TYPE_ATTRIBUTE = 1
TYPE_KEYVALUE = 2
# We first parse the whole cookie string and reject it if it's
# syntactically invalid (this helps avoid some classes of injection
# attacks).
while 0 <= i < n:
# Start looking for a cookie
match = patt.match(str, i)
if not match:
# No more cookies
break
key, value = match.group("key"), match.group("val")
i = match.end(0)
if key[0] == "$":
if not morsel_seen:
# We ignore attributes which pertain to the cookie
# mechanism as a whole, such as "$Version".
# See RFC 2965. (Does anyone care?)
continue
parsed_items.append((TYPE_ATTRIBUTE, key[1:], value))
elif key.lower() in Morsel._reserved:
if not morsel_seen:
# Invalid cookie string
return
if value is None:
if key.lower() in Morsel._flags:
parsed_items.append((TYPE_ATTRIBUTE, key, True))
else:
# Invalid cookie string
return
else:
parsed_items.append((TYPE_ATTRIBUTE, key, _unquote(value)))
elif value is not None:
parsed_items.append((TYPE_KEYVALUE, key, self.value_decode(value)))
morsel_seen = True
else:
# Invalid cookie string
return
# The cookie string is valid, apply it.
M = None # current morsel
for tp, key, value in parsed_items:
if tp == TYPE_ATTRIBUTE:
assert M is not None
M[key] = value
else:
assert tp == TYPE_KEYVALUE
rval, cval = value
self.__set(key, rval, cval)
M = self[key]
class SimpleCookie(BaseCookie):
"""
SimpleCookie supports strings as cookie values. When setting
the value using the dictionary assignment notation, SimpleCookie
calls the builtin str() to convert the value to a string. Values
received from HTTP are kept as strings.
"""
def value_decode(self, val):
return _unquote(val), val
def value_encode(self, val):
strval = str(val)
return strval, _quote(strval)
| mit |
pcubillos/MCcubed | examples/demo02/preamble.py | 1 | 1385 | #! /usr/bin/env python
# This script generates input files used to run MCMC from the shell prompt.
# Preamble
# --------
# To correctly execute this script, one needs to set the correct paths
# to the source code. The paths are given as if the Python session
# runs from a 'run/' folder at the same level than the repo, as in:
# rootdir/
# |-- MCcubed/
# `-- run/
# Alternatively, edit the paths from this script to adjust to your
# working directory.
# Import the necessary modules:
import sys
import numpy as np
# Import the modules from the MCcubed package:
sys.path.append("../MCcubed/")
import MCcubed as mc3
# Import the modeling function:
sys.path.append("../MCcubed/examples/models/")
from quadratic import quad
# Create a synthetic dataset using a quadratic polynomial curve:
x = np.linspace(0, 10, 1000) # Independent model variable
p0 = [3, -2.4, 0.5] # True-underlying model parameters
y = quad(p0, x) # Noiseless model
uncert = np.sqrt(np.abs(y)) # Data points uncertainty
error = np.random.normal(0, uncert) # Noise for the data
data = y + error # Noisy data set
# data.npz contains the data and uncertainty arrays:
mc3.utils.savebin([data, uncert], 'data.npz')
# indp.npz contains the list of additional arguments for the model:
mc3.utils.savebin([x], 'indp.npz')
| mit |
bryx-inc/boto | boto/sdb/db/test_db.py | 153 | 5427 | import logging
import time
from datetime import datetime
from boto.sdb.db.model import Model
from boto.sdb.db.property import StringProperty, IntegerProperty, BooleanProperty
from boto.sdb.db.property import DateTimeProperty, FloatProperty, ReferenceProperty
from boto.sdb.db.property import PasswordProperty, ListProperty, MapProperty
from boto.exception import SDBPersistenceError
logging.basicConfig()
log = logging.getLogger('test_db')
log.setLevel(logging.DEBUG)
_objects = {}
#
# This will eventually be moved to the boto.tests module and become a real unit test
# but for now it will live here. It shows examples of each of the Property types in
# use and tests the basic operations.
#
class TestBasic(Model):
name = StringProperty()
size = IntegerProperty()
foo = BooleanProperty()
date = DateTimeProperty()
class TestFloat(Model):
name = StringProperty()
value = FloatProperty()
class TestRequired(Model):
req = StringProperty(required=True, default='foo')
class TestReference(Model):
ref = ReferenceProperty(reference_class=TestBasic, collection_name='refs')
class TestSubClass(TestBasic):
answer = IntegerProperty()
class TestPassword(Model):
password = PasswordProperty()
class TestList(Model):
name = StringProperty()
nums = ListProperty(int)
class TestMap(Model):
name = StringProperty()
map = MapProperty()
class TestListReference(Model):
name = StringProperty()
basics = ListProperty(TestBasic)
class TestAutoNow(Model):
create_date = DateTimeProperty(auto_now_add=True)
modified_date = DateTimeProperty(auto_now=True)
class TestUnique(Model):
name = StringProperty(unique=True)
def test_basic():
global _objects
t = TestBasic()
t.name = 'simple'
t.size = -42
t.foo = True
t.date = datetime.now()
log.debug('saving object')
t.put()
_objects['test_basic_t'] = t
time.sleep(5)
log.debug('now try retrieving it')
tt = TestBasic.get_by_id(t.id)
_objects['test_basic_tt'] = tt
assert tt.id == t.id
l = TestBasic.get_by_id([t.id])
assert len(l) == 1
assert l[0].id == t.id
assert t.size == tt.size
assert t.foo == tt.foo
assert t.name == tt.name
#assert t.date == tt.date
return t
def test_float():
global _objects
t = TestFloat()
t.name = 'float object'
t.value = 98.6
log.debug('saving object')
t.save()
_objects['test_float_t'] = t
time.sleep(5)
log.debug('now try retrieving it')
tt = TestFloat.get_by_id(t.id)
_objects['test_float_tt'] = tt
assert tt.id == t.id
assert tt.name == t.name
assert tt.value == t.value
return t
def test_required():
global _objects
t = TestRequired()
_objects['test_required_t'] = t
t.put()
return t
def test_reference(t=None):
global _objects
if not t:
t = test_basic()
tt = TestReference()
tt.ref = t
tt.put()
time.sleep(10)
tt = TestReference.get_by_id(tt.id)
_objects['test_reference_tt'] = tt
assert tt.ref.id == t.id
for o in t.refs:
log.debug(o)
def test_subclass():
global _objects
t = TestSubClass()
_objects['test_subclass_t'] = t
t.name = 'a subclass'
t.size = -489
t.save()
def test_password():
global _objects
t = TestPassword()
_objects['test_password_t'] = t
t.password = "foo"
t.save()
time.sleep(5)
# Make sure it stored ok
tt = TestPassword.get_by_id(t.id)
_objects['test_password_tt'] = tt
#Testing password equality
assert tt.password == "foo"
#Testing password not stored as string
assert str(tt.password) != "foo"
def test_list():
global _objects
t = TestList()
_objects['test_list_t'] = t
t.name = 'a list of ints'
t.nums = [1, 2, 3, 4, 5]
t.put()
tt = TestList.get_by_id(t.id)
_objects['test_list_tt'] = tt
assert tt.name == t.name
for n in tt.nums:
assert isinstance(n, int)
def test_list_reference():
global _objects
t = TestBasic()
t.put()
_objects['test_list_ref_t'] = t
tt = TestListReference()
tt.name = "foo"
tt.basics = [t]
tt.put()
time.sleep(5)
_objects['test_list_ref_tt'] = tt
ttt = TestListReference.get_by_id(tt.id)
assert ttt.basics[0].id == t.id
def test_unique():
global _objects
t = TestUnique()
name = 'foo' + str(int(time.time()))
t.name = name
t.put()
_objects['test_unique_t'] = t
time.sleep(10)
tt = TestUnique()
_objects['test_unique_tt'] = tt
tt.name = name
try:
tt.put()
assert False
except(SDBPersistenceError):
pass
def test_datetime():
global _objects
t = TestAutoNow()
t.put()
_objects['test_datetime_t'] = t
time.sleep(5)
tt = TestAutoNow.get_by_id(t.id)
assert tt.create_date.timetuple() == t.create_date.timetuple()
def test():
log.info('test_basic')
t1 = test_basic()
log.info('test_required')
test_required()
log.info('test_reference')
test_reference(t1)
log.info('test_subclass')
test_subclass()
log.info('test_password')
test_password()
log.info('test_list')
test_list()
log.info('test_list_reference')
test_list_reference()
log.info("test_datetime")
test_datetime()
log.info('test_unique')
test_unique()
if __name__ == "__main__":
test()
| mit |
jdinuncio/ansible-modules-extras | cloud/ovirt/ovirt_datacenters.py | 9 | 7296 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import traceback
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_sdk,
check_params,
create_connection,
equal,
ovirt_full_argument_spec,
search_by_name,
)
ANSIBLE_METADATA = {'status': 'preview',
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ovirt_datacenters
short_description: Module to manage data centers in oVirt
version_added: "2.3"
author: "Ondra Machacek (@machacekondra)"
description:
- "Module to manage data centers in oVirt"
options:
name:
description:
- "Name of the the data center to manage."
required: true
state:
description:
- "Should the data center be present or absent"
choices: ['present', 'absent']
default: present
description:
description:
- "Description of the data center."
comment:
description:
- "Comment of the data center."
local:
description:
- "I(True) if the data center should be local, I(False) if should be shared."
- "Default value is set by engine."
compatibility_version:
description:
- "Compatibility version of the data center."
quota_mode:
description:
- "Quota mode of the data center. One of I(disabled), I(audit) or I(enabled)"
choices: ['disabled', 'audit', 'enabled']
mac_pool:
description:
- "MAC pool to be used by this datacenter."
- "IMPORTANT: This option is deprecated in oVirt 4.1. You should
use C(mac_pool) in C(ovirt_clusters) module, as MAC pools are
set per cluster since 4.1."
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Create datacenter
- ovirt_datacenters:
name: mydatacenter
local: True
compatibility_version: 4.0
quota_mode: enabled
# Remove datacenter
- ovirt_datacenters:
state: absent
name: mydatacenter
'''
RETURN = '''
id:
description: "ID of the managed datacenter"
returned: "On success if datacenter is found."
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
data_center:
description: "Dictionary of all the datacenter attributes. Datacenter attributes can be found on your oVirt instance
at following url: https://ovirt.example.com/ovirt-engine/api/model#types/datacenter."
returned: "On success if datacenter is found."
'''
class DatacentersModule(BaseModule):
def __get_major(self, full_version):
if full_version is None:
return None
if isinstance(full_version, otypes.Version):
return full_version.major
return int(full_version.split('.')[0])
def __get_minor(self, full_version):
if full_version is None:
return None
if isinstance(full_version, otypes.Version):
return full_version.minor
return int(full_version.split('.')[1])
def _get_mac_pool(self):
mac_pool = None
if self._module.params.get('mac_pool'):
mac_pool = search_by_name(
self._connection.system_service().mac_pools_service(),
self._module.params.get('mac_pool'),
)
return mac_pool
def build_entity(self):
return otypes.DataCenter(
name=self._module.params['name'],
comment=self._module.params['comment'],
description=self._module.params['description'],
mac_pool=otypes.MacPool(
id=getattr(self._get_mac_pool(), 'id', None),
) if self._module.params.get('mac_pool') else None,
quota_mode=otypes.QuotaModeType(
self._module.params['quota_mode']
) if self._module.params['quota_mode'] else None,
local=self._module.params['local'],
version=otypes.Version(
major=self.__get_major(self._module.params['compatibility_version']),
minor=self.__get_minor(self._module.params['compatibility_version']),
) if self._module.params['compatibility_version'] else None,
)
def update_check(self, entity):
minor = self.__get_minor(self._module.params.get('compatibility_version'))
major = self.__get_major(self._module.params.get('compatibility_version'))
return (
equal(getattr(self._get_mac_pool(), 'id', None), getattr(entity.mac_pool, 'id', None)) and
equal(self._module.params.get('comment'), entity.comment) and
equal(self._module.params.get('description'), entity.description) and
equal(self._module.params.get('quota_mode'), str(entity.quota_mode)) and
equal(self._module.params.get('local'), entity.local) and
equal(minor, self.__get_minor(entity.version)) and
equal(major, self.__get_major(entity.version))
)
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(
choices=['present', 'absent'],
default='present',
),
name=dict(default=None, required=True),
description=dict(default=None),
local=dict(type='bool'),
compatibility_version=dict(default=None),
quota_mode=dict(choices=['disabled', 'audit', 'enabled']),
comment=dict(default=None),
mac_pool=dict(default=None),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
check_sdk(module)
check_params(module)
try:
connection = create_connection(module.params.pop('auth'))
data_centers_service = connection.system_service().data_centers_service()
clusters_module = DatacentersModule(
connection=connection,
module=module,
service=data_centers_service,
)
state = module.params['state']
if state == 'present':
ret = clusters_module.create()
elif state == 'absent':
ret = clusters_module.remove()
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=False)
if __name__ == "__main__":
main()
| gpl-3.0 |
KimNorgaard/ansible-modules-extras | packaging/os/pkg5.py | 75 | 4862 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2014 Peter Oliver <ansible@mavit.org.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: pkg5
author: "Peter Oliver (@mavit)"
short_description: Manages packages with the Solaris 11 Image Packaging System
version_added: 1.9
description:
- IPS packages are the native packages in Solaris 11 and higher.
notes:
- The naming of IPS packages is explained at U(http://www.oracle.com/technetwork/articles/servers-storage-admin/ips-package-versioning-2232906.html).
options:
name:
description:
- An FRMI of the package(s) to be installed/removed/updated.
- Multiple packages may be specified, separated by C(,).
required: true
state:
description:
- Whether to install (I(present), I(latest)), or remove (I(absent)) a
package.
required: false
default: present
choices: [ present, latest, absent ]
accept_licenses:
description:
- Accept any licences.
required: false
default: false
choices: [ true, false ]
aliases: [ accept_licences, accept ]
'''
EXAMPLES = '''
# Install Vim:
- pkg5: name=editor/vim
# Remove finger daemon:
- pkg5: name=service/network/finger state=absent
# Install several packages at once:
- pkg5:
name:
- /file/gnu-findutils
- /text/gnu-grep
'''
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, type='list'),
state=dict(
default='present',
choices=[
'present',
'installed',
'latest',
'absent',
'uninstalled',
'removed',
]
),
accept_licenses=dict(
type='bool',
default=False,
aliases=['accept_licences', 'accept'],
),
)
)
params = module.params
packages = []
# pkg(5) FRMIs include a comma before the release number, but
# AnsibleModule will have split this into multiple items for us.
# Try to spot where this has happened and fix it.
for fragment in params['name']:
if (
re.search('^\d+(?:\.\d+)*', fragment)
and packages and re.search('@[^,]*$', packages[-1])
):
packages[-1] += ',' + fragment
else:
packages.append(fragment)
if params['state'] in ['present', 'installed']:
ensure(module, 'present', packages, params)
elif params['state'] in ['latest']:
ensure(module, 'latest', packages, params)
elif params['state'] in ['absent', 'uninstalled', 'removed']:
ensure(module, 'absent', packages, params)
def ensure(module, state, packages, params):
response = {
'results': [],
'msg': '',
}
behaviour = {
'present': {
'filter': lambda p: not is_installed(module, p),
'subcommand': 'install',
},
'latest': {
'filter': lambda p: not is_latest(module, p),
'subcommand': 'install',
},
'absent': {
'filter': lambda p: is_installed(module, p),
'subcommand': 'uninstall',
},
}
if params['accept_licenses']:
accept_licenses = ['--accept']
else:
accept_licenses = []
to_modify = filter(behaviour[state]['filter'], packages)
if to_modify:
rc, out, err = module.run_command(
[
'pkg', behaviour[state]['subcommand']
]
+ accept_licenses
+ [
'-q', '--'
] + to_modify
)
response['rc'] = rc
response['results'].append(out)
response['msg'] += err
response['changed'] = True
if rc != 0:
module.fail_json(**response)
module.exit_json(**response)
def is_installed(module, package):
rc, out, err = module.run_command(['pkg', 'list', '--', package])
return not bool(int(rc))
def is_latest(module, package):
rc, out, err = module.run_command(['pkg', 'list', '-u', '--', package])
return bool(int(rc))
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
andmarios/ansible-modules-core | system/user.py | 7 | 72948 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Stephen Fromm <sfromm@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: user
author: "Stephen Fromm (@sfromm)"
version_added: "0.2"
short_description: Manage user accounts
requirements: [ useradd, userdel, usermod ]
description:
- Manage user accounts and user attributes.
options:
name:
required: true
aliases: [ "user" ]
description:
- Name of the user to create, remove or modify.
comment:
required: false
description:
- Optionally sets the description (aka I(GECOS)) of user account.
uid:
required: false
description:
- Optionally sets the I(UID) of the user.
non_unique:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- Optionally when used with the -u option, this option allows to
change the user ID to a non-unique value.
version_added: "1.1"
seuser:
required: false
description:
- Optionally sets the seuser type (user_u) on selinux enabled systems.
version_added: "2.1"
group:
required: false
description:
- Optionally sets the user's primary group (takes a group name).
groups:
required: false
description:
- Puts the user in this comma-delimited list of groups. When set to
the empty string ('groups='), the user is removed from all groups
except the primary group.
append:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- If C(yes), will only add groups, not set them to just the list
in I(groups).
shell:
required: false
description:
- Optionally set the user's shell.
home:
required: false
description:
- Optionally set the user's home directory.
skeleton:
required: false
description:
- Optionally set a home skeleton directory. Requires createhome option!
password:
required: false
description:
- Optionally set the user's password to this crypted value. See
the user example in the github examples directory for what this looks
like in a playbook. See U(http://docs.ansible.com/ansible/faq.html#how-do-i-generate-crypted-passwords-for-the-user-module)
for details on various ways to generate these password values.
Note on Darwin system, this value has to be cleartext.
Beware of security issues.
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the account should exist or not, taking action if the state is different from what is stated.
createhome:
required: false
default: "yes"
choices: [ "yes", "no" ]
description:
- Unless set to C(no), a home directory will be made for the user
when the account is created or if the home directory does not
exist.
move_home:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- If set to C(yes) when used with C(home=), attempt to move the
user's home directory to the specified directory if it isn't there
already.
system:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- When creating an account, setting this to C(yes) makes the user a
system account. This setting cannot be changed on existing users.
force:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- When used with C(state=absent), behavior is as with
C(userdel --force).
login_class:
required: false
description:
- Optionally sets the user's login class for FreeBSD, OpenBSD and NetBSD systems.
remove:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- When used with C(state=absent), behavior is as with
C(userdel --remove).
generate_ssh_key:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "0.9"
description:
- Whether to generate a SSH key for the user in question.
This will B(not) overwrite an existing SSH key.
ssh_key_bits:
required: false
default: 2048
version_added: "0.9"
description:
- Optionally specify number of bits in SSH key to create.
ssh_key_type:
required: false
default: rsa
version_added: "0.9"
description:
- Optionally specify the type of SSH key to generate.
Available SSH key types will depend on implementation
present on target host.
ssh_key_file:
required: false
default: .ssh/id_rsa
version_added: "0.9"
description:
- Optionally specify the SSH key filename. If this is a relative
filename then it will be relative to the user's home directory.
ssh_key_comment:
required: false
default: ansible-generated on $HOSTNAME
version_added: "0.9"
description:
- Optionally define the comment for the SSH key.
ssh_key_passphrase:
required: false
version_added: "0.9"
description:
- Set a passphrase for the SSH key. If no
passphrase is provided, the SSH key will default to
having no passphrase.
update_password:
required: false
default: always
choices: ['always', 'on_create']
version_added: "1.3"
description:
- C(always) will update passwords if they differ. C(on_create) will only set the password for newly created users.
expires:
version_added: "1.9"
required: false
default: "None"
description:
- An expiry time for the user in epoch, it will be ignored on platforms that do not support this.
Currently supported on Linux and FreeBSD.
'''
EXAMPLES = '''
# Add the user 'johnd' with a specific uid and a primary group of 'admin'
- user: name=johnd comment="John Doe" uid=1040 group=admin
# Add the user 'james' with a bash shell, appending the group 'admins' and 'developers' to the user's groups
- user: name=james shell=/bin/bash groups=admins,developers append=yes
# Remove the user 'johnd'
- user: name=johnd state=absent remove=yes
# Create a 2048-bit SSH key for user jsmith in ~jsmith/.ssh/id_rsa
- user: name=jsmith generate_ssh_key=yes ssh_key_bits=2048 ssh_key_file=.ssh/id_rsa
# added a consultant whose account you want to expire
- user: name=james18 shell=/bin/zsh groups=developers expires=1422403387
'''
import os
import pwd
import grp
import platform
import socket
import time
try:
import spwd
HAVE_SPWD=True
except:
HAVE_SPWD=False
class User(object):
"""
This is a generic User manipulation class that is subclassed
based on platform.
A subclass may wish to override the following action methods:-
- create_user()
- remove_user()
- modify_user()
- ssh_key_gen()
- ssh_key_fingerprint()
- user_exists()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
SHADOWFILE = '/etc/shadow'
DATE_FORMAT = '%Y-%m-%d'
def __new__(cls, *args, **kwargs):
return load_platform_subclass(User, args, kwargs)
def __init__(self, module):
self.module = module
self.state = module.params['state']
self.name = module.params['name']
self.uid = module.params['uid']
self.non_unique = module.params['non_unique']
self.seuser = module.params['seuser']
self.group = module.params['group']
self.groups = module.params['groups']
self.comment = module.params['comment']
self.shell = module.params['shell']
self.password = module.params['password']
self.force = module.params['force']
self.remove = module.params['remove']
self.createhome = module.params['createhome']
self.move_home = module.params['move_home']
self.skeleton = module.params['skeleton']
self.system = module.params['system']
self.login_class = module.params['login_class']
self.append = module.params['append']
self.sshkeygen = module.params['generate_ssh_key']
self.ssh_bits = module.params['ssh_key_bits']
self.ssh_type = module.params['ssh_key_type']
self.ssh_comment = module.params['ssh_key_comment']
self.ssh_passphrase = module.params['ssh_key_passphrase']
self.update_password = module.params['update_password']
self.home = None
self.expires = None
if module.params['home'] is not None:
self.home = os.path.expanduser(module.params['home'])
if module.params['expires']:
try:
self.expires = time.gmtime(module.params['expires'])
except Exception,e:
module.fail_json("Invalid expires time %s: %s" %(self.expires, str(e)))
if module.params['ssh_key_file'] is not None:
self.ssh_file = module.params['ssh_key_file']
else:
self.ssh_file = os.path.join('.ssh', 'id_%s' % self.ssh_type)
def execute_command(self, cmd, use_unsafe_shell=False, data=None):
return self.module.run_command(cmd, use_unsafe_shell=use_unsafe_shell, data=data)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.force:
cmd.append('-f')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self, command_name='useradd'):
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.seuser is not None:
cmd.append('-Z')
cmd.append(self.seuser)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
elif self.group_exists(self.name):
# use the -N option (no user group) if a group already
# exists with the same name as the user to prevent
# errors from useradd trying to create a group when
# USERGROUPS_ENAB is set in /etc/login.defs.
if os.path.exists('/etc/redhat-release'):
dist = platform.dist()
major_release = int(dist[1].split('.')[0])
if major_release <= 5:
cmd.append('-n')
else:
cmd.append('-N')
else:
cmd.append('-N')
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.expires:
cmd.append('--expiredate')
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def _check_usermod_append(self):
# check if this version of usermod can append groups
usermod_path = self.module.get_bin_path('usermod', True)
# for some reason, usermod --help cannot be used by non root
# on RH/Fedora, due to lack of execute bit for others
if not os.access(usermod_path, os.X_OK):
return False
cmd = [usermod_path]
cmd.append('--help')
rc, data1, data2 = self.execute_command(cmd)
helpout = data1 + data2
# check if --append exists
lines = helpout.split('\n')
for line in lines:
if line.strip().startswith('-a, --append'):
return True
return False
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
has_append = self._check_usermod_append()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
if has_append:
cmd.append('-a')
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if self.append and not has_append:
cmd.append('-A')
cmd.append(','.join(group_diff))
else:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.expires:
cmd.append('--expiredate')
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
elif self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
def group_exists(self,group):
try:
# Try group as a gid first
grp.getgrgid(int(group))
return True
except (ValueError, KeyError):
try:
grp.getgrnam(group)
return True
except KeyError:
return False
def group_info(self, group):
if not self.group_exists(group):
return False
try:
# Try group as a gid first
return list(grp.getgrgid(int(group)))
except (ValueError, KeyError):
return list(grp.getgrnam(group))
def get_groups_set(self, remove_existing=True):
if self.groups is None:
return None
info = self.user_info()
groups = set(filter(None, self.groups.split(',')))
for g in set(groups):
if not self.group_exists(g):
self.module.fail_json(msg="Group %s does not exist" % (g))
if info and remove_existing and self.group_info(g)[2] == info[3]:
groups.remove(g)
return groups
def user_group_membership(self):
groups = []
info = self.get_pwd_info()
for group in grp.getgrall():
if self.name in group.gr_mem and not info[3] == group.gr_gid:
groups.append(group[0])
return groups
def user_exists(self):
try:
if pwd.getpwnam(self.name):
return True
except KeyError:
return False
def get_pwd_info(self):
if not self.user_exists():
return False
return list(pwd.getpwnam(self.name))
def user_info(self):
if not self.user_exists():
return False
info = self.get_pwd_info()
if len(info[1]) == 1 or len(info[1]) == 0:
info[1] = self.user_password()
return info
def user_password(self):
passwd = ''
if HAVE_SPWD:
try:
passwd = spwd.getspnam(self.name)[1]
except KeyError:
return passwd
if not self.user_exists():
return passwd
elif self.SHADOWFILE:
# Read shadow file for user's encrypted password string
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
for line in open(self.SHADOWFILE).readlines():
if line.startswith('%s:' % self.name):
passwd = line.split(':')[1]
return passwd
def get_ssh_key_path(self):
info = self.user_info()
if os.path.isabs(self.ssh_file):
ssh_key_file = self.ssh_file
else:
ssh_key_file = os.path.join(info[5], self.ssh_file)
return ssh_key_file
def ssh_key_gen(self):
info = self.user_info()
if not os.path.exists(info[5]) and not self.module.check_mode:
return (1, '', 'User %s home directory does not exist' % self.name)
ssh_key_file = self.get_ssh_key_path()
ssh_dir = os.path.dirname(ssh_key_file)
if not os.path.exists(ssh_dir):
if self.module.check_mode:
return (0, '', '')
try:
os.mkdir(ssh_dir, 0700)
os.chown(ssh_dir, info[2], info[3])
except OSError, e:
return (1, '', 'Failed to create %s: %s' % (ssh_dir, str(e)))
if os.path.exists(ssh_key_file):
return (None, 'Key already exists', '')
if self.module.check_mode:
return (0, '', '')
cmd = [self.module.get_bin_path('ssh-keygen', True)]
cmd.append('-t')
cmd.append(self.ssh_type)
cmd.append('-b')
cmd.append(self.ssh_bits)
cmd.append('-C')
cmd.append(self.ssh_comment)
cmd.append('-f')
cmd.append(ssh_key_file)
cmd.append('-N')
if self.ssh_passphrase is not None:
cmd.append(self.ssh_passphrase)
else:
cmd.append('')
(rc, out, err) = self.execute_command(cmd)
if rc == 0:
# If the keys were successfully created, we should be able
# to tweak ownership.
os.chown(ssh_key_file, info[2], info[3])
os.chown('%s.pub' % ssh_key_file, info[2], info[3])
return (rc, out, err)
def ssh_key_fingerprint(self):
ssh_key_file = self.get_ssh_key_path()
if not os.path.exists(ssh_key_file):
return (1, 'SSH Key file %s does not exist' % ssh_key_file, '')
cmd = [ self.module.get_bin_path('ssh-keygen', True) ]
cmd.append('-l')
cmd.append('-f')
cmd.append(ssh_key_file)
return self.execute_command(cmd)
def get_ssh_public_key(self):
ssh_public_key_file = '%s.pub' % self.get_ssh_key_path()
try:
f = open(ssh_public_key_file)
ssh_public_key = f.read().strip()
f.close()
except IOError:
return None
return ssh_public_key
def create_user(self):
# by default we use the create_user_useradd method
return self.create_user_useradd()
def remove_user(self):
# by default we use the remove_user_userdel method
return self.remove_user_userdel()
def modify_user(self):
# by default we use the modify_user_usermod method
return self.modify_user_usermod()
def create_homedir(self, path):
if not os.path.exists(path):
if self.skeleton is not None:
skeleton = self.skeleton
else:
skeleton = '/etc/skel'
if os.path.exists(skeleton):
try:
shutil.copytree(skeleton, path, symlinks=True)
except OSError, e:
self.module.exit_json(failed=True, msg="%s" % e)
else:
try:
os.makedirs(path)
except OSError, e:
self.module.exit_json(failed=True, msg="%s" % e)
def chown_homedir(self, uid, gid, path):
try:
os.chown(path, uid, gid)
for root, dirs, files in os.walk(path):
for d in dirs:
os.chown(path, uid, gid)
for f in files:
os.chown(os.path.join(root, f), uid, gid)
except OSError, e:
self.module.exit_json(failed=True, msg="%s" % e)
# ===========================================
class FreeBsdUser(User):
"""
This is a FreeBSD User manipulation class - it uses the pw command
to manipulate the user database, followed by the chpass command
to change the password.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'FreeBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def remove_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'userdel',
'-n',
self.name
]
if self.remove:
cmd.append('-r')
return self.execute_command(cmd)
def create_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'useradd',
'-n',
self.name,
]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.expires:
days =( time.mktime(self.expires) - time.time() ) / 86400
cmd.append('-e')
cmd.append(str(int(days)))
# system cannot be handled currently - should we error if its requested?
# create the user
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# we have to set the password in a second command
if self.password is not None:
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
return self.execute_command(cmd)
return (rc, out, err)
def modify_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'usermod',
'-n',
self.name
]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
for line in open(self.SHADOWFILE).readlines():
if line.startswith('%s:' % self.name):
user_login_class = line.split(':')[4]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups = groups | set(current_groups)
cmd.append(','.join(new_groups))
if self.expires:
days = ( time.mktime(self.expires) - time.time() ) / 86400
cmd.append('-e')
cmd.append(str(int(days)))
# modify the user if cmd will do anything
if cmd_len != len(cmd):
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
else:
(rc, out, err) = (None, '', '')
# we have to set the password in a second command
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
return self.execute_command(cmd)
return (rc, out, err)
# ===========================================
class OpenBSDUser(User):
"""
This is a OpenBSD User manipulation class.
Main differences are that OpenBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'OpenBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None and self.password != '*':
cmd.append('-p')
cmd.append(self.password)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups_option = '-G'
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_option = '-S'
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append(groups_option)
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
userinfo_cmd = [self.module.get_bin_path('userinfo', True), self.name]
(rc, out, err) = self.execute_command(userinfo_cmd)
for line in out.splitlines():
tokens = line.split()
if tokens[0] == 'class' and len(tokens) == 2:
user_login_class = tokens[1]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.update_password == 'always' and self.password is not None \
and self.password != '*' and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
elif self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class NetBSDUser(User):
"""
This is a NetBSD User manipulation class.
Main differences are that NetBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'NetBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups = set(current_groups).union(groups)
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
elif self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class SunOS(User):
"""
This is a SunOS User manipulation class - The main difference between
this class and the generic user class is that Solaris-type distros
don't support the concept of a "system" account and we need to
edit the /etc/shadow file manually to set a password. (Ugh)
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'SunOS'
distribution = None
SHADOWFILE = '/etc/shadow'
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
if self.module.check_mode:
return (0, '', '')
else:
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# we have to set the password by editing the /etc/shadow file
if self.password is not None:
try:
lines = []
for line in open(self.SHADOWFILE, 'rb').readlines():
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() / 86400))
line = ':'.join(fields)
lines.append('%s\n' % line)
open(self.SHADOWFILE, 'w+').writelines(lines)
except Exception, err:
self.module.fail_json(msg="failed to update users password: %s" % str(err))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups.update(current_groups)
cmd.append(','.join(new_groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
# modify the user if cmd will do anything
if cmd_len != len(cmd):
(rc, out, err) = (0, '', '')
if not self.module.check_mode:
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
else:
(rc, out, err) = (None, '', '')
# we have to set the password by editing the /etc/shadow file
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
(rc, out, err) = (0, '', '')
if not self.module.check_mode:
try:
lines = []
for line in open(self.SHADOWFILE, 'rb').readlines():
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() / 86400))
line = ':'.join(fields)
lines.append('%s\n' % line)
open(self.SHADOWFILE, 'w+').writelines(lines)
rc = 0
except Exception, err:
self.module.fail_json(msg="failed to update users password: %s" % str(err))
return (rc, out, err)
# ===========================================
class DarwinUser(User):
"""
This is a Darwin Mac OS X User manipulation class.
Main differences are that Darwin:-
- Handles accounts in a database managed by dscl(1)
- Has no useradd/groupadd
- Does not create home directories
- User password must be cleartext
- UID must be given
- System users must ben under 500
This overrides the following methods from the generic class:-
- user_exists()
- create_user()
- remove_user()
- modify_user()
"""
platform = 'Darwin'
distribution = None
SHADOWFILE = None
dscl_directory = '.'
fields = [
('comment', 'RealName'),
('home', 'NFSHomeDirectory'),
('shell', 'UserShell'),
('uid', 'UniqueID'),
('group', 'PrimaryGroupID'),
]
def _get_dscl(self):
return [ self.module.get_bin_path('dscl', True), self.dscl_directory ]
def _list_user_groups(self):
cmd = self._get_dscl()
cmd += [ '-search', '/Groups', 'GroupMembership', self.name ]
(rc, out, err) = self.execute_command(cmd)
groups = []
for line in out.splitlines():
if line.startswith(' ') or line.startswith(')'):
continue
groups.append(line.split()[0])
return groups
def _get_user_property(self, property):
'''Return user PROPERTY as given my dscl(1) read or None if not found.'''
cmd = self._get_dscl()
cmd += [ '-read', '/Users/%s' % self.name, property ]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
return None
# from dscl(1)
# if property contains embedded spaces, the list will instead be
# displayed one entry per line, starting on the line after the key.
lines = out.splitlines()
#sys.stderr.write('*** |%s| %s -> %s\n' % (property, out, lines))
if len(lines) == 1:
return lines[0].split(': ')[1]
else:
if len(lines) > 2:
return '\n'.join([ lines[1].strip() ] + lines[2:])
else:
if len(lines) == 2:
return lines[1].strip()
else:
return None
def _get_next_uid(self):
'''Return the next available uid'''
cmd = self._get_dscl()
cmd += ['-list', '/Users', 'UniqueID']
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg="Unable to get the next available uid",
rc=rc,
out=out,
err=err
)
max_uid = 0
for line in out.splitlines():
if max_uid < int(line.split()[1]):
max_uid = int(line.split()[1])
return max_uid + 1
def _change_user_password(self):
'''Change password for SELF.NAME against SELF.PASSWORD.
Please note that password must be cleatext.
'''
# some documentation on how is stored passwords on OSX:
# http://blog.lostpassword.com/2012/07/cracking-mac-os-x-lion-accounts-passwords/
# http://null-byte.wonderhowto.com/how-to/hack-mac-os-x-lion-passwords-0130036/
# http://pastebin.com/RYqxi7Ca
# on OSX 10.8+ hash is SALTED-SHA512-PBKDF2
# https://pythonhosted.org/passlib/lib/passlib.hash.pbkdf2_digest.html
# https://gist.github.com/nueh/8252572
cmd = self._get_dscl()
if self.password:
cmd += [ '-passwd', '/Users/%s' % self.name, self.password]
else:
cmd += [ '-create', '/Users/%s' % self.name, 'Password', '*']
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Error when changing password',
err=err, out=out, rc=rc)
return (rc, out, err)
def _make_group_numerical(self):
'''Convert SELF.GROUP to is stringed numerical value suitable for dscl.'''
if self.group is None:
self.group = 'nogroup'
try:
self.group = grp.getgrnam(self.group).gr_gid
except KeyError:
self.module.fail_json(msg='Group "%s" not found. Try to create it first using "group" module.' % self.group)
# We need to pass a string to dscl
self.group = str(self.group)
def __modify_group(self, group, action):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwhise 'remove' is assumed. '''
if action == 'add':
option = '-a'
else:
option = '-d'
cmd = [ 'dseditgroup', '-o', 'edit', option, self.name,
'-t', 'user', group ]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot %s user "%s" to group "%s".'
% (action, self.name, group),
err=err, out=out, rc=rc)
return (rc, out, err)
def _modify_group(self):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwhise 'remove' is assumed. '''
rc = 0
out = ''
err = ''
changed = False
current = set(self._list_user_groups())
if self.groups is not None:
target = set(self.groups.split(','))
else:
target = set([])
for remove in current - target:
(_rc, _err, _out) = self.__modify_group(remove, 'delete')
rc += rc
out += _out
err += _err
changed = True
for add in target - current:
(_rc, _err, _out) = self.__modify_group(add, 'add')
rc += _rc
out += _out
err += _err
changed = True
return (rc, err, out, changed)
def _update_system_user(self):
'''Hide or show user on login window according SELF.SYSTEM.
Returns 0 if a change has been made, None otherwhise.'''
plist_file = '/Library/Preferences/com.apple.loginwindow.plist'
# http://support.apple.com/kb/HT5017?viewlocale=en_US
cmd = [ 'defaults', 'read', plist_file, 'HiddenUsersList' ]
(rc, out, err) = self.execute_command(cmd)
# returned value is
# (
# "_userA",
# "_UserB",
# userc
# )
hidden_users = []
for x in out.splitlines()[1:-1]:
try:
x = x.split('"')[1]
except IndexError:
x = x.strip()
hidden_users.append(x)
if self.system:
if not self.name in hidden_users:
cmd = [ 'defaults', 'write', plist_file,
'HiddenUsersList', '-array-add', self.name ]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot user "%s" to hidden user list.'
% self.name, err=err, out=out, rc=rc)
return 0
else:
if self.name in hidden_users:
del(hidden_users[hidden_users.index(self.name)])
cmd = [ 'defaults', 'write', plist_file,
'HiddenUsersList', '-array' ] + hidden_users
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot remove user "%s" from hidden user list.'
% self.name, err=err, out=out, rc=rc)
return 0
def user_exists(self):
'''Check is SELF.NAME is a known user on the system.'''
cmd = self._get_dscl()
cmd += [ '-list', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd)
return rc == 0
def remove_user(self):
'''Delete SELF.NAME. If SELF.FORCE is true, remove its home directory.'''
info = self.user_info()
cmd = self._get_dscl()
cmd += [ '-delete', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot delete user "%s".'
% self.name, err=err, out=out, rc=rc)
if self.force:
if os.path.exists(info[5]):
shutil.rmtree(info[5])
out += "Removed %s" % info[5]
return (rc, out, err)
def create_user(self, command_name='dscl'):
cmd = self._get_dscl()
cmd += [ '-create', '/Users/%s' % self.name]
(rc, err, out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot create user "%s".'
% self.name, err=err, out=out, rc=rc)
self._make_group_numerical()
if self.uid is None:
self.uid = str(self._get_next_uid())
# Homedir is not created by default
if self.createhome:
if self.home is None:
self.home = '/Users/%s' % self.name
if not os.path.exists(self.home):
os.makedirs(self.home)
self.chown_homedir(int(self.uid), int(self.group), self.home)
for field in self.fields:
if self.__dict__.has_key(field[0]) and self.__dict__[field[0]]:
cmd = self._get_dscl()
cmd += [ '-create', '/Users/%s' % self.name,
field[1], self.__dict__[field[0]]]
(rc, _err, _out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot add property "%s" to user "%s".'
% (field[0], self.name), err=err, out=out, rc=rc)
out += _out
err += _err
if rc != 0:
return (rc, _err, _out)
(rc, _err, _out) = self._change_user_password()
out += _out
err += _err
self._update_system_user()
# here we don't care about change status since it is a creation,
# thus changed is always true.
if self.groups:
(rc, _out, _err, changed) = self._modify_group()
out += _out
err += _err
return (rc, err, out)
def modify_user(self):
changed = None
out = ''
err = ''
if self.group:
self._make_group_numerical()
for field in self.fields:
if self.__dict__.has_key(field[0]) and self.__dict__[field[0]]:
current = self._get_user_property(field[1])
if current is None or current != self.__dict__[field[0]]:
cmd = self._get_dscl()
cmd += [ '-create', '/Users/%s' % self.name,
field[1], self.__dict__[field[0]]]
(rc, _err, _out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot update property "%s" for user "%s".'
% (field[0], self.name), err=err, out=out, rc=rc)
changed = rc
out += _out
err += _err
if self.update_password == 'always' and self.password is not None:
(rc, _err, _out) = self._change_user_password()
out += _out
err += _err
changed = rc
if self.groups:
(rc, _out, _err, _changed) = self._modify_group()
out += _out
err += _err
if _changed is True:
changed = rc
rc = self._update_system_user()
if rc == 0:
changed = rc
return (changed, out, err)
# ===========================================
class AIX(User):
"""
This is a AIX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'AIX'
distribution = None
SHADOWFILE = '/etc/security/passwd'
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self, command_name='useradd'):
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.password is not None:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
self.execute_command(' '.join(cmd), data="%s:%s" % (self.name, self.password))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
# skip if no changes to be made
if len(cmd) == 1:
(rc, out, err) = (None, '', '')
elif self.module.check_mode:
return (True, '', '')
else:
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
(rc2, out2, err2) = self.execute_command(' '.join(cmd), data="%s:%s" % (self.name, self.password))
else:
(rc2, out2, err2) = (None, '', '')
if rc != None:
return (rc, out+out2, err+err2)
else:
return (rc2, out+out2, err+err2)
# ===========================================
class HPUX(User):
"""
This is a HP-UX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'HP-UX'
distribution = None
SHADOWFILE = '/etc/shadow'
def create_user(self):
cmd = ['/usr/sam/lbin/useradd.sam']
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.createhome:
cmd.append('-m')
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user(self):
cmd = ['/usr/sam/lbin/userdel.sam']
if self.force:
cmd.append('-F')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = ['/usr/sam/lbin/usermod.sam']
info = self.user_info()
has_append = self._check_usermod_append()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
if has_append:
cmd.append('-a')
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if self.append and not has_append:
cmd.append('-A')
cmd.append(','.join(group_diff))
else:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
elif self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
def main():
ssh_defaults = {
'bits': '2048',
'type': 'rsa',
'passphrase': None,
'comment': 'ansible-generated on %s' % socket.gethostname()
}
module = AnsibleModule(
argument_spec = dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
name=dict(required=True, aliases=['user'], type='str'),
uid=dict(default=None, type='str'),
non_unique=dict(default='no', type='bool'),
group=dict(default=None, type='str'),
groups=dict(default=None, type='str'),
comment=dict(default=None, type='str'),
home=dict(default=None, type='str'),
shell=dict(default=None, type='str'),
password=dict(default=None, type='str', no_log=True),
login_class=dict(default=None, type='str'),
# following options are specific to selinux
seuser=dict(default=None, type='str'),
# following options are specific to userdel
force=dict(default='no', type='bool'),
remove=dict(default='no', type='bool'),
# following options are specific to useradd
createhome=dict(default='yes', type='bool'),
skeleton=dict(default=None, type='str'),
system=dict(default='no', type='bool'),
# following options are specific to usermod
move_home=dict(default='no', type='bool'),
append=dict(default='no', type='bool'),
# following are specific to ssh key generation
generate_ssh_key=dict(type='bool'),
ssh_key_bits=dict(default=ssh_defaults['bits'], type='str'),
ssh_key_type=dict(default=ssh_defaults['type'], type='str'),
ssh_key_file=dict(default=None, type='str'),
ssh_key_comment=dict(default=ssh_defaults['comment'], type='str'),
ssh_key_passphrase=dict(default=None, type='str', no_log=True),
update_password=dict(default='always',choices=['always','on_create'],type='str'),
expires=dict(default=None, type='float'),
),
supports_check_mode=True
)
user = User(module)
module.debug('User instantiated - platform %s' % user.platform)
if user.distribution:
module.debug('User instantiated - distribution %s' % user.distribution)
rc = None
out = ''
err = ''
result = {}
result['name'] = user.name
result['state'] = user.state
if user.state == 'absent':
if user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = user.remove_user()
if rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
result['force'] = user.force
result['remove'] = user.remove
elif user.state == 'present':
if not user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = user.create_user()
result['system'] = user.system
result['createhome'] = user.createhome
else:
# modify user (note: this function is check mode aware)
(rc, out, err) = user.modify_user()
result['append'] = user.append
result['move_home'] = user.move_home
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if user.password is not None:
result['password'] = 'NOT_LOGGING_PASSWORD'
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
if user.user_exists():
info = user.user_info()
if info == False:
result['msg'] = "failed to look up user name: %s" % user.name
result['failed'] = True
result['uid'] = info[2]
result['group'] = info[3]
result['comment'] = info[4]
result['home'] = info[5]
result['shell'] = info[6]
result['uid'] = info[2]
if user.groups is not None:
result['groups'] = user.groups
# handle missing homedirs
info = user.user_info()
if user.home is None:
user.home = info[5]
if not os.path.exists(user.home) and user.createhome:
if not module.check_mode:
user.create_homedir(user.home)
user.chown_homedir(info[2], info[3], user.home)
result['changed'] = True
# deal with ssh key
if user.sshkeygen:
# generate ssh key (note: this function is check mode aware)
(rc, out, err) = user.ssh_key_gen()
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if rc == 0:
result['changed'] = True
(rc, out, err) = user.ssh_key_fingerprint()
if rc == 0:
result['ssh_fingerprint'] = out.strip()
else:
result['ssh_fingerprint'] = err.strip()
result['ssh_key_file'] = user.get_ssh_key_path()
result['ssh_public_key'] = user.get_ssh_public_key()
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
getredash/redash | redash/cli/queries.py | 3 | 1226 | from click import argument
from flask.cli import AppGroup
from sqlalchemy.orm.exc import NoResultFound
manager = AppGroup(help="Queries management commands.")
@manager.command()
@argument("query_id")
@argument("tag")
def add_tag(query_id, tag):
from redash import models
query_id = int(query_id)
try:
q = models.Query.get_by_id(query_id)
except NoResultFound:
print("Query not found.")
exit(1)
tags = q.tags
if tags is None:
tags = []
tags.append(tag)
q.tags = list(set(tags))
models.db.session.add(q)
models.db.session.commit()
print("Tag added.")
@manager.command()
@argument("query_id")
@argument("tag")
def remove_tag(query_id, tag):
from redash import models
query_id = int(query_id)
try:
q = models.Query.get_by_id(query_id)
except NoResultFound:
print("Query not found.")
exit(1)
tags = q.tags
if tags is None:
print("Tag is empty.")
exit(1)
try:
tags.remove(tag)
except ValueError:
print("Tag not found.")
exit(1)
q.tags = list(set(tags))
models.db.session.add(q)
models.db.session.commit()
print("Tag removed.")
| bsd-2-clause |
mtwestra/akvo-wandelenvoorwater | wvw/W4W/models.py | 1 | 6568 | from django.db import models
class school(models.Model):
class Meta:
verbose_name = "School"
verbose_name_plural="Scholen"
BRIN_NUMMER = models.CharField('BRIN code',max_length=15,blank=True, null=True)
NAAM_VOLLEDIG = models.CharField('Naam school',max_length=100,blank=True, null=True)
NAAM_STRAAT_VEST = models.CharField('Straat',max_length=100,blank=True, null=True)
NR_HUIS_VEST = models.CharField('Huisnummer',max_length=15,blank=True, null=True)
POSTCODE_VEST = models.CharField('Postcode',max_length=10,blank=True, null=True)
NAAM_PLAATS_VEST = models.CharField('Plaats',max_length=100,blank=True, null=True)
GEMEENTENAAM = models.CharField('Gemeente',max_length=100,blank=True, null=True)
GEOCODED=models.CharField('Geocoded',max_length=25,blank=True, null=True)
LONGITUDE=models.CharField('Longitude',max_length=20,blank=True, null=True)
LATITUDE=models.CharField('Latitude',max_length=20,blank=True, null=True)
NAAM_STRAAT_CORR = models.CharField('Straat',max_length=100,blank=True, null=True)
NR_HUIS_CORR = models.CharField('Huisnummer',max_length=15,blank=True, null=True)
POSTCODE_CORR = models.CharField('Postcode',max_length=10,blank=True, null=True)
NAAM_PLAATS_CORR = models.CharField('Plaats',max_length=100,blank=True, null=True)
NR_TELEFOON =models.CharField('Telefoon',max_length=15,blank=True, null=True)
NR_FAX = models.CharField('Fax',max_length=15,blank=True, null=True)
PROVINCIE_VEST = models.CharField('Provincie',max_length=100,blank=True, null=True)
NAAM_VOLLEDIG_GEZ = models.CharField('Naam',max_length=100,blank=True, null=True)
NR_ADMINISTRATIE_GEZ = models.CharField('Administratienummer',max_length=100,blank=True, null=True)
NAAM_STRAAT_COR_GEZ =models.CharField('Straat',max_length=100,blank=True, null=True)
NR_HUIS_CORR_GEZ =models.CharField('Huisnummer',max_length=15,blank=True, null=True)
POSTCODE_CORR_GEZ = models.CharField('Postcode',max_length=100,blank=True, null=True)
NAAM_PLAATS_CORR_GEZ =models.CharField('Plaats',max_length=100,blank=True, null=True)
INTERNET =models.CharField('Website',max_length=100,blank=True, null=True)
def __unicode__(self):
return self.NAAM_VOLLEDIG
class project(models.Model):
class Meta:
verbose_name = "Project"
verbose_name_plural="Projecten"
ACTIEF=models.BooleanField('Actief')
AKVO_CODE=models.IntegerField('Code Akvo project',blank=True, null=True)
PROJECT_AANDUIDING=models.CharField('Project aanduiding',max_length=100,blank=True, null=True)
PROJECT_NAAM = models.CharField('Naam contactpersoon',max_length=100,blank=True, null=True)
PROJECT_BESCHRIJVING = models.CharField('Opmerkingen',max_length=250,blank=True, null=True)
INVOER_DATUM = models.DateField('Invoerdatum',blank=True, null=True)
LAATSTE_WIJZIGING = models.DateTimeField('Laatste wijziging',blank=True, null=True)
def __unicode__(self):
return u'%s' % (self.PROJECT_NAAM)
class steunpunt(models.Model):
class Meta:
verbose_name = "Steunpunt"
verbose_name_plural="Steunpunten"
ACTIEF=models.BooleanField('Actief')
NAAM = models.CharField('Naam steunpunt',max_length=100,blank=True, null=True)
LOGO_URL = models.CharField('Logo URL',max_length=200,blank=True, null=True)
WEBSITE = models.CharField('Website',max_length=100,blank=True, null=True)
USERNAME = models.CharField('Username',max_length=100,blank=True, null=True)
PASSWD = models.CharField('Password',max_length=100,blank=True, null=True)
PROJECTEN = models.ManyToManyField(project,blank=True, null=True)
NAAM_CONTACT = models.CharField('Naam contactpersoon',max_length=100,blank=True, null=True)
EMAIL_CONTACT = models.CharField('E-mail',max_length=100,blank=True, null=True)
ADRES = models.CharField('Adres',max_length=100,blank=True, null=True)
POSTCODE = models.CharField('Postcode',max_length=10,blank=True, null=True)
PLAATS = models.CharField('Plaats',max_length=100,blank=True, null=True)
NR_TELEFOON = models.CharField('Telefoon',max_length=15,blank=True, null=True)
INVOER_DATUM = models.DateTimeField('Invoerdatum',blank=True, null=True)
LAATSTE_WIJZIGING = models.DateTimeField('Laatste wijzing',blank=True, null=True)
def __unicode__(self):
return u'%s' % (self.NAAM)
class inschrijving(models.Model):
class Meta:
verbose_name = "Inschrijving"
verbose_name_plural="Inschrijvingen"
STEUNPUNT=models.ForeignKey(steunpunt, verbose_name="Steunpunt")
PROJECT=models.ForeignKey(project, verbose_name="Project")
NUM_GROEP_GR7=models.IntegerField('Aantal groepen 7', blank=True, null=True)
NUM_GROEP_GR8=models.IntegerField('Aantal groepen 8', blank=True, null=True)
NUM_GROEP_GR67=models.IntegerField('Aantal gemengde groepen 6/7', blank=True, null=True)
NUM_GROEP_GR678=models.IntegerField('Aantal gemengde groepen 6/7/8',blank=True, null=True)
NUM_GROEP_GR78=models.IntegerField('Aantal gemengde groepen 7/8', blank=True, null=True)
ACTIEF=models.BooleanField('Actief')
TOTAAL_LEERLINGEN=models.IntegerField('Totaal aantal leerlingen', blank=True, null=True)
DATUM_WANDELING=models.DateField('Datum wandeling',blank=True, null=True)
PLAATS_WANDELING=models.CharField('Plaats wandeling',max_length=100,blank=True,null=True)
EERDER_MEEGEDAAN=models.CharField('Eerder meegedaan',max_length=100,blank=True,null=True)
NAAM_SCHOOL = models.CharField('Naam school',max_length=200,blank=True,null=True)
BRIN_NUMMER = models.CharField('BRIN code',max_length=15,blank=True,null=True)
NAAM_CONTACT = models.CharField('Naam contactpersoon',max_length=100,blank=True,null=True)
EMAIL_CONTACT = models.CharField('E-mail',max_length=100,blank=True,null=True)
ADRES = models.CharField('Adres',max_length=100,blank=True,null=True)
POSTCODE = models.CharField('Postcode',max_length=10,blank=True,null=True)
PLAATS = models.CharField('Plaats',max_length=100,blank=True,null=True)
NR_TELEFOON = models.CharField('Telefoon',max_length=15,blank=True,null=True)
AKKOORD_VOORW = models.BooleanField('Akkoord met de voorwaarden?')
OPMERKINGEN = models.CharField('Opmerkingen',max_length=1000,blank=True,null=True)
INVOER_DATUM = models.DateTimeField('Invoerdatum',blank=True, null=True)
LAATSTE_WIJZIGING=models.DateTimeField('Laatste wijziging',blank=True, null=True)
GEOCODED = models.CharField('Geocode resultaat',max_length=25,blank=True,null=True,default='NONE')
LONGITUDE = models.CharField('Longitude',max_length=20,blank=True,null=True)
LATITUDE = models.CharField('Latitude',max_length=20,blank=True,null=True)
def __unicode__(self):
return u'%s' %(self.NAAM_SCHOOL)
| agpl-3.0 |
google/compare_gan | compare_gan/architectures/resnet_init_test.py | 2 | 4302 | # coding=utf-8
# Copyright 2018 Google LLC & Hwalsuk Lee.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests weight initialization ops using ResNet5 architecture."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compare_gan.architectures import resnet5
from compare_gan.gans import consts
import gin
import tensorflow as tf
class ResNetInitTest(tf.test.TestCase):
def setUp(self):
super(ResNetInitTest, self).setUp()
gin.clear_config()
def testInitializersOldDefault(self):
valid_initalizer = [
"kernel/Initializer/random_normal",
"bias/Initializer/Const",
# truncated_normal is the old default for conv2d.
"kernel/Initializer/truncated_normal",
"bias/Initializer/Const",
"beta/Initializer/zeros",
"gamma/Initializer/ones",
]
valid_op_names = "/({}):0$".format("|".join(valid_initalizer))
with tf.Graph().as_default():
z = tf.zeros((2, 128))
fake_image = resnet5.Generator(image_shape=(128, 128, 3))(
z, y=None, is_training=True)
resnet5.Discriminator()(fake_image, y=None, is_training=True)
for var in tf.trainable_variables():
op_name = var.initializer.inputs[1].name
self.assertRegex(op_name, valid_op_names)
def testInitializersRandomNormal(self):
gin.bind_parameter("weights.initializer", consts.NORMAL_INIT)
valid_initalizer = [
"kernel/Initializer/random_normal",
"bias/Initializer/Const",
"kernel/Initializer/random_normal",
"bias/Initializer/Const",
"beta/Initializer/zeros",
"gamma/Initializer/ones",
]
valid_op_names = "/({}):0$".format("|".join(valid_initalizer))
with tf.Graph().as_default():
z = tf.zeros((2, 128))
fake_image = resnet5.Generator(image_shape=(128, 128, 3))(
z, y=None, is_training=True)
resnet5.Discriminator()(fake_image, y=None, is_training=True)
for var in tf.trainable_variables():
op_name = var.initializer.inputs[1].name
self.assertRegex(op_name, valid_op_names)
def testInitializersTruncatedNormal(self):
gin.bind_parameter("weights.initializer", consts.TRUNCATED_INIT)
valid_initalizer = [
"kernel/Initializer/truncated_normal",
"bias/Initializer/Const",
"kernel/Initializer/truncated_normal",
"bias/Initializer/Const",
"beta/Initializer/zeros",
"gamma/Initializer/ones",
]
valid_op_names = "/({}):0$".format("|".join(valid_initalizer))
with tf.Graph().as_default():
z = tf.zeros((2, 128))
fake_image = resnet5.Generator(image_shape=(128, 128, 3))(
z, y=None, is_training=True)
resnet5.Discriminator()(fake_image, y=None, is_training=True)
for var in tf.trainable_variables():
op_name = var.initializer.inputs[1].name
self.assertRegex(op_name, valid_op_names)
def testGeneratorInitializersOrthogonal(self):
gin.bind_parameter("weights.initializer", consts.ORTHOGONAL_INIT)
valid_initalizer = [
"kernel/Initializer/mul_1",
"bias/Initializer/Const",
"kernel/Initializer/mul_1",
"bias/Initializer/Const",
"beta/Initializer/zeros",
"gamma/Initializer/ones",
]
valid_op_names = "/({}):0$".format("|".join(valid_initalizer))
with tf.Graph().as_default():
z = tf.zeros((2, 128))
fake_image = resnet5.Generator(image_shape=(128, 128, 3))(
z, y=None, is_training=True)
resnet5.Discriminator()(fake_image, y=None, is_training=True)
for var in tf.trainable_variables():
op_name = var.initializer.inputs[1].name
self.assertRegex(op_name, valid_op_names)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
Noviat/account-financial-reporting-V3-intrastat | account_chart_report/__init__.py | 34 | 1038 | # -*- coding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Savoir-faire Linux (<www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from . import report
from . import wizard
| agpl-3.0 |
kaday/cylc | lib/cylc/run.py | 1 | 3790 | #!/usr/bin/env python
# THIS FILE IS PART OF THE CYLC SUITE ENGINE.
# Copyright (C) 2008-2015 NIWA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Provide the main function for "cylc run" and "cylc restart"."""
import re
import sys
from daemonize import daemonize
from version import CYLC_VERSION
from cylc.cfgspec.globalcfg import GLOBAL_CFG
import flags
from exceptions import SchedulerStop, SchedulerError
def print_blurb():
logo = (
" ,_, \n"
" | | \n"
",_____,_, ,_| |_____, \n"
"| ,___| | | | | ,___| \n"
"| |___| |_| | | |___, \n"
"\_____\___, |_\_____| \n"
" ,___| | \n"
" \_____| \n"
)
license = """
The Cylc Suite Engine [%s]
Copyright (C) 2008-2015 NIWA
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
This program comes with ABSOLUTELY NO WARRANTY;
see `cylc warranty`. It is free software, you
are welcome to redistribute it under certain
conditions; see `cylc conditions`.
""" % CYLC_VERSION
logo_lines = logo.splitlines()
license_lines = license.splitlines()
lmax = max(len(line) for line in license_lines)
for i in range(len(logo_lines)):
print logo_lines[i], ('{0: ^%s}' % lmax).format(license_lines[i])
print
def main(name, start):
# Parse the command line:
server = start()
# Print copyright and license information
print_blurb()
# Create run directory tree and get port.
try:
GLOBAL_CFG.create_cylc_run_tree(server.suite)
server.configure_pyro()
except Exception as exc:
if flags.debug:
raise
else:
sys.exit(exc)
# Daemonize the suite
if not server.options.no_detach and not flags.debug:
daemonize(server)
try:
server.configure()
server.run()
# For profiling (see Python docs for how to display the stats).
# import cProfile
# cProfile.runctx('server.run()', globals(), locals(), 'stats')
except SchedulerStop, x:
# deliberate stop
print str(x)
server.shutdown()
except SchedulerError, x:
print >> sys.stderr, str(x)
server.shutdown()
sys.exit(1)
except KeyboardInterrupt as x:
import traceback
try:
server.shutdown(str(x))
except Exception as y:
# In case of exceptions in the shutdown method itself.
traceback.print_exc(y)
sys.exit(1)
except (KeyboardInterrupt, Exception) as x:
import traceback
traceback.print_exc(x)
print >> sys.stderr, "ERROR CAUGHT: cleaning up before exit"
try:
server.shutdown('ERROR: ' + str(x))
except Exception, y:
# In case of exceptions in the shutdown method itself
traceback.print_exc(y)
if flags.debug:
raise
else:
print >> sys.stderr, "THE ERROR WAS:"
print >> sys.stderr, x
print >> sys.stderr, "use --debug to turn on exception tracebacks)"
sys.exit(1)
else:
# main loop ends (not used?)
server.shutdown()
| gpl-3.0 |
dchud/sentinel | canary/cmdline.py | 1 | 2175 | # $Id$
import optparse
import os
class CommandLine:
"""A helper class for canary command line tools. When you use
CommandLine you will get a --config option for free, and a handy
method for instantiating a Context() object.
cmdline = CommandLine()
cmdline.parse_args()
con = cmdline.context()
"""
def __init__(self):
self.parser = optparse.OptionParser('usage: %prog [options]')
self.parser.add_option('-c', '--config',
dest='config', default='conf/canary_config.py',
help='path to configuration file')
self._ran = False
def __getattr__(self,name):
"""To support attribute lookups.
"""
if hasattr(self,name):
return self.name
elif hasattr(self.options,name):
return getattr(self.options,name)
else:
raise AttributeError
def add_option(self,*args,**kwargs):
"""Similar to OptionParser.add_option
cmdline.add_option('-f', '--foo', help='foo bar')
"""
self.parser.add_option(*args,**kwargs)
def parse_args(self):
"""Similar to OptionParser.parse_args
options,args = cmdline.parse_args()
"""
if not self._ran:
self.options, self.args = self.parser.parse_args()
self._ran = True
return self.options, self.args
def context(self):
"""After you've called parse_args() you should be able
to fetch a canary.context.Context object.
context = cmdline.context()
"""
if not self.options.config:
self.guess_canary_config()
from canary.context import CanaryConfig, Context
config = CanaryConfig()
config.read_file(self.options.config)
return Context(config)
def guess_canary_config(self):
def find(arg,dname,fnames):
if 'canary_config.py' in fnames:
fnames = []
self.options.config = dname + '/canary_config.py'
os.path.walk(os.environ['HOME'],find,None)
print "using canary config at %s" % self.options.config
return self.options.config
| mit |
pierreg/tensorflow | tensorflow/python/training/training.py | 3 | 11365 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
"""This library provides a set of classes and functions that helps train models.
## Optimizers
The Optimizer base class provides methods to compute gradients for a loss and
apply gradients to variables. A collection of subclasses implement classic
optimization algorithms such as GradientDescent and Adagrad.
You never instantiate the Optimizer class itself, but instead instantiate one
of the subclasses.
@@Optimizer
@@GradientDescentOptimizer
@@AdadeltaOptimizer
@@AdagradOptimizer
@@AdagradDAOptimizer
@@MomentumOptimizer
@@AdamOptimizer
@@FtrlOptimizer
@@ProximalGradientDescentOptimizer
@@ProximalAdagradOptimizer
@@RMSPropOptimizer
## Gradient Computation
TensorFlow provides functions to compute the derivatives for a given
TensorFlow computation graph, adding operations to the graph. The
optimizer classes automatically compute derivatives on your graph, but
creators of new Optimizers or expert users can call the lower-level
functions below.
@@gradients
@@AggregationMethod
@@stop_gradient
## Gradient Clipping
TensorFlow provides several operations that you can use to add clipping
functions to your graph. You can use these functions to perform general data
clipping, but they're particularly useful for handling exploding or vanishing
gradients.
@@clip_by_value
@@clip_by_norm
@@clip_by_average_norm
@@clip_by_global_norm
@@global_norm
## Decaying the learning rate
@@exponential_decay
## Moving Averages
Some training algorithms, such as GradientDescent and Momentum often benefit
from maintaining a moving average of variables during optimization. Using the
moving averages for evaluations often improve results significantly.
@@ExponentialMovingAverage
## Coordinator and QueueRunner
See [Threading and Queues](../../how_tos/threading_and_queues/index.md)
for how to use threads and queues. For documentation on the Queue API,
see [Queues](../../api_docs/python/io_ops.md#queues).
@@Coordinator
@@QueueRunner
@@add_queue_runner
@@start_queue_runners
## Distributed execution
See [Distributed TensorFlow](../../how_tos/distributed/index.md) for
more information about how to configure a distributed TensorFlow program.
@@Server
@@Supervisor
@@SessionManager
@@ClusterSpec
@@replica_device_setter
@@Scaffold
@@MonitoredTrainingSession
@@SessionCreator
@@ChiefSessionCreator
@@WorkerSessionCreator
@@MonitoredSession
## Summary Operations
The following ops output
[`Summary`](https://www.tensorflow.org/code/tensorflow/core/framework/summary.proto)
protocol buffers as serialized string tensors.
You can fetch the output of a summary op in a session, and pass it to
a [SummaryWriter](../../api_docs/python/train.md#SummaryWriter) to append it
to an event file. Event files contain
[`Event`](https://www.tensorflow.org/code/tensorflow/core/util/event.proto)
protos that can contain `Summary` protos along with the timestamp and
step. You can then use TensorBoard to visualize the contents of the
event files. See [TensorBoard and
Summaries](../../how_tos/summaries_and_tensorboard/index.md) for more
details.
@@scalar_summary
@@image_summary
@@audio_summary
@@histogram_summary
@@zero_fraction
@@merge_summary
@@merge_all_summaries
## Adding Summaries to Event Files
See [Summaries and
TensorBoard](../../how_tos/summaries_and_tensorboard/index.md) for an
overview of summaries, event files, and visualization in TensorBoard.
@@SummaryWriter
@@SummaryWriterCache
@@summary_iterator
## Training utilities
@@global_step
@@basic_train_loop
@@get_global_step
@@assert_global_step
@@write_graph
@@SessionRunHook
@@LoggingTensorHook
@@StopAtStepHook
@@CheckpointSaverHook
@@NewCheckpointReader
@@StepCounterHook
@@NanLossDuringTrainingError
@@NanTensorHook
@@SummarySaverHook
@@SessionRunArgs
@@SessionRunContext
@@SessionRunValues
@@LooperThread
"""
# pylint: enable=line-too-long
# Optimizers.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys as _sys
from tensorflow.python.ops import io_ops as _io_ops
from tensorflow.python.ops import state_ops as _state_ops
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=g-bad-import-order,unused-import
from tensorflow.python.training.adadelta import AdadeltaOptimizer
from tensorflow.python.training.adagrad import AdagradOptimizer
from tensorflow.python.training.adagrad_da import AdagradDAOptimizer
from tensorflow.python.training.proximal_adagrad import ProximalAdagradOptimizer
from tensorflow.python.training.adam import AdamOptimizer
from tensorflow.python.training.ftrl import FtrlOptimizer
from tensorflow.python.training.momentum import MomentumOptimizer
from tensorflow.python.training.moving_averages import ExponentialMovingAverage
from tensorflow.python.training.optimizer import Optimizer
from tensorflow.python.training.rmsprop import RMSPropOptimizer
from tensorflow.python.training.gradient_descent import GradientDescentOptimizer
from tensorflow.python.training.proximal_gradient_descent import ProximalGradientDescentOptimizer
from tensorflow.python.training.sync_replicas_optimizer import SyncReplicasOptimizer
from tensorflow.python.training.sync_replicas_optimizer import SyncReplicasOptimizerV2
# Utility classes for training.
from tensorflow.python.training.coordinator import Coordinator
from tensorflow.python.training.coordinator import LooperThread
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.training.queue_runner import *
# For the module level doc.
from tensorflow.python.training import input as _input
from tensorflow.python.training.input import *
from tensorflow.python.training.basic_session_run_hooks import LoggingTensorHook
from tensorflow.python.training.basic_session_run_hooks import StopAtStepHook
from tensorflow.python.training.basic_session_run_hooks import CheckpointSaverHook
from tensorflow.python.training.basic_session_run_hooks import StepCounterHook
from tensorflow.python.training.basic_session_run_hooks import NanLossDuringTrainingError
from tensorflow.python.training.basic_session_run_hooks import NanTensorHook
from tensorflow.python.training.basic_session_run_hooks import SummarySaverHook
from tensorflow.python.training.basic_loops import basic_train_loop
from tensorflow.python.training.device_setter import replica_device_setter
from tensorflow.python.training.monitored_session import Scaffold
from tensorflow.python.training.monitored_session import MonitoredTrainingSession
from tensorflow.python.training.monitored_session import SessionCreator
from tensorflow.python.training.monitored_session import ChiefSessionCreator
from tensorflow.python.training.monitored_session import WorkerSessionCreator
from tensorflow.python.training.monitored_session import MonitoredSession
from tensorflow.python.training.saver import Saver
from tensorflow.python.training.saver import checkpoint_exists
from tensorflow.python.training.saver import generate_checkpoint_state_proto
from tensorflow.python.training.saver import get_checkpoint_mtimes
from tensorflow.python.training.saver import get_checkpoint_state
from tensorflow.python.training.saver import latest_checkpoint
from tensorflow.python.training.saver import update_checkpoint_state
from tensorflow.python.training.saver import export_meta_graph
from tensorflow.python.training.saver import import_meta_graph
from tensorflow.python.training.session_run_hook import SessionRunHook
from tensorflow.python.training.session_run_hook import SessionRunArgs
from tensorflow.python.training.session_run_hook import SessionRunContext
from tensorflow.python.training.session_run_hook import SessionRunValues
from tensorflow.python.training.session_manager import SessionManager
from tensorflow.python.training.summary_io import summary_iterator
from tensorflow.python.training.summary_io import SummaryWriter
from tensorflow.python.training.summary_io import SummaryWriterCache
from tensorflow.python.training.supervisor import Supervisor
from tensorflow.python.training.training_util import write_graph
from tensorflow.python.training.training_util import global_step
from tensorflow.python.training.training_util import get_global_step
from tensorflow.python.training.training_util import assert_global_step
from tensorflow.python.pywrap_tensorflow import do_quantize_training_on_graphdef
from tensorflow.python.pywrap_tensorflow import NewCheckpointReader
# Training data protos.
from tensorflow.core.example.example_pb2 import *
from tensorflow.core.example.feature_pb2 import *
from tensorflow.core.protobuf.saver_pb2 import *
# Utility op. Open Source. TODO(touts): move to nn?
from tensorflow.python.training.learning_rate_decay import *
# Distributed computing support.
from tensorflow.core.protobuf.tensorflow_server_pb2 import ClusterDef
from tensorflow.core.protobuf.tensorflow_server_pb2 import JobDef
from tensorflow.core.protobuf.tensorflow_server_pb2 import ServerDef
from tensorflow.python.training.server_lib import ClusterSpec
from tensorflow.python.training.server_lib import Server
# Symbols whitelisted for export without documentation.
_allowed_symbols = [
# TODO(cwhipkey): review these and move to contrib or expose through
# documentation.
"generate_checkpoint_state_proto", # Used internally by saver.
"checkpoint_exists", # Only used in test?
"get_checkpoint_mtimes", # Only used in test?
# Legacy: remove.
"do_quantize_training_on_graphdef", # At least use grah_def, not graphdef.
# No uses within tensorflow.
"queue_runner", # Use tf.train.start_queue_runner etc directly.
# This is also imported internally.
# TODO(drpng): document these. The reference in howtos/distributed does
# not link.
"SyncReplicasOptimizer",
"SyncReplicasOptimizerV2",
# Protobufs:
"BytesList", # from example_pb2.
"ClusterDef",
"Example", # from example_pb2
"Feature", # from example_pb2
"Features", # from example_pb2
"FeatureList", # from example_pb2
"FeatureLists", # from example_pb2
"FloatList", # from example_pb2.
"Int64List", # from example_pb2.
"JobDef",
"SaverDef", # From saver_pb2.
"SequenceExample", # from example_pb2.
"ServerDef",
]
# Include extra modules for docstrings because:
# * Input methods in tf.train are documented in io_ops.
# * Saver methods in tf.train are documented in state_ops.
remove_undocumented(__name__, _allowed_symbols,
[_sys.modules[__name__], _io_ops, _state_ops])
| apache-2.0 |
joachimmetz/plaso | plaso/formatters/file_system.py | 4 | 1917 | # -*- coding: utf-8 -*-
"""File system custom event formatter helpers."""
from plaso.formatters import interface
from plaso.formatters import manager
class NTFSFileReferenceFormatterHelper(interface.CustomEventFormatterHelper):
"""NTFS file reference formatter helper."""
IDENTIFIER = 'ntfs_file_reference'
def FormatEventValues(self, event_values):
"""Formats event values using the helper.
Args:
event_values (dict[str, object]): event values.
"""
file_reference = event_values.get('file_reference', None)
if file_reference:
event_values['file_reference'] = '{0:d}-{1:d}'.format(
file_reference & 0xffffffffffff, file_reference >> 48)
class NTFSParentFileReferenceFormatterHelper(
interface.CustomEventFormatterHelper):
"""NTFS parent file reference formatter helper."""
IDENTIFIER = 'ntfs_parent_file_reference'
def FormatEventValues(self, event_values):
"""Formats event values using the helper.
Args:
event_values (dict[str, object]): event values.
"""
parent_file_reference = event_values.get('parent_file_reference', None)
if parent_file_reference:
event_values['parent_file_reference'] = '{0:d}-{1:d}'.format(
parent_file_reference & 0xffffffffffff, parent_file_reference >> 48)
class NTFSPathHintsFormatterHelper(interface.CustomEventFormatterHelper):
"""NTFS path hints formatter helper."""
IDENTIFIER = 'ntfs_path_hints'
def FormatEventValues(self, event_values):
"""Formats event values using the helper.
Args:
event_values (dict[str, object]): event values.
"""
path_hints = event_values.get('path_hints', None)
if path_hints:
event_values['path_hints'] = ';'.join(path_hints)
manager.FormattersManager.RegisterEventFormatterHelpers([
NTFSFileReferenceFormatterHelper, NTFSParentFileReferenceFormatterHelper,
NTFSPathHintsFormatterHelper])
| apache-2.0 |
Hellowlol/PyTunes | libs/enzyme/flv.py | 180 | 6375 | # -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
# Copyright 2003-2006 Dirk Meyer <dischi@freevo.org>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
from exceptions import ParseError
import core
import logging
import struct
__all__ = ['Parser']
# get logging object
log = logging.getLogger(__name__)
FLV_TAG_TYPE_AUDIO = 0x08
FLV_TAG_TYPE_VIDEO = 0x09
FLV_TAG_TYPE_META = 0x12
# audio flags
FLV_AUDIO_CHANNEL_MASK = 0x01
FLV_AUDIO_SAMPLERATE_MASK = 0x0c
FLV_AUDIO_CODECID_MASK = 0xf0
FLV_AUDIO_SAMPLERATE_OFFSET = 2
FLV_AUDIO_CODECID_OFFSET = 4
FLV_AUDIO_CODECID = (0x0001, 0x0002, 0x0055, 0x0001)
# video flags
FLV_VIDEO_CODECID_MASK = 0x0f
FLV_VIDEO_CODECID = ('FLV1', 'MSS1', 'VP60') # wild guess
FLV_DATA_TYPE_NUMBER = 0x00
FLV_DATA_TYPE_BOOL = 0x01
FLV_DATA_TYPE_STRING = 0x02
FLV_DATA_TYPE_OBJECT = 0x03
FLC_DATA_TYPE_CLIP = 0x04
FLV_DATA_TYPE_REFERENCE = 0x07
FLV_DATA_TYPE_ECMARRAY = 0x08
FLV_DATA_TYPE_ENDOBJECT = 0x09
FLV_DATA_TYPE_ARRAY = 0x0a
FLV_DATA_TYPE_DATE = 0x0b
FLV_DATA_TYPE_LONGSTRING = 0x0c
FLVINFO = {
'creator': 'copyright',
}
class FlashVideo(core.AVContainer):
"""
Experimental parser for Flash videos. It requires certain flags to
be set to report video resolutions and in most cases it does not
provide that information.
"""
table_mapping = { 'FLVINFO' : FLVINFO }
def __init__(self, file):
core.AVContainer.__init__(self)
self.mime = 'video/flv'
self.type = 'Flash Video'
data = file.read(13)
if len(data) < 13 or struct.unpack('>3sBBII', data)[0] != 'FLV':
raise ParseError()
for _ in range(10):
if self.audio and self.video:
break
data = file.read(11)
if len(data) < 11:
break
chunk = struct.unpack('>BH4BI', data)
size = (chunk[1] << 8) + chunk[2]
if chunk[0] == FLV_TAG_TYPE_AUDIO:
flags = ord(file.read(1))
if not self.audio:
a = core.AudioStream()
a.channels = (flags & FLV_AUDIO_CHANNEL_MASK) + 1
srate = (flags & FLV_AUDIO_SAMPLERATE_MASK)
a.samplerate = (44100 << (srate >> FLV_AUDIO_SAMPLERATE_OFFSET) >> 3)
codec = (flags & FLV_AUDIO_CODECID_MASK) >> FLV_AUDIO_CODECID_OFFSET
if codec < len(FLV_AUDIO_CODECID):
a.codec = FLV_AUDIO_CODECID[codec]
self.audio.append(a)
file.seek(size - 1, 1)
elif chunk[0] == FLV_TAG_TYPE_VIDEO:
flags = ord(file.read(1))
if not self.video:
v = core.VideoStream()
codec = (flags & FLV_VIDEO_CODECID_MASK) - 2
if codec < len(FLV_VIDEO_CODECID):
v.codec = FLV_VIDEO_CODECID[codec]
# width and height are in the meta packet, but I have
# no file with such a packet inside. So maybe we have
# to decode some parts of the video.
self.video.append(v)
file.seek(size - 1, 1)
elif chunk[0] == FLV_TAG_TYPE_META:
log.info(u'metadata %r', str(chunk))
metadata = file.read(size)
try:
while metadata:
length, value = self._parse_value(metadata)
if isinstance(value, dict):
log.info(u'metadata: %r', value)
if value.get('creator'):
self.copyright = value.get('creator')
if value.get('width'):
self.width = value.get('width')
if value.get('height'):
self.height = value.get('height')
if value.get('duration'):
self.length = value.get('duration')
self._appendtable('FLVINFO', value)
if not length:
# parse error
break
metadata = metadata[length:]
except (IndexError, struct.error, TypeError):
pass
else:
log.info(u'unkown %r', str(chunk))
file.seek(size, 1)
file.seek(4, 1)
def _parse_value(self, data):
"""
Parse the next metadata value.
"""
if ord(data[0]) == FLV_DATA_TYPE_NUMBER:
value = struct.unpack('>d', data[1:9])[0]
return 9, value
if ord(data[0]) == FLV_DATA_TYPE_BOOL:
return 2, bool(data[1])
if ord(data[0]) == FLV_DATA_TYPE_STRING:
length = (ord(data[1]) << 8) + ord(data[2])
return length + 3, data[3:length + 3]
if ord(data[0]) == FLV_DATA_TYPE_ECMARRAY:
init_length = len(data)
num = struct.unpack('>I', data[1:5])[0]
data = data[5:]
result = {}
for _ in range(num):
length = (ord(data[0]) << 8) + ord(data[1])
key = data[2:length + 2]
data = data[length + 2:]
length, value = self._parse_value(data)
if not length:
return 0, result
result[key] = value
data = data[length:]
return init_length - len(data), result
log.info(u'unknown code: %x. Stop metadata parser', ord(data[0]))
return 0, None
Parser = FlashVideo
| gpl-3.0 |
rexyeah/jira-cli | tests/common_bridge_cases.py | 1 | 3778 | """
"""
import os
import vcr
jiravcr = vcr.VCR(
record_mode = 'once',
match_on = ['uri', 'method'],
)
class BridgeTests:
def test_get_issue(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "issue.yaml")):
self.assertIsNotNone(self.bridge.get_issue("TP-9"))
def test_get_statuses(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "status.yaml")):
self.assertIsNotNone(self.bridge.get_statuses())
def test_get_projects(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "project.yaml")):
self.assertIsNotNone(self.bridge.get_projects())
def test_get_priorities(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "priorities.yaml")):
self.assertIsNotNone(self.bridge.get_priorities())
def test_get_transitions(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "transitions.yaml")):
self.assertIsNotNone(self.bridge.get_available_transitions("TP-9"))
def test_get_resolutions(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "resolutions.yaml")):
self.assertIsNotNone(self.bridge.get_resolutions())
def test_get_project_components(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "components.yaml")):
self.assertIsNotNone(self.bridge.get_components("TP"))
def test_get_issue_types(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "types.yaml")):
self.assertIsNotNone(self.bridge.get_issue_types())
def test_get_sub_task_issue_types(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "subtypes.yaml")):
self.assertIsNotNone(self.bridge.get_issue_types())
def test_get_filters(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "filters.yaml")):
self.assertIsNotNone(self.bridge.get_filters())
def test_search_free_text(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "search.yaml")):
self.assertTrue(
len(
self.bridge.search_issues("test jira-cli")
) == 1)
def test_search_jql(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "search-jql.yaml")):
self.assertTrue(
len(
self.bridge.search_issues_jql("summary~jira-cli")
) == 1)
def test_filter_fail(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "filter-search-fail.yaml")):
self.assertIsNotNone(
self.bridge.get_issues_by_filter("test-filter")
)
def test_filter_fail(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "filter-search.yaml")):
self.assertIsNotNone(
self.bridge.get_issues_by_filter("test filter", "blah")
)
def test_create_issue(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "create.yaml")):
self.assertIsNotNone(
self.bridge.create_issue("TP", summary='test-create-issue')
)
def test_create_child_issue(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "childcreate.yaml")):
self.assertIsNotNone(
self.bridge.create_issue("TP", type='sub-task',
summary='test-create-issue',
parent='TP-10')
) | mit |
mindw/pip | pip/_vendor/distlib/version.py | 132 | 23711 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""
Implementation of a flexible versioning scheme providing support for PEP-440,
setuptools-compatible and semantic versioning.
"""
import logging
import re
from .compat import string_types
__all__ = ['NormalizedVersion', 'NormalizedMatcher',
'LegacyVersion', 'LegacyMatcher',
'SemanticVersion', 'SemanticMatcher',
'UnsupportedVersionError', 'get_scheme']
logger = logging.getLogger(__name__)
class UnsupportedVersionError(ValueError):
"""This is an unsupported version."""
pass
class Version(object):
def __init__(self, s):
self._string = s = s.strip()
self._parts = parts = self.parse(s)
assert isinstance(parts, tuple)
assert len(parts) > 0
def parse(self, s):
raise NotImplementedError('please implement in a subclass')
def _check_compatible(self, other):
if type(self) != type(other):
raise TypeError('cannot compare %r and %r' % (self, other))
def __eq__(self, other):
self._check_compatible(other)
return self._parts == other._parts
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
self._check_compatible(other)
return self._parts < other._parts
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self.__lt__(other) or self.__eq__(other)
def __ge__(self, other):
return self.__gt__(other) or self.__eq__(other)
# See http://docs.python.org/reference/datamodel#object.__hash__
def __hash__(self):
return hash(self._parts)
def __repr__(self):
return "%s('%s')" % (self.__class__.__name__, self._string)
def __str__(self):
return self._string
@property
def is_prerelease(self):
raise NotImplementedError('Please implement in subclasses.')
class Matcher(object):
version_class = None
dist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?")
comp_re = re.compile(r'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$')
num_re = re.compile(r'^\d+(\.\d+)*$')
# value is either a callable or the name of a method
_operators = {
'<': lambda v, c, p: v < c,
'>': lambda v, c, p: v > c,
'<=': lambda v, c, p: v == c or v < c,
'>=': lambda v, c, p: v == c or v > c,
'==': lambda v, c, p: v == c,
'===': lambda v, c, p: v == c,
# by default, compatible => >=.
'~=': lambda v, c, p: v == c or v > c,
'!=': lambda v, c, p: v != c,
}
def __init__(self, s):
if self.version_class is None:
raise ValueError('Please specify a version class')
self._string = s = s.strip()
m = self.dist_re.match(s)
if not m:
raise ValueError('Not valid: %r' % s)
groups = m.groups('')
self.name = groups[0].strip()
self.key = self.name.lower() # for case-insensitive comparisons
clist = []
if groups[2]:
constraints = [c.strip() for c in groups[2].split(',')]
for c in constraints:
m = self.comp_re.match(c)
if not m:
raise ValueError('Invalid %r in %r' % (c, s))
groups = m.groups()
op = groups[0] or '~='
s = groups[1]
if s.endswith('.*'):
if op not in ('==', '!='):
raise ValueError('\'.*\' not allowed for '
'%r constraints' % op)
# Could be a partial version (e.g. for '2.*') which
# won't parse as a version, so keep it as a string
vn, prefix = s[:-2], True
if not self.num_re.match(vn):
# Just to check that vn is a valid version
self.version_class(vn)
else:
# Should parse as a version, so we can create an
# instance for the comparison
vn, prefix = self.version_class(s), False
clist.append((op, vn, prefix))
self._parts = tuple(clist)
def match(self, version):
"""
Check if the provided version matches the constraints.
:param version: The version to match against this instance.
:type version: Strring or :class:`Version` instance.
"""
if isinstance(version, string_types):
version = self.version_class(version)
for operator, constraint, prefix in self._parts:
f = self._operators.get(operator)
if isinstance(f, string_types):
f = getattr(self, f)
if not f:
msg = ('%r not implemented '
'for %s' % (operator, self.__class__.__name__))
raise NotImplementedError(msg)
if not f(version, constraint, prefix):
return False
return True
@property
def exact_version(self):
result = None
if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
result = self._parts[0][1]
return result
def _check_compatible(self, other):
if type(self) != type(other) or self.name != other.name:
raise TypeError('cannot compare %s and %s' % (self, other))
def __eq__(self, other):
self._check_compatible(other)
return self.key == other.key and self._parts == other._parts
def __ne__(self, other):
return not self.__eq__(other)
# See http://docs.python.org/reference/datamodel#object.__hash__
def __hash__(self):
return hash(self.key) + hash(self._parts)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self._string)
def __str__(self):
return self._string
PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
r'(\.(post)(\d+))?(\.(dev)(\d+))?'
r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')
def _pep_440_key(s):
s = s.strip()
m = PEP440_VERSION_RE.match(s)
if not m:
raise UnsupportedVersionError('Not a valid version: %s' % s)
groups = m.groups()
nums = tuple(int(v) for v in groups[1].split('.'))
while len(nums) > 1 and nums[-1] == 0:
nums = nums[:-1]
if not groups[0]:
epoch = 0
else:
epoch = int(groups[0])
pre = groups[4:6]
post = groups[7:9]
dev = groups[10:12]
local = groups[13]
if pre == (None, None):
pre = ()
else:
pre = pre[0], int(pre[1])
if post == (None, None):
post = ()
else:
post = post[0], int(post[1])
if dev == (None, None):
dev = ()
else:
dev = dev[0], int(dev[1])
if local is None:
local = ()
else:
parts = []
for part in local.split('.'):
# to ensure that numeric compares as > lexicographic, avoid
# comparing them directly, but encode a tuple which ensures
# correct sorting
if part.isdigit():
part = (1, int(part))
else:
part = (0, part)
parts.append(part)
local = tuple(parts)
if not pre:
# either before pre-release, or final release and after
if not post and dev:
# before pre-release
pre = ('a', -1) # to sort before a0
else:
pre = ('z',) # to sort after all pre-releases
# now look at the state of post and dev.
if not post:
post = ('_',) # sort before 'a'
if not dev:
dev = ('final',)
#print('%s -> %s' % (s, m.groups()))
return epoch, nums, pre, post, dev, local
_normalized_key = _pep_440_key
class NormalizedVersion(Version):
"""A rational version.
Good:
1.2 # equivalent to "1.2.0"
1.2.0
1.2a1
1.2.3a2
1.2.3b1
1.2.3c1
1.2.3.4
TODO: fill this out
Bad:
1 # mininum two numbers
1.2a # release level must have a release serial
1.2.3b
"""
def parse(self, s):
result = _normalized_key(s)
# _normalized_key loses trailing zeroes in the release
# clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
# However, PEP 440 prefix matching needs it: for example,
# (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
m = PEP440_VERSION_RE.match(s) # must succeed
groups = m.groups()
self._release_clause = tuple(int(v) for v in groups[1].split('.'))
return result
PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
@property
def is_prerelease(self):
return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
def _match_prefix(x, y):
x = str(x)
y = str(y)
if x == y:
return True
if not x.startswith(y):
return False
n = len(y)
return x[n] == '.'
class NormalizedMatcher(Matcher):
version_class = NormalizedVersion
# value is either a callable or the name of a method
_operators = {
'~=': '_match_compatible',
'<': '_match_lt',
'>': '_match_gt',
'<=': '_match_le',
'>=': '_match_ge',
'==': '_match_eq',
'===': '_match_arbitrary',
'!=': '_match_ne',
}
def _adjust_local(self, version, constraint, prefix):
if prefix:
strip_local = '+' not in constraint and version._parts[-1]
else:
# both constraint and version are
# NormalizedVersion instances.
# If constraint does not have a local component,
# ensure the version doesn't, either.
strip_local = not constraint._parts[-1] and version._parts[-1]
if strip_local:
s = version._string.split('+', 1)[0]
version = self.version_class(s)
return version, constraint
def _match_lt(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if version >= constraint:
return False
release_clause = constraint._release_clause
pfx = '.'.join([str(i) for i in release_clause])
return not _match_prefix(version, pfx)
def _match_gt(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if version <= constraint:
return False
release_clause = constraint._release_clause
pfx = '.'.join([str(i) for i in release_clause])
return not _match_prefix(version, pfx)
def _match_le(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
return version <= constraint
def _match_ge(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
return version >= constraint
def _match_eq(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if not prefix:
result = (version == constraint)
else:
result = _match_prefix(version, constraint)
return result
def _match_arbitrary(self, version, constraint, prefix):
return str(version) == str(constraint)
def _match_ne(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if not prefix:
result = (version != constraint)
else:
result = not _match_prefix(version, constraint)
return result
def _match_compatible(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if version == constraint:
return True
if version < constraint:
return False
# if not prefix:
# return True
release_clause = constraint._release_clause
if len(release_clause) > 1:
release_clause = release_clause[:-1]
pfx = '.'.join([str(i) for i in release_clause])
return _match_prefix(version, pfx)
_REPLACEMENTS = (
(re.compile('[.+-]$'), ''), # remove trailing puncts
(re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start
(re.compile('^[.-]'), ''), # remove leading puncts
(re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses
(re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
(re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
(re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha
(re.compile(r'\b(pre-alpha|prealpha)\b'),
'pre.alpha'), # standardise
(re.compile(r'\(beta\)$'), 'beta'), # remove parentheses
)
_SUFFIX_REPLACEMENTS = (
(re.compile('^[:~._+-]+'), ''), # remove leading puncts
(re.compile('[,*")([\]]'), ''), # remove unwanted chars
(re.compile('[~:+_ -]'), '.'), # replace illegal chars
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
(re.compile(r'\.$'), ''), # trailing '.'
)
_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
def _suggest_semantic_version(s):
"""
Try to suggest a semantic form for a version for which
_suggest_normalized_version couldn't come up with anything.
"""
result = s.strip().lower()
for pat, repl in _REPLACEMENTS:
result = pat.sub(repl, result)
if not result:
result = '0.0.0'
# Now look for numeric prefix, and separate it out from
# the rest.
#import pdb; pdb.set_trace()
m = _NUMERIC_PREFIX.match(result)
if not m:
prefix = '0.0.0'
suffix = result
else:
prefix = m.groups()[0].split('.')
prefix = [int(i) for i in prefix]
while len(prefix) < 3:
prefix.append(0)
if len(prefix) == 3:
suffix = result[m.end():]
else:
suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
prefix = prefix[:3]
prefix = '.'.join([str(i) for i in prefix])
suffix = suffix.strip()
if suffix:
#import pdb; pdb.set_trace()
# massage the suffix.
for pat, repl in _SUFFIX_REPLACEMENTS:
suffix = pat.sub(repl, suffix)
if not suffix:
result = prefix
else:
sep = '-' if 'dev' in suffix else '+'
result = prefix + sep + suffix
if not is_semver(result):
result = None
return result
def _suggest_normalized_version(s):
"""Suggest a normalized version close to the given version string.
If you have a version string that isn't rational (i.e. NormalizedVersion
doesn't like it) then you might be able to get an equivalent (or close)
rational version from this function.
This does a number of simple normalizations to the given string, based
on observation of versions currently in use on PyPI. Given a dump of
those version during PyCon 2009, 4287 of them:
- 2312 (53.93%) match NormalizedVersion without change
with the automatic suggestion
- 3474 (81.04%) match when using this suggestion method
@param s {str} An irrational version string.
@returns A rational version string, or None, if couldn't determine one.
"""
try:
_normalized_key(s)
return s # already rational
except UnsupportedVersionError:
pass
rs = s.lower()
# part of this could use maketrans
for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
('beta', 'b'), ('rc', 'c'), ('-final', ''),
('-pre', 'c'),
('-release', ''), ('.release', ''), ('-stable', ''),
('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
('final', '')):
rs = rs.replace(orig, repl)
# if something ends with dev or pre, we add a 0
rs = re.sub(r"pre$", r"pre0", rs)
rs = re.sub(r"dev$", r"dev0", rs)
# if we have something like "b-2" or "a.2" at the end of the
# version, that is pobably beta, alpha, etc
# let's remove the dash or dot
rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
# 1.0-dev-r371 -> 1.0.dev371
# 0.1-dev-r79 -> 0.1.dev79
rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
# Clean: v0.3, v1.0
if rs.startswith('v'):
rs = rs[1:]
# Clean leading '0's on numbers.
#TODO: unintended side-effect on, e.g., "2003.05.09"
# PyPI stats: 77 (~2%) better
rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
# Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
# zero.
# PyPI stats: 245 (7.56%) better
rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
# the 'dev-rNNN' tag is a dev tag
rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
# clean the - when used as a pre delimiter
rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
# a terminal "dev" or "devel" can be changed into ".dev0"
rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
# a terminal "dev" can be changed into ".dev0"
rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
# a terminal "final" or "stable" can be removed
rs = re.sub(r"(final|stable)$", "", rs)
# The 'r' and the '-' tags are post release tags
# 0.4a1.r10 -> 0.4a1.post10
# 0.9.33-17222 -> 0.9.33.post17222
# 0.9.33-r17222 -> 0.9.33.post17222
rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
# Clean 'r' instead of 'dev' usage:
# 0.9.33+r17222 -> 0.9.33.dev17222
# 1.0dev123 -> 1.0.dev123
# 1.0.git123 -> 1.0.dev123
# 1.0.bzr123 -> 1.0.dev123
# 0.1a0dev.123 -> 0.1a0.dev123
# PyPI stats: ~150 (~4%) better
rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
# 0.2.pre1 -> 0.2c1
# 0.2-c1 -> 0.2c1
# 1.0preview123 -> 1.0c123
# PyPI stats: ~21 (0.62%) better
rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
# Tcl/Tk uses "px" for their post release markers
rs = re.sub(r"p(\d+)$", r".post\1", rs)
try:
_normalized_key(rs)
except UnsupportedVersionError:
rs = None
return rs
#
# Legacy version processing (distribute-compatible)
#
_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
_VERSION_REPLACE = {
'pre': 'c',
'preview': 'c',
'-': 'final-',
'rc': 'c',
'dev': '@',
'': None,
'.': None,
}
def _legacy_key(s):
def get_parts(s):
result = []
for p in _VERSION_PART.split(s.lower()):
p = _VERSION_REPLACE.get(p, p)
if p:
if '0' <= p[:1] <= '9':
p = p.zfill(8)
else:
p = '*' + p
result.append(p)
result.append('*final')
return result
result = []
for p in get_parts(s):
if p.startswith('*'):
if p < '*final':
while result and result[-1] == '*final-':
result.pop()
while result and result[-1] == '00000000':
result.pop()
result.append(p)
return tuple(result)
class LegacyVersion(Version):
def parse(self, s):
return _legacy_key(s)
@property
def is_prerelease(self):
result = False
for x in self._parts:
if (isinstance(x, string_types) and x.startswith('*') and
x < '*final'):
result = True
break
return result
class LegacyMatcher(Matcher):
version_class = LegacyVersion
_operators = dict(Matcher._operators)
_operators['~='] = '_match_compatible'
numeric_re = re.compile('^(\d+(\.\d+)*)')
def _match_compatible(self, version, constraint, prefix):
if version < constraint:
return False
m = self.numeric_re.match(str(constraint))
if not m:
logger.warning('Cannot compute compatible match for version %s '
' and constraint %s', version, constraint)
return True
s = m.groups()[0]
if '.' in s:
s = s.rsplit('.', 1)[0]
return _match_prefix(version, s)
#
# Semantic versioning
#
_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
def is_semver(s):
return _SEMVER_RE.match(s)
def _semantic_key(s):
def make_tuple(s, absent):
if s is None:
result = (absent,)
else:
parts = s[1:].split('.')
# We can't compare ints and strings on Python 3, so fudge it
# by zero-filling numeric values so simulate a numeric comparison
result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
return result
m = is_semver(s)
if not m:
raise UnsupportedVersionError(s)
groups = m.groups()
major, minor, patch = [int(i) for i in groups[:3]]
# choose the '|' and '*' so that versions sort correctly
pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
return (major, minor, patch), pre, build
class SemanticVersion(Version):
def parse(self, s):
return _semantic_key(s)
@property
def is_prerelease(self):
return self._parts[1][0] != '|'
class SemanticMatcher(Matcher):
version_class = SemanticVersion
class VersionScheme(object):
def __init__(self, key, matcher, suggester=None):
self.key = key
self.matcher = matcher
self.suggester = suggester
def is_valid_version(self, s):
try:
self.matcher.version_class(s)
result = True
except UnsupportedVersionError:
result = False
return result
def is_valid_matcher(self, s):
try:
self.matcher(s)
result = True
except UnsupportedVersionError:
result = False
return result
def is_valid_constraint_list(self, s):
"""
Used for processing some metadata fields
"""
return self.is_valid_matcher('dummy_name (%s)' % s)
def suggest(self, s):
if self.suggester is None:
result = None
else:
result = self.suggester(s)
return result
_SCHEMES = {
'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
_suggest_normalized_version),
'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
'semantic': VersionScheme(_semantic_key, SemanticMatcher,
_suggest_semantic_version),
}
_SCHEMES['default'] = _SCHEMES['normalized']
def get_scheme(name):
if name not in _SCHEMES:
raise ValueError('unknown scheme name: %r' % name)
return _SCHEMES[name]
| mit |
ThiagoGarciaAlves/intellij-community | python/lib/Lib/site-packages/django/contrib/gis/gdal/__init__.py | 397 | 2173 | """
This module houses ctypes interfaces for GDAL objects. The following GDAL
objects are supported:
CoordTransform: Used for coordinate transformations from one spatial
reference system to another.
Driver: Wraps an OGR data source driver.
DataSource: Wrapper for the OGR data source object, supports
OGR-supported data sources.
Envelope: A ctypes structure for bounding boxes (GDAL library
not required).
OGRGeometry: Object for accessing OGR Geometry functionality.
OGRGeomType: A class for representing the different OGR Geometry
types (GDAL library not required).
SpatialReference: Represents OSR Spatial Reference objects.
The GDAL library will be imported from the system path using the default
library name for the current OS. The default library path may be overridden
by setting `GDAL_LIBRARY_PATH` in your settings with the path to the GDAL C
library on your system.
GDAL links to a large number of external libraries that consume RAM when
loaded. Thus, it may desirable to disable GDAL on systems with limited
RAM resources -- this may be accomplished by setting `GDAL_LIBRARY_PATH`
to a non-existant file location (e.g., `GDAL_LIBRARY_PATH='/null/path'`;
setting to None/False/'' will not work as a string must be given).
"""
# Attempting to import objects that depend on the GDAL library. The
# HAS_GDAL flag will be set to True if the library is present on
# the system.
try:
from django.contrib.gis.gdal.driver import Driver
from django.contrib.gis.gdal.datasource import DataSource
from django.contrib.gis.gdal.libgdal import gdal_version, gdal_full_version, gdal_release_date, GEOJSON, GDAL_VERSION
from django.contrib.gis.gdal.srs import SpatialReference, CoordTransform
from django.contrib.gis.gdal.geometries import OGRGeometry
HAS_GDAL = True
except:
HAS_GDAL, GEOJSON = False, False
try:
from django.contrib.gis.gdal.envelope import Envelope
except ImportError:
# No ctypes, but don't raise an exception.
pass
from django.contrib.gis.gdal.error import check_err, OGRException, OGRIndexError, SRSException
from django.contrib.gis.gdal.geomtype import OGRGeomType
| apache-2.0 |
fedorpatlin/ansible | lib/ansible/modules/cloud/vmware/vmware_vswitch.py | 56 | 6915 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vmware_vswitch
short_description: Add a VMware Standard Switch to an ESXi host
description:
- Add a VMware Standard Switch to an ESXi host
version_added: 2.0
author: "Joseph Callen (@jcpowermac), Russell Teague (@mtnbikenc)"
notes:
- Tested on vSphere 5.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
switch_name:
description:
- vSwitch name to add
required: True
nic_name:
description:
- vmnic name to attach to vswitch
required: True
number_of_ports:
description:
- Number of port to configure on vswitch
default: 128
required: False
mtu:
description:
- MTU to configure on vswitch
required: False
state:
description:
- Add or remove the switch
default: 'present'
choices:
- 'present'
- 'absent'
required: False
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
# Example from Ansible playbook
- name: Add a VMware vSwitch
local_action:
module: vmware_vswitch
hostname: esxi_hostname
username: esxi_username
password: esxi_password
switch_name: vswitch_name
nic_name: vmnic_name
mtu: 9000
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
def find_vswitch_by_name(host, vswitch_name):
for vss in host.config.network.vswitch:
if vss.name == vswitch_name:
return vss
return None
class VMwareHostVirtualSwitch(object):
def __init__(self, module):
self.host_system = None
self.content = None
self.vss = None
self.module = module
self.switch_name = module.params['switch_name']
self.number_of_ports = module.params['number_of_ports']
self.nic_name = module.params['nic_name']
self.mtu = module.params['mtu']
self.state = module.params['state']
self.content = connect_to_api(self.module)
def process_state(self):
try:
vswitch_states = {
'absent': {
'present': self.state_destroy_vswitch,
'absent': self.state_exit_unchanged,
},
'present': {
'update': self.state_update_vswitch,
'present': self.state_exit_unchanged,
'absent': self.state_create_vswitch,
}
}
vswitch_states[self.state][self.check_vswitch_configuration()]()
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
except Exception as e:
self.module.fail_json(msg=str(e))
# Source from
# https://github.com/rreubenur/pyvmomi-community-samples/blob/patch-1/samples/create_vswitch.py
def state_create_vswitch(self):
vss_spec = vim.host.VirtualSwitch.Specification()
vss_spec.numPorts = self.number_of_ports
vss_spec.mtu = self.mtu
vss_spec.bridge = vim.host.VirtualSwitch.BondBridge(nicDevice=[self.nic_name])
self.host_system.configManager.networkSystem.AddVirtualSwitch(vswitchName=self.switch_name, spec=vss_spec)
self.module.exit_json(changed=True)
def state_exit_unchanged(self):
self.module.exit_json(changed=False)
def state_destroy_vswitch(self):
config = vim.host.NetworkConfig()
for portgroup in self.host_system.configManager.networkSystem.networkInfo.portgroup:
if portgroup.spec.vswitchName == self.vss.name:
portgroup_config = vim.host.PortGroup.Config()
portgroup_config.changeOperation = "remove"
portgroup_config.spec = vim.host.PortGroup.Specification()
portgroup_config.spec.name = portgroup.spec.name
portgroup_config.spec.name = portgroup.spec.name
portgroup_config.spec.vlanId = portgroup.spec.vlanId
portgroup_config.spec.vswitchName = portgroup.spec.vswitchName
portgroup_config.spec.policy = vim.host.NetworkPolicy()
config.portgroup.append(portgroup_config)
self.host_system.configManager.networkSystem.UpdateNetworkConfig(config, "modify")
self.host_system.configManager.networkSystem.RemoveVirtualSwitch(self.vss.name)
self.module.exit_json(changed=True)
def state_update_vswitch(self):
self.module.exit_json(changed=False, msg="Currently not implemented.")
def check_vswitch_configuration(self):
host = get_all_objs(self.content, [vim.HostSystem])
if not host:
self.module.fail_json(msg="Unable to find host")
self.host_system = host.keys()[0]
self.vss = find_vswitch_by_name(self.host_system, self.switch_name)
if self.vss is None:
return 'absent'
else:
return 'present'
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(switch_name=dict(required=True, type='str'),
nic_name=dict(required=True, type='str'),
number_of_ports=dict(required=False, type='int', default=128),
mtu=dict(required=False, type='int', default=1500),
state=dict(default='present', choices=['present', 'absent'], type='str')))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
host_virtual_switch = VMwareHostVirtualSwitch(module)
host_virtual_switch.process_state()
from ansible.module_utils.vmware import *
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
RichardLitt/wyrd-django-dev | django/db/models/fields/related.py | 2 | 62048 | from operator import attrgetter
from django.db import connection, router
from django.db.backends import util
from django.db.models import signals, get_model
from django.db.models.fields import (AutoField, Field, IntegerField,
PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist)
from django.db.models.related import RelatedObject
from django.db.models.query import QuerySet
from django.db.models.query_utils import QueryWrapper
from django.db.models.deletion import CASCADE
from django.utils.encoding import smart_text
from django.utils import six
from django.utils.translation import ugettext_lazy as _, string_concat
from django.utils.functional import curry, cached_property
from django.core import exceptions
from django import forms
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
pending_lookups = {}
def add_lazy_relation(cls, field, relation, operation):
"""
Adds a lookup on ``cls`` when a related field is defined using a string,
i.e.::
class MyModel(Model):
fk = ForeignKey("AnotherModel")
This string can be:
* RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive
relation.
* The name of a model (i.e "AnotherModel") to indicate another model in
the same app.
* An app-label and model name (i.e. "someapp.AnotherModel") to indicate
another model in a different app.
If the other model hasn't yet been loaded -- almost a given if you're using
lazy relationships -- then the relation won't be set up until the
class_prepared signal fires at the end of model initialization.
operation is the work that must be performed once the relation can be resolved.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
app_label = cls._meta.app_label
model_name = cls.__name__
else:
# Look for an "app.Model" relation
try:
app_label, model_name = relation.split(".")
except ValueError:
# If we can't split, assume a model in current app
app_label = cls._meta.app_label
model_name = relation
except AttributeError:
# If it doesn't have a split it's actually a model class
app_label = relation._meta.app_label
model_name = relation._meta.object_name
# Try to look up the related model, and if it's already loaded resolve the
# string right away. If get_model returns None, it means that the related
# model isn't loaded yet, so we need to pend the relation until the class
# is prepared.
model = get_model(app_label, model_name,
seed_cache=False, only_installed=False)
if model:
operation(field, model, cls)
else:
key = (app_label, model_name)
value = (cls, field, operation)
pending_lookups.setdefault(key, []).append(value)
def do_pending_lookups(sender, **kwargs):
"""
Handle any pending relations to the sending model. Sent from class_prepared.
"""
key = (sender._meta.app_label, sender.__name__)
for cls, field, operation in pending_lookups.pop(key, []):
operation(field, sender, cls)
signals.class_prepared.connect(do_pending_lookups)
#HACK
class RelatedField(object):
def contribute_to_class(self, cls, name):
sup = super(RelatedField, self)
# Store the opts for related_query_name()
self.opts = cls._meta
if hasattr(sup, 'contribute_to_class'):
sup.contribute_to_class(cls, name)
if not cls._meta.abstract and self.rel.related_name:
self.rel.related_name = self.rel.related_name % {
'class': cls.__name__.lower(),
'app_label': cls._meta.app_label.lower(),
}
other = self.rel.to
if isinstance(other, six.string_types) or other._meta.pk is None:
def resolve_related_class(field, model, cls):
field.rel.to = model
field.do_related_class(model, cls)
add_lazy_relation(cls, self, other, resolve_related_class)
else:
self.do_related_class(other, cls)
def set_attributes_from_rel(self):
self.name = self.name or (self.rel.to._meta.object_name.lower() + '_' + self.rel.to._meta.pk.name)
if self.verbose_name is None:
self.verbose_name = self.rel.to._meta.verbose_name
self.rel.field_name = self.rel.field_name or self.rel.to._meta.pk.name
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.related = RelatedObject(other, cls, self)
if not cls._meta.abstract:
self.contribute_to_related_class(other, self.related)
def get_prep_lookup(self, lookup_type, value):
if hasattr(value, 'prepare'):
return value.prepare()
if hasattr(value, '_prepare'):
return value._prepare()
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return self._pk_trace(value, 'get_prep_lookup', lookup_type)
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_prep_lookup', lookup_type) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabel_aliases method, it will need to
# be invoked before the final SQL is evaluated
if hasattr(value, 'relabel_aliases'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return [self._pk_trace(value, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)]
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)
for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def _pk_trace(self, value, prep_func, lookup_type, **kwargs):
# Value may be a primary key, or an object held in a relation.
# If it is an object, then we need to get the primary key value for
# that object. In certain conditions (especially one-to-one relations),
# the primary key may itself be an object - so we need to keep drilling
# down until we hit a value that can be used for a comparison.
v = value
# In the case of an FK to 'self', this check allows to_field to be used
# for both forwards and reverse lookups across the FK. (For normal FKs,
# it's only relevant for forward lookups).
if isinstance(v, self.rel.to):
field_name = getattr(self.rel, "field_name", None)
else:
field_name = None
try:
while True:
if field_name is None:
field_name = v._meta.pk.name
v = getattr(v, field_name)
field_name = None
except AttributeError:
pass
except exceptions.ObjectDoesNotExist:
v = None
field = self
while field.rel:
if hasattr(field.rel, 'field_name'):
field = field.rel.to._meta.get_field(field.rel.field_name)
else:
field = field.rel.to._meta.pk
if lookup_type in ('range', 'in'):
v = [v]
v = getattr(field, prep_func)(lookup_type, v, **kwargs)
if isinstance(v, list):
v = v[0]
return v
def related_query_name(self):
# This method defines the name that can be used to identify this
# related object in a table-spanning query. It uses the lower-cased
# object_name by default, but this can be overridden with the
# "related_name" option.
return self.rel.related_name or self.opts.object_name.lower()
class SingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class pointed to by a related field.
# In the example "place.restaurant", the restaurant attribute is a
# SingleRelatedObjectDescriptor instance.
def __init__(self, related):
self.related = related
self.cache_name = related.get_cache_name()
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_query_set(self, **db_hints):
db = router.db_for_read(self.related.model, **db_hints)
return self.related.model._base_manager.using(db)
def get_prefetch_query_set(self, instances):
rel_obj_attr = attrgetter(self.related.field.attname)
instance_attr = lambda obj: obj._get_pk_val()
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
params = {'%s__pk__in' % self.related.field.name: list(instances_dict)}
qs = self.get_query_set(instance=instances[0]).filter(**params)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
rel_obj_cache_name = self.related.field.get_cache_name()
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_obj_cache_name, instance)
return qs, rel_obj_attr, instance_attr, True, self.cache_name
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
related_pk = instance._get_pk_val()
if related_pk is None:
rel_obj = None
else:
params = {'%s__pk' % self.related.field.name: related_pk}
try:
rel_obj = self.get_query_set(instance=instance).get(**params)
except self.related.model.DoesNotExist:
rel_obj = None
else:
setattr(rel_obj, self.related.field.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None:
raise self.related.model.DoesNotExist
else:
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.related.opts.object_name)
# The similarity of the code below to the code in
# ReverseSingleRelatedObjectDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.related.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.related.get_accessor_name()))
elif value is not None and not isinstance(value, self.related.model):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.related.get_accessor_name(), self.related.opts.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
related_pk = getattr(instance, self.related.field.rel.get_related_field().attname)
if related_pk is None:
raise ValueError('Cannot assign "%r": "%s" instance isn\'t saved in the database.' %
(value, instance._meta.object_name))
# Set the value of the related field to the value of the related object's related field
setattr(value, self.related.field.attname, related_pk)
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseSingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class that defines the related field.
# In the example "choice.poll", the poll attribute is a
# ReverseSingleRelatedObjectDescriptor instance.
def __init__(self, field_with_rel):
self.field = field_with_rel
self.cache_name = self.field.get_cache_name()
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_query_set(self, **db_hints):
db = router.db_for_read(self.field.rel.to, **db_hints)
rel_mgr = self.field.rel.to._default_manager
# If the related manager indicates that it should be used for
# related fields, respect that.
if getattr(rel_mgr, 'use_for_related_fields', False):
return rel_mgr.using(db)
else:
return QuerySet(self.field.rel.to).using(db)
def get_prefetch_query_set(self, instances):
other_field = self.field.rel.get_related_field()
rel_obj_attr = attrgetter(other_field.attname)
instance_attr = attrgetter(self.field.attname)
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
if other_field.rel:
params = {'%s__pk__in' % self.field.rel.field_name: list(instances_dict)}
else:
params = {'%s__in' % self.field.rel.field_name: list(instances_dict)}
qs = self.get_query_set(instance=instances[0]).filter(**params)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
if not self.field.rel.multiple:
rel_obj_cache_name = self.field.related.get_cache_name()
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_obj_cache_name, instance)
return qs, rel_obj_attr, instance_attr, True, self.cache_name
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
val = getattr(instance, self.field.attname)
if val is None:
rel_obj = None
else:
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__%s' % (self.field.rel.field_name, other_field.rel.field_name): val}
else:
params = {'%s__exact' % self.field.rel.field_name: val}
qs = self.get_query_set(instance=instance)
# Assuming the database enforces foreign keys, this won't fail.
rel_obj = qs.get(**params)
if not self.field.rel.multiple:
setattr(rel_obj, self.field.related.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None and not self.field.null:
raise self.field.rel.to.DoesNotExist
else:
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.field.name)
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.field.name))
elif value is not None and not isinstance(value, self.field.rel.to):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.field.name, self.field.rel.to._meta.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# If we're setting the value of a OneToOneField to None, we need to clear
# out the cache on any old related object. Otherwise, deleting the
# previously-related object will also cause this object to be deleted,
# which is wrong.
if value is None:
# Look up the previously-related object, which may still be available
# since we've not yet cleared out the related field.
# Use the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
# the object to invalidate the accessor cache, so there's no
# need to populate the cache just to expire it again.
related = getattr(instance, self.cache_name, None)
# If we've got an old related object, we need to clear out its
# cache. This cache also might not exist if the related object
# hasn't been accessed yet.
if related is not None:
setattr(related, self.field.related.get_cache_name(), None)
# Set the value of the related field
try:
val = getattr(value, self.field.rel.get_related_field().attname)
except AttributeError:
val = None
setattr(instance, self.field.attname, val)
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
if value is not None and not self.field.rel.multiple:
setattr(value, self.field.related.get_cache_name(), instance)
class ForeignRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ForeignKey pointed at them by
# some other model. In the example "poll.choice_set", the choice_set
# attribute is a ForeignRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
return self.related_manager_cls(instance)
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
manager = self.__get__(instance)
# If the foreign key can support nulls, then completely clear the related set.
# Otherwise, just move the named objects into the set.
if self.related.field.null:
manager.clear()
manager.add(*value)
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related model's default
# manager.
superclass = self.related.model._default_manager.__class__
rel_field = self.related.field
rel_model = self.related.model
attname = rel_field.rel.get_related_field().attname
class RelatedManager(superclass):
def __init__(self, instance):
super(RelatedManager, self).__init__()
self.instance = instance
self.core_filters = {
'%s__%s' % (rel_field.name, attname): getattr(instance, attname)
}
self.model = rel_model
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[rel_field.related_query_name()]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.model, instance=self.instance)
qs = super(RelatedManager, self).get_query_set().using(db).filter(**self.core_filters)
qs._known_related_object = (rel_field.name, self.instance)
return qs
def get_prefetch_query_set(self, instances):
rel_obj_attr = attrgetter(rel_field.attname)
instance_attr = attrgetter(attname)
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
db = self._db or router.db_for_read(self.model, instance=instances[0])
query = {'%s__%s__in' % (rel_field.name, attname): list(instances_dict)}
qs = super(RelatedManager, self).get_query_set().using(db).filter(**query)
# Since we just bypassed this class' get_query_set(), we must manage
# the reverse relation manually.
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_field.name, instance)
cache_name = rel_field.related_query_name()
return qs, rel_obj_attr, instance_attr, False, cache_name
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj))
setattr(obj, rel_field.name, self.instance)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs[rel_field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs[rel_field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel_field.null:
def remove(self, *objs):
val = getattr(self.instance, attname)
for obj in objs:
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
else:
raise rel_field.rel.to.DoesNotExist("%r is not related to %r." % (obj, self.instance))
remove.alters_data = True
def clear(self):
self.update(**{rel_field.name: None})
clear.alters_data = True
return RelatedManager
def create_many_related_manager(superclass, rel):
"""Creates a manager that subclasses 'superclass' (which is a Manager)
and adds behavior for many-to-many related objects."""
class ManyRelatedManager(superclass):
def __init__(self, model=None, query_field_name=None, instance=None, symmetrical=None,
source_field_name=None, target_field_name=None, reverse=False,
through=None, prefetch_cache_name=None):
super(ManyRelatedManager, self).__init__()
self.model = model
self.query_field_name = query_field_name
self.core_filters = {'%s__pk' % query_field_name: instance._get_pk_val()}
self.instance = instance
self.symmetrical = symmetrical
self.source_field_name = source_field_name
self.target_field_name = target_field_name
self.reverse = reverse
self.through = through
self.prefetch_cache_name = prefetch_cache_name
self._pk_val = self.instance.pk
if self._pk_val is None:
raise ValueError("%r instance needs to have a primary key value before a many-to-many relationship can be used." % instance.__class__.__name__)
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.instance.__class__, instance=self.instance)
return super(ManyRelatedManager, self).get_query_set().using(db)._next_is_sticky().filter(**self.core_filters)
def get_prefetch_query_set(self, instances):
instance = instances[0]
from django.db import connections
db = self._db or router.db_for_read(instance.__class__, instance=instance)
query = {'%s__pk__in' % self.query_field_name:
set(obj._get_pk_val() for obj in instances)}
qs = super(ManyRelatedManager, self).get_query_set().using(db)._next_is_sticky().filter(**query)
# M2M: need to annotate the query in order to get the primary model
# that the secondary model was actually related to. We know that
# there will already be a join on the join table, so we can just add
# the select.
# For non-autocreated 'through' models, can't assume we are
# dealing with PK values.
fk = self.through._meta.get_field(self.source_field_name)
source_col = fk.column
join_table = self.through._meta.db_table
connection = connections[db]
qn = connection.ops.quote_name
qs = qs.extra(select={'_prefetch_related_val':
'%s.%s' % (qn(join_table), qn(source_col))})
select_attname = fk.rel.get_related_field().get_attname()
return (qs,
attrgetter('_prefetch_related_val'),
attrgetter(select_attname),
False,
self.prefetch_cache_name)
# If the ManyToMany relation has an intermediary model,
# the add and remove methods do not exist.
if rel.through._meta.auto_created:
def add(self, *objs):
self._add_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
self._remove_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table
if self.symmetrical:
self._remove_items(self.target_field_name, self.source_field_name, *objs)
remove.alters_data = True
def clear(self):
self._clear_items(self.source_field_name)
# If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table
if self.symmetrical:
self._clear_items(self.target_field_name)
clear.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if not self.through._meta.auto_created:
opts = self.through._meta
raise AttributeError("Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
db = router.db_for_write(self.instance.__class__, instance=self.instance)
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = \
super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def _add_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK fieldname in join table for the source object
# target_field_name: the PK fieldname in join table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
from django.db.models import Model
if objs:
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
if not router.allow_relation(obj, self.instance):
raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' %
(obj, self.instance._state.db, obj._state.db))
new_ids.add(obj.pk)
elif isinstance(obj, Model):
raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj))
else:
new_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True)
vals = vals.filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: new_ids,
})
new_ids = new_ids - set(vals)
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='pre_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
# Add the ones that aren't there already
self.through._default_manager.using(db).bulk_create([
self.through(**{
'%s_id' % source_field_name: self._pk_val,
'%s_id' % target_field_name: obj_id,
})
for obj_id in new_ids
])
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='post_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK colname in join table for the source object
# target_field_name: the PK colname in join table for the target object
# *objs - objects to remove
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
old_ids.add(obj.pk)
else:
old_ids.add(obj)
# Work out what DB we're operating on
db = router.db_for_write(self.through, instance=self.instance)
# Send a signal to the other end if need be.
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="pre_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
# Remove the specified objects from the join table
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: old_ids
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="post_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
def _clear_items(self, source_field_name):
db = router.db_for_write(self.through, instance=self.instance)
# source_field_name: the PK colname in join table for the source object
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="pre_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="post_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
return ManyRelatedManager
class ManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField pointed at them by
# some other model (rather than having a ManyToManyField themselves).
# In the example "publication.article_set", the article_set attribute is a
# ManyRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related
# model's default manager.
return create_many_related_manager(
self.related.model._default_manager.__class__,
self.related.field.rel
)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
rel_model = self.related.model
manager = self.related_manager_cls(
model=rel_model,
query_field_name=self.related.field.name,
prefetch_cache_name=self.related.field.related_query_name(),
instance=instance,
symmetrical=False,
source_field_name=self.related.field.m2m_reverse_field_name(),
target_field_name=self.related.field.m2m_field_name(),
reverse=True,
through=self.related.field.rel.through,
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.related.field.rel.through._meta.auto_created:
opts = self.related.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField defined in their
# model (rather than having another model pointed *at* them).
# In the example "article.publications", the publications attribute is a
# ReverseManyRelatedObjectsDescriptor instance.
def __init__(self, m2m_field):
self.field = m2m_field
@property
def through(self):
# through is provided so that you have easy access to the through
# model (Book.authors.through) for inlines, etc. This is done as
# a property to ensure that the fully resolved value is returned.
return self.field.rel.through
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related model's
# default manager.
return create_many_related_manager(
self.field.rel.to._default_manager.__class__,
self.field.rel
)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
manager = self.related_manager_cls(
model=self.field.rel.to,
query_field_name=self.field.related_query_name(),
prefetch_cache_name=self.field.name,
instance=instance,
symmetrical=self.field.rel.symmetrical,
source_field_name=self.field.m2m_field_name(),
target_field_name=self.field.m2m_reverse_field_name(),
reverse=False,
through=self.field.rel.through,
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.field.rel.through._meta.auto_created:
opts = self.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ManyToOneRel(object):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
self.to, self.field_name = to, field_name
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.multiple = True
self.parent_link = parent_link
self.on_delete = on_delete
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the Field in the 'to' object to which this relationship is
tied.
"""
data = self.to._meta.get_field_by_name(self.field_name)
if not data[2]:
raise FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return data[0]
class OneToOneRel(ManyToOneRel):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
super(OneToOneRel, self).__init__(to, field_name,
related_name=related_name, limit_choices_to=limit_choices_to,
parent_link=parent_link, on_delete=on_delete
)
self.multiple = False
class ManyToManyRel(object):
def __init__(self, to, related_name=None, limit_choices_to=None,
symmetrical=True, through=None):
self.to = to
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.symmetrical = symmetrical
self.multiple = True
self.through = through
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the field in the to' object to which this relationship is tied
(this is always the primary key on the target model). Provided for
symmetry with ManyToOneRel.
"""
return self.to._meta.pk
class ForeignKey(RelatedField, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('Model %(model)s with pk %(pk)r does not exist.')
}
description = _("Foreign Key (type determined by related field)")
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, **kwargs):
try:
to_name = to._meta.object_name.lower()
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
else:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
if 'db_index' not in kwargs:
kwargs['db_index'] = True
kwargs['rel'] = rel_class(to, to_field,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
parent_link=kwargs.pop('parent_link', False),
on_delete=kwargs.pop('on_delete', CASCADE),
)
Field.__init__(self, **kwargs)
def validate(self, value, model_instance):
if self.rel.parent_link:
return
super(ForeignKey, self).validate(value, model_instance)
if value is None:
return
using = router.db_for_read(model_instance.__class__, instance=model_instance)
qs = self.rel.to._default_manager.using(using).filter(
**{self.rel.field_name: value}
)
qs = qs.complex_filter(self.rel.limit_choices_to)
if not qs.exists():
raise exceptions.ValidationError(self.error_messages['invalid'] % {
'model': self.rel.to._meta.verbose_name, 'pk': value})
def get_attname(self):
return '%s_id' % self.name
def get_validator_unique_lookup_type(self):
return '%s__%s__exact' % (self.name, self.rel.get_related_field().name)
def get_default(self):
"Here we check if the default value is an object and return the to_field if so."
field_default = super(ForeignKey, self).get_default()
if isinstance(field_default, self.rel.to):
return getattr(field_default, self.rel.get_related_field().attname)
return field_default
def get_db_prep_save(self, value, connection):
if value == '' or value == None:
return None
else:
return self.rel.get_related_field().get_db_prep_save(value,
connection=connection)
def value_to_string(self, obj):
if not obj:
# In required many-to-one fields with only one available choice,
# select that one available choice. Note: For SelectFields
# we have to check that the length of choices is *2*, not 1,
# because SelectFields always have an initial "blank" value.
if not self.blank and self.choices:
choice_list = self.get_choices_default()
if len(choice_list) == 2:
return smart_text(choice_list[1][0])
return Field.value_to_string(self, obj)
def contribute_to_class(self, cls, name):
super(ForeignKey, self).contribute_to_class(cls, name)
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, six.string_types):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# and swapped models don't get a related descriptor.
if not self.rel.is_hidden() and not related.model._meta.swapped:
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
if self.rel.limit_choices_to:
cls._meta.related_fkey_lookups.append(self.rel.limit_choices_to)
if self.rel.field_name is None:
self.rel.field_name = cls._meta.pk.name
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
if isinstance(self.rel.to, six.string_types):
raise ValueError("Cannot create form field for %r yet, because "
"its related model %r has not been loaded yet" %
(self.name, self.rel.to))
defaults = {
'form_class': forms.ModelChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to),
'to_field_name': self.rel.field_name,
}
defaults.update(kwargs)
return super(ForeignKey, self).formfield(**defaults)
def db_type(self, connection):
# The database column type of a ForeignKey is the column type
# of the field to which it points. An exception is if the ForeignKey
# points to an AutoField/PositiveIntegerField/PositiveSmallIntegerField,
# in which case the column type is simply that of an IntegerField.
# If the database needs similar types for key fields however, the only
# thing we can do is making AutoField an IntegerField.
rel_field = self.rel.get_related_field()
if (isinstance(rel_field, AutoField) or
(not connection.features.related_fields_match_type and
isinstance(rel_field, (PositiveIntegerField,
PositiveSmallIntegerField)))):
return IntegerField().db_type(connection=connection)
return rel_field.db_type(connection=connection)
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that always carries a "unique" constraint with it and the reverse relation
always returns the object pointed to (since there will only ever be one),
rather than returning a list.
"""
description = _("One-to-one relationship")
def __init__(self, to, to_field=None, **kwargs):
kwargs['unique'] = True
super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs)
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
SingleRelatedObjectDescriptor(related))
def formfield(self, **kwargs):
if self.rel.parent_link:
return None
return super(OneToOneField, self).formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.rel.to):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
managed = True
if isinstance(field.rel.to, six.string_types) and field.rel.to != RECURSIVE_RELATIONSHIP_CONSTANT:
to_model = field.rel.to
to = to_model.split('.')[-1]
def set_managed(field, model, cls):
field.rel.through._meta.managed = model._meta.managed or cls._meta.managed
add_lazy_relation(klass, field, to_model, set_managed)
elif isinstance(field.rel.to, six.string_types):
to = klass._meta.object_name
to_model = klass
managed = klass._meta.managed
else:
to = field.rel.to._meta.object_name
to_model = field.rel.to
managed = klass._meta.managed or to_model._meta.managed
name = '%s_%s' % (klass._meta.object_name, field.name)
if field.rel.to == RECURSIVE_RELATIONSHIP_CONSTANT or to == klass._meta.object_name:
from_ = 'from_%s' % to.lower()
to = 'to_%s' % to.lower()
else:
from_ = klass._meta.object_name.lower()
to = to.lower()
meta = type('Meta', (object,), {
'db_table': field._get_m2m_db_table(klass._meta),
'managed': managed,
'auto_created': klass,
'app_label': klass._meta.app_label,
'db_tablespace': klass._meta.db_tablespace,
'unique_together': (from_, to),
'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to},
'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to},
})
# Construct and return the new class.
return type(name, (models.Model,), {
'Meta': meta,
'__module__': klass.__module__,
from_: models.ForeignKey(klass, related_name='%s+' % name, db_tablespace=field.db_tablespace),
to: models.ForeignKey(to_model, related_name='%s+' % name, db_tablespace=field.db_tablespace)
})
class ManyToManyField(RelatedField, Field):
description = _("Many-to-many relationship")
def __init__(self, to, **kwargs):
try:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
# Python 2.6 and earlier require dictionary keys to be of str type,
# not unicode and class names must be ASCII (in Python 2.x), so we
# forcibly coerce it here (breaks early if there's a problem).
to = str(to)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = ManyToManyRel(to,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', to == RECURSIVE_RELATIONSHIP_CONSTANT),
through=kwargs.pop('through', None))
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
Field.__init__(self, **kwargs)
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
self.help_text = string_concat(self.help_text, ' ', msg)
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def _get_m2m_db_table(self, opts):
"Function that can be curried to provide the m2m table name for this relation"
if self.rel.through is not None:
return self.rel.through._meta.db_table
elif self.db_table:
return self.db_table
else:
return util.truncate_name('%s_%s' % (opts.db_table, self.name),
connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"Function that can be curried to provide the source accessor or DB column name for the m2m table"
cache_attr = '_m2m_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
for f in self.rel.through._meta.fields:
if hasattr(f, 'rel') and f.rel and f.rel.to == related.model:
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"Function that can be curried to provide the related accessor or DB column name for the m2m table"
cache_attr = '_m2m_reverse_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
for f in self.rel.through._meta.fields:
if hasattr(f, 'rel') and f.rel and f.rel.to == related.parent_model:
if related.model == related.parent_model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
else:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def value_to_string(self, obj):
data = ''
if obj:
qs = getattr(obj, self.name).all()
data = [instance._get_pk_val() for instance in qs]
else:
# In required many-to-many fields with only one available choice,
# select that one available choice.
if not self.blank:
choices_list = self.get_choices_default()
if len(choices_list) == 1:
data = [choices_list[0][0]]
return smart_text(data)
def contribute_to_class(self, cls, name):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.rel.symmetrical and (self.rel.to == "self" or self.rel.to == cls._meta.object_name):
self.rel.related_name = "%s_rel_+" % name
super(ManyToManyField, self).contribute_to_class(cls, name)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
# 3) The class owning the m2m field has been swapped out.
if not self.rel.through and not cls._meta.abstract and not cls._meta.swapped:
self.rel.through = create_many_to_many_intermediary_model(self, cls)
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseManyRelatedObjectsDescriptor(self))
# Set up the accessor for the m2m table name for the relation
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
# Populate some necessary rel arguments so that cross-app relations
# work correctly.
if isinstance(self.rel.through, six.string_types):
def resolve_through_model(field, model, cls):
field.rel.through = model
add_lazy_relation(cls, self, self.rel.through, resolve_through_model)
if isinstance(self.rel.to, six.string_types):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "m2m")
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# and swapped models don't get a related descriptor.
if not self.rel.is_hidden() and not related.model._meta.swapped:
setattr(cls, related.get_accessor_name(), ManyRelatedObjectsDescriptor(related))
# Set up the accessors for the column names on the m2m table
self.m2m_column_name = curry(self._get_m2m_attr, related, 'column')
self.m2m_reverse_name = curry(self._get_m2m_reverse_attr, related, 'column')
self.m2m_field_name = curry(self._get_m2m_attr, related, 'name')
self.m2m_reverse_field_name = curry(self._get_m2m_reverse_attr, related, 'name')
get_m2m_rel = curry(self._get_m2m_attr, related, 'rel')
self.m2m_target_field_name = lambda: get_m2m_rel().field_name
get_m2m_reverse_rel = curry(self._get_m2m_reverse_attr, related, 'rel')
self.m2m_reverse_target_field_name = lambda: get_m2m_reverse_rel().field_name
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
"Returns the value of this field in the given model instance."
return getattr(obj, self.attname).all()
def save_form_data(self, instance, data):
setattr(instance, self.attname, data)
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelMultipleChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to)
}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
initial = defaults['initial']
if callable(initial):
initial = initial()
defaults['initial'] = [i._get_pk_val() for i in initial]
return super(ManyToManyField, self).formfield(**defaults)
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
| bsd-3-clause |
SuriyaaKudoIsc/olympia | apps/api/tests/test_oauth.py | 5 | 34999 | """
Verifies basic OAUTH functionality in AMO.
Sample request_token query:
/en-US/firefox/oauth/request_token/?
oauth_consumer_key=GYKEp7m5fJpj9j8Vjz&
oauth_nonce=A7A79B47-B571-4D70-AA6C-592A0555E94B&
oauth_signature_method=HMAC-SHA1&
oauth_timestamp=1282950712&
oauth_version=1.0
With headers:
Authorization: OAuth realm="",
oauth_consumer_key="GYKEp7m5fJpj9j8Vjz",
oauth_signature_method="HMAC-SHA1",
oauth_signature="JBCA4ah%2FOQC0lLWV8aChGAC+15s%3D",
oauth_timestamp="1282950995",
oauth_nonce="1008F707-37E6-4ABF-8322-C6B658771D88",
oauth_version="1.0"
"""
import json
import os
import time
import urllib
import urlparse
from django import forms
from django.conf import settings
from django.core import mail
from django.test.client import (encode_multipart, Client, FakePayload,
BOUNDARY, MULTIPART_CONTENT, RequestFactory)
import oauth2 as oauth
from mock import Mock, patch
from nose.tools import eq_
from piston.models import Consumer
import amo
from amo.helpers import absolutify
from amo.tests import TestCase
from amo.urlresolvers import reverse
from api.authentication import AMOOAuthAuthentication
from addons.models import Addon, AddonUser, BlacklistedGuid
from devhub.models import ActivityLog, SubmitStep
from files.models import File
from perf.models import (Performance, PerformanceAppVersions,
PerformanceOSVersion)
from translations.models import Translation
from users.models import UserProfile
from versions.models import AppVersion, Version
def _get_args(consumer, token=None, callback=False, verifier=None):
d = dict(oauth_consumer_key=consumer.key,
oauth_nonce=oauth.generate_nonce(),
oauth_signature_method='HMAC-SHA1',
oauth_timestamp=int(time.time()),
oauth_version='1.0')
if callback:
d['oauth_callback'] = 'http://testserver/foo'
if verifier:
d['oauth_verifier'] = verifier
return d
def get_absolute_url(url):
if isinstance(url, tuple):
url = reverse(url[0], args=url[1:])
else:
url = reverse(url)
return 'http://%s%s' % ('api', url)
def data_keys(d):
# Form keys and values MUST be part of the signature.
# File keys MUST be part of the signature.
# But file values MUST NOT be included as part of the signature.
return dict([k, '' if isinstance(v, file) else v] for k, v in d.items())
class OAuthClient(Client):
"""OauthClient can make magically signed requests."""
signature_method = oauth.SignatureMethod_HMAC_SHA1()
def get(self, url, consumer=None, token=None, callback=False,
verifier=None, params=None):
url = get_absolute_url(url)
if params:
url = '%s?%s' % (url, urllib.urlencode(params))
req = oauth.Request(method='GET', url=url,
parameters=_get_args(consumer, callback=callback,
verifier=verifier))
req.sign_request(self.signature_method, consumer, token)
return super(OAuthClient, self).get(
req.to_url(), HTTP_HOST='api', HTTP_AUTHORIZATION='OAuth realm=""',
**req)
def delete(self, url, consumer=None, token=None, callback=False,
verifier=None):
url = get_absolute_url(url)
req = oauth.Request(method='DELETE', url=url,
parameters=_get_args(consumer, callback=callback,
verifier=verifier))
req.sign_request(self.signature_method, consumer, token)
return super(OAuthClient, self).delete(
req.to_url(), HTTP_HOST='api', HTTP_AUTHORIZATION='OAuth realm=""',
**req)
def post(self, url, consumer=None, token=None, callback=False,
verifier=None, data={}):
url = get_absolute_url(url)
params = _get_args(consumer, callback=callback, verifier=verifier)
params.update(data_keys(data))
req = oauth.Request(method='POST', url=url, parameters=params)
req.sign_request(self.signature_method, consumer, token)
return super(OAuthClient, self).post(
req.to_url(), HTTP_HOST='api', HTTP_AUTHORIZATION='OAuth realm=""',
data=data, headers=req.to_header())
def put(self, url, consumer=None, token=None, callback=False,
verifier=None, data={}, content_type=MULTIPART_CONTENT, **kwargs):
"""
Send a resource to the server using PUT.
"""
# If data has come from JSON remove unicode keys.
data = dict([(str(k), v) for k, v in data.items()])
url = get_absolute_url(url)
params = _get_args(consumer, callback=callback, verifier=verifier)
params.update(data_keys(data))
req = oauth.Request(method='PUT', url=url, parameters=params)
req.sign_request(self.signature_method, consumer, token)
post_data = encode_multipart(BOUNDARY, data)
parsed = urlparse.urlparse(url)
query_string = urllib.urlencode(req, doseq=True)
r = {
'CONTENT_LENGTH': len(post_data),
'CONTENT_TYPE': content_type,
'PATH_INFO': urllib.unquote(parsed[2]),
'QUERY_STRING': query_string,
'REQUEST_METHOD': 'PUT',
'wsgi.input': FakePayload(post_data),
'HTTP_HOST': 'api',
'HTTP_AUTHORIZATION': 'OAuth realm=""',
}
r.update(req)
response = self.request(**r)
return response
oclient = OAuthClient()
token_keys = ('oauth_token_secret', 'oauth_token',)
def get_token_from_response(response):
data = urlparse.parse_qs(response.content)
for key in token_keys:
assert key in data.keys(), '%s not in %s' % (key, data.keys())
return oauth.Token(key=data['oauth_token'][0],
secret=data['oauth_token_secret'][0])
def get_request_token(consumer, callback=False):
r = oclient.get('oauth.request_token', consumer, callback=callback)
return get_token_from_response(r)
def get_access_token(consumer, token, authorize=True, verifier=None):
r = oclient.get('oauth.access_token', consumer, token, verifier=verifier)
if authorize:
return get_token_from_response(r)
else:
eq_(r.status_code, 401)
class BaseOAuth(TestCase):
fixtures = ['base/users', 'base/appversion', 'base/licenses']
def setUp(self):
super(BaseOAuth, self).setUp()
self.editor = UserProfile.objects.get(email='editor@mozilla.com')
self.admin = UserProfile.objects.get(email='admin@mozilla.com')
consumers = []
for status in ('accepted', 'pending', 'canceled', ):
c = Consumer(name='a', status=status, user=self.editor)
c.generate_random_codes()
c.save()
consumers.append(c)
self.accepted_consumer = consumers[0]
self.pending_consumer = consumers[1]
self.canceled_consumer = consumers[2]
self.token = None
class TestBaseOAuth(BaseOAuth):
def test_accepted(self):
self.assertRaises(AssertionError, get_request_token,
self.accepted_consumer)
def test_accepted_callback(self):
get_request_token(self.accepted_consumer, callback=True)
def test_request_token_pending(self):
get_request_token(self.pending_consumer, callback=True)
def test_request_token_cancelled(self):
get_request_token(self.canceled_consumer, callback=True)
def test_request_token_fake(self):
"""Try with a phony consumer key"""
c = Mock()
c.key = 'yer'
c.secret = 'mom'
r = oclient.get('oauth.request_token', c, callback=True)
eq_(r.content, 'Invalid Consumer.')
@patch('piston.authentication.oauth.OAuthAuthentication.is_authenticated')
def _test_auth(self, pk, is_authenticated, two_legged=True):
request = RequestFactory().get('/en-US/firefox/2/api/2/user/',
data={'authenticate_as': pk})
request.user = None
def alter_request(*args, **kw):
request.user = self.admin
return True
is_authenticated.return_value = True
is_authenticated.side_effect = alter_request
auth = AMOOAuthAuthentication()
auth.two_legged = two_legged
auth.is_authenticated(request)
return request
def test_login_nonexistant(self):
eq_(self.admin, self._test_auth(9999).user)
def test_login_deleted(self):
# If _test_auth returns self.admin, that means the user was
# not altered to the user set in authenticate_as.
self.editor.update(deleted=True)
pk = self.editor.pk
eq_(self.admin, self._test_auth(pk).user)
def test_login_unconfirmed(self):
self.editor.update(confirmationcode='something')
pk = self.editor.pk
eq_(self.admin, self._test_auth(pk).user)
def test_login_works(self):
pk = self.editor.pk
eq_(self.editor, self._test_auth(pk).user)
def test_login_three_legged(self):
pk = self.editor.pk
eq_(self.admin, self._test_auth(pk, two_legged=False).user)
class TestUser(BaseOAuth):
def test_user(self):
r = oclient.get('api.user', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['email'], 'editor@mozilla.com')
def test_user_lookup(self):
partner = UserProfile.objects.get(email='partner@mozilla.com')
c = Consumer(name='p', status='accepted', user=partner)
c.generate_random_codes()
c.save()
r = oclient.get('api.user', c, None,
params={'email': 'admin@mozilla.com'})
eq_(r.status_code, 200)
eq_(json.loads(r.content)['email'], 'admin@mozilla.com')
def test_failed_user_lookup(self):
partner = UserProfile.objects.get(email='partner@mozilla.com')
c = Consumer(name='p', status='accepted', user=partner)
c.generate_random_codes()
c.save()
r = oclient.get('api.user', c, None,
params={'email': 'not_a_user@mozilla.com'})
eq_(r.status_code, 404)
def test_forbidden_user_lookup(self, response_code=401):
r = oclient.get('api.user', self.accepted_consumer, self.token,
params={'email': 'admin@mozilla.com'})
eq_(r.status_code, response_code)
class TestDRFUser(TestUser):
def setUp(self):
super(TestDRFUser, self).setUp()
self.create_switch('drf', db=True)
def test_forbidden_user_lookup(self):
super(TestDRFUser, self).test_forbidden_user_lookup(response_code=403)
def activitylog_count(type=None):
qs = ActivityLog.objects
if type:
qs = qs.filter(action=type.id)
return qs.count()
class TestAddon(BaseOAuth):
created_http_status = 200
permission_denied_http_status = 401
def setUp(self):
super(TestAddon, self).setUp()
path = 'apps/files/fixtures/files/extension.xpi'
xpi = os.path.join(settings.ROOT, path)
f = open(xpi)
self.create_data = dict(builtin=0,
name='FREEDOM',
text='This is FREE!',
platform='mac',
xpi=f)
path = 'apps/files/fixtures/files/extension-0.2.xpi'
self.version_data = dict(builtin=2, platform='windows',
xpi=open(os.path.join(settings.ROOT, path)))
self.update_data = dict(name='fu',
default_locale='fr',
homepage='mozilla.com',
support_email='go@away.com',
support_url='http://google.com/',
description='awesome',
summary='sucks',
developer_comments='i made it for you',
eula='love it',
privacy_policy='aybabtu',
the_reason='for shits',
the_future='is gone',
view_source=1,
prerelease=1,
binary=False,
site_specific=1)
def make_create_request(self, data):
return oclient.post('api.addons', self.accepted_consumer, self.token,
data=data)
def create_addon(self):
current_count = activitylog_count(amo.LOG.CREATE_ADDON)
r = self.make_create_request(self.create_data)
eq_(r.status_code, self.created_http_status, r.content)
# 1 new add-on
eq_(activitylog_count(amo.LOG.CREATE_ADDON), current_count + 1)
return json.loads(r.content)
def test_create_no_user(self):
# The user in TwoLeggedAuth is set to the consumer user.
# If there isn't one, we should get a challenge back.
self.accepted_consumer.user = None
self.accepted_consumer.save()
r = self.make_create_request(self.create_data)
eq_(r.status_code, 401)
def test_create_user_altered(self):
data = self.create_data
data['authenticate_as'] = self.editor.pk
r = self.make_create_request(data)
eq_(r.status_code, self.created_http_status, r.content)
id = json.loads(r.content)['id']
ad = Addon.objects.get(pk=id)
eq_(len(ad.authors.all()), 1)
eq_(ad.authors.all()[0].pk, self.editor.pk)
def test_create(self):
# License (req'd): MIT, GPLv2, GPLv3, LGPLv2.1, LGPLv3, MIT, BSD, Other
# Custom License (if other, req'd)
# XPI file... (req'd)
# Platform (All by default): 'mac', 'all', 'bsd', 'linux', 'solaris',
# 'windows'
data = self.create_addon()
id = data['id']
name = data['name']
eq_(name, 'xpi name')
assert Addon.objects.get(pk=id)
def create_no_license(self):
data = self.create_data.copy()
del data['builtin']
return self.make_create_request(data)
def test_create_no_license(self):
r = self.create_no_license()
eq_(r.status_code, self.created_http_status, r.content)
eq_(Addon.objects.count(), 1)
def test_create_no_license_step(self):
r = self.create_no_license()
eq_(r.status_code, self.created_http_status, r.content)
id = json.loads(r.content)['id']
eq_(SubmitStep.objects.get(addon=id).step, 5)
def test_create_no_license_url(self):
self.create_no_license()
self.client.login(username='editor@mozilla.com', password='password')
res = self.client.get(reverse('devhub.submit.resume',
args=['xpi-name']))
self.assertRedirects(res, reverse('devhub.submit.5',
args=['xpi-name']))
def test_create_no_license_status(self):
self.create_no_license()
eq_(Addon.objects.get(slug='xpi-name').status, 0)
def test_create_status(self):
r = self.make_create_request(self.create_data)
eq_(r.status_code, self.created_http_status, r.content)
eq_(Addon.objects.get(slug='xpi-name').status, 0)
eq_(Addon.objects.count(), 1)
def test_create_slug(self):
r = self.make_create_request(self.create_data)
content = json.loads(r.content)
eq_(content['slug'], 'xpi-name')
eq_(content['resource_uri'],
absolutify(reverse('addons.detail', args=['xpi-name'])))
def test_delete(self):
data = self.create_addon()
id = data['id']
guid = data['guid']
# Force it to be public so its guid gets blacklisted.
Addon.objects.filter(id=id).update(highest_status=amo.STATUS_PUBLIC)
r = oclient.delete(('api.addon', id), self.accepted_consumer,
self.token)
eq_(r.status_code, 204, r.content)
eq_(Addon.objects.filter(pk=id).count(), 0, "Didn't delete.")
assert BlacklistedGuid.objects.filter(guid=guid)
eq_(len(mail.outbox), 1)
def test_update(self):
# create an addon
data = self.create_addon()
id = data['id']
current_count = activitylog_count()
r = oclient.put(('api.addon', id), self.accepted_consumer, self.token,
data=self.update_data)
eq_(r.status_code, 200, r.content)
# EDIT_PROPERTIES
eq_(activitylog_count(), current_count + 1)
a = Addon.objects.get(pk=id)
for field, expected in self.update_data.iteritems():
value = getattr(a, field)
if isinstance(value, Translation):
value = unicode(value)
eq_(value, expected,
"'%s' didn't match: got '%s' instead of '%s'"
% (field, getattr(a, field), expected))
@patch('api.handlers.AddonForm.is_valid')
def test_update_fail(self, is_valid):
data = self.create_addon()
id = data['id']
is_valid.return_value = False
r = oclient.put(('api.addon', id), self.accepted_consumer, self.token,
data=self.update_data)
eq_(r.status_code, 400, r.content)
def test_update_nonexistant(self):
r = oclient.put(('api.addon', 0), self.accepted_consumer, self.token,
data={})
eq_(r.status_code, 410, r.content)
@patch('api.handlers.XPIForm.clean_xpi')
def test_xpi_failure(self, f):
f.side_effect = forms.ValidationError('F')
r = self.make_create_request(self.create_data)
eq_(r.status_code, 400)
def test_fake_license(self):
data = self.create_data.copy()
data['builtin'] = 'fff'
r = self.make_create_request(data)
eq_(r.status_code, 400, r.content)
eq_(r.content, 'Bad Request: Invalid data provided: '
'Select a valid choice. fff is not one of the available choices. '
'(builtin)')
@patch('zipfile.ZipFile.infolist')
def test_bad_zip(self, infolist):
fake = Mock()
fake.filename = '..'
infolist.return_value = [fake]
r = self.make_create_request(self.create_data)
eq_(r.status_code, 400, r.content)
@patch('versions.models.AppVersion.objects.get')
def test_bad_appversion(self, get):
get.side_effect = AppVersion.DoesNotExist()
data = self.create_addon()
assert data, "We didn't get data."
def test_wrong_guid(self):
data = self.create_addon()
id = data['id']
addon = Addon.objects.get(pk=id)
addon.guid = 'XXX'
addon.save()
# Upload new version of file
r = oclient.post(('api.versions', id,), self.accepted_consumer,
self.token, data=self.version_data)
eq_(r.status_code, 400)
eq_(r.content, 'Bad Request: Add-on did not validate: '
"UUID doesn't match add-on.")
def test_duplicate_guid(self):
self.create_addon()
data = self.create_data.copy()
data['xpi'] = self.version_data['xpi']
r = self.make_create_request(data)
eq_(r.status_code, 400)
eq_(r.content, 'Bad Request: Add-on did not validate: '
'Duplicate UUID found.')
def test_create_version(self):
# Create an addon and let's use this for the new version.
data = self.create_addon()
id = data['id']
log_count = activitylog_count()
# Upload new version of file
r = oclient.post(('api.versions', id,), self.accepted_consumer,
self.token, data=self.version_data)
eq_(r.status_code, 200, r.content)
# verify we've logged a new version and a new app version
eq_(log_count + 2, activitylog_count())
# validate that the addon has 2 versions
a = Addon.objects.get(pk=id)
eq_(a.versions.all().count(), 2)
# validate the version number
v = a.versions.get(version='0.2')
eq_(v.version, '0.2')
# validate any new version data
eq_(amo.PLATFORMS[v.files.get().platform].shortname, 'windows')
def test_create_version_bad_license(self):
data = self.create_addon()
id = data['id']
data = self.version_data.copy()
data['builtin'] = 'fu'
r = oclient.post(('api.versions', id,), self.accepted_consumer,
self.token, data=data)
eq_(r.status_code, 400, r.content)
def test_create_version_no_license(self):
data = self.create_addon()
id = data['id']
data = self.version_data.copy()
del data['builtin']
r = oclient.post(('api.versions', id,), self.accepted_consumer,
self.token, data=data)
eq_(r.status_code, 200, r.content)
data = json.loads(r.content)
id = data['id']
v = Version.objects.get(pk=id)
assert not v.license
def create_for_update(self):
data = self.create_addon()
id = data['id']
a = Addon.objects.get(pk=id)
v = a.versions.get()
eq_(v.version, '0.1')
return a, v, 'apps/files/fixtures/files/extension-0.2.xpi'
def test_update_version_no_license(self):
a, v, path = self.create_for_update()
data = dict(release_notes='fukyeah', platform='windows',
xpi=open(os.path.join(settings.ROOT, path)))
r = oclient.put(('api.version', a.id, v.id), self.accepted_consumer,
self.token, data=data, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 200, r.content)
v = a.versions.get()
eq_(v.version, '0.2')
eq_(v.license, None)
def test_update_version_bad_license(self):
a, v, path = self.create_for_update()
data = dict(release_notes='fukyeah', builtin=3, platform='windows',
xpi=open(os.path.join(settings.ROOT, path)))
r = oclient.put(('api.version', a.id, v.id), self.accepted_consumer,
self.token, data=data, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 400, r.content)
def test_update_version(self):
a, v, path = self.create_for_update()
data = dict(release_notes='fukyeah', builtin=2, platform='windows',
xpi=open(os.path.join(settings.ROOT, path)))
log_count = activitylog_count()
# upload new version
r = oclient.put(('api.version', a.id, v.id), self.accepted_consumer,
self.token, data=data, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 200, r.content[:1000])
# verify we've logged a version update and a new app version
eq_(activitylog_count(), log_count + 2)
# verify data
v = a.versions.get()
eq_(v.version, '0.2')
eq_(str(v.releasenotes), 'fukyeah')
eq_(str(v.license.builtin), '2')
def test_update_version_bad_xpi(self):
data = self.create_addon()
id = data['id']
# verify version
a = Addon.objects.get(pk=id)
v = a.versions.get()
eq_(v.version, '0.1')
data = dict(release_notes='fukyeah', platform='windows')
# upload new version
r = oclient.put(('api.version', id, v.id), self.accepted_consumer,
self.token, data=data, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 400)
def test_update_version_bad_id(self):
r = oclient.put(('api.version', 0, 0), self.accepted_consumer,
self.token, data={}, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 410, r.content)
def test_get_version(self):
data = self.create_addon()
a = Addon.objects.get(pk=data['id'])
r = oclient.get(('api.version', data['id'], a.versions.get().id),
self.accepted_consumer, self.token)
eq_(r.status_code, 200)
def test_get_version_statuses(self):
data = self.create_addon()
a = Addon.objects.get(pk=data['id'])
r = oclient.get(('api.version', data['id'], a.versions.get().id),
self.accepted_consumer, self.token)
eq_(json.loads(r.content)['statuses'],
[[File.objects.all()[0].pk, 1]])
@patch('api.authorization.AllowRelatedAppOwner.has_object_permission')
@patch('api.authorization.AllowAppOwner.has_object_permission')
@patch('access.acl.action_allowed')
@patch('access.acl.check_addon_ownership')
def test_not_my_addon(self, addon_ownership, action_allowed,
app_owner, related_app_owner):
data = self.create_addon()
id = data['id']
a = Addon.objects.get(pk=id)
v = a.versions.get()
# The first one is for piston, the 3 next ones are for DRF.
addon_ownership.return_value = False
action_allowed.return_value = False
app_owner.return_value = False
related_app_owner.return_value = False
r = oclient.put(('api.version', id, v.id), self.accepted_consumer,
self.token, data={}, content_type=MULTIPART_CONTENT)
eq_(r.status_code, self.permission_denied_http_status, r.content)
r = oclient.put(('api.addon', id), self.accepted_consumer, self.token,
data=self.update_data)
eq_(r.status_code, self.permission_denied_http_status, r.content)
def test_delete_version(self):
data = self.create_addon()
id = data['id']
a = Addon.objects.get(pk=id)
v = a.versions.get()
log_count = activitylog_count()
r = oclient.delete(('api.version', id, v.id), self.accepted_consumer,
self.token)
eq_(activitylog_count(), log_count + 1)
eq_(r.status_code, 204, r.content)
eq_(a.versions.count(), 0)
def test_retrieve_versions(self):
data = self.create_addon()
id = data['id']
a = Addon.objects.get(pk=id)
v = a.versions.get()
r = oclient.get(('api.versions', id), self.accepted_consumer,
self.token)
eq_(r.status_code, 200, r.content)
data = json.loads(r.content)
for attr in ('id', 'version',):
expect = getattr(v, attr)
val = data[0].get(attr)
eq_(expect, val,
'Got "%s" was expecting "%s" for "%s".' % (val, expect, attr,))
def test_no_addons(self):
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
def test_no_user(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon=addon, user=self.admin,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
def test_my_addons_only(self):
for num in range(0, 2):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get('api.addons', self.accepted_consumer, self.token,
params={'authenticate_as': self.editor.pk})
j = json.loads(r.content)
eq_(j['count'], 1)
eq_(j['objects'][0]['id'], addon.id)
def test_one_addon(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get(('api.addon', addon.pk), self.accepted_consumer,
self.token, params={'authenticate_as': self.editor.pk})
eq_(json.loads(r.content)['id'], addon.pk)
def test_my_addons_role(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_VIEWER)
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
def test_my_addons_disabled(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION,
status=amo.STATUS_DISABLED)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
def test_my_addons_deleted(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION,
status=amo.STATUS_DELETED)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
class TestDRFAddon(TestAddon):
created_http_status = 201
permission_denied_http_status = 403
def setUp(self):
super(TestDRFAddon, self).setUp()
self.create_switch('drf', db=True)
def _compare_dicts(self, drf_data, piston_data):
"""
Given 2 dicts of data from DRF and Piston, compare keys then values.
"""
eq_(sorted(drf_data.keys()), sorted(piston_data.keys()),
('Keys inexistent from Piston: {0}\n'
'Keys inexistent from DRF: {1}').format(
set(piston_data) - set(drf_data),
set(drf_data) - set(piston_data)))
for drf_item, piston_item in zip(sorted(drf_data.items()),
sorted(piston_data.items())):
eq_(drf_item[0], piston_item[0])
eq_(drf_item[1], piston_item[1],
('Different representations for key "{0}": DRF={1}, Piston={2}'
.format(drf_item[0], drf_item[1], piston_item[1])))
def compare_output(self, url, listed=False):
"""
Load responses from DRF and Piston given the `url` parameter and
compare returned data dicts, key by key. Useful to make sure
that both responses are similar.
Set `listed` to True for comparing responses as lists.
"""
r = oclient.get(url, self.accepted_consumer, self.token)
eq_(r.status_code, 200, r.content)
drf_data = json.loads(r.content)
self.create_switch('drf', **{'active': False})
r = oclient.get(url, self.accepted_consumer, self.token)
eq_(r.status_code, 200, r.content)
piston_data = json.loads(r.content)
if listed:
eq_(len(drf_data), len(piston_data))
for items in zip(drf_data, piston_data):
self._compare_dicts(items[0], items[1])
else:
self._compare_dicts(drf_data, piston_data)
def test_diff_versions(self):
data = self.create_addon()
self.compare_output(('api.versions', data['id']), listed=True)
def test_diff_version(self):
data = self.create_addon()
addon = Addon.objects.get(pk=data['id'])
version = addon.versions.get()
self.compare_output(('api.version', addon.id, version.id))
def test_diff_addons(self):
self.create_addon()
self.compare_output(('api.addons'))
def test_diff_addon(self):
data = self.create_addon()
self.compare_output(('api.addon', data['id']))
class TestPerformanceAPI(BaseOAuth):
fixtures = ['base/users']
def get_data(self):
return {
'os': 'WINNT',
'version': '123',
'platform': 'x86',
'product': 'firefox',
'product_version': 'x.y.z',
'average': '1.25',
'test': 'ts'
}
def make_create_request(self, data):
return oclient.post('api.performance.add', self.accepted_consumer,
self.token, data=data)
def test_form_fails(self):
res = self.make_create_request({})
eq_(res.status_code, 400)
def test_not_allowed(self):
res = self.client.post(reverse('api.performance.add'), {})
eq_(res.status_code, 401)
def test_form_incomplete(self):
data = self.get_data()
del data['test']
res = self.make_create_request(data)
eq_(res.status_code, 400)
assert 'This field is required. (test)' in res.content
def test_form_validate(self):
data = self.get_data()
data['os'] = 'WebOS hotness'
res = self.make_create_request(data)
eq_(res.status_code, 400)
assert 'WebOS hotness' in res.content
def test_no_addon(self):
data = self.get_data()
data['addon_id'] = '123'
res = self.make_create_request(data)
eq_(res.status_code, 400)
assert 'Add-on not found' in res.content
def test_addon(self):
data = self.get_data()
data['addon_id'] = Addon.objects.create(type=amo.ADDON_EXTENSION).pk
res = self.make_create_request(data)
eq_(res.status_code, 200)
perfs = Performance.objects.all()
eq_(perfs[0].addon_id, data['addon_id'])
def test_form_data(self):
res = self.make_create_request(self.get_data())
eq_(res.status_code, 200)
perfs = Performance.objects.all()
eq_(perfs.count(), 1)
eq_(perfs[0].average, 1.25)
def test_form_updates(self):
self.test_form_data()
data = self.get_data()
data['average'] = 1.3
self.make_create_request(data)
perfs = Performance.objects.all()
eq_(len(perfs), 1)
eq_(perfs[0].average, 1.3)
def test_creates_app_version(self):
self.test_form_data()
apps = PerformanceAppVersions.objects.all()
eq_(len(apps), 1)
eq_(apps[0].app, 'firefox')
eq_(apps[0].version, 'x.y.z')
def test_gets_app_version(self):
self.test_form_data()
eq_(PerformanceAppVersions.objects.all().count(), 1)
self.test_form_data()
eq_(PerformanceAppVersions.objects.all().count(), 1)
def test_creates_os_version(self):
self.test_form_data()
apps = PerformanceOSVersion.objects.all()
eq_(apps.count(), 1)
eq_(apps[0].os, 'WINNT')
def test_gets_os_version(self):
self.test_form_data()
eq_(PerformanceOSVersion.objects.all().count(), 1)
self.test_form_data()
eq_(PerformanceOSVersion.objects.all().count(), 1)
| bsd-3-clause |
jcoady9/python-for-android | python3-alpha/python3-src/Lib/test/test_profile.py | 91 | 7006 | """Test suite for the profile module."""
import sys
import pstats
import unittest
from difflib import unified_diff
from io import StringIO
from test.support import run_unittest
import profile
from test.profilee import testfunc, timer
class ProfileTest(unittest.TestCase):
profilerclass = profile.Profile
methodnames = ['print_stats', 'print_callers', 'print_callees']
expected_max_output = ':0(max)'
def get_expected_output(self):
return _ProfileOutput
@classmethod
def do_profiling(cls):
results = []
prof = cls.profilerclass(timer, 0.001)
start_timer = timer()
prof.runctx("testfunc()", globals(), locals())
results.append(timer() - start_timer)
for methodname in cls.methodnames:
s = StringIO()
stats = pstats.Stats(prof, stream=s)
stats.strip_dirs().sort_stats("stdname")
getattr(stats, methodname)()
output = s.getvalue().splitlines()
mod_name = testfunc.__module__.rsplit('.', 1)[1]
# Only compare against stats originating from the test file.
# Prevents outside code (e.g., the io module) from causing
# unexpected output.
output = [line.rstrip() for line in output if mod_name in line]
results.append('\n'.join(output))
return results
def test_cprofile(self):
results = self.do_profiling()
expected = self.get_expected_output()
self.assertEqual(results[0], 1000)
for i, method in enumerate(self.methodnames):
if results[i+1] != expected[method]:
print("Stats.%s output for %s doesn't fit expectation!" %
(method, self.profilerclass.__name__))
print('\n'.join(unified_diff(
results[i+1].split('\n'),
expected[method].split('\n'))))
def test_calling_conventions(self):
# Issue #5330: profile and cProfile wouldn't report C functions called
# with keyword arguments. We test all calling conventions.
stmts = [
"max([0])",
"max([0], key=int)",
"max([0], **dict(key=int))",
"max(*([0],))",
"max(*([0],), key=int)",
"max(*([0],), **dict(key=int))",
]
for stmt in stmts:
s = StringIO()
prof = self.profilerclass(timer, 0.001)
prof.runctx(stmt, globals(), locals())
stats = pstats.Stats(prof, stream=s)
stats.print_stats()
res = s.getvalue()
self.assertIn(self.expected_max_output, res,
"Profiling {0!r} didn't report max:\n{1}".format(stmt, res))
def regenerate_expected_output(filename, cls):
filename = filename.rstrip('co')
print('Regenerating %s...' % filename)
results = cls.do_profiling()
newfile = []
with open(filename, 'r') as f:
for line in f:
newfile.append(line)
if line.startswith('#--cut'):
break
with open(filename, 'w') as f:
f.writelines(newfile)
f.write("_ProfileOutput = {}\n")
for i, method in enumerate(cls.methodnames):
f.write('_ProfileOutput[%r] = """\\\n%s"""\n' % (
method, results[i+1]))
f.write('\nif __name__ == "__main__":\n main()\n')
def test_main():
run_unittest(ProfileTest)
def main():
if '-r' not in sys.argv:
test_main()
else:
regenerate_expected_output(__file__, ProfileTest)
# Don't remove this comment. Everything below it is auto-generated.
#--cut--------------------------------------------------------------------------
_ProfileOutput = {}
_ProfileOutput['print_stats'] = """\
28 27.972 0.999 27.972 0.999 profilee.py:110(__getattr__)
1 269.996 269.996 999.769 999.769 profilee.py:25(testfunc)
23/3 149.937 6.519 169.917 56.639 profilee.py:35(factorial)
20 19.980 0.999 19.980 0.999 profilee.py:48(mul)
2 39.986 19.993 599.830 299.915 profilee.py:55(helper)
4 115.984 28.996 119.964 29.991 profilee.py:73(helper1)
2 -0.006 -0.003 139.946 69.973 profilee.py:84(helper2_indirect)
8 311.976 38.997 399.912 49.989 profilee.py:88(helper2)
8 63.976 7.997 79.960 9.995 profilee.py:98(subhelper)"""
_ProfileOutput['print_callers'] = """\
:0(append) <- profilee.py:73(helper1)(4) 119.964
:0(exc_info) <- profilee.py:73(helper1)(4) 119.964
:0(hasattr) <- profilee.py:73(helper1)(4) 119.964
profilee.py:88(helper2)(8) 399.912
profilee.py:110(__getattr__) <- :0(hasattr)(12) 11.964
profilee.py:98(subhelper)(16) 79.960
profilee.py:25(testfunc) <- <string>:1(<module>)(1) 999.767
profilee.py:35(factorial) <- profilee.py:25(testfunc)(1) 999.769
profilee.py:35(factorial)(20) 169.917
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:48(mul) <- profilee.py:35(factorial)(20) 169.917
profilee.py:55(helper) <- profilee.py:25(testfunc)(2) 999.769
profilee.py:73(helper1) <- profilee.py:55(helper)(4) 599.830
profilee.py:84(helper2_indirect) <- profilee.py:55(helper)(2) 599.830
profilee.py:88(helper2) <- profilee.py:55(helper)(6) 599.830
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:98(subhelper) <- profilee.py:88(helper2)(8) 399.912"""
_ProfileOutput['print_callees'] = """\
:0(hasattr) -> profilee.py:110(__getattr__)(12) 27.972
<string>:1(<module>) -> profilee.py:25(testfunc)(1) 999.769
profilee.py:110(__getattr__) ->
profilee.py:25(testfunc) -> profilee.py:35(factorial)(1) 169.917
profilee.py:55(helper)(2) 599.830
profilee.py:35(factorial) -> profilee.py:35(factorial)(20) 169.917
profilee.py:48(mul)(20) 19.980
profilee.py:48(mul) ->
profilee.py:55(helper) -> profilee.py:73(helper1)(4) 119.964
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:88(helper2)(6) 399.912
profilee.py:73(helper1) -> :0(append)(4) -0.004
profilee.py:84(helper2_indirect) -> profilee.py:35(factorial)(2) 169.917
profilee.py:88(helper2)(2) 399.912
profilee.py:88(helper2) -> :0(hasattr)(8) 11.964
profilee.py:98(subhelper)(8) 79.960
profilee.py:98(subhelper) -> profilee.py:110(__getattr__)(16) 27.972"""
if __name__ == "__main__":
main()
| apache-2.0 |
ShinyROM/android_external_chromium_org | third_party/android_testrunner/errors.py | 171 | 1340 | #!/usr/bin/python2.4
#
#
# Copyright 2008, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines common exception classes for this package."""
class MsgException(Exception):
"""Generic exception with an optional string msg."""
def __init__(self, msg=""):
self.msg = msg
class WaitForResponseTimedOutError(Exception):
"""We sent a command and had to wait too long for response."""
class DeviceUnresponsiveError(Exception):
"""Device is unresponsive to command."""
class InstrumentationError(Exception):
"""Failed to run instrumentation."""
class AbortError(MsgException):
"""Generic exception that indicates a fatal error has occurred and program
execution should be aborted."""
class ParseError(MsgException):
"""Raised when xml data to parse has unrecognized format."""
| bsd-3-clause |
40223243/2015cd_midterm2 | 2015cd_midterm-master/static/Brython3.1.1-20150328-091302/Lib/csv.py | 637 | 16166 |
"""
csv.py - read/write/investigate CSV files
"""
import re
from _csv import Error, __version__, writer, reader, register_dialect, \
unregister_dialect, get_dialect, list_dialects, \
field_size_limit, \
QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE, \
__doc__
from _csv import Dialect as _Dialect
from io import StringIO
__all__ = [ "QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE",
"Error", "Dialect", "__doc__", "excel", "excel_tab",
"field_size_limit", "reader", "writer",
"register_dialect", "get_dialect", "list_dialects", "Sniffer",
"unregister_dialect", "__version__", "DictReader", "DictWriter" ]
class Dialect:
"""Describe a CSV dialect.
This must be subclassed (see csv.excel). Valid attributes are:
delimiter, quotechar, escapechar, doublequote, skipinitialspace,
lineterminator, quoting.
"""
_name = ""
_valid = False
# placeholders
delimiter = None
quotechar = None
escapechar = None
doublequote = None
skipinitialspace = None
lineterminator = None
quoting = None
def __init__(self):
if self.__class__ != Dialect:
self._valid = True
self._validate()
def _validate(self):
try:
_Dialect(self)
except TypeError as e:
# We do this for compatibility with py2.3
raise Error(str(e))
class excel(Dialect):
"""Describe the usual properties of Excel-generated CSV files."""
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\r\n'
quoting = QUOTE_MINIMAL
register_dialect("excel", excel)
class excel_tab(excel):
"""Describe the usual properties of Excel-generated TAB-delimited files."""
delimiter = '\t'
register_dialect("excel-tab", excel_tab)
class unix_dialect(Dialect):
"""Describe the usual properties of Unix-generated CSV files."""
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\n'
quoting = QUOTE_ALL
register_dialect("unix", unix_dialect)
class DictReader:
def __init__(self, f, fieldnames=None, restkey=None, restval=None,
dialect="excel", *args, **kwds):
self._fieldnames = fieldnames # list of keys for the dict
self.restkey = restkey # key to catch long rows
self.restval = restval # default value for short rows
self.reader = reader(f, dialect, *args, **kwds)
self.dialect = dialect
self.line_num = 0
def __iter__(self):
return self
@property
def fieldnames(self):
if self._fieldnames is None:
try:
self._fieldnames = next(self.reader)
except StopIteration:
pass
self.line_num = self.reader.line_num
return self._fieldnames
@fieldnames.setter
def fieldnames(self, value):
self._fieldnames = value
def __next__(self):
if self.line_num == 0:
# Used only for its side effect.
self.fieldnames
row = next(self.reader)
self.line_num = self.reader.line_num
# unlike the basic reader, we prefer not to return blanks,
# because we will typically wind up with a dict full of None
# values
while row == []:
row = next(self.reader)
d = dict(zip(self.fieldnames, row))
lf = len(self.fieldnames)
lr = len(row)
if lf < lr:
d[self.restkey] = row[lf:]
elif lf > lr:
for key in self.fieldnames[lr:]:
d[key] = self.restval
return d
class DictWriter:
def __init__(self, f, fieldnames, restval="", extrasaction="raise",
dialect="excel", *args, **kwds):
self.fieldnames = fieldnames # list of keys for the dict
self.restval = restval # for writing short dicts
if extrasaction.lower() not in ("raise", "ignore"):
raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'"
% extrasaction)
self.extrasaction = extrasaction
self.writer = writer(f, dialect, *args, **kwds)
def writeheader(self):
header = dict(zip(self.fieldnames, self.fieldnames))
self.writerow(header)
def _dict_to_list(self, rowdict):
if self.extrasaction == "raise":
wrong_fields = [k for k in rowdict if k not in self.fieldnames]
if wrong_fields:
raise ValueError("dict contains fields not in fieldnames: "
+ ", ".join(wrong_fields))
return [rowdict.get(key, self.restval) for key in self.fieldnames]
def writerow(self, rowdict):
return self.writer.writerow(self._dict_to_list(rowdict))
def writerows(self, rowdicts):
rows = []
for rowdict in rowdicts:
rows.append(self._dict_to_list(rowdict))
return self.writer.writerows(rows)
# Guard Sniffer's type checking against builds that exclude complex()
try:
complex
except NameError:
complex = float
class Sniffer:
'''
"Sniffs" the format of a CSV file (i.e. delimiter, quotechar)
Returns a Dialect object.
'''
def __init__(self):
# in case there is more than one possible delimiter
self.preferred = [',', '\t', ';', ' ', ':']
def sniff(self, sample, delimiters=None):
"""
Returns a dialect (or None) corresponding to the sample
"""
quotechar, doublequote, delimiter, skipinitialspace = \
self._guess_quote_and_delimiter(sample, delimiters)
if not delimiter:
delimiter, skipinitialspace = self._guess_delimiter(sample,
delimiters)
if not delimiter:
raise Error("Could not determine delimiter")
class dialect(Dialect):
_name = "sniffed"
lineterminator = '\r\n'
quoting = QUOTE_MINIMAL
# escapechar = ''
dialect.doublequote = doublequote
dialect.delimiter = delimiter
# _csv.reader won't accept a quotechar of ''
dialect.quotechar = quotechar or '"'
dialect.skipinitialspace = skipinitialspace
return dialect
def _guess_quote_and_delimiter(self, data, delimiters):
"""
Looks for text enclosed between two identical quotes
(the probable quotechar) which are preceded and followed
by the same character (the probable delimiter).
For example:
,'some text',
The quote with the most wins, same with the delimiter.
If there is no quotechar the delimiter can't be determined
this way.
"""
matches = []
for restr in ('(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?P=delim)', # ,".*?",
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?P<delim>[^\w\n"\'])(?P<space> ?)', # ".*?",
'(?P<delim>>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?:$|\n)', # ,".*?"
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?:$|\n)'): # ".*?" (no delim, no space)
regexp = re.compile(restr, re.DOTALL | re.MULTILINE)
matches = regexp.findall(data)
if matches:
break
if not matches:
# (quotechar, doublequote, delimiter, skipinitialspace)
return ('', False, None, 0)
quotes = {}
delims = {}
spaces = 0
for m in matches:
n = regexp.groupindex['quote'] - 1
key = m[n]
if key:
quotes[key] = quotes.get(key, 0) + 1
try:
n = regexp.groupindex['delim'] - 1
key = m[n]
except KeyError:
continue
if key and (delimiters is None or key in delimiters):
delims[key] = delims.get(key, 0) + 1
try:
n = regexp.groupindex['space'] - 1
except KeyError:
continue
if m[n]:
spaces += 1
quotechar = max(quotes, key=quotes.get)
if delims:
delim = max(delims, key=delims.get)
skipinitialspace = delims[delim] == spaces
if delim == '\n': # most likely a file with a single column
delim = ''
else:
# there is *no* delimiter, it's a single column of quoted data
delim = ''
skipinitialspace = 0
# if we see an extra quote between delimiters, we've got a
# double quoted format
dq_regexp = re.compile(
r"((%(delim)s)|^)\W*%(quote)s[^%(delim)s\n]*%(quote)s[^%(delim)s\n]*%(quote)s\W*((%(delim)s)|$)" % \
{'delim':re.escape(delim), 'quote':quotechar}, re.MULTILINE)
if dq_regexp.search(data):
doublequote = True
else:
doublequote = False
return (quotechar, doublequote, delim, skipinitialspace)
def _guess_delimiter(self, data, delimiters):
"""
The delimiter /should/ occur the same number of times on
each row. However, due to malformed data, it may not. We don't want
an all or nothing approach, so we allow for small variations in this
number.
1) build a table of the frequency of each character on every line.
2) build a table of frequencies of this frequency (meta-frequency?),
e.g. 'x occurred 5 times in 10 rows, 6 times in 1000 rows,
7 times in 2 rows'
3) use the mode of the meta-frequency to determine the /expected/
frequency for that character
4) find out how often the character actually meets that goal
5) the character that best meets its goal is the delimiter
For performance reasons, the data is evaluated in chunks, so it can
try and evaluate the smallest portion of the data possible, evaluating
additional chunks as necessary.
"""
data = list(filter(None, data.split('\n')))
ascii = [chr(c) for c in range(127)] # 7-bit ASCII
# build frequency tables
chunkLength = min(10, len(data))
iteration = 0
charFrequency = {}
modes = {}
delims = {}
start, end = 0, min(chunkLength, len(data))
while start < len(data):
iteration += 1
for line in data[start:end]:
for char in ascii:
metaFrequency = charFrequency.get(char, {})
# must count even if frequency is 0
freq = line.count(char)
# value is the mode
metaFrequency[freq] = metaFrequency.get(freq, 0) + 1
charFrequency[char] = metaFrequency
for char in charFrequency.keys():
items = list(charFrequency[char].items())
if len(items) == 1 and items[0][0] == 0:
continue
# get the mode of the frequencies
if len(items) > 1:
modes[char] = max(items, key=lambda x: x[1])
# adjust the mode - subtract the sum of all
# other frequencies
items.remove(modes[char])
modes[char] = (modes[char][0], modes[char][1]
- sum(item[1] for item in items))
else:
modes[char] = items[0]
# build a list of possible delimiters
modeList = modes.items()
total = float(chunkLength * iteration)
# (rows of consistent data) / (number of rows) = 100%
consistency = 1.0
# minimum consistency threshold
threshold = 0.9
while len(delims) == 0 and consistency >= threshold:
for k, v in modeList:
if v[0] > 0 and v[1] > 0:
if ((v[1]/total) >= consistency and
(delimiters is None or k in delimiters)):
delims[k] = v
consistency -= 0.01
if len(delims) == 1:
delim = list(delims.keys())[0]
skipinitialspace = (data[0].count(delim) ==
data[0].count("%c " % delim))
return (delim, skipinitialspace)
# analyze another chunkLength lines
start = end
end += chunkLength
if not delims:
return ('', 0)
# if there's more than one, fall back to a 'preferred' list
if len(delims) > 1:
for d in self.preferred:
if d in delims.keys():
skipinitialspace = (data[0].count(d) ==
data[0].count("%c " % d))
return (d, skipinitialspace)
# nothing else indicates a preference, pick the character that
# dominates(?)
items = [(v,k) for (k,v) in delims.items()]
items.sort()
delim = items[-1][1]
skipinitialspace = (data[0].count(delim) ==
data[0].count("%c " % delim))
return (delim, skipinitialspace)
def has_header(self, sample):
# Creates a dictionary of types of data in each column. If any
# column is of a single type (say, integers), *except* for the first
# row, then the first row is presumed to be labels. If the type
# can't be determined, it is assumed to be a string in which case
# the length of the string is the determining factor: if all of the
# rows except for the first are the same length, it's a header.
# Finally, a 'vote' is taken at the end for each column, adding or
# subtracting from the likelihood of the first row being a header.
rdr = reader(StringIO(sample), self.sniff(sample))
header = next(rdr) # assume first row is header
columns = len(header)
columnTypes = {}
for i in range(columns): columnTypes[i] = None
checked = 0
for row in rdr:
# arbitrary number of rows to check, to keep it sane
if checked > 20:
break
checked += 1
if len(row) != columns:
continue # skip rows that have irregular number of columns
for col in list(columnTypes.keys()):
for thisType in [int, float, complex]:
try:
thisType(row[col])
break
except (ValueError, OverflowError):
pass
else:
# fallback to length of string
thisType = len(row[col])
if thisType != columnTypes[col]:
if columnTypes[col] is None: # add new column type
columnTypes[col] = thisType
else:
# type is inconsistent, remove column from
# consideration
del columnTypes[col]
# finally, compare results against first row and "vote"
# on whether it's a header
hasHeader = 0
for col, colType in columnTypes.items():
if type(colType) == type(0): # it's a length
if len(header[col]) != colType:
hasHeader += 1
else:
hasHeader -= 1
else: # attempt typecast
try:
colType(header[col])
except (ValueError, TypeError):
hasHeader += 1
else:
hasHeader -= 1
return hasHeader > 0
| agpl-3.0 |
lukemarsden/compose | tests/unit/config_test.py | 19 | 23623 | import mock
import os
import shutil
import tempfile
from .. import unittest
from compose import config
def make_service_dict(name, service_dict, working_dir):
"""
Test helper function to contruct a ServiceLoader
"""
return config.ServiceLoader(working_dir=working_dir).make_service_dict(name, service_dict)
class ConfigTest(unittest.TestCase):
def test_load(self):
service_dicts = config.load(
config.ConfigDetails(
{
'foo': {'image': 'busybox'},
'bar': {'environment': ['FOO=1']},
},
'working_dir',
'filename.yml'
)
)
self.assertEqual(
sorted(service_dicts, key=lambda d: d['name']),
sorted([
{
'name': 'bar',
'environment': {'FOO': '1'},
},
{
'name': 'foo',
'image': 'busybox',
}
])
)
def test_load_throws_error_when_not_dict(self):
with self.assertRaises(config.ConfigurationError):
config.load(
config.ConfigDetails(
{'web': 'busybox:latest'},
'working_dir',
'filename.yml'
)
)
def test_config_validation(self):
self.assertRaises(
config.ConfigurationError,
lambda: make_service_dict('foo', {'port': ['8000']}, 'tests/')
)
make_service_dict('foo', {'ports': ['8000']}, 'tests/')
class VolumePathTest(unittest.TestCase):
@mock.patch.dict(os.environ)
def test_volume_binding_with_environ(self):
os.environ['VOLUME_PATH'] = '/host/path'
d = make_service_dict('foo', {'volumes': ['${VOLUME_PATH}:/container/path']}, working_dir='.')
self.assertEqual(d['volumes'], ['/host/path:/container/path'])
@mock.patch.dict(os.environ)
def test_volume_binding_with_home(self):
os.environ['HOME'] = '/home/user'
d = make_service_dict('foo', {'volumes': ['~:/container/path']}, working_dir='.')
self.assertEqual(d['volumes'], ['/home/user:/container/path'])
class MergePathMappingTest(object):
def config_name(self):
return ""
def test_empty(self):
service_dict = config.merge_service_dicts({}, {})
self.assertNotIn(self.config_name(), service_dict)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/data']},
{},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/foo:/code', '/data']))
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name(): ['/bar:/code']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code']))
def test_override_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/data']},
{self.config_name(): ['/bar:/code']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code', '/data']))
def test_add_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/data']},
{self.config_name(): ['/bar:/code', '/quux:/data']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code', '/quux:/data']))
def test_remove_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/quux:/data']},
{self.config_name(): ['/bar:/code', '/data']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code', '/data']))
class MergeVolumesTest(unittest.TestCase, MergePathMappingTest):
def config_name(self):
return 'volumes'
class MergeDevicesTest(unittest.TestCase, MergePathMappingTest):
def config_name(self):
return 'devices'
class BuildOrImageMergeTest(unittest.TestCase):
def test_merge_build_or_image_no_override(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {}),
{'build': '.'},
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {}),
{'image': 'redis'},
)
def test_merge_build_or_image_override_with_same(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {'build': './web'}),
{'build': './web'},
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {'image': 'postgres'}),
{'image': 'postgres'},
)
def test_merge_build_or_image_override_with_other(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {'image': 'redis'}),
{'image': 'redis'}
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {'build': '.'}),
{'build': '.'}
)
class MergeListsTest(unittest.TestCase):
def test_empty(self):
service_dict = config.merge_service_dicts({}, {})
self.assertNotIn('ports', service_dict)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{'ports': ['10:8000', '9000']},
{},
)
self.assertEqual(set(service_dict['ports']), set(['10:8000', '9000']))
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{'ports': ['10:8000', '9000']},
)
self.assertEqual(set(service_dict['ports']), set(['10:8000', '9000']))
def test_add_item(self):
service_dict = config.merge_service_dicts(
{'ports': ['10:8000', '9000']},
{'ports': ['20:8000']},
)
self.assertEqual(set(service_dict['ports']), set(['10:8000', '9000', '20:8000']))
class MergeStringsOrListsTest(unittest.TestCase):
def test_no_override(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8']))
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{'dns': '8.8.8.8'},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8']))
def test_add_string(self):
service_dict = config.merge_service_dicts(
{'dns': ['8.8.8.8']},
{'dns': '9.9.9.9'},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8', '9.9.9.9']))
def test_add_list(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{'dns': ['9.9.9.9']},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8', '9.9.9.9']))
class MergeLabelsTest(unittest.TestCase):
def test_empty(self):
service_dict = config.merge_service_dicts({}, {})
self.assertNotIn('labels', service_dict)
def test_no_override(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
def test_no_base(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {}, 'tests/'),
make_service_dict('foo', {'labels': ['foo=2']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '2'})
def test_override_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'labels': ['foo=2']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '2', 'bar': ''})
def test_add_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'labels': ['bar=2']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': '2'})
def test_remove_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'labels': ['foo=1', 'bar=2']}, 'tests/'),
make_service_dict('foo', {'labels': ['bar']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
class MemoryOptionsTest(unittest.TestCase):
def test_validation_fails_with_just_memswap_limit(self):
"""
When you set a 'memswap_limit' it is invalid config unless you also set
a mem_limit
"""
with self.assertRaises(config.ConfigurationError):
make_service_dict(
'foo', {
'memswap_limit': 2000000,
},
'tests/'
)
def test_validation_with_correct_memswap_values(self):
service_dict = make_service_dict(
'foo', {
'mem_limit': 1000000,
'memswap_limit': 2000000,
},
'tests/'
)
self.assertEqual(service_dict['memswap_limit'], 2000000)
class EnvTest(unittest.TestCase):
def test_parse_environment_as_list(self):
environment = [
'NORMAL=F1',
'CONTAINS_EQUALS=F=2',
'TRAILING_EQUALS=',
]
self.assertEqual(
config.parse_environment(environment),
{'NORMAL': 'F1', 'CONTAINS_EQUALS': 'F=2', 'TRAILING_EQUALS': ''},
)
def test_parse_environment_as_dict(self):
environment = {
'NORMAL': 'F1',
'CONTAINS_EQUALS': 'F=2',
'TRAILING_EQUALS': None,
}
self.assertEqual(config.parse_environment(environment), environment)
def test_parse_environment_invalid(self):
with self.assertRaises(config.ConfigurationError):
config.parse_environment('a=b')
def test_parse_environment_empty(self):
self.assertEqual(config.parse_environment(None), {})
@mock.patch.dict(os.environ)
def test_resolve_environment(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
service_dict = make_service_dict(
'foo', {
'environment': {
'FILE_DEF': 'F1',
'FILE_DEF_EMPTY': '',
'ENV_DEF': None,
'NO_DEF': None
},
},
'tests/'
)
self.assertEqual(
service_dict['environment'],
{'FILE_DEF': 'F1', 'FILE_DEF_EMPTY': '', 'ENV_DEF': 'E3', 'NO_DEF': ''},
)
def test_env_from_file(self):
service_dict = make_service_dict(
'foo',
{'env_file': 'one.env'},
'tests/fixtures/env',
)
self.assertEqual(
service_dict['environment'],
{'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'bar'},
)
def test_env_from_multiple_files(self):
service_dict = make_service_dict(
'foo',
{'env_file': ['one.env', 'two.env']},
'tests/fixtures/env',
)
self.assertEqual(
service_dict['environment'],
{'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'baz', 'DOO': 'dah'},
)
def test_env_nonexistent_file(self):
options = {'env_file': 'nonexistent.env'}
self.assertRaises(
config.ConfigurationError,
lambda: make_service_dict('foo', options, 'tests/fixtures/env'),
)
@mock.patch.dict(os.environ)
def test_resolve_environment_from_file(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
service_dict = make_service_dict(
'foo',
{'env_file': 'resolve.env'},
'tests/fixtures/env',
)
self.assertEqual(
service_dict['environment'],
{'FILE_DEF': 'F1', 'FILE_DEF_EMPTY': '', 'ENV_DEF': 'E3', 'NO_DEF': ''},
)
@mock.patch.dict(os.environ)
def test_resolve_path(self):
os.environ['HOSTENV'] = '/tmp'
os.environ['CONTAINERENV'] = '/host/tmp'
service_dict = make_service_dict(
'foo',
{'volumes': ['$HOSTENV:$CONTAINERENV']},
working_dir="tests/fixtures/env"
)
self.assertEqual(set(service_dict['volumes']), set(['/tmp:/host/tmp']))
service_dict = make_service_dict(
'foo',
{'volumes': ['/opt${HOSTENV}:/opt${CONTAINERENV}']},
working_dir="tests/fixtures/env"
)
self.assertEqual(set(service_dict['volumes']), set(['/opt/tmp:/opt/host/tmp']))
def load_from_filename(filename):
return config.load(config.find('.', filename))
class ExtendsTest(unittest.TestCase):
def test_extends(self):
service_dicts = load_from_filename('tests/fixtures/extends/docker-compose.yml')
service_dicts = sorted(
service_dicts,
key=lambda sd: sd['name'],
)
self.assertEqual(service_dicts, [
{
'name': 'mydb',
'image': 'busybox',
'command': 'top',
},
{
'name': 'myweb',
'image': 'busybox',
'command': 'top',
'links': ['mydb:db'],
'environment': {
"FOO": "1",
"BAR": "2",
"BAZ": "2",
},
}
])
def test_nested(self):
service_dicts = load_from_filename('tests/fixtures/extends/nested.yml')
self.assertEqual(service_dicts, [
{
'name': 'myweb',
'image': 'busybox',
'command': '/bin/true',
'environment': {
"FOO": "2",
"BAR": "2",
},
},
])
def test_self_referencing_file(self):
"""
We specify a 'file' key that is the filename we're already in.
"""
service_dicts = load_from_filename('tests/fixtures/extends/specify-file-as-self.yml')
self.assertEqual(service_dicts, [
{
'environment':
{
'YEP': '1', 'BAR': '1', 'BAZ': '3'
},
'image': 'busybox',
'name': 'myweb'
},
{
'environment':
{'YEP': '1'},
'name': 'otherweb'
},
{
'environment':
{'YEP': '1', 'BAZ': '3'},
'image': 'busybox',
'name': 'web'
}
])
def test_circular(self):
try:
load_from_filename('tests/fixtures/extends/circle-1.yml')
raise Exception("Expected config.CircularReference to be raised")
except config.CircularReference as e:
self.assertEqual(
[(os.path.basename(filename), service_name) for (filename, service_name) in e.trail],
[
('circle-1.yml', 'web'),
('circle-2.yml', 'web'),
('circle-1.yml', 'web'),
],
)
def test_extends_validation_empty_dictionary(self):
dictionary = {'extends': None}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertRaisesRegexp(config.ConfigurationError, 'dictionary', load_config)
dictionary['extends'] = {}
self.assertRaises(config.ConfigurationError, load_config)
def test_extends_validation_missing_service_key(self):
dictionary = {'extends': {'file': 'common.yml'}}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertRaisesRegexp(config.ConfigurationError, 'service', load_config)
def test_extends_validation_invalid_key(self):
dictionary = {
'extends':
{
'service': 'web', 'file': 'common.yml', 'what': 'is this'
}
}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertRaisesRegexp(config.ConfigurationError, 'what', load_config)
def test_extends_validation_no_file_key_no_filename_set(self):
dictionary = {'extends': {'service': 'web'}}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertRaisesRegexp(config.ConfigurationError, 'file', load_config)
def test_extends_validation_valid_config(self):
dictionary = {'extends': {'service': 'web', 'file': 'common.yml'}}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertIsInstance(load_config(), dict)
def test_extends_file_defaults_to_self(self):
"""
Test not specifying a file in our extends options that the
config is valid and correctly extends from itself.
"""
service_dicts = load_from_filename('tests/fixtures/extends/no-file-specified.yml')
self.assertEqual(service_dicts, [
{
'name': 'myweb',
'image': 'busybox',
'environment': {
"BAR": "1",
"BAZ": "3",
}
},
{
'name': 'web',
'image': 'busybox',
'environment': {
"BAZ": "3",
}
}
])
def test_blacklisted_options(self):
def load_config():
return make_service_dict('myweb', {
'extends': {
'file': 'whatever',
'service': 'web',
}
}, '.')
with self.assertRaisesRegexp(config.ConfigurationError, 'links'):
other_config = {'web': {'links': ['db']}}
with mock.patch.object(config, 'load_yaml', return_value=other_config):
print load_config()
with self.assertRaisesRegexp(config.ConfigurationError, 'volumes_from'):
other_config = {'web': {'volumes_from': ['db']}}
with mock.patch.object(config, 'load_yaml', return_value=other_config):
print load_config()
with self.assertRaisesRegexp(config.ConfigurationError, 'net'):
other_config = {'web': {'net': 'container:db'}}
with mock.patch.object(config, 'load_yaml', return_value=other_config):
print load_config()
other_config = {'web': {'net': 'host'}}
with mock.patch.object(config, 'load_yaml', return_value=other_config):
print load_config()
def test_volume_path(self):
dicts = load_from_filename('tests/fixtures/volume-path/docker-compose.yml')
paths = [
'%s:/foo' % os.path.abspath('tests/fixtures/volume-path/common/foo'),
'%s:/bar' % os.path.abspath('tests/fixtures/volume-path/bar'),
]
self.assertEqual(set(dicts[0]['volumes']), set(paths))
def test_parent_build_path_dne(self):
child = load_from_filename('tests/fixtures/extends/nonexistent-path-child.yml')
self.assertEqual(child, [
{
'name': 'dnechild',
'image': 'busybox',
'command': '/bin/true',
'environment': {
"FOO": "1",
"BAR": "2",
},
},
])
class BuildPathTest(unittest.TestCase):
def setUp(self):
self.abs_context_path = os.path.join(os.getcwd(), 'tests/fixtures/build-ctx')
def test_nonexistent_path(self):
with self.assertRaises(config.ConfigurationError):
config.load(
config.ConfigDetails(
{
'foo': {'build': 'nonexistent.path'},
},
'working_dir',
'filename.yml'
)
)
def test_relative_path(self):
relative_build_path = '../build-ctx/'
service_dict = make_service_dict(
'relpath',
{'build': relative_build_path},
working_dir='tests/fixtures/build-path'
)
self.assertEquals(service_dict['build'], self.abs_context_path)
def test_absolute_path(self):
service_dict = make_service_dict(
'abspath',
{'build': self.abs_context_path},
working_dir='tests/fixtures/build-path'
)
self.assertEquals(service_dict['build'], self.abs_context_path)
def test_from_file(self):
service_dict = load_from_filename('tests/fixtures/build-path/docker-compose.yml')
self.assertEquals(service_dict, [{'name': 'foo', 'build': self.abs_context_path}])
class GetConfigPathTestCase(unittest.TestCase):
files = [
'docker-compose.yml',
'docker-compose.yaml',
'fig.yml',
'fig.yaml',
]
def test_get_config_path_default_file_in_basedir(self):
files = self.files
self.assertEqual('docker-compose.yml', get_config_filename_for_files(files[0:]))
self.assertEqual('docker-compose.yaml', get_config_filename_for_files(files[1:]))
self.assertEqual('fig.yml', get_config_filename_for_files(files[2:]))
self.assertEqual('fig.yaml', get_config_filename_for_files(files[3:]))
with self.assertRaises(config.ComposeFileNotFound):
get_config_filename_for_files([])
def test_get_config_path_default_file_in_parent_dir(self):
"""Test with files placed in the subdir"""
files = self.files
def get_config_in_subdir(files):
return get_config_filename_for_files(files, subdir=True)
self.assertEqual('docker-compose.yml', get_config_in_subdir(files[0:]))
self.assertEqual('docker-compose.yaml', get_config_in_subdir(files[1:]))
self.assertEqual('fig.yml', get_config_in_subdir(files[2:]))
self.assertEqual('fig.yaml', get_config_in_subdir(files[3:]))
with self.assertRaises(config.ComposeFileNotFound):
get_config_in_subdir([])
def get_config_filename_for_files(filenames, subdir=None):
def make_files(dirname, filenames):
for fname in filenames:
with open(os.path.join(dirname, fname), 'w') as f:
f.write('')
project_dir = tempfile.mkdtemp()
try:
make_files(project_dir, filenames)
if subdir:
base_dir = tempfile.mkdtemp(dir=project_dir)
else:
base_dir = project_dir
return os.path.basename(config.get_config_path(base_dir))
finally:
shutil.rmtree(project_dir)
| apache-2.0 |
wkoathp/glance | glance/db/sqlalchemy/metadef_api/property.py | 6 | 6161 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import exception as db_exc
from oslo_log import log as logging
from sqlalchemy import func
import sqlalchemy.orm as sa_orm
from glance.common import exception as exc
from glance.db.sqlalchemy.metadef_api import namespace as namespace_api
from glance.db.sqlalchemy.metadef_api import utils as metadef_utils
from glance.db.sqlalchemy import models_metadef as models
from glance import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
_LW = i18n._LW
def _get(context, property_id, session):
try:
query = session.query(models.MetadefProperty)\
.filter_by(id=property_id)
property_rec = query.one()
except sa_orm.exc.NoResultFound:
msg = (_("Metadata definition property not found for id=%s")
% property_id)
LOG.warn(msg)
raise exc.MetadefPropertyNotFound(msg)
return property_rec
def _get_by_name(context, namespace_name, name, session):
"""get a property; raise if ns not found/visible or property not found"""
namespace = namespace_api.get(context, namespace_name, session)
try:
query = session.query(models.MetadefProperty)\
.filter_by(name=name, namespace_id=namespace['id'])
property_rec = query.one()
except sa_orm.exc.NoResultFound:
msg = ("The metadata definition property with name=%(name)s"
" was not found in namespace=%(namespace_name)s."
% {'name': name, 'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefPropertyNotFound(property_name=name,
namespace_name=namespace_name)
return property_rec
def get(context, namespace_name, name, session):
"""get a property; raise if ns not found/visible or property not found"""
property_rec = _get_by_name(context, namespace_name, name, session)
return property_rec.to_dict()
def get_all(context, namespace_name, session):
namespace = namespace_api.get(context, namespace_name, session)
query = session.query(models.MetadefProperty)\
.filter_by(namespace_id=namespace['id'])
properties = query.all()
properties_list = []
for prop in properties:
properties_list.append(prop.to_dict())
return properties_list
def create(context, namespace_name, values, session):
namespace = namespace_api.get(context, namespace_name, session)
values.update({'namespace_id': namespace['id']})
property_rec = models.MetadefProperty()
metadef_utils.drop_protected_attrs(models.MetadefProperty, values)
property_rec.update(values.copy())
try:
property_rec.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Can not create metadata definition property. A property"
" with name=%(name)s already exists in"
" namespace=%(namespace_name)s."
% {'name': property_rec.name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefDuplicateProperty(
property_name=property_rec.name,
namespace_name=namespace_name)
return property_rec.to_dict()
def update(context, namespace_name, property_id, values, session):
"""Update a property, raise if ns not found/visible or duplicate result"""
namespace_api.get(context, namespace_name, session)
property_rec = _get(context, property_id, session)
metadef_utils.drop_protected_attrs(models.MetadefProperty, values)
# values['updated_at'] = timeutils.utcnow() - done by TS mixin
try:
property_rec.update(values.copy())
property_rec.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Invalid update. It would result in a duplicate"
" metadata definition property with the same name=%(name)s"
" in namespace=%(namespace_name)s."
% {'name': property_rec.name,
'namespace_name': namespace_name})
LOG.debug(msg)
emsg = (_("Invalid update. It would result in a duplicate"
" metadata definition property with the same name=%(name)s"
" in namespace=%(namespace_name)s.")
% {'name': property_rec.name,
'namespace_name': namespace_name})
raise exc.MetadefDuplicateProperty(emsg)
return property_rec.to_dict()
def delete(context, namespace_name, property_name, session):
property_rec = _get_by_name(
context, namespace_name, property_name, session)
if property_rec:
session.delete(property_rec)
session.flush()
return property_rec.to_dict()
def delete_namespace_content(context, namespace_id, session):
"""Use this def only if the ns for the id has been verified as visible"""
count = 0
query = session.query(models.MetadefProperty)\
.filter_by(namespace_id=namespace_id)
count = query.delete(synchronize_session='fetch')
return count
def delete_by_namespace_name(context, namespace_name, session):
namespace = namespace_api.get(context, namespace_name, session)
return delete_namespace_content(context, namespace['id'], session)
def count(context, namespace_name, session):
"""Get the count of properties for a namespace, raise if ns not found"""
namespace = namespace_api.get(context, namespace_name, session)
query = session.query(func.count(models.MetadefProperty.id))\
.filter_by(namespace_id=namespace['id'])
return query.scalar()
| apache-2.0 |
aemerick/galaxy_analysis | method_paper_plots/star_abundances.py | 1 | 26128 | from galaxy_analysis.plot.plot_styles import *
import matplotlib.pyplot as plt
import glob
import deepdish as dd
import yt
from galaxy_analysis.utilities import utilities
import numpy as np
from matplotlib.ticker import NullFormatter
from galaxy_analysis.particle_analysis.abundances import single_MDF
#
from galaxy_analysis.analysis import Galaxy
from mpl_toolkits.axes_grid1 import make_axes_locatable
import h5py
# grab the most recent file
workdir = '/mnt/ceph/users/emerick/enzo_runs/pleiades/starIC/run11_30km/final_sndriving/'
#workdir = '/home/emerick/work/enzo_runs/pleiades/starIC/run11_30km/final_sndriving/'
data_files = np.sort(glob.glob(workdir + 'DD????'))
name = data_files[-1].split('final_sndriving/')[1]
gal = Galaxy(name, wdir = workdir)
#
#
#
def plot_alpha_vs_fe():
fig,ax = plt.subplots()
fig.set_size_inches(8,7)
ptype = gal.df['particle_type']
fe_over_h = gal.df[('io','particle_Fe_over_H')]
alpha = gal.df[('io','particle_alpha_over_Fe')]
age = (gal.ds.current_time - gal.df[('io','creation_time')]).convert_to_units('Myr')
age = age - np.min(age)
p = ax.scatter(fe_over_h[ptype==11], alpha[ptype==11],
s = point_size, lw = 2, c = age[ptype==11], cmap = 'plasma_r', alpha = 0.75)
p.set_clim([0.0, np.max(age)])
cb = fig.colorbar(p)
cb.set_label(r'Stellar Age (Myr)')
ax.set_xlim(-9,-1)
ax.set_ylim(-1.75,1.75)
ax.set_xlabel(r'[Fe/H]')
ax.set_ylabel(r'[$\rm \alpha$/Fe]')
plt.minorticks_on()
plt.tight_layout()
fig.savefig('alpha_over_fe.png')
plt.close()
return
def plot_alpha_vs_fe_movie():
times = np.arange(0, 245, 1)
for i, t in enumerate(times):
plot_alpha_vs_fe_with_histograms(t_f = t, image_num = i)
def plot_alpha_vs_fe_with_histograms(t_f = None, image_num = 0):
sep = 0.02
left, width = 0.125, 0.65
bottom, height = 0.1, 0.65
left_h = left + width + sep
bottom_h = bottom + height + sep
rect_scatter = [left,bottom,width,height]
# rect_colorbar =
# rect_histx = [left, bottom_h, width, 0.95 - bottom_h - (left-bottom)]
# rect_histy = [left_h, bottom, 0.95 - left_h, height]
# fig,ax = plt.subplots()
fig = plt.figure(1, figsize=(8,8))
# fig.set_size_inches(8,8)
ax_scatter = plt.axes(rect_scatter)
# ax_hist_x = plt.axes(rect_histx)
# ax_hist_y = plt.axes(rect_histy)
# ax_color = plt.axes(rect_colorbar)
ptype = gal.df['particle_type']
fe_over_h = gal.df[('io','particle_Fe_over_H')]
alpha = gal.df[('io','particle_alpha_over_Fe')]
creation_time = gal.df[('io','creation_time')].convert_to_units('Myr')
age = (gal.ds.current_time - creation_time)
if t_f is None: # plot normally all MS stars
age = age - np.min(age)
# scatter plot
p = ax_scatter.scatter(fe_over_h[ptype==11], alpha[ptype==11],
s = point_size, lw = 2, c = age[ptype==11], cmap = 'plasma_r', alpha = 0.75)
p.set_clim([0.0, np.max(age)])
else:
min_clim = 0.0
max_clim = np.max( age - np.min(age))
particle_lifetimes = gal.df[('io','particle_model_lifetime')].convert_to_units('Myr')
selection = (t_f >= creation_time) * ( t_f < creation_time + particle_lifetimes)
age = t_f - creation_time
if np.size(fe_over_h[selection]) < 1:
plot_fe_over_h = np.ones(np.size(fe_over_h))*(-10000) # make dummy values so plot still diplays, but is empty
plot_alpha = np.ones(np.size(alpha))*(-10000)
plot_age = np.ones(np.size(age))*(-10000)
else:
plot_fe_over_h = fe_over_h[selection]
plot_alpha = alpha[selection]
plot_age = age[selection]
p = ax_scatter.scatter(plot_fe_over_h, plot_alpha, s = point_size, lw = 2,
c = plot_age, cmap = 'plasma_r', alpha = 0.75)
p.set_clim([min_clim,max_clim])
cb = fig.colorbar(p, ax = ax_scatter, orientation = 'horizontal', pad = 0.125, fraction = 0.046,
aspect = 40)
cb.set_label(r'Stellar Age (Myr)')
#
#
ax_scatter.set_xlim(-9,-1)
ax_scatter.set_ylim(-1.75,1.75)
ax_scatter.tick_params(axis='x',which='minor',bottom='on')
ax_scatter.tick_params(axis='y',which='minor',bottom='on')
ax_scatter.set_xlabel(r'[Fe/H]')
ax_scatter.set_ylabel(r'[$\rm \alpha$/Fe]')
plt.minorticks_on()
ax_scatter.plot( ax_scatter.get_xlim(), [0.0,0.0], lw = line_width, color = 'black', ls = '--')
#
# find main plot and construct histograms
#
divider = make_axes_locatable(ax_scatter)
left, bottom, width, height = divider.get_position()
# width, height = divider.get_horizontal(), divider.get_vertical()
sep = 0.01
thickness = np.min( np.array([0.95 - left - width - sep, 0.95 - bottom - height - sep]))
rect_histx = [left, bottom + height + sep, width, thickness]
rect_histy = [left + width + sep, bottom, thickness, height]
ax_hist_x = plt.axes(rect_histx)
ax_hist_y = plt.axes(rect_histy)
nbins = 100
hist,bins = np.histogram(fe_over_h, bins = nbins)
weights = np.ones(np.size(fe_over_h)) * (1.0 / (1.0*np.max(hist)))
ax_hist_x.hist(fe_over_h, color = 'C0', bins = nbins, weights = weights)
if not (t_f is None):
if np.max(plot_fe_over_h) > -1000:
hist,bins = np.histogram(plot_fe_over_h, bins = nbins)
weights = np.ones(np.size(plot_fe_over_h)) * (1.0 / (1.0*np.max(hist)))
ax_hist_x.hist(plot_fe_over_h, color = 'black', bins = nbins, weights = weights,
histtype = 'step', lw = 2.0)
# plot_histogram(ax_hist_x, bins, hist / (1.0*np.max(hist)), color = 'black')
plt.minorticks_on()
# hist,bins = np.histogram(alpha, bins = 24)
# plot_histogram(ax_hist_y, bins, hist / (1.0*np.max(hist)), color = 'black', orientation = 'horizontal')
nbins = 50
hist,bins = np.histogram(alpha, bins = nbins)
weights = np.ones(np.size(fe_over_h)) * (1.0 / (1.0*np.max(hist)))
ax_hist_y.hist(alpha, orientation='horizontal', color = 'C0', bins = nbins, weights = weights)
if not (t_f is None):
if np.max(plot_alpha) > -1000:
hist,bins = np.histogram(plot_alpha, bins = nbins)
weights = np.ones(np.size(plot_alpha)) * (1.0 / (1.0*np.max(hist)))
ax_hist_y.hist(plot_alpha, orientation = 'horizontal', color = 'black', bins = nbins,
weights = weights, histtype='step', lw = 2.0)
ax_hist_x.xaxis.set_major_formatter(NullFormatter())
ax_hist_y.yaxis.set_major_formatter(NullFormatter())
ax_hist_x.set_xlim(ax_scatter.get_xlim())
ax_hist_y.set_ylim(ax_scatter.get_ylim())
ticks = [0.0,0.25,0.5,0.75,1.0]
ax_hist_x.set_yticks(ticks)
ax_hist_y.set_xticks(ticks)
ax_hist_y.set_xticklabels(ticks, rotation = 270)
plt.minorticks_on()
# plt.tight_layout()
if t_f is None:
fig.savefig('alpha_over_fe_hist.png')
else:
fig.savefig('alpha_movie/alpha_over_fe_hist_%0004i.png'%(image_num))
plt.close()
return
def plot_panel(A = 'Fe', B = 'Fe', C = 'H', color = True):
"""
Make panel plots of X/A vs. B/C where "X" is a loop through all elements available,
and A, B, C are fixed for all plots, chosen by user. Defualt will plot
[X/Fe] vs. [Fe/H]. Default behavior is to color points by age.
"""
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha']
age = data['Time'] - data['creation_time'] # age of all particles in this data set
for base in ['H','Fe']:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(4*4,4*4)
fig.subplots_adjust(hspace=0.0, wspace = 0.0)
if base == 'Fe':
bins = np.arange(-3,3.1,0.1)
else:
bins = np.arange(-9,0,0.1)
i,j = 0,0
for e in elements:
if (A == e): # skip
continue
index = (i,j)
y = np.array(data['abundances'][e][A])
x = np.array(data['abundances'][B][C])
p = ax[index].scatter(x, y, s = point_size*0.5,
lw = 2, c = age, cmap = 'plasma_r', alpha = 0.75)
p.set_clim([0.0, np.max(age)])
xy = (0.8,0.8)
ax[index].annotate(e, xy=xy, xytext=xy, xycoords = 'axes fraction',
textcoords = 'axes fraction')
# cb = fig.colorbar(p)
# cb.set_label(r'Stellar Age (Myr)')
j = j + 1
if j >= 4:
j = 0
i = i + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(r'log([' + B + '/' + C + '])')
ax[(i,0)].set_ylabel(r'log([X/' + A + '])')
if C == 'H':
ax[(i,0)].set_xlim(-10.25, 0.125)
else:
ax[(i,0)].set_xlim(-3.25, 3.25)
if A == 'H':
ax[(0,i)].set_ylim(-10.25, 0.125)
else:
ax[(0,i)].set_ylim(-3.25, 3.25)
for j in np.arange(4):
ax[(j,i)].plot([-10,10], [0.0,0.0], lw = 0.5 * line_width, ls = ':', color = 'black')
plt.minorticks_on()
fig.savefig('X_over_' + A +'_vs_' + B + '_over_' + C + '_panel.png')
plt.close()
return
def plot_spatial_profiles(field = 'metallicity', abundance = False,
bins = None, spatial_type = 'cylindrical_radius'):
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha']
if spatial_type == 'cylindrical_radius':
bin_field = np.sqrt(data['kinematics']['x']**2 + data['kinematics']['y']**2)
xlabel = r'Radius (pc)'
elif spatial_type == 'z':
bin_field = np.abs( data['kinematics']['z'] )
xlabel = r'Z (pc)'
if bins is None:
bins = np.linspace(np.floor(np.min(bin_field)), np.ceil(np.max(bin_field)), 100)
centers = 0.5 * (bins[1:] + bins[:-1])
nbins = np.size(bins)
hist_index = np.digitize(bin_field, bins = bins)
median, q1, q3 = np.zeros(nbins-1), np.zeros(nbins-1), np.zeros(nbins-1)
if field == 'metallicity':
# make a single plot
# bin the data
for i in np.arange(nbins-1):
x = data['metallicity'][hist_index == i + 1]
median[i] = np.median(x)
if np.size(x) > 1:
q1[i] = np.percentile(x, 25.0)
q3[i] = np.percentile(x, 75.0)
elif np.size(x) == 1:
q1[i] = median[i]
q3[i] = median[i]
# now plot
fig, ax = plt.subplots()
fig.set_size_inches(8,8)
plot_histogram(ax, bins, median, lw = line_width, color = 'black', ls = '-')
ax.fill_between(centers, q1, q3, lw = 1.5, color = 'grey')
ax.set_ylabel(r'Metallicity Fraction')
ax.set_xlabel(xlabel)
ax.set_xlim( np.min(bins), np.max(bins))
plt.tight_layout()
plt.minorticks_on()
fig.savefig('metallicity_' + spatial_type + '_profile.png')
plt.close()
elif abundance:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(16,16)
fig.subplots_adjust(hspace = 0.0, wspace = 0.0)
axi, axj = 0,0
for e in elements:
if field == e:
continue
index = (axi,axj)
for i in np.arange(nbins-1):
x = np.array(data['abundances'][e][field])
x = x[ hist_index == (i + 1)]
if np.size(x) > 0:
median[i] = np.median(x)
q1[i] = np.percentile(x, 25)
q3[i] = np.percentile(x, 75)
else:
median[i] = None; q1[i] = None; q3[i] = None
ax[index].annotate(e, xy=(0.8,0.8),xytext=(0.8,0.8),
xycoords='axes fraction',textcoords = 'axes fraction')
plot_histogram(ax[index], bins, median, lw = line_width, color = 'black', ls = '-')
ax[index].fill_between(centers,q1,q3,lw=1.5,color='grey')
axj = axj+1
if axj>=4:
axj = 0
axi = axi + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(xlabel)
ax[(i,0)].set_ylabel(r'log[X/' + field +'])')
if field == 'H':
ax[(0,i)].set_ylim(-10.25,0.125)
else:
ax[(0,i)].set_ylim(-3.25,3.25)
for j in np.arange(4):
ax[(j,i)].plot([bins[0],bins[-1]], [0.0,0.0], lw = 0.5 * line_width, ls = '--',color ='black')
ax[(i,0)].set_xlim(np.min(bins), np.max(bins))
plt.minorticks_on()
fig.savefig(field + '_' + spatial_type + '_profile_panel.png')
plt.close()
return
def plot_MDF(plot_base = ['H','Fe']):
"""
Make a panel plot of the time evolution of all elemental abundance ratios
with respect to both H and Fe (as separate plots)
"""
if (not (type(plot_base) is list)):
plot_base = [plot_base]
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha']
for base in plot_base:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(4*4,4*4)
fig.subplots_adjust(hspace=0.0, wspace = 0.0)
if base == 'Fe':
bins = np.arange(-3,3.1,0.1)
else:
bins = np.arange(-9,0,0.1)
i,j = 0,0
for e in elements:
if (base == e):
continue
index = (i,j)
points = np.array(data['abundances'][e][base])
single_MDF(points, bins = bins, norm = 'peak', ax = ax[index],
label = False, lw = line_width)
x = np.max(bins) - (0.25/6.0 * (bins[-1] - bins[0]))
y = 0.9
ax[index].annotate(e, xy = (x,y), xytext =(x,y))
ax[index].plot([0,0], [0.0,1.0], ls = ':', lw = 0.5 * line_width, color = 'black')
j = j + 1
if j >= 4:
j = 0
i = i + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(r'log([X/' + base + '])')
ax[(i,0)].set_ylabel(r'N/N$_{\rm peak}$')
if base == 'H':
ax[(i,0)].set_xlim(-10.25, 0.125)
elif base == 'Fe':
ax[(i,0)].set_xlim(-3.25, 3.25)
plt.minorticks_on()
fig.savefig(base + '_MDF.png')
plt.close()
return
def plot_time_evolution():
"""
Make a panel plot of the time evolution of all elemental abundance ratios
with respect to both H and Fe (as separate plots)
"""
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha']
for time_type in ['cumulative','10Myr']:
for base in ['H','Fe']:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(4*4,4*4)
fig.subplots_adjust(hspace=0.0, wspace = 0.0)
i,j = 0,0
for e in elements:
if (base == e):
continue
print("plotting " + e + "/" + base + " time evolution")
index = (i,j)
t = data['statistics'][time_type]['bins']
y = data['statistics'][time_type][e][base]['median']
Q1 = data['statistics'][time_type][e][base]['Q1']
Q3 = data['statistics'][time_type][e][base]['Q3']
select = (y*0 == 0) # remove nan values
t = t[select]
t = t - t[0]
ax[index].plot( t, y[select], lw = line_width, ls = '-', color = 'black', label = r' ' + e +' ')
ax[index].fill_between(t, Q1[select], Q3[select], color = 'black', alpha = 0.5, lw = 0.5 * line_width)
ax[index].set_xlim(0.0, np.max(t))
ax[index].plot( [0.0,1000.0], [0.0,0.0], ls = ':', color = 'black', lw = line_width)
ax[index].legend(loc = 'upper right')
j = j + 1
if j >= 4:
j = 0
i = i + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(r'Time (Myr)')
ax[(i,0)].set_ylabel(r'[X/' + base +']')
if base == 'H':
ax[(i,0)].set_ylim(-12.25, 0.125)
elif base == 'Fe':
ax[(i,0)].set_ylim(-3.25, 3.25)
# for j in np.arange(3):
# ax[(j,i)].set_xticklabels([])
# ax[(i,j+1)].set_yticklabels([])
# ax[(3,i)].set_xticklabels(np.arange(0,np.max(t)+20,20))
# if base == 'Fe':
# ax[(i,0)].set_yticklabels([-3,-2,-1,0,1,2,3,])
# else:
# ax[(i,0)].set_yticklabels([-12, -10, -8, -6, -4, -2, 0])
plt.minorticks_on()
fig.savefig('stellar_x_over_' + base + '_' + time_type +'_evolution.png')
plt.close()
return
def plot_mass_fraction_time_evolution():
"""
Make a panel plot of the time evolution of all elemental abundance ratios
with respect to both H and Fe (as separate plots)
"""
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
# elements = elements + ['alpha']
for time_type in ['cumulative','10Myr']:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(4*4,4*4)
fig.subplots_adjust(hspace=0.0, wspace = 0.0)
i,j = 0,0
for e in elements:
print("plotting " + e + "mass fraction time evolution")
index = (i,j)
t = data['mass_fraction_statistics'][time_type]['bins']
y = data['mass_fraction_statistics'][time_type][e]['median']
Q1 = data['mass_fraction_statistics'][time_type][e]['Q1']
Q3 = data['mass_fraction_statistics'][time_type][e]['Q3']
select = (y*0 == 0) # remove nan values
t = t[select]
t = t - t[0]
ax[index].plot( t, y[select], lw = line_width, ls = '-', color = 'black', label = r' ' + e +' ')
ax[index].fill_between(t, Q1[select], Q3[select], color = 'black', alpha = 0.5, lw = 0.5 * line_width)
ax[index].set_xlim(0.0, np.max(t))
ax[index].plot( [0.0,1000.0], [0.0,0.0], ls = ':', color = 'black', lw = line_width)
ax[index].legend(loc = 'upper right')
j = j + 1
if j >= 4:
j = 0
i = i + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(r'Time (Myr)')
ax[(i,0)].set_ylabel(r'log(X Mass Fraction)')
ax[(i,0)].set_ylim(1.0E-10, 1.0E-4)
ax[(i,0)].semilogy()
# for j in np.arange(3):
# ax[(j,i)].set_xticklabels([])
# ax[(i,j+1)].set_yticklabels([])
# ax[(3,i)].set_xticklabels(np.arange(0,np.max(t)+20,20))
# if base == 'Fe':
# ax[(i,0)].set_yticklabels([-3,-2,-1,0,1,2,3,])
# else:
# ax[(i,0)].set_yticklabels([-12, -10, -8, -6, -4, -2, 0])
plt.minorticks_on()
fig.savefig('stellar_mass_fraction_' + time_type +'_evolution.png')
plt.close()
return
def plot_ratios_with_histograms(X='alpha',A='Fe',B='Fe',C='H'):
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha'] + ['H']
age = data['Time'] - data['creation_time'] # age of all particles in this data set
# --------------------
check_elements = [x for x in [X,A,B,C] if (not (x in elements))]
if len(check_elements) > 0:
print(check_elements, " not in elements list")
print("available: ", elements)
raise ValueError
sep = 0.02
left, width = 0.125, 0.65
bottom, height = 0.1, 0.65
left_h = left + width + sep
bottom_h = bottom + height + sep
rect_scatter = [left,bottom,width,height]
# rect_colorbar =
# rect_histx = [left, bottom_h, width, 0.95 - bottom_h - (left-bottom)]
# rect_histy = [left_h, bottom, 0.95 - left_h, height]
# fig,ax = plt.subplots()
fig = plt.figure(1, figsize=(8,8))
# fig.set_size_inches(8,8)
ax_scatter = plt.axes(rect_scatter)
# ax_hist_x = plt.axes(rect_histx)
# ax_hist_y = plt.axes(rect_histy)
# ax_color = plt.axes(rect_colorbar)
x_values = data['abundances'][B][C]
y_values = data['abundances'][X][A]
age = age - np.min(age) # normalize
# scatter plot
p = ax_scatter.scatter(x_values, y_values,
s = point_size, lw = 2, c = age, cmap = 'plasma_r', alpha = 0.75)
p.set_clim([0.0, np.max(age)])
cb = fig.colorbar(p, ax = ax_scatter, orientation = 'horizontal', pad = 0.125, fraction = 0.046,
aspect = 40)
cb.set_label(r'Stellar Age (Myr)')
#
#
#
ax_scatter.set_xlim(-9,-1)
ax_scatter.set_ylim(-1.75,1.75)
ax_scatter.tick_params(axis='x',which='minor',bottom='on')
ax_scatter.tick_params(axis='y',which='minor',bottom='on')
ax_scatter.set_xlabel(r'log([' + B + '/' + C + '])')
ax_scatter.set_ylabel(r'log([' + X + '/' + A + '])')
plt.minorticks_on()
#
# find main plot and construct histograms
#
divider = make_axes_locatable(ax_scatter)
left, bottom, width, height = divider.get_position()
# width, height = divider.get_horizontal(), divider.get_vertical()
sep = 0.01
thickness = np.min( np.array([0.95 - left - width - sep, 0.95 - bottom - height - sep]))
rect_histx = [left, bottom + height + sep, width, thickness]
rect_histy = [left + width + sep, bottom, thickness, height]
ax_hist_x = plt.axes(rect_histx)
ax_hist_y = plt.axes(rect_histy)
# construct the histogram for the horizontal axis (goes up top)
nbins = 100
hist,bins = np.histogram(x_values, bins = nbins)
weights = np.ones(np.size(x_values)) * (1.0 / (1.0*np.max(hist)))
ax_hist_x.hist(x_values, color = 'C0', bins = nbins, weights = weights)
# plot_histogram(ax_hist_x, bins, hist / (1.0*np.max(hist)), color = 'black')
plt.minorticks_on()
# hist,bins = np.histogram(alpha, bins = 24)
# plot_histogram(ax_hist_y, bins, hist / (1.0*np.max(hist)), color = 'black', orientation = 'horizontal')
# now do the same for the vertical axis histogram
nbins = 50
hist,bins = np.histogram(y_values, bins = nbins)
weights = np.ones(np.size(y_values)) * (1.0 / (1.0*np.max(hist)))
ax_hist_y.hist(y_values, orientation='horizontal', color = 'C0', bins = nbins, weights = weights)
ax_hist_x.xaxis.set_major_formatter(NullFormatter())
ax_hist_y.yaxis.set_major_formatter(NullFormatter())
ax_hist_x.set_xlim(ax_scatter.get_xlim())
ax_hist_y.set_ylim(ax_scatter.get_ylim())
ticks = [0.0,0.25,0.5,0.75,1.0]
ax_hist_x.set_yticks(ticks)
ax_hist_y.set_xticks(ticks)
ax_hist_y.set_xticklabels(ticks, rotation = 270)
plt.minorticks_on()
# plt.tight_layout()
fig.savefig(X + '_over_' + A + '_vs_' + B + '_over_' + C + '_hist.png')
plt.close()
return
if __name__ == '__main__':
plot_mass_fraction_time_evolution() #
# plot_ratios_with_histograms('C','O','Fe','H') # C/O vs Fe/H
# plot_ratios_with_histograms('alpha','Mg','Mg','H')
# plot_ratios_with_histograms('alpha','Fe','Fe','H')
# plot_panel() # default [X/Fe] vs [Fe/H]
# plot_panel(A = 'Mg', B = 'Fe', C = 'H')
# plot_panel(A = 'Mg', B = 'Mg', C = 'Fe')
# plot_panel(A = 'O', B = 'Fe', C = 'H')
# plot_panel(A = 'O', B = 'O', C = 'Fe')
# plot_panel(A = 'Ba', B = 'Ba', C = 'Fe')
# plot_MDF(plot_base = ['H','Fe','O','Ba'])
# plot_time_evolution()
# plot_alpha_vs_fe_with_histograms()
# plot_alpha_vs_fe()
# plot_alpha_vs_fe_movie()
# plot_spatial_profiles(bins=np.arange(0,505,10))
# plot_spatial_profiles(field = 'Fe',abundance=True, bins = np.arange(0,505,10))
# plot_spatial_profiles(field = 'H', abundance=True, bins = np.arange(0,505,10))
| mit |
agrista/odoo-saas | addons/marketing_campaign/__openerp__.py | 260 | 3127 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Marketing Campaigns',
'version': '1.1',
'depends': ['marketing',
'document',
'email_template',
'decimal_precision'
],
'author': 'OpenERP SA',
'category': 'Marketing',
'description': """
This module provides leads automation through marketing campaigns (campaigns can in fact be defined on any resource, not just CRM Leads).
=========================================================================================================================================
The campaigns are dynamic and multi-channels. The process is as follows:
------------------------------------------------------------------------
* Design marketing campaigns like workflows, including email templates to
send, reports to print and send by email, custom actions
* Define input segments that will select the items that should enter the
campaign (e.g leads from certain countries.)
* Run your campaign in simulation mode to test it real-time or accelerated,
and fine-tune it
* You may also start the real campaign in manual mode, where each action
requires manual validation
* Finally launch your campaign live, and watch the statistics as the
campaign does everything fully automatically.
While the campaign runs you can of course continue to fine-tune the parameters,
input segments, workflow.
**Note:** If you need demo data, you can install the marketing_campaign_crm_demo
module, but this will also install the CRM application as it depends on
CRM Leads.
""",
'website': 'https://www.odoo.com/page/lead-automation',
'data': [
'marketing_campaign_view.xml',
'marketing_campaign_data.xml',
'marketing_campaign_workflow.xml',
'report/campaign_analysis_view.xml',
'security/marketing_campaign_security.xml',
'security/ir.model.access.csv'
],
'demo': ['marketing_campaign_demo.xml'],
'test': ['test/marketing_campaign.yml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
rychipman/mongo-python-driver | pymongo/read_preferences.py | 27 | 12784 | # Copyright 2012-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License",
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for choosing which member of a replica set to read from."""
from collections import Mapping
from pymongo.errors import ConfigurationError
from pymongo.server_selectors import (member_with_tags_server_selector,
secondary_with_tags_server_selector,
writable_server_selector)
_PRIMARY = 0
_PRIMARY_PREFERRED = 1
_SECONDARY = 2
_SECONDARY_PREFERRED = 3
_NEAREST = 4
_MONGOS_MODES = (
'primary',
'primaryPreferred',
'secondary',
'secondaryPreferred',
'nearest',
)
def _validate_tag_sets(tag_sets):
"""Validate tag sets for a MongoReplicaSetClient.
"""
if tag_sets is None:
return tag_sets
if not isinstance(tag_sets, list):
raise TypeError((
"Tag sets %r invalid, must be a list") % (tag_sets,))
if len(tag_sets) == 0:
raise ValueError((
"Tag sets %r invalid, must be None or contain at least one set of"
" tags") % (tag_sets,))
for tags in tag_sets:
if not isinstance(tags, Mapping):
raise TypeError(
"Tag set %r invalid, must be an instance of dict, "
"bson.son.SON or other type that inherits from "
"collection.Mapping" % (tags,))
return tag_sets
class _ServerMode(object):
"""Base class for all read preferences.
"""
__slots__ = ("__mongos_mode", "__mode", "__tag_sets")
def __init__(self, mode, tag_sets=None):
if mode == _PRIMARY and tag_sets is not None:
raise ConfigurationError("Read preference primary "
"cannot be combined with tags")
self.__mongos_mode = _MONGOS_MODES[mode]
self.__mode = mode
self.__tag_sets = _validate_tag_sets(tag_sets)
@property
def name(self):
"""The name of this read preference.
"""
return self.__class__.__name__
@property
def document(self):
"""Read preference as a document.
"""
if self.__tag_sets in (None, [{}]):
return {'mode': self.__mongos_mode}
return {'mode': self.__mongos_mode, 'tags': self.__tag_sets}
@property
def mode(self):
"""The mode of this read preference instance.
"""
return self.__mode
@property
def tag_sets(self):
"""Set ``tag_sets`` to a list of dictionaries like [{'dc': 'ny'}] to
read only from members whose ``dc`` tag has the value ``"ny"``.
To specify a priority-order for tag sets, provide a list of
tag sets: ``[{'dc': 'ny'}, {'dc': 'la'}, {}]``. A final, empty tag
set, ``{}``, means "read from any member that matches the mode,
ignoring tags." MongoReplicaSetClient tries each set of tags in turn
until it finds a set of tags with at least one matching member.
.. seealso:: `Data-Center Awareness
<http://www.mongodb.org/display/DOCS/Data+Center+Awareness>`_
"""
return list(self.__tag_sets) if self.__tag_sets else [{}]
def __repr__(self):
return "%s(tag_sets=%r)" % (
self.name, self.__tag_sets)
def __eq__(self, other):
if isinstance(other, _ServerMode):
return (self.mode == other.mode and
self.tag_sets == other.tag_sets)
return NotImplemented
def __ne__(self, other):
return not self == other
def __getstate__(self):
"""Return value of object for pickling.
Needed explicitly because __slots__() defined.
"""
return {'mode': self.__mode, 'tag_sets': self.__tag_sets}
def __setstate__(self, value):
"""Restore from pickling."""
self.__mode = value['mode']
self.__mongos_mode = _MONGOS_MODES[self.__mode]
self.__tag_sets = _validate_tag_sets(value['tag_sets'])
class Primary(_ServerMode):
"""Primary read preference.
* When directly connected to one mongod queries are allowed if the server
is standalone or a replica set primary.
* When connected to a mongos queries are sent to the primary of a shard.
* When connected to a replica set queries are sent to the primary of
the replica set.
"""
def __init__(self):
super(Primary, self).__init__(_PRIMARY)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
return writable_server_selector(server_descriptions)
def __repr__(self):
return "Primary()"
def __eq__(self, other):
if isinstance(other, _ServerMode):
return other.mode == _PRIMARY
return NotImplemented
class PrimaryPreferred(_ServerMode):
"""PrimaryPreferred read preference.
* When directly connected to one mongod queries are allowed to standalone
servers, to a replica set primary, or to replica set secondaries.
* When connected to a mongos queries are sent to the primary of a shard if
available, otherwise a shard secondary.
* When connected to a replica set queries are sent to the primary if
available, otherwise a secondary.
:Parameters:
- `tag_sets`: The :attr:`~tag_sets` to use if the primary is not
available.
"""
def __init__(self, tag_sets=None):
super(PrimaryPreferred, self).__init__(_PRIMARY_PREFERRED, tag_sets)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
writable_servers = writable_server_selector(server_descriptions)
if writable_servers:
return writable_servers
else:
return secondary_with_tags_server_selector(
self.tag_sets,
server_descriptions)
class Secondary(_ServerMode):
"""Secondary read preference.
* When directly connected to one mongod queries are allowed to standalone
servers, to a replica set primary, or to replica set secondaries.
* When connected to a mongos queries are distributed among shard
secondaries. An error is raised if no secondaries are available.
* When connected to a replica set queries are distributed among
secondaries. An error is raised if no secondaries are available.
:Parameters:
- `tag_sets`: The :attr:`~tag_sets` to use with this read_preference
"""
def __init__(self, tag_sets=None):
super(Secondary, self).__init__(_SECONDARY, tag_sets)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
return secondary_with_tags_server_selector(
self.tag_sets,
server_descriptions)
class SecondaryPreferred(_ServerMode):
"""SecondaryPreferred read preference.
* When directly connected to one mongod queries are allowed to standalone
servers, to a replica set primary, or to replica set secondaries.
* When connected to a mongos queries are distributed among shard
secondaries, or the shard primary if no secondary is available.
* When connected to a replica set queries are distributed among
secondaries, or the primary if no secondary is available.
:Parameters:
- `tag_sets`: The :attr:`~tag_sets` to use with this read_preference
"""
def __init__(self, tag_sets=None):
super(SecondaryPreferred, self).__init__(_SECONDARY_PREFERRED, tag_sets)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
secondaries = secondary_with_tags_server_selector(
self.tag_sets,
server_descriptions)
if secondaries:
return secondaries
else:
return writable_server_selector(server_descriptions)
class Nearest(_ServerMode):
"""Nearest read preference.
* When directly connected to one mongod queries are allowed to standalone
servers, to a replica set primary, or to replica set secondaries.
* When connected to a mongos queries are distributed among all members of
a shard.
* When connected to a replica set queries are distributed among all
members.
:Parameters:
- `tag_sets`: The :attr:`~tag_sets` to use with this read_preference
"""
def __init__(self, tag_sets=None):
super(Nearest, self).__init__(_NEAREST, tag_sets)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
return member_with_tags_server_selector(
self.tag_sets or [{}],
server_descriptions)
_ALL_READ_PREFERENCES = (Primary, PrimaryPreferred,
Secondary, SecondaryPreferred, Nearest)
def make_read_preference(mode, tag_sets):
if mode == _PRIMARY:
if tag_sets not in (None, [{}]):
raise ConfigurationError("Read preference primary "
"cannot be combined with tags")
return Primary()
return _ALL_READ_PREFERENCES[mode](tag_sets)
_MODES = (
'PRIMARY',
'PRIMARY_PREFERRED',
'SECONDARY',
'SECONDARY_PREFERRED',
'NEAREST',
)
class ReadPreference(object):
"""An enum that defines the read preference modes supported by PyMongo.
See :doc:`/examples/high_availability` for code examples.
A read preference is used in three cases:
:class:`~pymongo.mongo_client.MongoClient` connected to a single mongod:
- ``PRIMARY``: Queries are allowed if the server is standalone or a replica
set primary.
- All other modes allow queries to standalone servers, to a replica set
primary, or to replica set secondaries.
:class:`~pymongo.mongo_client.MongoClient` initialized with the
``replicaSet`` option:
- ``PRIMARY``: Read from the primary. This is the default, and provides the
strongest consistency. If no primary is available, raise
:class:`~pymongo.errors.AutoReconnect`.
- ``PRIMARY_PREFERRED``: Read from the primary if available, or if there is
none, read from a secondary.
- ``SECONDARY``: Read from a secondary. If no secondary is available,
raise :class:`~pymongo.errors.AutoReconnect`.
- ``SECONDARY_PREFERRED``: Read from a secondary if available, otherwise
from the primary.
- ``NEAREST``: Read from any member.
:class:`~pymongo.mongo_client.MongoClient` connected to a mongos, with a
sharded cluster of replica sets:
- ``PRIMARY``: Read from the primary of the shard, or raise
:class:`~pymongo.errors.OperationFailure` if there is none.
This is the default.
- ``PRIMARY_PREFERRED``: Read from the primary of the shard, or if there is
none, read from a secondary of the shard.
- ``SECONDARY``: Read from a secondary of the shard, or raise
:class:`~pymongo.errors.OperationFailure` if there is none.
- ``SECONDARY_PREFERRED``: Read from a secondary of the shard if available,
otherwise from the shard primary.
- ``NEAREST``: Read from any shard member.
"""
PRIMARY = Primary()
PRIMARY_PREFERRED = PrimaryPreferred()
SECONDARY = Secondary()
SECONDARY_PREFERRED = SecondaryPreferred()
NEAREST = Nearest()
def read_pref_mode_from_name(name):
"""Get the read preference mode from mongos/uri name.
"""
return _MONGOS_MODES.index(name)
class MovingAverage(object):
"""Tracks an exponentially-weighted moving average."""
def __init__(self):
self.average = None
def add_sample(self, sample):
if sample < 0:
# Likely system time change while waiting for ismaster response
# and not using time.monotonic. Ignore it, the next one will
# probably be valid.
return
if self.average is None:
self.average = sample
else:
# The Server Selection Spec requires an exponentially weighted
# average with alpha = 0.2.
self.average = 0.8 * self.average + 0.2 * sample
def get(self):
"""Get the calculated average, or None if no samples yet."""
return self.average
def reset(self):
self.average = None
| apache-2.0 |
cainmatt/django | django/contrib/gis/gdal/srs.py | 366 | 12043 | """
The Spatial Reference class, represents OGR Spatial Reference objects.
Example:
>>> from django.contrib.gis.gdal import SpatialReference
>>> srs = SpatialReference('WGS84')
>>> print(srs)
GEOGCS["WGS 84",
DATUM["WGS_1984",
SPHEROID["WGS 84",6378137,298.257223563,
AUTHORITY["EPSG","7030"]],
TOWGS84[0,0,0,0,0,0,0],
AUTHORITY["EPSG","6326"]],
PRIMEM["Greenwich",0,
AUTHORITY["EPSG","8901"]],
UNIT["degree",0.01745329251994328,
AUTHORITY["EPSG","9122"]],
AUTHORITY["EPSG","4326"]]
>>> print(srs.proj)
+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
>>> print(srs.ellipsoid)
(6378137.0, 6356752.3142451793, 298.25722356300003)
>>> print(srs.projected, srs.geographic)
False True
>>> srs.import_epsg(32140)
>>> print(srs.name)
NAD83 / Texas South Central
"""
from ctypes import byref, c_char_p, c_int
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import SRSException
from django.contrib.gis.gdal.prototypes import srs as capi
from django.utils import six
from django.utils.encoding import force_bytes, force_text
class SpatialReference(GDALBase):
"""
A wrapper for the OGRSpatialReference object. According to the GDAL Web site,
the SpatialReference object "provide[s] services to represent coordinate
systems (projections and datums) and to transform between them."
"""
def __init__(self, srs_input='', srs_type='user'):
"""
Creates a GDAL OSR Spatial Reference object from the given input.
The input may be string of OGC Well Known Text (WKT), an integer
EPSG code, a PROJ.4 string, and/or a projection "well known" shorthand
string (one of 'WGS84', 'WGS72', 'NAD27', 'NAD83').
"""
if srs_type == 'wkt':
self.ptr = capi.new_srs(c_char_p(b''))
self.import_wkt(srs_input)
return
elif isinstance(srs_input, six.string_types):
# Encoding to ASCII if unicode passed in.
if isinstance(srs_input, six.text_type):
srs_input = srs_input.encode('ascii')
try:
# If SRID is a string, e.g., '4326', then make acceptable
# as user input.
srid = int(srs_input)
srs_input = 'EPSG:%d' % srid
except ValueError:
pass
elif isinstance(srs_input, six.integer_types):
# EPSG integer code was input.
srs_type = 'epsg'
elif isinstance(srs_input, self.ptr_type):
srs = srs_input
srs_type = 'ogr'
else:
raise TypeError('Invalid SRS type "%s"' % srs_type)
if srs_type == 'ogr':
# Input is already an SRS pointer.
srs = srs_input
else:
# Creating a new SRS pointer, using the string buffer.
buf = c_char_p(b'')
srs = capi.new_srs(buf)
# If the pointer is NULL, throw an exception.
if not srs:
raise SRSException('Could not create spatial reference from: %s' % srs_input)
else:
self.ptr = srs
# Importing from either the user input string or an integer SRID.
if srs_type == 'user':
self.import_user_input(srs_input)
elif srs_type == 'epsg':
self.import_epsg(srs_input)
def __del__(self):
"Destroys this spatial reference."
if self._ptr and capi:
capi.release_srs(self._ptr)
def __getitem__(self, target):
"""
Returns the value of the given string attribute node, None if the node
doesn't exist. Can also take a tuple as a parameter, (target, child),
where child is the index of the attribute in the WKT. For example:
>>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]'
>>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326
>>> print(srs['GEOGCS'])
WGS 84
>>> print(srs['DATUM'])
WGS_1984
>>> print(srs['AUTHORITY'])
EPSG
>>> print(srs['AUTHORITY', 1]) # The authority value
4326
>>> print(srs['TOWGS84', 4]) # the fourth value in this wkt
0
>>> print(srs['UNIT|AUTHORITY']) # For the units authority, have to use the pipe symbole.
EPSG
>>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the units
9122
"""
if isinstance(target, tuple):
return self.attr_value(*target)
else:
return self.attr_value(target)
def __str__(self):
"The string representation uses 'pretty' WKT."
return self.pretty_wkt
# #### SpatialReference Methods ####
def attr_value(self, target, index=0):
"""
The attribute value for the given target node (e.g. 'PROJCS'). The index
keyword specifies an index of the child node to return.
"""
if not isinstance(target, six.string_types) or not isinstance(index, int):
raise TypeError
return capi.get_attr_value(self.ptr, force_bytes(target), index)
def auth_name(self, target):
"Returns the authority name for the given string target node."
return capi.get_auth_name(self.ptr, force_bytes(target))
def auth_code(self, target):
"Returns the authority code for the given string target node."
return capi.get_auth_code(self.ptr, force_bytes(target))
def clone(self):
"Returns a clone of this SpatialReference object."
return SpatialReference(capi.clone_srs(self.ptr))
def from_esri(self):
"Morphs this SpatialReference from ESRI's format to EPSG."
capi.morph_from_esri(self.ptr)
def identify_epsg(self):
"""
This method inspects the WKT of this SpatialReference, and will
add EPSG authority nodes where an EPSG identifier is applicable.
"""
capi.identify_epsg(self.ptr)
def to_esri(self):
"Morphs this SpatialReference to ESRI's format."
capi.morph_to_esri(self.ptr)
def validate(self):
"Checks to see if the given spatial reference is valid."
capi.srs_validate(self.ptr)
# #### Name & SRID properties ####
@property
def name(self):
"Returns the name of this Spatial Reference."
if self.projected:
return self.attr_value('PROJCS')
elif self.geographic:
return self.attr_value('GEOGCS')
elif self.local:
return self.attr_value('LOCAL_CS')
else:
return None
@property
def srid(self):
"Returns the SRID of top-level authority, or None if undefined."
try:
return int(self.attr_value('AUTHORITY', 1))
except (TypeError, ValueError):
return None
# #### Unit Properties ####
@property
def linear_name(self):
"Returns the name of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return name
@property
def linear_units(self):
"Returns the value of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return units
@property
def angular_name(self):
"Returns the name of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return name
@property
def angular_units(self):
"Returns the value of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return units
@property
def units(self):
"""
Returns a 2-tuple of the units value and the units name,
and will automatically determines whether to return the linear
or angular units.
"""
units, name = None, None
if self.projected or self.local:
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
elif self.geographic:
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
if name is not None:
name = force_text(name)
return (units, name)
# #### Spheroid/Ellipsoid Properties ####
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening)
"""
return (self.semi_major, self.semi_minor, self.inverse_flattening)
@property
def semi_major(self):
"Returns the Semi Major Axis for this Spatial Reference."
return capi.semi_major(self.ptr, byref(c_int()))
@property
def semi_minor(self):
"Returns the Semi Minor Axis for this Spatial Reference."
return capi.semi_minor(self.ptr, byref(c_int()))
@property
def inverse_flattening(self):
"Returns the Inverse Flattening for this Spatial Reference."
return capi.invflattening(self.ptr, byref(c_int()))
# #### Boolean Properties ####
@property
def geographic(self):
"""
Returns True if this SpatialReference is geographic
(root node is GEOGCS).
"""
return bool(capi.isgeographic(self.ptr))
@property
def local(self):
"Returns True if this SpatialReference is local (root node is LOCAL_CS)."
return bool(capi.islocal(self.ptr))
@property
def projected(self):
"""
Returns True if this SpatialReference is a projected coordinate system
(root node is PROJCS).
"""
return bool(capi.isprojected(self.ptr))
# #### Import Routines #####
def import_epsg(self, epsg):
"Imports the Spatial Reference from the EPSG code (an integer)."
capi.from_epsg(self.ptr, epsg)
def import_proj(self, proj):
"Imports the Spatial Reference from a PROJ.4 string."
capi.from_proj(self.ptr, proj)
def import_user_input(self, user_input):
"Imports the Spatial Reference from the given user input string."
capi.from_user_input(self.ptr, force_bytes(user_input))
def import_wkt(self, wkt):
"Imports the Spatial Reference from OGC WKT (string)"
capi.from_wkt(self.ptr, byref(c_char_p(wkt)))
def import_xml(self, xml):
"Imports the Spatial Reference from an XML string."
capi.from_xml(self.ptr, xml)
# #### Export Properties ####
@property
def wkt(self):
"Returns the WKT representation of this Spatial Reference."
return capi.to_wkt(self.ptr, byref(c_char_p()))
@property
def pretty_wkt(self, simplify=0):
"Returns the 'pretty' representation of the WKT."
return capi.to_pretty_wkt(self.ptr, byref(c_char_p()), simplify)
@property
def proj(self):
"Returns the PROJ.4 representation for this Spatial Reference."
return capi.to_proj(self.ptr, byref(c_char_p()))
@property
def proj4(self):
"Alias for proj()."
return self.proj
@property
def xml(self, dialect=''):
"Returns the XML representation of this Spatial Reference."
return capi.to_xml(self.ptr, byref(c_char_p()), dialect)
class CoordTransform(GDALBase):
"The coordinate system transformation object."
def __init__(self, source, target):
"Initializes on a source and target SpatialReference objects."
if not isinstance(source, SpatialReference) or not isinstance(target, SpatialReference):
raise TypeError('source and target must be of type SpatialReference')
self.ptr = capi.new_ct(source._ptr, target._ptr)
self._srs1_name = source.name
self._srs2_name = target.name
def __del__(self):
"Deletes this Coordinate Transformation object."
if self._ptr and capi:
capi.destroy_ct(self._ptr)
def __str__(self):
return 'Transform from "%s" to "%s"' % (self._srs1_name, self._srs2_name)
| bsd-3-clause |
MounirMesselmeni/django | django/contrib/auth/migrations/0008_alter_user_username_max_length.py | 26 | 1030 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0007_alter_validators_add_error_messages'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(
error_messages={'unique': 'A user with that username already exists.'},
help_text='Required. 254 characters or fewer. Letters, digits and @/./+/-/_ only.',
max_length=254,
unique=True,
validators=[
django.core.validators.RegexValidator(
'^[\\w.@+-]+$', 'Enter a valid username. '
'This value may contain only letters, numbers and @/./+/-/_ characters.'
),
],
verbose_name='username',
),
),
]
| bsd-3-clause |
tumbl3w33d/ansible | lib/ansible/modules/crypto/openssl_certificate.py | 2 | 115826 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
# Copyright: (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: openssl_certificate
version_added: "2.4"
short_description: Generate and/or check OpenSSL certificates
description:
- This module allows one to (re)generate OpenSSL certificates.
- It implements a notion of provider (ie. C(selfsigned), C(ownca), C(acme), C(assertonly), C(entrust))
for your certificate.
- The C(assertonly) provider is intended for use cases where one is only interested in
checking properties of a supplied certificate. Please note that this provider has been
deprecated in Ansible 2.9 and will be removed in Ansible 2.13. See the examples on how
to emulate C(assertonly) usage with M(openssl_certificate_info), M(openssl_csr_info),
M(openssl_privatekey_info) and M(assert). This also allows more flexible checks than
the ones offered by the C(assertonly) provider.
- The C(ownca) provider is intended for generating OpenSSL certificate signed with your own
CA (Certificate Authority) certificate (self-signed certificate).
- Many properties that can be specified in this module are for validation of an
existing or newly generated certificate. The proper place to specify them, if you
want to receive a certificate with these properties is a CSR (Certificate Signing Request).
- "Please note that the module regenerates existing certificate if it doesn't match the module's
options, or if it seems to be corrupt. If you are concerned that this could overwrite
your existing certificate, consider using the I(backup) option."
- It uses the pyOpenSSL or cryptography python library to interact with OpenSSL.
- If both the cryptography and PyOpenSSL libraries are available (and meet the minimum version requirements)
cryptography will be preferred as a backend over PyOpenSSL (unless the backend is forced with C(select_crypto_backend)).
Please note that the PyOpenSSL backend was deprecated in Ansible 2.9 and will be removed in Ansible 2.13.
requirements:
- PyOpenSSL >= 0.15 or cryptography >= 1.6 (if using C(selfsigned) or C(assertonly) provider)
- acme-tiny (if using the C(acme) provider)
author:
- Yanis Guenane (@Spredzy)
- Markus Teufelberger (@MarkusTeufelberger)
options:
state:
description:
- Whether the certificate should exist or not, taking action if the state is different from what is stated.
type: str
default: present
choices: [ absent, present ]
path:
description:
- Remote absolute path where the generated certificate file should be created or is already located.
type: path
required: true
provider:
description:
- Name of the provider to use to generate/retrieve the OpenSSL certificate.
- The C(assertonly) provider will not generate files and fail if the certificate file is missing.
- The C(assertonly) provider has been deprecated in Ansible 2.9 and will be removed in Ansible 2.13.
Please see the examples on how to emulate it with M(openssl_certificate_info), M(openssl_csr_info),
M(openssl_privatekey_info) and M(assert).
- "The C(entrust) provider was added for Ansible 2.9 and requires credentials for the
L(https://www.entrustdatacard.com/products/categories/ssl-certificates,Entrust Certificate Services) (ECS) API."
type: str
required: true
choices: [ acme, assertonly, entrust, ownca, selfsigned ]
force:
description:
- Generate the certificate, even if it already exists.
type: bool
default: no
csr_path:
description:
- Path to the Certificate Signing Request (CSR) used to generate this certificate.
- This is not required in C(assertonly) mode.
type: path
privatekey_path:
description:
- Path to the private key to use when signing the certificate.
type: path
privatekey_passphrase:
description:
- The passphrase for the I(privatekey_path).
- This is required if the private key is password protected.
type: str
selfsigned_version:
description:
- Version of the C(selfsigned) certificate.
- Nowadays it should almost always be C(3).
- This is only used by the C(selfsigned) provider.
type: int
default: 3
version_added: "2.5"
selfsigned_digest:
description:
- Digest algorithm to be used when self-signing the certificate.
- This is only used by the C(selfsigned) provider.
type: str
default: sha256
selfsigned_not_before:
description:
- The point in time the certificate is valid from.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will start being valid from now.
- This is only used by the C(selfsigned) provider.
type: str
default: +0s
aliases: [ selfsigned_notBefore ]
selfsigned_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will stop being valid 10 years from now.
- This is only used by the C(selfsigned) provider.
type: str
default: +3650d
aliases: [ selfsigned_notAfter ]
selfsigned_create_subject_key_identifier:
description:
- Whether to create the Subject Key Identifier (SKI) from the public key.
- A value of C(create_if_not_provided) (default) only creates a SKI when the CSR does not
provide one.
- A value of C(always_create) always creates a SKI. If the CSR provides one, that one is
ignored.
- A value of C(never_create) never creates a SKI. If the CSR provides one, that one is used.
- This is only used by the C(selfsigned) provider.
- Note that this is only supported if the C(cryptography) backend is used!
type: str
choices: [create_if_not_provided, always_create, never_create]
default: create_if_not_provided
version_added: "2.9"
ownca_path:
description:
- Remote absolute path of the CA (Certificate Authority) certificate.
- This is only used by the C(ownca) provider.
type: path
version_added: "2.7"
ownca_privatekey_path:
description:
- Path to the CA (Certificate Authority) private key to use when signing the certificate.
- This is only used by the C(ownca) provider.
type: path
version_added: "2.7"
ownca_privatekey_passphrase:
description:
- The passphrase for the I(ownca_privatekey_path).
- This is only used by the C(ownca) provider.
type: str
version_added: "2.7"
ownca_digest:
description:
- The digest algorithm to be used for the C(ownca) certificate.
- This is only used by the C(ownca) provider.
type: str
default: sha256
version_added: "2.7"
ownca_version:
description:
- The version of the C(ownca) certificate.
- Nowadays it should almost always be C(3).
- This is only used by the C(ownca) provider.
type: int
default: 3
version_added: "2.7"
ownca_not_before:
description:
- The point in time the certificate is valid from.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will start being valid from now.
- This is only used by the C(ownca) provider.
type: str
default: +0s
version_added: "2.7"
ownca_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will stop being valid 10 years from now.
- This is only used by the C(ownca) provider.
type: str
default: +3650d
version_added: "2.7"
ownca_create_subject_key_identifier:
description:
- Whether to create the Subject Key Identifier (SKI) from the public key.
- A value of C(create_if_not_provided) (default) only creates a SKI when the CSR does not
provide one.
- A value of C(always_create) always creates a SKI. If the CSR provides one, that one is
ignored.
- A value of C(never_create) never creates a SKI. If the CSR provides one, that one is used.
- This is only used by the C(ownca) provider.
- Note that this is only supported if the C(cryptography) backend is used!
type: str
choices: [create_if_not_provided, always_create, never_create]
default: create_if_not_provided
version_added: "2.9"
ownca_create_authority_key_identifier:
description:
- Create a Authority Key Identifier from the CA's certificate. If the CSR provided
a authority key identifier, it is ignored.
- The Authority Key Identifier is generated from the CA certificate's Subject Key Identifier,
if available. If it is not available, the CA certificate's public key will be used.
- This is only used by the C(ownca) provider.
- Note that this is only supported if the C(cryptography) backend is used!
type: bool
default: yes
version_added: "2.9"
acme_accountkey_path:
description:
- The path to the accountkey for the C(acme) provider.
- This is only used by the C(acme) provider.
type: path
acme_challenge_path:
description:
- The path to the ACME challenge directory that is served on U(http://<HOST>:80/.well-known/acme-challenge/)
- This is only used by the C(acme) provider.
type: path
acme_chain:
description:
- Include the intermediate certificate to the generated certificate
- This is only used by the C(acme) provider.
- Note that this is only available for older versions of C(acme-tiny).
New versions include the chain automatically, and setting I(acme_chain) to C(yes) results in an error.
type: bool
default: no
version_added: "2.5"
signature_algorithms:
description:
- A list of algorithms that you would accept the certificate to be signed with
(e.g. ['sha256WithRSAEncryption', 'sha512WithRSAEncryption']).
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: list
elements: str
issuer:
description:
- The key/value pairs that must be present in the issuer name field of the certificate.
- If you need to specify more than one value with the same key, use a list as value.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: dict
issuer_strict:
description:
- If set to C(yes), the I(issuer) field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
version_added: "2.5"
subject:
description:
- The key/value pairs that must be present in the subject name field of the certificate.
- If you need to specify more than one value with the same key, use a list as value.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: dict
subject_strict:
description:
- If set to C(yes), the I(subject) field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
version_added: "2.5"
has_expired:
description:
- Checks if the certificate is expired/not expired at the time the module is executed.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
version:
description:
- The version of the certificate.
- Nowadays it should almost always be 3.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: int
valid_at:
description:
- The certificate must be valid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
invalid_at:
description:
- The certificate must be invalid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
not_before:
description:
- The certificate must start to become valid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
aliases: [ notBefore ]
not_after:
description:
- The certificate must expire at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
aliases: [ notAfter ]
valid_in:
description:
- The certificate must still be valid at this relative time offset from now.
- Valid format is C([+-]timespec | number_of_seconds) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using this parameter, this module is NOT idempotent.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
key_usage:
description:
- The I(key_usage) extension field must contain all these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: list
elements: str
aliases: [ keyUsage ]
key_usage_strict:
description:
- If set to C(yes), the I(key_usage) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
aliases: [ keyUsage_strict ]
extended_key_usage:
description:
- The I(extended_key_usage) extension field must contain all these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: list
elements: str
aliases: [ extendedKeyUsage ]
extended_key_usage_strict:
description:
- If set to C(yes), the I(extended_key_usage) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
aliases: [ extendedKeyUsage_strict ]
subject_alt_name:
description:
- The I(subject_alt_name) extension field must contain these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: list
elements: str
aliases: [ subjectAltName ]
subject_alt_name_strict:
description:
- If set to C(yes), the I(subject_alt_name) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
aliases: [ subjectAltName_strict ]
select_crypto_backend:
description:
- Determines which crypto backend to use.
- The default choice is C(auto), which tries to use C(cryptography) if available, and falls back to C(pyopenssl).
- If set to C(pyopenssl), will try to use the L(pyOpenSSL,https://pypi.org/project/pyOpenSSL/) library.
- If set to C(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
- Please note that the C(pyopenssl) backend has been deprecated in Ansible 2.9, and will be removed in Ansible 2.13.
From that point on, only the C(cryptography) backend will be available.
type: str
default: auto
choices: [ auto, cryptography, pyopenssl ]
version_added: "2.8"
backup:
description:
- Create a backup file including a timestamp so you can get the original
certificate back if you overwrote it with a new one by accident.
- This is not used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
version_added: "2.8"
entrust_cert_type:
description:
- Specify the type of certificate requested.
- This is only used by the C(entrust) provider.
type: str
default: STANDARD_SSL
choices: [ 'STANDARD_SSL', 'ADVANTAGE_SSL', 'UC_SSL', 'EV_SSL', 'WILDCARD_SSL', 'PRIVATE_SSL', 'PD_SSL', 'CDS_ENT_LITE', 'CDS_ENT_PRO', 'SMIME_ENT' ]
version_added: "2.9"
entrust_requester_email:
description:
- The email of the requester of the certificate (for tracking purposes).
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_requester_name:
description:
- The name of the requester of the certificate (for tracking purposes).
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_requester_phone:
description:
- The phone number of the requester of the certificate (for tracking purposes).
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_api_user:
description:
- The username for authentication to the Entrust Certificate Services (ECS) API.
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_api_key:
description:
- The key (password) for authentication to the Entrust Certificate Services (ECS) API.
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_api_client_cert_path:
description:
- The path to the client certificate used to authenticate to the Entrust Certificate Services (ECS) API.
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: path
version_added: "2.9"
entrust_api_client_cert_key_path:
description:
- The path to the private key of the client certificate used to authenticate to the Entrust Certificate Services (ECS) API.
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: path
version_added: "2.9"
entrust_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as an absolute timestamp.
- A valid absolute time format is C(ASN.1 TIME) such as C(2019-06-18).
- A valid relative time format is C([+-]timespec) where timespec can be an integer + C([w | d | h | m | s]), such as C(+365d) or C(+32w1d2h)).
- Time will always be interpreted as UTC.
- Note that only the date (day, month, year) is supported for specifying the expiry date of the issued certificate.
- The full date-time is adjusted to EST (GMT -5:00) before issuance, which may result in a certificate with an expiration date one day
earlier than expected if a relative time is used.
- The minimum certificate lifetime is 90 days, and maximum is three years.
- If this value is not specified, the certificate will stop being valid 365 days the date of issue.
- This is only used by the C(entrust) provider.
type: str
default: +365d
version_added: "2.9"
entrust_api_specification_path:
description:
- The path to the specification file defining the Entrust Certificate Services (ECS) API configuration.
- You can use this to keep a local copy of the specification to avoid downloading it every time the module is used.
- This is only used by the C(entrust) provider.
type: path
default: https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml
version_added: "2.9"
extends_documentation_fragment: files
notes:
- All ASN.1 TIME values should be specified following the YYYYMMDDHHMMSSZ pattern.
- Date specified should be UTC. Minutes and seconds are mandatory.
- For security reason, when you use C(ownca) provider, you should NOT run M(openssl_certificate) on
a target machine, but on a dedicated CA machine. It is recommended not to store the CA private key
on the target machine. Once signed, the certificate can be moved to the target machine.
seealso:
- module: openssl_csr
- module: openssl_dhparam
- module: openssl_pkcs12
- module: openssl_privatekey
- module: openssl_publickey
'''
EXAMPLES = r'''
- name: Generate a Self Signed OpenSSL certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
privatekey_path: /etc/ssl/private/ansible.com.pem
csr_path: /etc/ssl/csr/ansible.com.csr
provider: selfsigned
- name: Generate an OpenSSL certificate signed with your own CA certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
ownca_path: /etc/ssl/crt/ansible_CA.crt
ownca_privatekey_path: /etc/ssl/private/ansible_CA.pem
provider: ownca
- name: Generate a Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
- name: Force (re-)generate a new Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
force: yes
- name: Generate an Entrust certificate via the Entrust Certificate Services (ECS) API
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: entrust
entrust_requester_name: Jo Doe
entrust_requester_email: jdoe@ansible.com
entrust_requester_phone: 555-555-5555
entrust_cert_type: STANDARD_SSL
entrust_api_user: apiusername
entrust_api_key: a^lv*32!cd9LnT
entrust_api_client_cert_path: /etc/ssl/entrust/ecs-client.crt
entrust_api_client_cert_key_path: /etc/ssl/entrust/ecs-key.crt
entrust_api_specification_path: /etc/ssl/entrust/api-docs/cms-api-2.1.0.yaml
# The following example shows one assertonly usage using all existing options for
# assertonly, and shows how to emulate the behavior with the openssl_certificate_info,
# openssl_csr_info, openssl_privatekey_info and assert modules:
- openssl_certificate:
provider: assertonly
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
privatekey_path: /etc/ssl/csr/ansible.com.key
signature_algorithms:
- sha256WithRSAEncryption
- sha512WithRSAEncryption
subject:
commonName: ansible.com
subject_strict: yes
issuer:
commonName: ansible.com
issuer_strict: yes
has_expired: no
version: 3
key_usage:
- Data Encipherment
key_usage_strict: yes
extended_key_usage:
- DVCS
extended_key_usage_strict: yes
subject_alt_name:
- dns:ansible.com
subject_alt_name_strict: yes
not_before: 20190331202428Z
not_after: 20190413202428Z
valid_at: "+1d10h"
invalid_at: 20200331202428Z
valid_in: 10 # in ten seconds
- openssl_certificate_info:
path: /etc/ssl/crt/ansible.com.crt
# for valid_at, invalid_at and valid_in
valid_at:
one_day_ten_hours: "+1d10h"
fixed_timestamp: 20200331202428Z
ten_seconds: "+10"
register: result
- openssl_csr_info:
# Verifies that the CSR signature is valid; module will fail if not
path: /etc/ssl/csr/ansible.com.csr
register: result_csr
- openssl_privatekey_info:
path: /etc/ssl/csr/ansible.com.key
register: result_privatekey
- assert:
that:
# When private key is specified for assertonly, this will be checked:
- result.public_key == result_privatekey.public_key
# When CSR is specified for assertonly, this will be checked:
- result.public_key == result_csr.public_key
- result.subject_ordered == result_csr.subject_ordered
- result.extensions_by_oid == result_csr.extensions_by_oid
# signature_algorithms check
- "result.signature_algorithm == 'sha256WithRSAEncryption' or result.signature_algorithm == 'sha512WithRSAEncryption'"
# subject and subject_strict
- "result.subject.commonName == 'ansible.com'"
- "result.subject | length == 1" # the number must be the number of entries you check for
# issuer and issuer_strict
- "result.issuer.commonName == 'ansible.com'"
- "result.issuer | length == 1" # the number must be the number of entries you check for
# has_expired
- not result.expired
# version
- result.version == 3
# key_usage and key_usage_strict
- "'Data Encipherment' in result.key_usage"
- "result.key_usage | length == 1" # the number must be the number of entries you check for
# extended_key_usage and extended_key_usage_strict
- "'DVCS' in result.extended_key_usage"
- "result.extended_key_usage | length == 1" # the number must be the number of entries you check for
# subject_alt_name and subject_alt_name_strict
- "'dns:ansible.com' in result.subject_alt_name"
- "result.subject_alt_name | length == 1" # the number must be the number of entries you check for
# not_before and not_after
- "result.not_before == '20190331202428Z'"
- "result.not_after == '20190413202428Z'"
# valid_at, invalid_at and valid_in
- "result.valid_at.one_day_ten_hours" # for valid_at
- "not result.valid_at.fixed_timestamp" # for invalid_at
- "result.valid_at.ten_seconds" # for valid_in
# Examples for some checks one could use the assertonly provider for:
# (Please note that assertonly has been deprecated!)
# How to use the assertonly provider to implement and trigger your own custom certificate generation workflow:
- name: Check if a certificate is currently still valid, ignoring failures
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: no
ignore_errors: yes
register: validity_check
- name: Run custom task(s) to get a new, valid certificate in case the initial check failed
command: superspecialSSL recreate /etc/ssl/crt/example.com.crt
when: validity_check.failed
- name: Check the new certificate again for validity with the same parameters, this time failing the play if it is still invalid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: no
when: validity_check.failed
# Some other checks that assertonly could be used for:
- name: Verify that an existing certificate was issued by the Let's Encrypt CA and is currently still valid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
issuer:
O: Let's Encrypt
has_expired: no
- name: Ensure that a certificate uses a modern signature algorithm (no SHA1, MD5 or DSA)
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
signature_algorithms:
- sha224WithRSAEncryption
- sha256WithRSAEncryption
- sha384WithRSAEncryption
- sha512WithRSAEncryption
- sha224WithECDSAEncryption
- sha256WithECDSAEncryption
- sha384WithECDSAEncryption
- sha512WithECDSAEncryption
- name: Ensure that the existing certificate belongs to the specified private key
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
privatekey_path: /etc/ssl/private/example.com.pem
provider: assertonly
- name: Ensure that the existing certificate is still valid at the winter solstice 2017
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_at: 20171221162800Z
- name: Ensure that the existing certificate is still valid 2 weeks (1209600 seconds) from now
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_in: 1209600
- name: Ensure that the existing certificate is only used for digital signatures and encrypting other keys
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
key_usage:
- digitalSignature
- keyEncipherment
key_usage_strict: true
- name: Ensure that the existing certificate can be used for client authentication
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- name: Ensure that the existing certificate can only be used for client authentication and time stamping
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- 1.3.6.1.5.5.7.3.8
extended_key_usage_strict: true
- name: Ensure that the existing certificate has a certain domain in its subjectAltName
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
subject_alt_name:
- www.example.com
- test.example.com
'''
RETURN = r'''
filename:
description: Path to the generated Certificate
returned: changed or success
type: str
sample: /etc/ssl/crt/www.ansible.com.crt
backup_file:
description: Name of backup file created.
returned: changed and if I(backup) is C(yes)
type: str
sample: /path/to/www.ansible.com.crt.2019-03-09@11:22~
'''
from random import randint
import abc
import datetime
import time
import os
import traceback
from distutils.version import LooseVersion
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils._text import to_native, to_bytes, to_text
from ansible.module_utils.compat import ipaddress as compat_ipaddress
from ansible.module_utils.ecs.api import ECSClient, RestOperationException, SessionConfigurationException
MINIMAL_CRYPTOGRAPHY_VERSION = '1.6'
MINIMAL_PYOPENSSL_VERSION = '0.15'
PYOPENSSL_IMP_ERR = None
try:
import OpenSSL
from OpenSSL import crypto
PYOPENSSL_VERSION = LooseVersion(OpenSSL.__version__)
except ImportError:
PYOPENSSL_IMP_ERR = traceback.format_exc()
PYOPENSSL_FOUND = False
else:
PYOPENSSL_FOUND = True
CRYPTOGRAPHY_IMP_ERR = None
try:
import cryptography
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import Encoding
from cryptography.x509 import NameAttribute, Name
from cryptography.x509.oid import NameOID
CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
except ImportError:
CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
CRYPTOGRAPHY_FOUND = False
else:
CRYPTOGRAPHY_FOUND = True
class CertificateError(crypto_utils.OpenSSLObjectError):
pass
class Certificate(crypto_utils.OpenSSLObject):
def __init__(self, module, backend):
super(Certificate, self).__init__(
module.params['path'],
module.params['state'],
module.params['force'],
module.check_mode
)
self.provider = module.params['provider']
self.privatekey_path = module.params['privatekey_path']
self.privatekey_passphrase = module.params['privatekey_passphrase']
self.csr_path = module.params['csr_path']
self.cert = None
self.privatekey = None
self.csr = None
self.backend = backend
self.module = module
# The following are default values which make sure check() works as
# before if providers do not explicitly change these properties.
self.create_subject_key_identifier = 'never_create'
self.create_authority_key_identifier = False
self.backup = module.params['backup']
self.backup_file = None
def get_relative_time_option(self, input_string, input_name):
"""Return an ASN1 formatted string if a relative timespec
or an ASN1 formatted string is provided."""
result = to_native(input_string)
if result is None:
raise CertificateError(
'The timespec "%s" for %s is not valid' %
input_string, input_name)
if result.startswith("+") or result.startswith("-"):
result_datetime = crypto_utils.convert_relative_to_datetime(
result)
if self.backend == 'pyopenssl':
return result_datetime.strftime("%Y%m%d%H%M%SZ")
elif self.backend == 'cryptography':
return result_datetime
if self.backend == 'cryptography':
for date_fmt in ['%Y%m%d%H%M%SZ', '%Y%m%d%H%MZ', '%Y%m%d%H%M%S%z', '%Y%m%d%H%M%z']:
try:
return datetime.datetime.strptime(result, date_fmt)
except ValueError:
pass
raise CertificateError(
'The time spec "%s" for %s is invalid' %
(input_string, input_name)
)
return input_string
def _validate_privatekey(self):
if self.backend == 'pyopenssl':
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
elif self.backend == 'cryptography':
return crypto_utils.cryptography_compare_public_keys(self.cert.public_key(), self.privatekey.public_key())
def _validate_csr(self):
if self.backend == 'pyopenssl':
# Verify that CSR is signed by certificate's private key
try:
self.csr.verify(self.cert.get_pubkey())
except OpenSSL.crypto.Error:
return False
# Check subject
if self.csr.get_subject() != self.cert.get_subject():
return False
# Check extensions
csr_extensions = self.csr.get_extensions()
cert_extension_count = self.cert.get_extension_count()
if len(csr_extensions) != cert_extension_count:
return False
for extension_number in range(0, cert_extension_count):
cert_extension = self.cert.get_extension(extension_number)
csr_extension = filter(lambda extension: extension.get_short_name() == cert_extension.get_short_name(), csr_extensions)
if cert_extension.get_data() != list(csr_extension)[0].get_data():
return False
return True
elif self.backend == 'cryptography':
# Verify that CSR is signed by certificate's private key
if not self.csr.is_signature_valid:
return False
if not crypto_utils.cryptography_compare_public_keys(self.csr.public_key(), self.cert.public_key()):
return False
# Check subject
if self.csr.subject != self.cert.subject:
return False
# Check extensions
cert_exts = list(self.cert.extensions)
csr_exts = list(self.csr.extensions)
if self.create_subject_key_identifier != 'never_create':
# Filter out SubjectKeyIdentifier extension before comparison
cert_exts = list(filter(lambda x: not isinstance(x.value, x509.SubjectKeyIdentifier), cert_exts))
csr_exts = list(filter(lambda x: not isinstance(x.value, x509.SubjectKeyIdentifier), csr_exts))
if self.create_authority_key_identifier:
# Filter out AuthorityKeyIdentifier extension before comparison
cert_exts = list(filter(lambda x: not isinstance(x.value, x509.AuthorityKeyIdentifier), cert_exts))
csr_exts = list(filter(lambda x: not isinstance(x.value, x509.AuthorityKeyIdentifier), csr_exts))
if len(cert_exts) != len(csr_exts):
return False
for cert_ext in cert_exts:
try:
csr_ext = self.csr.extensions.get_extension_for_oid(cert_ext.oid)
if cert_ext != csr_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def remove(self, module):
if self.backup:
self.backup_file = module.backup_local(self.path)
super(Certificate, self).remove(module)
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
state_and_perms = super(Certificate, self).check(module, perms_required)
if not state_and_perms:
return False
try:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
except Exception as dummy:
return False
if self.privatekey_path:
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase,
backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
raise CertificateError(exc)
if not self._validate_privatekey():
return False
if self.csr_path:
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
if not self._validate_csr():
return False
# Check SubjectKeyIdentifier
if self.backend == 'cryptography' and self.create_subject_key_identifier != 'never_create':
# Get hold of certificate's SKI
try:
ext = self.cert.extensions.get_extension_for_class(x509.SubjectKeyIdentifier)
except cryptography.x509.ExtensionNotFound as dummy:
return False
# Get hold of CSR's SKI for 'create_if_not_provided'
csr_ext = None
if self.create_subject_key_identifier == 'create_if_not_provided':
try:
csr_ext = self.csr.extensions.get_extension_for_class(x509.SubjectKeyIdentifier)
except cryptography.x509.ExtensionNotFound as dummy:
pass
if csr_ext is None:
# If CSR had no SKI, or we chose to ignore it ('always_create'), compare with created SKI
if ext.value.digest != x509.SubjectKeyIdentifier.from_public_key(self.cert.public_key()).digest:
return False
else:
# If CSR had SKI and we didn't ignore it ('create_if_not_provided'), compare SKIs
if ext.value.digest != csr_ext.value.digest:
return False
return True
class CertificateAbsent(Certificate):
def __init__(self, module):
super(CertificateAbsent, self).__init__(module, 'cryptography') # backend doesn't matter
def generate(self, module):
pass
def dump(self, check_mode=False):
# Use only for absent
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
return result
class SelfSignedCertificateCryptography(Certificate):
"""Generate the self-signed certificate, using the cryptography backend"""
def __init__(self, module):
super(SelfSignedCertificateCryptography, self).__init__(module, 'cryptography')
self.create_subject_key_identifier = module.params['selfsigned_create_subject_key_identifier']
self.notBefore = self.get_relative_time_option(module.params['selfsigned_not_before'], 'selfsigned_not_before')
self.notAfter = self.get_relative_time_option(module.params['selfsigned_not_after'], 'selfsigned_not_after')
self.digest = crypto_utils.select_message_digest(module.params['selfsigned_digest'])
self.version = module.params['selfsigned_version']
self.serial_number = x509.random_serial_number()
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key file {0} does not exist'.format(self.privatekey_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
self._module = module
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase, backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=to_native(exc))
if crypto_utils.cryptography_key_needs_digest_for_signing(self.privatekey):
if self.digest is None:
raise CertificateError(
'The digest %s is not supported with the cryptography backend' % module.params['selfsigned_digest']
)
else:
self.digest = None
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
try:
cert_builder = x509.CertificateBuilder()
cert_builder = cert_builder.subject_name(self.csr.subject)
cert_builder = cert_builder.issuer_name(self.csr.subject)
cert_builder = cert_builder.serial_number(self.serial_number)
cert_builder = cert_builder.not_valid_before(self.notBefore)
cert_builder = cert_builder.not_valid_after(self.notAfter)
cert_builder = cert_builder.public_key(self.privatekey.public_key())
has_ski = False
for extension in self.csr.extensions:
if isinstance(extension.value, x509.SubjectKeyIdentifier):
if self.create_subject_key_identifier == 'always_create':
continue
has_ski = True
cert_builder = cert_builder.add_extension(extension.value, critical=extension.critical)
if not has_ski and self.create_subject_key_identifier != 'never_create':
cert_builder = cert_builder.add_extension(
x509.SubjectKeyIdentifier.from_public_key(self.privatekey.public_key()),
critical=False
)
except ValueError as e:
raise CertificateError(str(e))
try:
certificate = cert_builder.sign(
private_key=self.privatekey, algorithm=self.digest,
backend=default_backend()
)
except TypeError as e:
if str(e) == 'Algorithm must be a registered hash algorithm.' and self.digest is None:
module.fail_json(msg='Signing with Ed25519 and Ed448 keys requires cryptography 2.8 or newer.')
raise
self.cert = certificate
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, certificate.public_bytes(Encoding.PEM))
self.changed = True
else:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.not_valid_before.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.cert.not_valid_after.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.cert.serial_number,
})
return result
class SelfSignedCertificate(Certificate):
"""Generate the self-signed certificate."""
def __init__(self, module):
super(SelfSignedCertificate, self).__init__(module, 'pyopenssl')
if module.params['selfsigned_create_subject_key_identifier'] != 'create_if_not_provided':
module.fail_json(msg='selfsigned_create_subject_key_identifier cannot be used with the pyOpenSSL backend!')
self.notBefore = self.get_relative_time_option(module.params['selfsigned_not_before'], 'selfsigned_not_before')
self.notAfter = self.get_relative_time_option(module.params['selfsigned_not_after'], 'selfsigned_not_after')
self.digest = module.params['selfsigned_digest']
self.version = module.params['selfsigned_version']
self.serial_number = randint(1000, 99999)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key file {0} does not exist'.format(self.privatekey_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path)
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
cert.set_notBefore(to_bytes(self.notBefore))
cert.set_notAfter(to_bytes(self.notAfter))
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.csr.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.privatekey, self.digest)
self.cert = cert
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore,
'notAfter': self.notAfter,
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
class OwnCACertificateCryptography(Certificate):
"""Generate the own CA certificate. Using the cryptography backend"""
def __init__(self, module):
super(OwnCACertificateCryptography, self).__init__(module, 'cryptography')
self.create_subject_key_identifier = module.params['ownca_create_subject_key_identifier']
self.create_authority_key_identifier = module.params['ownca_create_authority_key_identifier']
self.notBefore = self.get_relative_time_option(module.params['ownca_not_before'], 'ownca_not_before')
self.notAfter = self.get_relative_time_option(module.params['ownca_not_after'], 'ownca_not_after')
self.digest = crypto_utils.select_message_digest(module.params['ownca_digest'])
self.version = module.params['ownca_version']
self.serial_number = x509.random_serial_number()
self.ca_cert_path = module.params['ownca_path']
self.ca_privatekey_path = module.params['ownca_privatekey_path']
self.ca_privatekey_passphrase = module.params['ownca_privatekey_passphrase']
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate file {0} does not exist'.format(self.ca_cert_path)
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key file {0} does not exist'.format(self.ca_privatekey_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
self.ca_cert = crypto_utils.load_certificate(self.ca_cert_path, backend=self.backend)
try:
self.ca_private_key = crypto_utils.load_privatekey(
self.ca_privatekey_path, self.ca_privatekey_passphrase, backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
if crypto_utils.cryptography_key_needs_digest_for_signing(self.ca_private_key):
if self.digest is None:
raise CertificateError(
'The digest %s is not supported with the cryptography backend' % module.params['ownca_digest']
)
else:
self.digest = None
def generate(self, module):
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate %s does not exist' % self.ca_cert_path
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key %s does not exist' % self.ca_privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert_builder = x509.CertificateBuilder()
cert_builder = cert_builder.subject_name(self.csr.subject)
cert_builder = cert_builder.issuer_name(self.ca_cert.subject)
cert_builder = cert_builder.serial_number(self.serial_number)
cert_builder = cert_builder.not_valid_before(self.notBefore)
cert_builder = cert_builder.not_valid_after(self.notAfter)
cert_builder = cert_builder.public_key(self.csr.public_key())
has_ski = False
for extension in self.csr.extensions:
if isinstance(extension.value, x509.SubjectKeyIdentifier):
if self.create_subject_key_identifier == 'always_create':
continue
has_ski = True
if self.create_authority_key_identifier and isinstance(extension.value, x509.AuthorityKeyIdentifier):
continue
cert_builder = cert_builder.add_extension(extension.value, critical=extension.critical)
if not has_ski and self.create_subject_key_identifier != 'never_create':
cert_builder = cert_builder.add_extension(
x509.SubjectKeyIdentifier.from_public_key(self.csr.public_key()),
critical=False
)
if self.create_authority_key_identifier:
try:
ext = self.ca_cert.extensions.get_extension_for_class(x509.SubjectKeyIdentifier)
cert_builder = cert_builder.add_extension(
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(ext.value)
if CRYPTOGRAPHY_VERSION >= LooseVersion('2.7') else
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(ext),
critical=False
)
except cryptography.x509.ExtensionNotFound:
cert_builder = cert_builder.add_extension(
x509.AuthorityKeyIdentifier.from_issuer_public_key(self.ca_cert.public_key()),
critical=False
)
try:
certificate = cert_builder.sign(
private_key=self.ca_private_key, algorithm=self.digest,
backend=default_backend()
)
except TypeError as e:
if str(e) == 'Algorithm must be a registered hash algorithm.' and self.digest is None:
module.fail_json(msg='Signing with Ed25519 and Ed448 keys requires cryptography 2.8 or newer.')
raise
self.cert = certificate
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, certificate.public_bytes(Encoding.PEM))
self.changed = True
else:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
if not super(OwnCACertificateCryptography, self).check(module, perms_required):
return False
# Check AuthorityKeyIdentifier
if self.create_authority_key_identifier:
try:
ext = self.ca_cert.extensions.get_extension_for_class(x509.SubjectKeyIdentifier)
expected_ext = (
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(ext.value)
if CRYPTOGRAPHY_VERSION >= LooseVersion('2.7') else
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(ext)
)
except cryptography.x509.ExtensionNotFound:
expected_ext = x509.AuthorityKeyIdentifier.from_issuer_public_key(self.ca_cert.public_key())
try:
ext = self.cert.extensions.get_extension_for_class(x509.AuthorityKeyIdentifier)
if ext.value != expected_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
'ca_cert': self.ca_cert_path,
'ca_privatekey': self.ca_privatekey_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.not_valid_before.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.cert.not_valid_after.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.cert.serial_number,
})
return result
class OwnCACertificate(Certificate):
"""Generate the own CA certificate."""
def __init__(self, module):
super(OwnCACertificate, self).__init__(module, 'pyopenssl')
self.notBefore = self.get_relative_time_option(module.params['ownca_not_before'], 'ownca_not_before')
self.notAfter = self.get_relative_time_option(module.params['ownca_not_after'], 'ownca_not_after')
self.digest = module.params['ownca_digest']
self.version = module.params['ownca_version']
self.serial_number = randint(1000, 99999)
if module.params['ownca_create_subject_key_identifier'] != 'create_if_not_provided':
module.fail_json(msg='ownca_create_subject_key_identifier cannot be used with the pyOpenSSL backend!')
if module.params['ownca_create_authority_key_identifier']:
module.warn('ownca_create_authority_key_identifier is ignored by the pyOpenSSL backend!')
self.ca_cert_path = module.params['ownca_path']
self.ca_privatekey_path = module.params['ownca_privatekey_path']
self.ca_privatekey_passphrase = module.params['ownca_privatekey_passphrase']
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate file {0} does not exist'.format(self.ca_cert_path)
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key file {0} does not exist'.format(self.ca_privatekey_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path)
self.ca_cert = crypto_utils.load_certificate(self.ca_cert_path)
try:
self.ca_privatekey = crypto_utils.load_privatekey(
self.ca_privatekey_path, self.ca_privatekey_passphrase
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate %s does not exist' % self.ca_cert_path
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key %s does not exist' % self.ca_privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
cert.set_notBefore(to_bytes(self.notBefore))
cert.set_notAfter(to_bytes(self.notAfter))
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.ca_cert.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.ca_privatekey, self.digest)
self.cert = cert
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
'ca_cert': self.ca_cert_path,
'ca_privatekey': self.ca_privatekey_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore,
'notAfter': self.notAfter,
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
def compare_sets(subset, superset, equality=False):
if equality:
return set(subset) == set(superset)
else:
return all(x in superset for x in subset)
def compare_dicts(subset, superset, equality=False):
if equality:
return subset == superset
else:
return all(superset.get(x) == v for x, v in subset.items())
NO_EXTENSION = 'no extension'
class AssertOnlyCertificateBase(Certificate):
def __init__(self, module, backend):
super(AssertOnlyCertificateBase, self).__init__(module, backend)
self.signature_algorithms = module.params['signature_algorithms']
if module.params['subject']:
self.subject = crypto_utils.parse_name_field(module.params['subject'])
else:
self.subject = []
self.subject_strict = module.params['subject_strict']
if module.params['issuer']:
self.issuer = crypto_utils.parse_name_field(module.params['issuer'])
else:
self.issuer = []
self.issuer_strict = module.params['issuer_strict']
self.has_expired = module.params['has_expired']
self.version = module.params['version']
self.key_usage = module.params['key_usage']
self.key_usage_strict = module.params['key_usage_strict']
self.extended_key_usage = module.params['extended_key_usage']
self.extended_key_usage_strict = module.params['extended_key_usage_strict']
self.subject_alt_name = module.params['subject_alt_name']
self.subject_alt_name_strict = module.params['subject_alt_name_strict']
self.not_before = module.params['not_before']
self.not_after = module.params['not_after']
self.valid_at = module.params['valid_at']
self.invalid_at = module.params['invalid_at']
self.valid_in = module.params['valid_in']
if self.valid_in and not self.valid_in.startswith("+") and not self.valid_in.startswith("-"):
try:
int(self.valid_in)
except ValueError:
module.fail_json(msg='The supplied value for "valid_in" (%s) is not an integer or a valid timespec' % self.valid_in)
self.valid_in = "+" + self.valid_in + "s"
# Load objects
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
if self.privatekey_path is not None:
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase,
backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
raise CertificateError(exc)
if self.csr_path is not None:
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
@abc.abstractmethod
def _validate_privatekey(self):
pass
@abc.abstractmethod
def _validate_csr_signature(self):
pass
@abc.abstractmethod
def _validate_csr_subject(self):
pass
@abc.abstractmethod
def _validate_csr_extensions(self):
pass
@abc.abstractmethod
def _validate_signature_algorithms(self):
pass
@abc.abstractmethod
def _validate_subject(self):
pass
@abc.abstractmethod
def _validate_issuer(self):
pass
@abc.abstractmethod
def _validate_has_expired(self):
pass
@abc.abstractmethod
def _validate_version(self):
pass
@abc.abstractmethod
def _validate_key_usage(self):
pass
@abc.abstractmethod
def _validate_extended_key_usage(self):
pass
@abc.abstractmethod
def _validate_subject_alt_name(self):
pass
@abc.abstractmethod
def _validate_not_before(self):
pass
@abc.abstractmethod
def _validate_not_after(self):
pass
@abc.abstractmethod
def _validate_valid_at(self):
pass
@abc.abstractmethod
def _validate_invalid_at(self):
pass
@abc.abstractmethod
def _validate_valid_in(self):
pass
def assertonly(self, module):
messages = []
if self.privatekey_path is not None:
if not self._validate_privatekey():
messages.append(
'Certificate %s and private key %s do not match' %
(self.path, self.privatekey_path)
)
if self.csr_path is not None:
if not self._validate_csr_signature():
messages.append(
'Certificate %s and CSR %s do not match: private key mismatch' %
(self.path, self.csr_path)
)
if not self._validate_csr_subject():
messages.append(
'Certificate %s and CSR %s do not match: subject mismatch' %
(self.path, self.csr_path)
)
if not self._validate_csr_extensions():
messages.append(
'Certificate %s and CSR %s do not match: extensions mismatch' %
(self.path, self.csr_path)
)
if self.signature_algorithms is not None:
wrong_alg = self._validate_signature_algorithms()
if wrong_alg:
messages.append(
'Invalid signature algorithm (got %s, expected one of %s)' %
(wrong_alg, self.signature_algorithms)
)
if self.subject is not None:
failure = self._validate_subject()
if failure:
dummy, cert_subject = failure
messages.append(
'Invalid subject component (got %s, expected all of %s to be present)' %
(cert_subject, self.subject)
)
if self.issuer is not None:
failure = self._validate_issuer()
if failure:
dummy, cert_issuer = failure
messages.append(
'Invalid issuer component (got %s, expected all of %s to be present)' % (cert_issuer, self.issuer)
)
if self.has_expired is not None:
cert_expired = self._validate_has_expired()
if cert_expired != self.has_expired:
messages.append(
'Certificate expiration check failed (certificate expiration is %s, expected %s)' %
(cert_expired, self.has_expired)
)
if self.version is not None:
cert_version = self._validate_version()
if cert_version != self.version:
messages.append(
'Invalid certificate version number (got %s, expected %s)' %
(cert_version, self.version)
)
if self.key_usage is not None:
failure = self._validate_key_usage()
if failure == NO_EXTENSION:
messages.append('Found no keyUsage extension')
elif failure:
dummy, cert_key_usage = failure
messages.append(
'Invalid keyUsage components (got %s, expected all of %s to be present)' %
(cert_key_usage, self.key_usage)
)
if self.extended_key_usage is not None:
failure = self._validate_extended_key_usage()
if failure == NO_EXTENSION:
messages.append('Found no extendedKeyUsage extension')
elif failure:
dummy, ext_cert_key_usage = failure
messages.append(
'Invalid extendedKeyUsage component (got %s, expected all of %s to be present)' % (ext_cert_key_usage, self.extended_key_usage)
)
if self.subject_alt_name is not None:
failure = self._validate_subject_alt_name()
if failure == NO_EXTENSION:
messages.append('Found no subjectAltName extension')
elif failure:
dummy, cert_san = failure
messages.append(
'Invalid subjectAltName component (got %s, expected all of %s to be present)' %
(cert_san, self.subject_alt_name)
)
if self.not_before is not None:
cert_not_valid_before = self._validate_not_before()
if cert_not_valid_before != self.get_relative_time_option(self.not_before, 'not_before'):
messages.append(
'Invalid not_before component (got %s, expected %s to be present)' %
(cert_not_valid_before, self.not_before)
)
if self.not_after is not None:
cert_not_valid_after = self._validate_not_after()
if cert_not_valid_after != self.get_relative_time_option(self.not_after, 'not_after'):
messages.append(
'Invalid not_after component (got %s, expected %s to be present)' %
(cert_not_valid_after, self.not_after)
)
if self.valid_at is not None:
not_before, valid_at, not_after = self._validate_valid_at()
if not (not_before <= valid_at <= not_after):
messages.append(
'Certificate is not valid for the specified date (%s) - not_before: %s - not_after: %s' %
(self.valid_at, not_before, not_after)
)
if self.invalid_at is not None:
not_before, invalid_at, not_after = self._validate_invalid_at()
if not_before <= invalid_at <= not_after:
messages.append(
'Certificate is not invalid for the specified date (%s) - not_before: %s - not_after: %s' %
(self.invalid_at, not_before, not_after)
)
if self.valid_in is not None:
not_before, valid_in, not_after = self._validate_valid_in()
if not not_before <= valid_in <= not_after:
messages.append(
'Certificate is not valid in %s from now (that would be %s) - not_before: %s - not_after: %s' %
(self.valid_in, valid_in, not_before, not_after)
)
return messages
def generate(self, module):
"""Don't generate anything - only assert"""
messages = self.assertonly(module)
if messages:
module.fail_json(msg=' | '.join(messages))
def check(self, module, perms_required=False):
"""Ensure the resource is in its desired state."""
messages = self.assertonly(module)
return len(messages) == 0
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
}
return result
class AssertOnlyCertificateCryptography(AssertOnlyCertificateBase):
"""Validate the supplied cert, using the cryptography backend"""
def __init__(self, module):
super(AssertOnlyCertificateCryptography, self).__init__(module, 'cryptography')
def _validate_privatekey(self):
return crypto_utils.cryptography_compare_public_keys(self.cert.public_key(), self.privatekey.public_key())
def _validate_csr_signature(self):
if not self.csr.is_signature_valid:
return False
return crypto_utils.cryptography_compare_public_keys(self.csr.public_key(), self.cert.public_key())
def _validate_csr_subject(self):
return self.csr.subject == self.cert.subject
def _validate_csr_extensions(self):
cert_exts = self.cert.extensions
csr_exts = self.csr.extensions
if len(cert_exts) != len(csr_exts):
return False
for cert_ext in cert_exts:
try:
csr_ext = csr_exts.get_extension_for_oid(cert_ext.oid)
if cert_ext != csr_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def _validate_signature_algorithms(self):
if self.cert.signature_algorithm_oid._name not in self.signature_algorithms:
return self.cert.signature_algorithm_oid._name
def _validate_subject(self):
expected_subject = Name([NameAttribute(oid=crypto_utils.cryptography_name_to_oid(sub[0]), value=to_text(sub[1]))
for sub in self.subject])
cert_subject = self.cert.subject
if not compare_sets(expected_subject, cert_subject, self.subject_strict):
return expected_subject, cert_subject
def _validate_issuer(self):
expected_issuer = Name([NameAttribute(oid=crypto_utils.cryptography_name_to_oid(iss[0]), value=to_text(iss[1]))
for iss in self.issuer])
cert_issuer = self.cert.issuer
if not compare_sets(expected_issuer, cert_issuer, self.issuer_strict):
return self.issuer, cert_issuer
def _validate_has_expired(self):
cert_not_after = self.cert.not_valid_after
cert_expired = cert_not_after < datetime.datetime.utcnow()
return cert_expired
def _validate_version(self):
if self.cert.version == x509.Version.v1:
return 1
if self.cert.version == x509.Version.v3:
return 3
return "unknown"
def _validate_key_usage(self):
try:
current_key_usage = self.cert.extensions.get_extension_for_class(x509.KeyUsage).value
test_key_usage = dict(
digital_signature=current_key_usage.digital_signature,
content_commitment=current_key_usage.content_commitment,
key_encipherment=current_key_usage.key_encipherment,
data_encipherment=current_key_usage.data_encipherment,
key_agreement=current_key_usage.key_agreement,
key_cert_sign=current_key_usage.key_cert_sign,
crl_sign=current_key_usage.crl_sign,
encipher_only=False,
decipher_only=False
)
if test_key_usage['key_agreement']:
test_key_usage.update(dict(
encipher_only=current_key_usage.encipher_only,
decipher_only=current_key_usage.decipher_only
))
key_usages = crypto_utils.cryptography_parse_key_usage_params(self.key_usage)
if not compare_dicts(key_usages, test_key_usage, self.key_usage_strict):
return self.key_usage, [k for k, v in test_key_usage.items() if v is True]
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.key_usage:
return NO_EXTENSION
def _validate_extended_key_usage(self):
try:
current_ext_keyusage = self.cert.extensions.get_extension_for_class(x509.ExtendedKeyUsage).value
usages = [crypto_utils.cryptography_name_to_oid(usage) for usage in self.extended_key_usage]
expected_ext_keyusage = x509.ExtendedKeyUsage(usages)
if not compare_sets(expected_ext_keyusage, current_ext_keyusage, self.extended_key_usage_strict):
return [eku.value for eku in expected_ext_keyusage], [eku.value for eku in current_ext_keyusage]
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.extended_key_usage:
return NO_EXTENSION
def _validate_subject_alt_name(self):
try:
current_san = self.cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
expected_san = [crypto_utils.cryptography_get_name(san) for san in self.subject_alt_name]
if not compare_sets(expected_san, current_san, self.subject_alt_name_strict):
return self.subject_alt_name, current_san
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.subject_alt_name:
return NO_EXTENSION
def _validate_not_before(self):
return self.cert.not_valid_before
def _validate_not_after(self):
return self.cert.not_valid_after
def _validate_valid_at(self):
rt = self.get_relative_time_option(self.valid_at, 'valid_at')
return self.cert.not_valid_before, rt, self.cert.not_valid_after
def _validate_invalid_at(self):
rt = self.get_relative_time_option(self.invalid_at, 'invalid_at')
return self.cert.not_valid_before, rt, self.cert.not_valid_after
def _validate_valid_in(self):
valid_in_date = self.get_relative_time_option(self.valid_in, "valid_in")
return self.cert.not_valid_before, valid_in_date, self.cert.not_valid_after
class AssertOnlyCertificate(AssertOnlyCertificateBase):
"""validate the supplied certificate."""
def __init__(self, module):
super(AssertOnlyCertificate, self).__init__(module, 'pyopenssl')
# Ensure inputs are properly sanitized before comparison.
for param in ['signature_algorithms', 'key_usage', 'extended_key_usage',
'subject_alt_name', 'subject', 'issuer', 'not_before',
'not_after', 'valid_at', 'invalid_at']:
attr = getattr(self, param)
if isinstance(attr, list) and attr:
if isinstance(attr[0], str):
setattr(self, param, [to_bytes(item) for item in attr])
elif isinstance(attr[0], tuple):
setattr(self, param, [(to_bytes(item[0]), to_bytes(item[1])) for item in attr])
elif isinstance(attr, tuple):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, dict):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, str):
setattr(self, param, to_bytes(attr))
def _validate_privatekey(self):
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
def _validate_csr_signature(self):
try:
self.csr.verify(self.cert.get_pubkey())
except OpenSSL.crypto.Error:
return False
def _validate_csr_subject(self):
if self.csr.get_subject() != self.cert.get_subject():
return False
def _validate_csr_extensions(self):
csr_extensions = self.csr.get_extensions()
cert_extension_count = self.cert.get_extension_count()
if len(csr_extensions) != cert_extension_count:
return False
for extension_number in range(0, cert_extension_count):
cert_extension = self.cert.get_extension(extension_number)
csr_extension = filter(lambda extension: extension.get_short_name() == cert_extension.get_short_name(), csr_extensions)
if cert_extension.get_data() != list(csr_extension)[0].get_data():
return False
return True
def _validate_signature_algorithms(self):
if self.cert.get_signature_algorithm() not in self.signature_algorithms:
return self.cert.get_signature_algorithm()
def _validate_subject(self):
expected_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in self.subject]
cert_subject = self.cert.get_subject().get_components()
current_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in cert_subject]
if not compare_sets(expected_subject, current_subject, self.subject_strict):
return expected_subject, current_subject
def _validate_issuer(self):
expected_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in self.issuer]
cert_issuer = self.cert.get_issuer().get_components()
current_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in cert_issuer]
if not compare_sets(expected_issuer, current_issuer, self.issuer_strict):
return self.issuer, cert_issuer
def _validate_has_expired(self):
# The following 3 lines are the same as the current PyOpenSSL code for cert.has_expired().
# Older version of PyOpenSSL have a buggy implementation,
# to avoid issues with those we added the code from a more recent release here.
time_string = to_native(self.cert.get_notAfter())
not_after = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ")
cert_expired = not_after < datetime.datetime.utcnow()
return cert_expired
def _validate_version(self):
# Version numbers in certs are off by one:
# v1: 0, v2: 1, v3: 2 ...
return self.cert.get_version() + 1
def _validate_key_usage(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'keyUsage':
found = True
expected_extension = crypto.X509Extension(b"keyUsage", False, b', '.join(self.key_usage))
key_usage = [usage.strip() for usage in to_text(expected_extension, errors='surrogate_or_strict').split(',')]
current_ku = [usage.strip() for usage in to_text(extension, errors='surrogate_or_strict').split(',')]
if not compare_sets(key_usage, current_ku, self.key_usage_strict):
return self.key_usage, str(extension).split(', ')
if not found:
# This is only bad if the user specified a non-empty list
if self.key_usage:
return NO_EXTENSION
def _validate_extended_key_usage(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'extendedKeyUsage':
found = True
extKeyUsage = [OpenSSL._util.lib.OBJ_txt2nid(keyUsage) for keyUsage in self.extended_key_usage]
current_xku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if not compare_sets(extKeyUsage, current_xku, self.extended_key_usage_strict):
return self.extended_key_usage, str(extension).split(', ')
if not found:
# This is only bad if the user specified a non-empty list
if self.extended_key_usage:
return NO_EXTENSION
def _normalize_san(self, san):
# Apparently OpenSSL returns 'IP address' not 'IP' as specifier when converting the subjectAltName to string
# although it won't accept this specifier when generating the CSR. (https://github.com/openssl/openssl/issues/4004)
if san.startswith('IP Address:'):
san = 'IP:' + san[len('IP Address:'):]
if san.startswith('IP:'):
ip = compat_ipaddress.ip_address(san[3:])
san = 'IP:{0}'.format(ip.compressed)
return san
def _validate_subject_alt_name(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'subjectAltName':
found = True
l_altnames = [self._normalize_san(altname.strip()) for altname in
to_text(extension, errors='surrogate_or_strict').split(', ')]
sans = [self._normalize_san(to_text(san, errors='surrogate_or_strict')) for san in self.subject_alt_name]
if not compare_sets(sans, l_altnames, self.subject_alt_name_strict):
return self.subject_alt_name, l_altnames
if not found:
# This is only bad if the user specified a non-empty list
if self.subject_alt_name:
return NO_EXTENSION
def _validate_not_before(self):
return self.cert.get_notBefore()
def _validate_not_after(self):
return self.cert.get_notAfter()
def _validate_valid_at(self):
rt = self.get_relative_time_option(self.valid_at, "valid_at")
rt = to_bytes(rt, errors='surrogate_or_strict')
return self.cert.get_notBefore(), rt, self.cert.get_notAfter()
def _validate_invalid_at(self):
rt = self.get_relative_time_option(self.invalid_at, "invalid_at")
rt = to_bytes(rt, errors='surrogate_or_strict')
return self.cert.get_notBefore(), rt, self.cert.get_notAfter()
def _validate_valid_in(self):
valid_in_asn1 = self.get_relative_time_option(self.valid_in, "valid_in")
valid_in_date = to_bytes(valid_in_asn1, errors='surrogate_or_strict')
return self.cert.get_notBefore(), valid_in_date, self.cert.get_notAfter()
class EntrustCertificate(Certificate):
"""Retrieve a certificate using Entrust (ECS)."""
def __init__(self, module, backend):
super(EntrustCertificate, self).__init__(module, backend)
self.trackingId = None
self.notAfter = self.get_relative_time_option(module.params['entrust_not_after'], 'entrust_not_after')
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
# ECS API defaults to using the validated organization tied to the account.
# We want to always force behavior of trying to use the organization provided in the CSR.
# To that end we need to parse out the organization from the CSR.
self.csr_org = None
if self.backend == 'pyopenssl':
csr_subject = self.csr.get_subject()
csr_subject_components = csr_subject.get_components()
for k, v in csr_subject_components:
if k.upper() == 'O':
# Entrust does not support multiple validated organizations in a single certificate
if self.csr_org is not None:
module.fail_json(msg=("Entrust provider does not currently support multiple validated organizations. Multiple organizations found in "
"Subject DN: '{0}'. ".format(csr_subject)))
else:
self.csr_org = v
elif self.backend == 'cryptography':
csr_subject_orgs = self.csr.subject.get_attributes_for_oid(NameOID.ORGANIZATION_NAME)
if len(csr_subject_orgs) == 1:
self.csr_org = csr_subject_orgs[0].value
elif len(csr_subject_orgs) > 1:
module.fail_json(msg=("Entrust provider does not currently support multiple validated organizations. Multiple organizations found in "
"Subject DN: '{0}'. ".format(self.csr.subject)))
# If no organization in the CSR, explicitly tell ECS that it should be blank in issued cert, not defaulted to
# organization tied to the account.
if self.csr_org is None:
self.csr_org = ''
try:
self.ecs_client = ECSClient(
entrust_api_user=module.params.get('entrust_api_user'),
entrust_api_key=module.params.get('entrust_api_key'),
entrust_api_cert=module.params.get('entrust_api_client_cert_path'),
entrust_api_cert_key=module.params.get('entrust_api_client_cert_key_path'),
entrust_api_specification_path=module.params.get('entrust_api_specification_path')
)
except SessionConfigurationException as e:
module.fail_json(msg='Failed to initialize Entrust Provider: {0}'.format(to_native(e.message)))
def generate(self, module):
if not self.check(module, perms_required=False) or self.force:
# Read the CSR that was generated for us
body = {}
with open(self.csr_path, 'r') as csr_file:
body['csr'] = csr_file.read()
body['certType'] = module.params['entrust_cert_type']
# Handle expiration (30 days if not specified)
expiry = self.notAfter
if not expiry:
gmt_now = datetime.datetime.fromtimestamp(time.mktime(time.gmtime()))
expiry = gmt_now + datetime.timedelta(days=365)
expiry_iso3339 = expiry.strftime("%Y-%m-%dT%H:%M:%S.00Z")
body['certExpiryDate'] = expiry_iso3339
body['org'] = self.csr_org
body['tracking'] = {
'requesterName': module.params['entrust_requester_name'],
'requesterEmail': module.params['entrust_requester_email'],
'requesterPhone': module.params['entrust_requester_phone'],
}
try:
result = self.ecs_client.NewCertRequest(Body=body)
self.trackingId = result.get('trackingId')
except RestOperationException as e:
module.fail_json(msg='Failed to request new certificate from Entrust Certificate Services (ECS): {0}'.format(to_native(e.message)))
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, to_bytes(result.get('endEntityCert')))
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
self.changed = True
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
parent_check = super(EntrustCertificate, self).check(module, perms_required)
try:
cert_details = self._get_cert_details()
except RestOperationException as e:
module.fail_json(msg='Failed to get status of existing certificate from Entrust Certificate Services (ECS): {0}.'.format(to_native(e.message)))
# Always issue a new certificate if the certificate is expired, suspended or revoked
status = cert_details.get('status', False)
if status == 'EXPIRED' or status == 'SUSPENDED' or status == 'REVOKED':
return False
# If the requested cert type was specified and it is for a different certificate type than the initial certificate, a new one is needed
if module.params['entrust_cert_type'] and cert_details.get('certType') and module.params['entrust_cert_type'] != cert_details.get('certType'):
return False
return parent_check
def _get_cert_details(self):
cert_details = {}
if self.cert:
serial_number = None
expiry = None
if self.backend == 'pyopenssl':
serial_number = "{0:X}".format(self.cert.get_serial_number())
time_string = to_native(self.cert.get_notAfter())
expiry = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ")
elif self.backend == 'cryptography':
serial_number = "{0:X}".format(self.cert.serial_number)
expiry = self.cert.not_valid_after
# get some information about the expiry of this certificate
expiry_iso3339 = expiry.strftime("%Y-%m-%dT%H:%M:%S.00Z")
cert_details['expiresAfter'] = expiry_iso3339
# If a trackingId is not already defined (from the result of a generate)
# use the serial number to identify the tracking Id
if self.trackingId is None and serial_number is not None:
cert_results = self.ecs_client.GetCertificates(serialNumber=serial_number).get('certificates', {})
# Finding 0 or more than 1 result is a very unlikely use case, it simply means we cannot perform additional checks
# on the 'state' as returned by Entrust Certificate Services (ECS). The general certificate validity is
# still checked as it is in the rest of the module.
if len(cert_results) == 1:
self.trackingId = cert_results[0].get('trackingId')
if self.trackingId is not None:
cert_details.update(self.ecs_client.GetCertificate(trackingId=self.trackingId))
return cert_details
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
}
if self.backup_file:
result['backup_file'] = self.backup_file
result.update(self._get_cert_details())
return result
class AcmeCertificate(Certificate):
"""Retrieve a certificate using the ACME protocol."""
# Since there's no real use of the backend,
# other than the 'self.check' function, we just pass the backend to the constructor
def __init__(self, module, backend):
super(AcmeCertificate, self).__init__(module, backend)
self.accountkey_path = module.params['acme_accountkey_path']
self.challenge_path = module.params['acme_challenge_path']
self.use_chain = module.params['acme_chain']
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not os.path.exists(self.accountkey_path):
raise CertificateError(
'The account key %s does not exist' % self.accountkey_path
)
if not os.path.exists(self.challenge_path):
raise CertificateError(
'The challenge path %s does not exist' % self.challenge_path
)
if not self.check(module, perms_required=False) or self.force:
acme_tiny_path = self.module.get_bin_path('acme-tiny', required=True)
command = [acme_tiny_path]
if self.use_chain:
command.append('--chain')
command.extend(['--account-key', self.accountkey_path])
command.extend(['--csr', self.csr_path])
command.extend(['--acme-dir', self.challenge_path])
try:
crt = module.run_command(command, check_rc=True)[1]
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, to_bytes(crt))
self.changed = True
except OSError as exc:
raise CertificateError(exc)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'accountkey': self.accountkey_path,
'csr': self.csr_path,
}
if self.backup_file:
result['backup_file'] = self.backup_file
return result
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
path=dict(type='path', required=True),
provider=dict(type='str', choices=['acme', 'assertonly', 'entrust', 'ownca', 'selfsigned']),
force=dict(type='bool', default=False,),
csr_path=dict(type='path'),
backup=dict(type='bool', default=False),
select_crypto_backend=dict(type='str', default='auto', choices=['auto', 'cryptography', 'pyopenssl']),
# General properties of a certificate
privatekey_path=dict(type='path'),
privatekey_passphrase=dict(type='str', no_log=True),
# provider: assertonly
signature_algorithms=dict(type='list', elements='str', removed_in_version='2.13'),
subject=dict(type='dict', removed_in_version='2.13'),
subject_strict=dict(type='bool', default=False, removed_in_version='2.13'),
issuer=dict(type='dict', removed_in_version='2.13'),
issuer_strict=dict(type='bool', default=False, removed_in_version='2.13'),
has_expired=dict(type='bool', default=False, removed_in_version='2.13'),
version=dict(type='int', removed_in_version='2.13'),
key_usage=dict(type='list', elements='str', aliases=['keyUsage'], removed_in_version='2.13'),
key_usage_strict=dict(type='bool', default=False, aliases=['keyUsage_strict'], removed_in_version='2.13'),
extended_key_usage=dict(type='list', elements='str', aliases=['extendedKeyUsage'], removed_in_version='2.13'),
extended_key_usage_strict=dict(type='bool', default=False, aliases=['extendedKeyUsage_strict'], removed_in_version='2.13'),
subject_alt_name=dict(type='list', elements='str', aliases=['subjectAltName'], removed_in_version='2.13'),
subject_alt_name_strict=dict(type='bool', default=False, aliases=['subjectAltName_strict'], removed_in_version='2.13'),
not_before=dict(type='str', aliases=['notBefore'], removed_in_version='2.13'),
not_after=dict(type='str', aliases=['notAfter'], removed_in_version='2.13'),
valid_at=dict(type='str', removed_in_version='2.13'),
invalid_at=dict(type='str', removed_in_version='2.13'),
valid_in=dict(type='str', removed_in_version='2.13'),
# provider: selfsigned
selfsigned_version=dict(type='int', default=3),
selfsigned_digest=dict(type='str', default='sha256'),
selfsigned_not_before=dict(type='str', default='+0s', aliases=['selfsigned_notBefore']),
selfsigned_not_after=dict(type='str', default='+3650d', aliases=['selfsigned_notAfter']),
selfsigned_create_subject_key_identifier=dict(
type='str',
default='create_if_not_provided',
choices=['create_if_not_provided', 'always_create', 'never_create']
),
# provider: ownca
ownca_path=dict(type='path'),
ownca_privatekey_path=dict(type='path'),
ownca_privatekey_passphrase=dict(type='str', no_log=True),
ownca_digest=dict(type='str', default='sha256'),
ownca_version=dict(type='int', default=3),
ownca_not_before=dict(type='str', default='+0s'),
ownca_not_after=dict(type='str', default='+3650d'),
ownca_create_subject_key_identifier=dict(
type='str',
default='create_if_not_provided',
choices=['create_if_not_provided', 'always_create', 'never_create']
),
ownca_create_authority_key_identifier=dict(type='bool', default=True),
# provider: acme
acme_accountkey_path=dict(type='path'),
acme_challenge_path=dict(type='path'),
acme_chain=dict(type='bool', default=False),
# provider: entrust
entrust_cert_type=dict(type='str', default='STANDARD_SSL',
choices=['STANDARD_SSL', 'ADVANTAGE_SSL', 'UC_SSL', 'EV_SSL', 'WILDCARD_SSL',
'PRIVATE_SSL', 'PD_SSL', 'CDS_ENT_LITE', 'CDS_ENT_PRO', 'SMIME_ENT']),
entrust_requester_email=dict(type='str'),
entrust_requester_name=dict(type='str'),
entrust_requester_phone=dict(type='str'),
entrust_api_user=dict(type='str'),
entrust_api_key=dict(type='str', no_log=True),
entrust_api_client_cert_path=dict(type='path'),
entrust_api_client_cert_key_path=dict(type='path', no_log=True),
entrust_api_specification_path=dict(type='path', default='https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml'),
entrust_not_after=dict(type='str', default='+365d'),
),
supports_check_mode=True,
add_file_common_args=True,
required_if=[
['provider', 'entrust', ['entrust_requester_email', 'entrust_requester_name', 'entrust_requester_phone',
'entrust_api_user', 'entrust_api_key', 'entrust_api_client_cert_path',
'entrust_api_client_cert_key_path']]
]
)
try:
if module.params['state'] == 'absent':
certificate = CertificateAbsent(module)
else:
if module.params['provider'] != 'assertonly' and module.params['csr_path'] is None:
module.fail_json(msg='csr_path is required when provider is not assertonly')
base_dir = os.path.dirname(module.params['path']) or '.'
if not os.path.isdir(base_dir):
module.fail_json(
name=base_dir,
msg='The directory %s does not exist or the file is not a directory' % base_dir
)
provider = module.params['provider']
if provider == 'assertonly':
module.deprecate("The 'assertonly' provider is deprecated; please see the examples of "
"the 'openssl_certificate' module on how to replace it with other modules",
version='2.13')
backend = module.params['select_crypto_backend']
if backend == 'auto':
# Detect what backend we can use
can_use_cryptography = CRYPTOGRAPHY_FOUND and CRYPTOGRAPHY_VERSION >= LooseVersion(MINIMAL_CRYPTOGRAPHY_VERSION)
can_use_pyopenssl = PYOPENSSL_FOUND and PYOPENSSL_VERSION >= LooseVersion(MINIMAL_PYOPENSSL_VERSION)
# If cryptography is available we'll use it
if can_use_cryptography:
backend = 'cryptography'
elif can_use_pyopenssl:
backend = 'pyopenssl'
if module.params['selfsigned_version'] == 2 or module.params['ownca_version'] == 2:
module.warn('crypto backend forced to pyopenssl. The cryptography library does not support v2 certificates')
backend = 'pyopenssl'
# Fail if no backend has been found
if backend == 'auto':
module.fail_json(msg=("Can't detect any of the required Python libraries "
"cryptography (>= {0}) or PyOpenSSL (>= {1})").format(
MINIMAL_CRYPTOGRAPHY_VERSION,
MINIMAL_PYOPENSSL_VERSION))
if backend == 'pyopenssl':
if not PYOPENSSL_FOUND:
module.fail_json(msg=missing_required_lib('pyOpenSSL >= {0}'.format(MINIMAL_PYOPENSSL_VERSION)),
exception=PYOPENSSL_IMP_ERR)
if module.params['provider'] in ['selfsigned', 'ownca', 'assertonly']:
try:
getattr(crypto.X509Req, 'get_extensions')
except AttributeError:
module.fail_json(msg='You need to have PyOpenSSL>=0.15')
module.deprecate('The module is using the PyOpenSSL backend. This backend has been deprecated', version='2.13')
if provider == 'selfsigned':
certificate = SelfSignedCertificate(module)
elif provider == 'acme':
certificate = AcmeCertificate(module, 'pyopenssl')
elif provider == 'ownca':
certificate = OwnCACertificate(module)
elif provider == 'entrust':
certificate = EntrustCertificate(module, 'pyopenssl')
else:
certificate = AssertOnlyCertificate(module)
elif backend == 'cryptography':
if not CRYPTOGRAPHY_FOUND:
module.fail_json(msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
exception=CRYPTOGRAPHY_IMP_ERR)
if module.params['selfsigned_version'] == 2 or module.params['ownca_version'] == 2:
module.fail_json(msg='The cryptography backend does not support v2 certificates, '
'use select_crypto_backend=pyopenssl for v2 certificates')
if provider == 'selfsigned':
certificate = SelfSignedCertificateCryptography(module)
elif provider == 'acme':
certificate = AcmeCertificate(module, 'cryptography')
elif provider == 'ownca':
certificate = OwnCACertificateCryptography(module)
elif provider == 'entrust':
certificate = EntrustCertificate(module, 'cryptography')
else:
certificate = AssertOnlyCertificateCryptography(module)
if module.params['state'] == 'present':
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = module.params['force'] or not certificate.check(module)
module.exit_json(**result)
certificate.generate(module)
else:
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = os.path.exists(module.params['path'])
module.exit_json(**result)
certificate.remove(module)
result = certificate.dump()
module.exit_json(**result)
except crypto_utils.OpenSSLObjectError as exc:
module.fail_json(msg=to_native(exc))
if __name__ == "__main__":
main()
| gpl-3.0 |
fritsvanveen/QGIS | python/plugins/processing/tools/spatialite.py | 3 | 4049 | # -*- coding: utf-8 -*-
"""
***************************************************************************
spatialite.py
---------------------
Date : November 2015
Copyright : (C) 2015 by René-Luc Dhont
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import str
from builtins import object
__author__ = 'René-Luc Dhont'
__date__ = 'November 2015'
__copyright__ = '(C) 2015, René-Luc Dhont'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.utils import spatialite_connect
class DbError(Exception):
def __init__(self, message, query=None):
# Save error. funny that the variables are in utf-8
self.message = str(message, 'utf-8')
self.query = (str(query, 'utf-8') if query is not None else None)
def __str__(self):
return 'MESSAGE: %s\nQUERY: %s' % (self.message, self.query)
class GeoDB(object):
def __init__(self, uri=None):
self.uri = uri
self.dbname = uri.database()
try:
self.con = spatialite_connect(self.con_info())
except (sqlite.InterfaceError, sqlite.OperationalError) as e:
raise DbError(str(e))
self.has_spatialite = self.check_spatialite()
if not self.has_spatialite:
self.has_spatialite = self.init_spatialite()
def con_info(self):
return str(self.dbname)
def init_spatialite(self):
# Get spatialite version
c = self.con.cursor()
try:
self._exec_sql(c, u'SELECT spatialite_version()')
rep = c.fetchall()
v = [int(a) for a in rep[0][0].split('.')]
vv = v[0] * 100000 + v[1] * 1000 + v[2] * 10
# Add spatialite support
if vv >= 401000:
# 4.1 and above
sql = "SELECT initspatialmetadata(1)"
else:
# Under 4.1
sql = "SELECT initspatialmetadata()"
self._exec_sql_and_commit(sql)
except:
return False
finally:
self.con.close()
try:
self.con = spatialite_connect(self.con_info())
except (sqlite.InterfaceError, sqlite.OperationalError) as e:
raise DbError(str(e))
return self.check_spatialite()
def check_spatialite(self):
try:
c = self.con.cursor()
self._exec_sql(c, u"SELECT CheckSpatialMetaData()")
v = c.fetchone()[0]
self.has_geometry_columns = v == 1 or v == 3
self.has_spatialite4 = v == 3
except Exception:
self.has_geometry_columns = False
self.has_spatialite4 = False
self.has_geometry_columns_access = self.has_geometry_columns
return self.has_geometry_columns
def _exec_sql(self, cursor, sql):
try:
cursor.execute(sql)
except (sqlite.Error, sqlite.ProgrammingError, sqlite.Warning, sqlite.InterfaceError, sqlite.OperationalError) as e:
raise DbError(str(e), sql)
def _exec_sql_and_commit(self, sql):
"""Tries to execute and commit some action, on error it rolls
back the change.
"""
try:
c = self.con.cursor()
self._exec_sql(c, sql)
self.con.commit()
except DbError:
self.con.rollback()
raise
| gpl-2.0 |
opencloudinfra/orchestrator | venv/Lib/site-packages/django/templatetags/cache.py | 471 | 3389 | from __future__ import unicode_literals
from django.core.cache import InvalidCacheBackendError, caches
from django.core.cache.utils import make_template_fragment_key
from django.template import (
Library, Node, TemplateSyntaxError, VariableDoesNotExist,
)
register = Library()
class CacheNode(Node):
def __init__(self, nodelist, expire_time_var, fragment_name, vary_on, cache_name):
self.nodelist = nodelist
self.expire_time_var = expire_time_var
self.fragment_name = fragment_name
self.vary_on = vary_on
self.cache_name = cache_name
def render(self, context):
try:
expire_time = self.expire_time_var.resolve(context)
except VariableDoesNotExist:
raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.expire_time_var.var)
try:
expire_time = int(expire_time)
except (ValueError, TypeError):
raise TemplateSyntaxError('"cache" tag got a non-integer timeout value: %r' % expire_time)
if self.cache_name:
try:
cache_name = self.cache_name.resolve(context)
except VariableDoesNotExist:
raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.cache_name.var)
try:
fragment_cache = caches[cache_name]
except InvalidCacheBackendError:
raise TemplateSyntaxError('Invalid cache name specified for cache tag: %r' % cache_name)
else:
try:
fragment_cache = caches['template_fragments']
except InvalidCacheBackendError:
fragment_cache = caches['default']
vary_on = [var.resolve(context) for var in self.vary_on]
cache_key = make_template_fragment_key(self.fragment_name, vary_on)
value = fragment_cache.get(cache_key)
if value is None:
value = self.nodelist.render(context)
fragment_cache.set(cache_key, value, expire_time)
return value
@register.tag('cache')
def do_cache(parser, token):
"""
This will cache the contents of a template fragment for a given amount
of time.
Usage::
{% load cache %}
{% cache [expire_time] [fragment_name] %}
.. some expensive processing ..
{% endcache %}
This tag also supports varying by a list of arguments::
{% load cache %}
{% cache [expire_time] [fragment_name] [var1] [var2] .. %}
.. some expensive processing ..
{% endcache %}
Optionally the cache to use may be specified thus::
{% cache .... using="cachename" %}
Each unique set of arguments will result in a unique cache entry.
"""
nodelist = parser.parse(('endcache',))
parser.delete_first_token()
tokens = token.split_contents()
if len(tokens) < 3:
raise TemplateSyntaxError("'%r' tag requires at least 2 arguments." % tokens[0])
if len(tokens) > 3 and tokens[-1].startswith('using='):
cache_name = parser.compile_filter(tokens[-1][len('using='):])
tokens = tokens[:-1]
else:
cache_name = None
return CacheNode(nodelist,
parser.compile_filter(tokens[1]),
tokens[2], # fragment_name can't be a variable.
[parser.compile_filter(t) for t in tokens[3:]],
cache_name,
)
| gpl-3.0 |
sernst/cauldron | cauldron/session/display/__init__.py | 1 | 23013 | import json as _json_io
import textwrap
import typing
from datetime import timedelta
import cauldron as _cd
from cauldron import environ
from cauldron import render
from cauldron.render import plots as render_plots
from cauldron.render import texts as render_texts
from cauldron.session import report
def _get_report() -> 'report.Report':
"""Fetches the report associated with the currently running step."""
return _cd.project.get_internal_project().current_step.report
def inspect(source: dict):
"""
Inspects the data and structure of the source dictionary object and
adds the results to the display for viewing.
:param source:
A dictionary object to be inspected.
:return:
"""
r = _get_report()
r.append_body(render.inspect(source))
def header(header_text: str, level: int = 1, expand_full: bool = False):
"""
Adds a text header to the display with the specified level.
:param header_text:
The text to display in the header.
:param level:
The level of the header, which corresponds to the html header
levels, such as <h1>, <h2>, ...
:param expand_full:
Whether or not the header will expand to fill the width of the entire
notebook page, or be constrained by automatic maximum page width. The
default value of False lines the header up with text displays.
"""
r = _get_report()
r.append_body(render.header(
header_text,
level=level,
expand_full=expand_full
))
def text(value: str, preformatted: bool = False):
"""
Adds text to the display. If the text is not preformatted, it will be
displayed in paragraph format. Preformatted text will be displayed
inside a pre tag with a monospace font.
:param value:
The text to display.
:param preformatted:
Whether or not to preserve the whitespace display of the text.
"""
if preformatted:
result = render_texts.preformatted_text(value)
else:
result = render_texts.text(value)
r = _get_report()
r.append_body(result)
r.stdout_interceptor.write_source(
'{}\n'.format(textwrap.dedent(value))
)
def markdown(
source: str = None,
source_path: str = None,
preserve_lines: bool = False,
font_size: float = None,
**kwargs
):
"""
Renders the specified source string or source file using markdown and
adds the resulting HTML to the notebook display.
:param source:
A markdown formatted string.
:param source_path:
A file containing markdown text.
:param preserve_lines:
If True, all line breaks will be treated as hard breaks. Use this
for pre-formatted markdown text where newlines should be retained
during rendering.
:param font_size:
Specifies a relative font size adjustment. The default value is 1.0,
which preserves the inherited font size values. Set it to a value
below 1.0 for smaller font-size rendering and greater than 1.0 for
larger font size rendering.
:param kwargs:
Any variable replacements to make within the string using Jinja2
templating syntax.
"""
r = _get_report()
result = render_texts.markdown(
source=source,
source_path=source_path,
preserve_lines=preserve_lines,
font_size=font_size,
**kwargs
)
r.library_includes += result['library_includes']
r.append_body(result['body'])
r.stdout_interceptor.write_source(
'{}\n'.format(textwrap.dedent(result['rendered']))
)
def json(**kwargs):
"""
Adds the specified data to the the output display window with the
specified key. This allows the user to make available arbitrary
JSON-compatible data to the display for runtime use.
:param kwargs:
Each keyword argument is added to the CD.data object with the
specified key and value.
"""
r = _get_report()
r.append_body(render.json(**kwargs))
r.stdout_interceptor.write_source(
'{}\n'.format(_json_io.dumps(kwargs, indent=2))
)
def plotly(
data: typing.Union[dict, list, typing.Any] = None,
layout: typing.Union[dict, typing.Any] = None,
scale: float = 0.5,
figure: typing.Union[dict, typing.Any] = None,
static: bool = False
):
"""
Creates a Plotly plot in the display with the specified data and
layout.
:param data:
The Plotly trace data to be plotted.
:param layout:
The layout data used for the plot.
:param scale:
The display scale with units of fractional screen height. A value
of 0.5 constrains the output to a maximum height equal to half the
height of browser window when viewed. Values below 1.0 are usually
recommended so the entire output can be viewed without scrolling.
:param figure:
In cases where you need to create a figure instead of separate data
and layout information, you can pass the figure here and leave the
data and layout values as None.
:param static:
If true, the plot will be created without interactivity.
This is useful if you have a lot of plots in your notebook.
"""
r = _get_report()
if not figure and not isinstance(data, (list, tuple)):
data = [data]
if 'plotly' not in r.library_includes:
r.library_includes.append('plotly')
r.append_body(render.plotly(
data=data,
layout=layout,
scale=scale,
figure=figure,
static=static
))
r.stdout_interceptor.write_source('[ADDED] Plotly plot\n')
def table(
data_frame,
scale: float = 0.7,
include_index: bool = False,
max_rows: int = 500,
sample_rows: typing.Optional[int] = None,
formats: typing.Union[
str,
typing.Callable[[typing.Any], str],
typing.Dict[
str,
typing.Union[str, typing.Callable[[typing.Any], str]]
]
] = None
):
"""
Adds the specified data frame to the display in a nicely formatted
scrolling table.
:param data_frame:
The pandas data frame to be rendered to a table.
:param scale:
The display scale with units of fractional screen height. A value
of 0.5 constrains the output to a maximum height equal to half the
height of browser window when viewed. Values below 1.0 are usually
recommended so the entire output can be viewed without scrolling.
:param include_index:
Whether or not the index column should be included in the displayed
output. The index column is not included by default because it is
often unnecessary extra information in the display of the data.
:param max_rows:
This argument exists to prevent accidentally writing very large data
frames to a table, which can cause the notebook display to become
sluggish or unresponsive. If you want to display large tables, you need
only increase the value of this argument.
:param sample_rows:
When set to a positive integer value, the DataFrame will be randomly
sampled to the specified number of rows when displayed in the table.
If the value here is larger than the number of rows in the DataFrame,
the sampling will have no effect and the entire DataFrame will be
displayed instead.
:param formats:
An optional dictionary that, when specified, should contain a mapping
between column names and formatting strings to apply to that column
for display purposes. For example, ``{'foo': '{:,.2f}%'}`` would
transform a column ``foo = [12.2121, 34.987123, 42.72839]`` to
display as ``foo = [12.21%, 34.99%, 42.73%]``. The formatters should
follow the standard Python string formatting guidelines the same as
the ``str.format()`` command having the value of the column as the only
positional argument in the format arguments. A string value can also
be specified for uniform formatting of all columns (or if displaying
a series with only a single value).
"""
r = _get_report()
r.append_body(render.table(
data_frame=data_frame,
scale=scale,
include_index=include_index,
max_rows=max_rows,
sample_rows=sample_rows,
formats=formats
))
r.stdout_interceptor.write_source('[ADDED] Table\n')
def svg(svg_dom: str, filename: str = None):
"""
Adds the specified SVG string to the display. If a filename is
included, the SVG data will also be saved to that filename within the
project results folder.
:param svg_dom:
The SVG string data to add to the display.
:param filename:
An optional filename where the SVG data should be saved within
the project results folder.
"""
r = _get_report()
r.append_body(render.svg(svg_dom))
r.stdout_interceptor.write_source('[ADDED] SVG\n')
if not filename:
return
if not filename.endswith('.svg'):
filename += '.svg'
r.files[filename] = svg_dom
def jinja(path: str, **kwargs):
"""
Renders the specified Jinja2 template to HTML and adds the output to the
display.
:param path:
The fully-qualified path to the template to be rendered.
:param kwargs:
Any keyword arguments that will be use as variable replacements within
the template.
"""
r = _get_report()
r.append_body(render.jinja(path, **kwargs))
r.stdout_interceptor.write_source('[ADDED] Jinja2 rendered HTML\n')
def whitespace(lines: float = 1.0):
"""
Adds the specified number of lines of whitespace.
:param lines:
The number of lines of whitespace to show.
"""
r = _get_report()
r.append_body(render.whitespace(lines))
r.stdout_interceptor.write_source('\n')
def image(
filename: str,
width: int = None,
height: int = None,
justify: str = 'left'
):
"""
Adds an image to the display. The image must be located within the
assets directory of the Cauldron notebook's folder.
:param filename:
Name of the file within the assets directory,
:param width:
Optional width in pixels for the image.
:param height:
Optional height in pixels for the image.
:param justify:
One of 'left', 'center' or 'right', which specifies how the image
is horizontally justified within the notebook display.
"""
r = _get_report()
path = '/'.join(['reports', r.project.uuid, 'latest', 'assets', filename])
r.append_body(render.image(path, width, height, justify))
r.stdout_interceptor.write_source('[ADDED] Image\n')
def html(dom: str):
"""
A string containing a valid HTML snippet.
:param dom:
The HTML string to add to the display.
"""
r = _get_report()
r.append_body(render.html(dom))
r.stdout_interceptor.write_source('[ADDED] HTML\n')
def workspace(show_values: bool = True, show_types: bool = True):
"""
Adds a list of the shared variables currently stored in the project
workspace.
:param show_values:
When true the values for each variable will be shown in addition to
their name.
:param show_types:
When true the data types for each shared variable will be shown in
addition to their name.
"""
r = _get_report()
data = {}
for key, value in r.project.shared.fetch(None).items():
if key.startswith('__cauldron_'):
continue
data[key] = value
r.append_body(render.status(data, values=show_values, types=show_types))
def pyplot(
figure=None,
scale: float = 0.8,
clear: bool = True,
aspect_ratio: typing.Union[list, tuple] = None
):
"""
Creates a matplotlib plot in the display for the specified figure. The size
of the plot is determined automatically to best fit the notebook.
:param figure:
The matplotlib figure to plot. If omitted, the currently active
figure will be used.
:param scale:
The display scale with units of fractional screen height. A value
of 0.5 constrains the output to a maximum height equal to half the
height of browser window when viewed. Values below 1.0 are usually
recommended so the entire output can be viewed without scrolling.
:param clear:
Clears the figure after it has been rendered. This is useful to
prevent persisting old plot data between repeated runs of the
project files. This can be disabled if the plot is going to be
used later in the project files.
:param aspect_ratio:
The aspect ratio for the displayed plot as a two-element list or
tuple. The first element is the width and the second element the
height. The units are "inches," which is an important consideration
for the display of text within the figure. If no aspect ratio is
specified, the currently assigned values to the plot will be used
instead.
"""
r = _get_report()
r.append_body(render_plots.pyplot(
figure,
scale=scale,
clear=clear,
aspect_ratio=aspect_ratio
))
r.stdout_interceptor.write_source('[ADDED] PyPlot plot\n')
def bokeh(model, scale: float = 0.7, responsive: bool = True):
"""
Adds a Bokeh plot object to the notebook display.
:param model:
The plot object to be added to the notebook display.
:param scale:
How tall the plot should be in the notebook as a fraction of screen
height. A number between 0.1 and 1.0. The default value is 0.7.
:param responsive:
Whether or not the plot should responsively scale to fill the width
of the notebook. The default is True.
"""
r = _get_report()
if 'bokeh' not in r.library_includes:
r.library_includes.append('bokeh')
r.append_body(render_plots.bokeh_plot(
model=model,
scale=scale,
responsive=responsive
))
r.stdout_interceptor.write_source('[ADDED] Bokeh plot\n')
def listing(
source: list,
ordered: bool = False,
expand_full: bool = False
):
"""
An unordered or ordered list of the specified *source* iterable where
each element is converted to a string representation for display.
:param source:
The iterable to display as a list.
:param ordered:
Whether or not the list should be ordered. If False, which is the
default, an unordered bulleted list is created.
:param expand_full:
Whether or not the list should expand to fill the screen horizontally.
When defaulted to False, the list is constrained to the center view
area of the screen along with other text. This can be useful to keep
lists aligned with the text flow.
"""
r = _get_report()
r.append_body(render.listing(
source=source,
ordered=ordered,
expand_full=expand_full
))
r.stdout_interceptor.write_source('[ADDED] Listing\n')
def list_grid(
source: list,
expand_full: bool = False,
column_count: int = 2,
row_spacing: float = 1.0
):
"""
An multi-column list of the specified *source* iterable where
each element is converted to a string representation for display.
:param source:
The iterable to display as a list.
:param expand_full:
Whether or not the list should expand to fill the screen horizontally.
When defaulted to False, the list is constrained to the center view
area of the screen along with other text. This can be useful to keep
lists aligned with the text flow.
:param column_count:
The number of columns to display. The specified count is applicable to
high-definition screens. For Lower definition screens the actual count
displayed may be fewer as the layout responds to less available
horizontal screen space.
:param row_spacing:
The number of lines of whitespace to include between each row in the
grid. Set this to 0 for tightly displayed lists.
"""
r = _get_report()
r.append_body(render.list_grid(
source=source,
expand_full=expand_full,
column_count=column_count,
row_spacing=row_spacing
))
r.stdout_interceptor.write_source('[ADDED] List grid\n')
def latex(source: str):
"""
Add a mathematical equation in latex math-mode syntax to the display.
Instead of the traditional backslash escape character, the @ character is
used instead to prevent backslash conflicts with Python strings. For
example, \\delta would be @delta.
:param source:
The string representing the latex equation to be rendered.
"""
r = _get_report()
if 'katex' not in r.library_includes:
r.library_includes.append('katex')
r.append_body(render_texts.latex(source.replace('@', '\\')))
r.stdout_interceptor.write_source('[ADDED] Latex equation\n')
def head(source, count: int = 5):
"""
Displays a specified number of elements in a source object of many
different possible types.
:param source:
DataFrames will show *count* rows of that DataFrame. A list, tuple or
other iterable, will show the first *count* rows. Dictionaries will
show *count* keys from the dictionary, which will be randomly selected
unless you are using an OrderedDict. Strings will show the first
*count* characters.
:param count:
The number of elements to show from the source.
"""
r = _get_report()
r.append_body(render_texts.head(source, count=count))
r.stdout_interceptor.write_source('[ADDED] Head\n')
def tail(source, count: int = 5):
"""
The opposite of the head function. Displays the last *count* elements of
the *source* object.
:param source:
DataFrames will show the last *count* rows of that DataFrame. A list,
tuple or other iterable, will show the last *count* rows. Dictionaries
will show *count* keys from the dictionary, which will be randomly
selected unless you are using an OrderedDict. Strings will show the
last *count* characters.
:param count:
The number of elements to show from the source.
"""
r = _get_report()
r.append_body(render_texts.tail(source, count=count))
r.stdout_interceptor.write_source('[ADDED] Tail\n')
def status(
message: str = None,
progress: float = None,
section_message: str = None,
section_progress: float = None,
):
"""
Updates the status display, which is only visible while a step is running.
This is useful for providing feedback and information during long-running
steps.
A section progress is also available for cases where long running tasks
consist of multiple tasks and you want to display sub-progress messages
within the context of the larger status.
Note: this is only supported when running in the Cauldron desktop
application.
:param message:
The status message you want to display. If left blank the previously
set status message will be retained. Should you desire to remove an
existing message, specify a blank string for this argument.
:param progress:
A number between zero and one that indicates the overall progress for
the current status. If no value is specified, the previously assigned
progress will be retained.
:param section_message:
The status message you want to display for a particular task within a
long-running step. If left blank the previously set section message
will be retained. Should you desire to remove an existing message,
specify a blank string for this argument.
:param section_progress:
A number between zero and one that indicates the progress for the
current section status. If no value is specified, the previously
assigned section progress value will be retained.
"""
environ.abort_thread()
r = _get_report()
step = _cd.project.get_internal_project().current_step
changes = 0
has_changed = step.progress_message != message
if message is not None and has_changed:
changes += 1
step.progress_message = message
has_changed = step.progress_message != max(0, min(1, progress or 0))
if progress is not None and has_changed:
changes += 1
step.progress = max(0.0, min(1.0, progress))
has_changed = step.sub_progress_message != section_message
if section_message is not None and has_changed:
changes += 1
step.sub_progress_message = section_message
has_changed = step.sub_progress != max(0, min(1, section_progress or 0))
if section_progress is not None and has_changed:
changes += 1
step.sub_progress = section_progress
if changes > 0:
# update the timestamp to inform rendering that a status
# has changed and should be re-rendered into the step.
r.update_last_modified()
def code_block(
code: str = None,
path: str = None,
language_id: str = None,
title: str = None,
caption: str = None
):
"""
Adds a block of syntax highlighted code to the display from either
the supplied code argument, or from the code file specified
by the path argument.
:param code:
A string containing the code to be added to the display
:param path:
A path to a file containing code to be added to the display
:param language_id:
The language identifier that indicates what language should
be used by the syntax highlighter. Valid values are any of the
languages supported by the Pygments highlighter.
:param title:
If specified, the code block will include a title bar with the
value of this argument
:param caption:
If specified, the code block will include a caption box below the code
that contains the value of this argument
"""
environ.abort_thread()
r = _get_report()
r.append_body(render.code_block(
block=code,
path=path,
language=language_id,
title=title,
caption=caption
))
r.stdout_interceptor.write_source('{}\n'.format(code))
def elapsed():
"""
Displays the elapsed time since the step started running.
"""
environ.abort_thread()
step = _cd.project.get_internal_project().current_step
r = _get_report()
r.append_body(render.elapsed_time(step.elapsed_time))
result = '[ELAPSED]: {}\n'.format(timedelta(seconds=step.elapsed_time))
r.stdout_interceptor.write_source(result)
| mit |
julienmalard/Tikon | pruebas/test_calibrador/test_spotpy.py | 1 | 1920 | import unittest
from warnings import warn as avisar
import scipy.stats as estad
from pruebas.test_central.rcrs.modelo_calib import generar
from tikon.calibrador.spotpy_ import EMV, RS, BDD, CMEDZ, MC, MLH, CAACAA, CAA, ECBUA, ERP, CMMC, CalibSpotPy
from tikon.ecs.aprioris import APrioriDist
class PruebaSpotPy(unittest.TestCase):
def test_algs(símismo):
for alg in [EMV, RS, BDD, CMEDZ, MC, MLH, CAACAA, CAA, ECBUA, ERP, CMMC]:
with símismo.subTest(alg.__name__):
gen = generar()
modelo = gen['modelo']
exper = gen['exper']
modelo.calibrar('calib', exper, calibrador=alg(), n_iter=30)
valid = modelo.simular('valid', exper, calibs=['calib']).validar()
if valid['ens'] < 0.90:
avisar('Algoritmo {alg} no funcion muy bien.'.format(alg=alg.__name__))
def test_dists(símismo):
dists_aprioris = {
'Normal': estad.norm(1, 2),
'Uniforme': estad.uniform(0, 3),
'LogNormal': estad.lognorm(1, 0, 2),
'Chi2': estad.chi2(1, 0, 2),
'Exponencial': estad.expon(0, 2),
'Gamma': estad.gamma(1, 0, 1),
'Triang': estad.triang(0.5, 0, 2)
}
for nmbre in CalibSpotPy.dists_disp:
dist = dists_aprioris[nmbre]
with símismo.subTest(nmbre):
gen = generar()
modelo = gen['modelo']
exper = gen['exper']
coso = gen['coso']
apr = APrioriDist(dist)
coso.espec_apriori(apr, categ='categ', sub_categ='subcateg', ec='ec', prm='a')
modelo.calibrar('calib', exper, n_iter=30)
coso.borrar_aprioris()
valid = modelo.simular('valid', exper, calibs=['calib']).validar()
símismo.assertGreater(valid['ens'], 0.95)
| agpl-3.0 |
yangchandle/django_ecommerce | env/lib/python3.5/site-packages/pip/utils/build.py | 899 | 1312 | from __future__ import absolute_import
import os.path
import tempfile
from pip.utils import rmtree
class BuildDirectory(object):
def __init__(self, name=None, delete=None):
# If we were not given an explicit directory, and we were not given an
# explicit delete option, then we'll default to deleting.
if name is None and delete is None:
delete = True
if name is None:
# We realpath here because some systems have their default tmpdir
# symlinked to another directory. This tends to confuse build
# scripts, so we canonicalize the path by traversing potential
# symlinks here.
name = os.path.realpath(tempfile.mkdtemp(prefix="pip-build-"))
# If we were not given an explicit directory, and we were not given
# an explicit delete option, then we'll default to deleting.
if delete is None:
delete = True
self.name = name
self.delete = delete
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self):
if self.delete:
rmtree(self.name)
| mit |
spreeker/democracygame | external_apps/docutils-snapshot/docutils/parsers/rst/directives/html.py | 61 | 3223 | # $Id: html.py 4667 2006-07-12 21:40:56Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
Directives for typically HTML-specific constructs.
"""
__docformat__ = 'reStructuredText'
import sys
from docutils import nodes, utils
from docutils.parsers.rst import Directive
from docutils.parsers.rst import states
from docutils.transforms import components
class MetaBody(states.SpecializedBody):
class meta(nodes.Special, nodes.PreBibliographic, nodes.Element):
"""HTML-specific "meta" element."""
pass
def field_marker(self, match, context, next_state):
"""Meta element."""
node, blank_finish = self.parsemeta(match)
self.parent += node
return [], next_state, []
def parsemeta(self, match):
name = self.parse_field_marker(match)
indented, indent, line_offset, blank_finish = \
self.state_machine.get_first_known_indented(match.end())
node = self.meta()
pending = nodes.pending(components.Filter,
{'component': 'writer',
'format': 'html',
'nodes': [node]})
node['content'] = ' '.join(indented)
if not indented:
line = self.state_machine.line
msg = self.reporter.info(
'No content for meta tag "%s".' % name,
nodes.literal_block(line, line),
line=self.state_machine.abs_line_number())
return msg, blank_finish
tokens = name.split()
try:
attname, val = utils.extract_name_value(tokens[0])[0]
node[attname.lower()] = val
except utils.NameValueError:
node['name'] = tokens[0]
for token in tokens[1:]:
try:
attname, val = utils.extract_name_value(token)[0]
node[attname.lower()] = val
except utils.NameValueError, detail:
line = self.state_machine.line
msg = self.reporter.error(
'Error parsing meta tag attribute "%s": %s.'
% (token, detail), nodes.literal_block(line, line),
line=self.state_machine.abs_line_number())
return msg, blank_finish
self.document.note_pending(pending)
return pending, blank_finish
class Meta(Directive):
has_content = True
SMkwargs = {'state_classes': (MetaBody,)}
def run(self):
self.assert_has_content()
node = nodes.Element()
new_line_offset, blank_finish = self.state.nested_list_parse(
self.content, self.content_offset, node,
initial_state='MetaBody', blank_finish=1,
state_machine_kwargs=self.SMkwargs)
if (new_line_offset - self.content_offset) != len(self.content):
# incomplete parse of block?
error = self.state_machine.reporter.error(
'Invalid meta directive.',
nodes.literal_block(self.block_text, self.block_text),
line=self.lineno)
node += error
return node.children
| bsd-3-clause |
aaxelb/SHARE | share/harvesters/org_dataone.py | 3 | 1426 | from furl import furl
from lxml import etree
from share.harvest import BaseHarvester
class DataOneHarvester(BaseHarvester):
VERSION = 1
def do_harvest(self, start_date, end_date):
end_date = end_date.format('YYYY-MM-DDT00:00:00', formatter='alternative') + 'Z'
start_date = start_date.format('YYYY-MM-DDT00:00:00', formatter='alternative') + 'Z'
url = furl(self.config.base_url).set(query_params={
'q': 'dateModified:[{} TO {}]'.format(start_date, end_date),
'start': 0,
'rows': 1
}).url
return self.fetch_records(url, start_date, end_date)
def fetch_records(self, url, start_date, end_date):
resp = self.requests.get(url)
doc = etree.XML(resp.content)
total_records = int(doc.xpath("//result/@numFound")[0])
records_processed = 0
while records_processed < total_records:
response = self.requests.get(furl(url).set(query_params={
'q': 'dateModified:[{} TO {}]'.format(start_date, end_date),
'start': records_processed,
'rows': 1000
}).url)
docs = etree.XML(response.content).xpath('//doc')
for doc in docs:
doc_id = doc.xpath("str[@name='id']")[0].text
doc = etree.tostring(doc)
yield (doc_id, doc)
records_processed += len(docs)
| apache-2.0 |
shehzaadn/phoenix | bin/sqlline-thin.py | 4 | 6499 | #!/usr/bin/env python
############################################################################
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
############################################################################
import os
import subprocess
import sys
import phoenix_utils
import atexit
import urlparse
import argparse
global childProc
childProc = None
def kill_child():
if childProc is not None:
childProc.terminate()
childProc.kill()
if os.name != 'nt':
os.system("reset")
atexit.register(kill_child)
parser = argparse.ArgumentParser(description='Launches the Apache Phoenix Thin Client.')
# Positional argument "url" is optional
parser.add_argument('url', nargs='?', help='The URL to the Phoenix Query Server.', default='http://localhost:8765')
# Positional argument "sqlfile" is optional
parser.add_argument('sqlfile', nargs='?', help='A file of SQL commands to execute.', default='')
# Avatica wire authentication
parser.add_argument('-a', '--authentication', help='Mechanism for HTTP authentication.', choices=('SPNEGO', 'BASIC', 'DIGEST', 'NONE'), default='')
# Avatica wire serialization
parser.add_argument('-s', '--serialization', help='Serialization type for HTTP API.', choices=('PROTOBUF', 'JSON'), default=None)
# Avatica authentication
parser.add_argument('-au', '--auth-user', help='Username for HTTP authentication.')
parser.add_argument('-ap', '--auth-password', help='Password for HTTP authentication.')
# Common arguments across sqlline.py and sqlline-thin.py
phoenix_utils.common_sqlline_args(parser)
# Parse the args
args=parser.parse_args()
phoenix_utils.setPath()
url = args.url
sqlfile = args.sqlfile
serialization_key = 'phoenix.queryserver.serialization'
def cleanup_url(url):
parsed = urlparse.urlparse(url)
if parsed.scheme == "":
url = "http://" + url
parsed = urlparse.urlparse(url)
if ":" not in parsed.netloc:
url = url + ":8765"
return url
def get_serialization():
default_serialization='PROTOBUF'
env=os.environ.copy()
if os.name == 'posix':
hbase_exec_name = 'hbase'
elif os.name == 'nt':
hbase_exec_name = 'hbase.cmd'
else:
print 'Unknown platform "%s", defaulting to HBase executable of "hbase"' % os.name
hbase_exec_name = 'hbase'
hbase_cmd = phoenix_utils.which(hbase_exec_name)
if hbase_cmd is None:
print 'Failed to find hbase executable on PATH, defaulting serialization to %s.' % default_serialization
return default_serialization
env['HBASE_CONF_DIR'] = phoenix_utils.hbase_conf_dir
proc = subprocess.Popen([hbase_cmd, 'org.apache.hadoop.hbase.util.HBaseConfTool', serialization_key],
env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode != 0:
print 'Failed to extract serialization from hbase-site.xml, defaulting to %s.' % default_serialization
return default_serialization
# Don't expect this to happen, but give a default value just in case
if stdout is None:
return default_serialization
stdout = stdout.strip()
if stdout == 'null':
return default_serialization
return stdout
url = cleanup_url(url)
if sqlfile != "":
sqlfile = "--run=" + sqlfile
colorSetting = args.color
# disable color setting for windows OS
if os.name == 'nt':
colorSetting = "false"
# HBase configuration folder path (where hbase-site.xml reside) for
# HBase/Phoenix client side property override
hbase_config_path = os.getenv('HBASE_CONF_DIR', phoenix_utils.current_dir)
serialization = args.serialization if args.serialization else get_serialization()
java_home = os.getenv('JAVA_HOME')
# load hbase-env.??? to extract JAVA_HOME, HBASE_PID_DIR, HBASE_LOG_DIR
hbase_env_path = None
hbase_env_cmd = None
if os.name == 'posix':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.sh')
hbase_env_cmd = ['bash', '-c', 'source %s && env' % hbase_env_path]
elif os.name == 'nt':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.cmd')
hbase_env_cmd = ['cmd.exe', '/c', 'call %s & set' % hbase_env_path]
if not hbase_env_path or not hbase_env_cmd:
print >> sys.stderr, "hbase-env file unknown on platform %s" % os.name
sys.exit(-1)
hbase_env = {}
if os.path.isfile(hbase_env_path):
p = subprocess.Popen(hbase_env_cmd, stdout = subprocess.PIPE)
for x in p.stdout:
(k, _, v) = x.partition('=')
hbase_env[k.strip()] = v.strip()
if hbase_env.has_key('JAVA_HOME'):
java_home = hbase_env['JAVA_HOME']
if java_home:
java = os.path.join(java_home, 'bin', 'java')
else:
java = 'java'
jdbc_url = 'jdbc:phoenix:thin:url=' + url + ';serialization=' + serialization
if args.authentication:
jdbc_url += ';authentication=' + args.authentication
if args.auth_user:
jdbc_url += ';avatica_user=' + args.auth_user
if args.auth_password:
jdbc_url += ';avatica_password=' + args.auth_password
java_cmd = java + ' $PHOENIX_OPTS ' + \
' -cp "' + phoenix_utils.hbase_conf_dir + os.pathsep + phoenix_utils.phoenix_thin_client_jar + \
os.pathsep + phoenix_utils.hadoop_conf + os.pathsep + phoenix_utils.hadoop_classpath + '" -Dlog4j.configuration=file:' + \
os.path.join(phoenix_utils.current_dir, "log4j.properties") + \
" org.apache.phoenix.queryserver.client.SqllineWrapper -d org.apache.phoenix.queryserver.client.Driver " + \
' -u "' + jdbc_url + '"' + " -n none -p none " + \
" --color=" + colorSetting + " --fastConnect=" + args.fastconnect + " --verbose=" + args.verbose + \
" --incremental=false --isolation=TRANSACTION_READ_COMMITTED " + sqlfile
exitcode = subprocess.call(java_cmd, shell=True)
sys.exit(exitcode)
| apache-2.0 |
fanchao01/spider | mini_spider/log.py | 1 | 2513 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
This module provide configure file management service in i18n environment.
"""
import os
import logging
import logging.handlers
_LOG_FORMAT = "%(levelname)s: %(asctime)s: %(filename)s:%(lineno)d * %(thread)d %(message)s"
_LOG_DATEFMT = "%m-%d %H:%M:%S"
def init_log(log_path, level=logging.INFO, when="D", backup=7,
format=_LOG_FORMAT, datefmt=_LOG_DATEFMT):
"""
init_log - initialize log module
Args:
log_path - Log file path prefix.
Log data will go to two files: log_path.log and log_path.log.wf
Any non-exist parent directories will be created automatically
level - msg above the level will be displayed
DEBUG < INFO < WARNING < ERROR < CRITICAL
the default value is logging.INFO
when - how to split the log file by time interval
'S' : Seconds
'M' : Minutes
'H' : Hours
'D' : Days
'W' : Week day
default value: 'D'
format - format of the log
default format:
%(levelname)s: %(asctime)s: %(filename)s:%(lineno)d * %(thread)d %(message)s
INFO: 12-09 18:02:42: log.py:40 * 139814749787872 HELLO WORLD
backup - how many backup file to keep
default value: 7
Raises:
OSError: fail to create log directories
IOError: fail to open log file
"""
formatter = logging.Formatter(format, datefmt)
logger = logging.getLogger()
logger.setLevel(level)
dir = os.path.dirname(log_path)
if not os.path.isdir(dir):
os.makedirs(dir)
handler = logging.handlers.TimedRotatingFileHandler(log_path + ".log",
when=when,
backupCount=backup)
handler.setLevel(level)
handler.setFormatter(formatter)
logger.addHandler(handler)
handler = logging.handlers.TimedRotatingFileHandler(log_path + ".log.wf",
when=when,
backupCount=backup)
handler.setLevel(logging.WARNING)
handler.setFormatter(formatter)
logger.addHandler(handler)
if __name__ == '__main__':
init_log('./log')
| gpl-2.0 |
LubyRuffy/pr0bescan | plugins/hostcdn.py | 2 | 1504 | #/usr/bin/env python
#coding=utf-8
from libs.core.common import logging,runtime
from libs.core.common import print_color
import libs.DNS as DNS
log = logging.getLogger(__name__)
def output(target):
customHeaders = ['x-powered-by-360wzb',
'x-powered-by-anquanbao','x-cache','webluker-edge',
'powered-by-chinacache']
cnames = ['360wzb','incapdns','aqb.so']
target.iscdn = False
print_color('Test CDN for %s'%target.ip, 2)
print_color('Test CDN for %s with HTTP header'%target.f_domain, 2)
if any('cdn' in header for header in target.header):
target.iscdn = True
if not target.iscdn:
flag = set(target.header).intersection(set(customHeaders))
target.iscdn = True if len(flag) else None
if not target.iscdn and target.f_domain:
try:
print_color('Test CDN for %s with CNAME'%target.f_domain, 2)
r = DNS.DnsRequest(target.f_domain, qtype="CNAME",
server=['8.8.8.8'], protocol='tcp', timeout=10)
res = r.req()
if len(res.answers) > 0:
cname = res.answers[0]['data']
# 值得学习
if any(cname_str in cname for cname_str in cnames):
target.iscdn = True
except:
log.exception('exception')
print_color(__name__+' faild', 0)
if target.iscdn:
print_color(target.iscdn, 1)
print('')
| gpl-2.0 |
cosmoharrigan/opencog | opencog/python/learning/incremental_learner/incremental_learner.py | 34 | 7004 | __author__ = 'raminbarati'
import networkx as nx
from util import switch
from modification import Modification
class Runnable:
"""INTERFACE"""
def run(self):
pass
class IncrementalLearner:
"""INTERFACE"""
def construct_join_tree(self, graph):
pass
def construct_mpd_tree(self, joinTree, moralisedGraph):
pass
def incremental_compilation(self, modificationList):
pass
# pretty self explanatory
class IncrementalLearnerBase(object, Runnable, IncrementalLearner):
"""ABSTRACT, INCOMPLETE"""
def __init__(self, old_network):
self._old_network = old_network
self._graph_m = None
self._jt = None
self._jt_mpd = None
self._initialized = False
self._marked = []
def moralize(self,directed_graph):
gm = directed_graph.to_undirected()
for node in directed_graph.nodes_iter():
pres = directed_graph.predecessors(node)
for i in range(0,len(pres),1):
for j in range(i+1,len(pres),1):
gm.add_edge(pres[i],pres[j])
return gm
# input graph should not be directed, use moralize first
def triangulate(self, graph):
# implemented in a child class
pass
def thin_out_graph(self, graph):
# override in a child class if triangulation isn't minimal
return graph
def clique_decomposition(self, graph):
cluster_graph = nx.Graph()
cliques = list(graph.subgraph(c) for c in nx.find_cliques(graph))
while cliques:
clique_i = cliques.pop()
for clique in cliques:
clique_j = clique
shared = set(clique_i).intersection(set(clique_j))
if len(shared) > 0:
cluster_graph.add_edge(clique_i, clique_j, {'label':shared, 'weight':1.0/len(shared)})
j_tree = nx.minimum_spanning_tree(cluster_graph)
return j_tree
def construct_join_tree(self, graph):
graph_m = self.moralize(graph)
graph_t = self.triangulate(graph_m)
graph_min = self.thin_out_graph(graph_t)
jt_min = self.clique_decomposition(graph_min)
return jt_min
def construct_mpd_tree(self, jt_min, graph_m):
def is_complete(nbunch):
sub_g = graph_m.subgraph(nbunch)
n = len(nbunch)
if n == 1:
return True
m = sub_g.size()
if n*(n-1)/2 == m:
return True
return False
def aggregate(node_i,node_j):
union = set(node_i).union(set(node_j))
sub_g = graph_m.subgraph(union)
jt_mpd.add_node(sub_g)
sub_g_n = set(sub_g)
neigh = set(jt_mpd[node_i]).union(jt_mpd[node_j])
for n_i in neigh:
sep = set(n_i).intersection(sub_g_n)
jt_mpd.add_edge(n_i,sub_g, {'label':sep})
jt_mpd.remove_node(node_i)
jt_mpd.remove_node(node_j)
jt_mpd = jt_min.copy()
while True:
nodes = jt_mpd.nodes()
complete = True
for node in nodes:
for neighbor in jt_mpd[node]:
seperator = jt_mpd[neighbor][node]['label']
if not is_complete(seperator):
complete = False
aggregate(neighbor,node)
break
if not complete:
break
if complete:
break
return jt_mpd
def incremental_compilation(self, modificationList):
for modification in modificationList:
L = self.modify_moral_graph(modification)
for case in switch(modification.type):
if case(Modification.ADD_NODE):
self.add_node(modification.data)
break
if case(Modification.REMOVE_NODE):
self.remove_node(modification.data)
break
if case(Modification.ADD_LINK):
self.mark_affected_mps_by_add_link(L)
break
if case(Modification.REMOVE_LINK):
self.mark_affected_mps_by_remove_link(None,None,L)
break
if case():
pass
raise Exception("not implemented")
def modify_moral_graph(self, modification):
L = []
for case in switch(modification.type):
if case(Modification.ADD_NODE):
self._graph_m.add_node(modification.data)
break
if case(Modification.REMOVE_NODE):
self._graph_m.remove_node(modification.data)
break
if case(Modification.ADD_LINK):
pair = set(modification.data)
parents = set(self._old_network.predecessors(modification.data[1]))
nodes = pair.union(parents)
subgraph = self._graph_m.subgraph(nodes)
complement = nx.complement(subgraph)
for edge in complement.edges_iter():
L.append(edge)
break
if case(Modification.REMOVE_LINK):
head = modification.data[1]
tail = modification.data[0]
children_head = set(self._old_network.successors(head))
children_tail = set(self._old_network.successors(tail))
if len(children_tail.intersection(children_head)) <= 0:
self._graph_m.remove_edge(modification.data)
L.append(modification.data)
for parent in self._old_network.predecessors_iter(head):
if parent == tail: continue
children_z_i = set(self._old_network.successors(parent)).intersection(children_tail)
if not len(children_z_i) == 1: continue
if head not in children_z_i: continue
if not self._old_network.has_edge(parent,tail): continue
if self._old_network.has_edge(tail, parent): continue
self._graph_m.remove_edge(tail,parent)
L.append((tail,parent))
break
if case():
raise Exception('Not a defined modification')
return L
def connect(self, clusterTree, cluster_i, cluster_j):
raise Exception("not implemented")
def mark_affected_mps_by_remove_link(self, mps_y, mps_z, linkList):
raise Exception("not implemented")
def remove_node(self, node, mps_x = None, mps_y = None):
raise Exception("not implemented")
def add_node(self, node):
C_x = nx.Graph()
C_x.add_node(node, {'marked':False})
self._jt.add_node(C_x)
self._jt_mpd.add_node(C_x)
def mark_affected_mps_by_add_link(self, linkList):
raise Exception("not implemented") | agpl-3.0 |
mpasternak/django-interval-field | interval/fields.py | 2 | 5391 | # -*- encoding: utf-8 -*-
from django.db import models
from django.db.models.fields.subclassing import SubfieldBase
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy as _
from datetime import timedelta
import six
from interval.forms import IntervalFormField
day_seconds = 24 * 60 * 60
microseconds = 1000000
def formatError(value):
raise ValueError(
"please use [[DD]D days,]HH:MM:SS[.ms] instead of %r" % value)
def timedelta_topgsqlstring(value):
buf = []
for attr in ['days', 'seconds', 'microseconds']:
v = getattr(value, attr)
if v:
buf.append('%i %s' % (v, attr.upper()))
if not buf:
return '0'
return " ".join(buf)
def timedelta_tobigint(value):
return (
value.days * day_seconds * microseconds
+ value.seconds * microseconds
+ value.microseconds
)
def range_check(value, name, min=None, max=None):
try:
value = int(value)
except (TypeError, ValueError):
raise ValueError("%s is not an integer" % value)
if min is not None:
if value < min:
raise ValueError("%s is less than %s" % (value, min))
if max is not None:
if value > max:
raise ValueError("%s is more than %s" % (value, max))
return value
class IntervalField(six.with_metaclass(SubfieldBase, models.Field)):
"""This is a field, which maps to Python's datetime.timedelta.
For PostgreSQL, its type is INTERVAL - a native interval type.
- http://www.postgresql.org/docs/8.4/static/datatype-datetime.html
For other databases, its type is BIGINT and timedelta value is stored
as number of seconds * 1000000 .
"""
description = _("interval")
def __init__(
self, verbose_name=None, min_value=None, max_value=None, format=None,
*args, **kw):
models.Field.__init__(
self, verbose_name=verbose_name, *args, **kw)
self.min_value = min_value
self.max_value = max_value
self.format = format
if self.min_value is not None and self.max_value is not None:
if self.min_value >= self.max_value:
raise ValueError('min_value >= max_value')
def db_type(self, connection):
if connection.settings_dict['ENGINE'].find('postgresql') >= 0 or \
connection.settings_dict['ENGINE'].find('postgis') >= 0:
return 'INTERVAL'
return 'BIGINT'
def to_python(self, value):
if isinstance(value, timedelta):
# psycopg2 will return a timedelta() for INTERVAL type column
# in database
return value
if value is None or value is '' or value is u'':
return None
# string forms: in form like "X days, HH:MM:SS.ms" (can be used in
# fixture files)
if isinstance(value, six.string_types) and value.find(":") >= 0:
days = 0
if value.find("days,") >= 0 or value.find("day,") >= 0:
if value.find("days,") >= 0:
days, value = value.split("days,")
else:
days, value = value.split("day,")
value = value.strip()
try:
days = int(days.strip())
except ValueError:
formatError(value)
days = range_check(days, "days", 0)
try:
h, m, s = value.split(":")
except ValueError:
formatError(value)
h = range_check(h, "hours", 0)
m = range_check(m, "minutes", 0, 59)
if s.find(".") >= 0:
s, ms = s.split(".")
else:
ms = "0"
s = range_check(s, "seconds", 0, 59)
l = len(ms)
ms = range_check(ms, "microseconds", 0, microseconds)
ms = ms * (microseconds / (10 ** l))
return timedelta(
days=days, hours=h, minutes=m,
seconds=s, microseconds=ms)
# other database backends:
return timedelta(seconds=float(value) / microseconds)
def get_db_prep_value(self, value, connection, prepared=False):
if value is None or value is '':
return None
if connection.settings_dict['ENGINE'].find('postgresql') >= 0 or \
connection.settings_dict['ENGINE'].find('postgis') >= 0:
if isinstance(value, six.string_types):
# Can happen, when using south migrations
return value
return timedelta_topgsqlstring(value)
return timedelta_tobigint(value)
def formfield(self, form_class=IntervalFormField, **kwargs):
defaults = {'min_value': self.min_value,
'max_value': self.max_value,
'format': self.format or 'DHMS',
'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text}
if self.has_default():
defaults['initial'] = self.default
defaults.update(kwargs)
return form_class(**defaults)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^interval\.fields\.IntervalField"])
except ImportError:
pass
| mit |
wschwa/Mr-Orange-Sick-Beard | lib/dateutil/zoneinfo/__init__.py | 265 | 2575 | """
Copyright (c) 2003-2005 Gustavo Niemeyer <gustavo@niemeyer.net>
This module offers extensions to the standard python 2.3+
datetime module.
"""
from dateutil.tz import tzfile
from tarfile import TarFile
import os
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
__license__ = "PSF License"
__all__ = ["setcachesize", "gettz", "rebuild"]
CACHE = []
CACHESIZE = 10
class tzfile(tzfile):
def __reduce__(self):
return (gettz, (self._filename,))
def getzoneinfofile():
filenames = os.listdir(os.path.join(os.path.dirname(__file__)))
filenames.sort()
filenames.reverse()
for entry in filenames:
if entry.startswith("zoneinfo") and ".tar." in entry:
return os.path.join(os.path.dirname(__file__), entry)
return None
ZONEINFOFILE = getzoneinfofile()
del getzoneinfofile
def setcachesize(size):
global CACHESIZE, CACHE
CACHESIZE = size
del CACHE[size:]
def gettz(name):
tzinfo = None
if ZONEINFOFILE:
for cachedname, tzinfo in CACHE:
if cachedname == name:
break
else:
tf = TarFile.open(ZONEINFOFILE)
try:
zonefile = tf.extractfile(name)
except KeyError:
tzinfo = None
else:
tzinfo = tzfile(zonefile)
tf.close()
CACHE.insert(0, (name, tzinfo))
del CACHE[CACHESIZE:]
return tzinfo
def rebuild(filename, tag=None, format="gz"):
import tempfile, shutil
tmpdir = tempfile.mkdtemp()
zonedir = os.path.join(tmpdir, "zoneinfo")
moduledir = os.path.dirname(__file__)
if tag: tag = "-"+tag
targetname = "zoneinfo%s.tar.%s" % (tag, format)
try:
tf = TarFile.open(filename)
for name in tf.getnames():
if not (name.endswith(".sh") or
name.endswith(".tab") or
name == "leapseconds"):
tf.extract(name, tmpdir)
filepath = os.path.join(tmpdir, name)
os.system("zic -d %s %s" % (zonedir, filepath))
tf.close()
target = os.path.join(moduledir, targetname)
for entry in os.listdir(moduledir):
if entry.startswith("zoneinfo") and ".tar." in entry:
os.unlink(os.path.join(moduledir, entry))
tf = TarFile.open(target, "w:%s" % format)
for entry in os.listdir(zonedir):
entrypath = os.path.join(zonedir, entry)
tf.add(entrypath, entry)
tf.close()
finally:
shutil.rmtree(tmpdir)
| gpl-3.0 |
stonewell/wxglterm | src/utils/app_config.py | 1 | 1301 | import os
import json
class DictQuery(dict):
def get(self, path, default=None):
try:
return self.__get(path, default)
except:
import logging
logging.exception('get failed')
def __get(self, path, default=None):
keys = path.split("/")
val = None
for key in keys:
# skip empty keys for // and path start with /
if len(key) == 0:
continue
if val:
if isinstance(val, list):
val = [v.get(key, default)
if v else None for v in val]
else:
val = val.get(key, default)
else:
val = dict.get(self, key, default)
if not val or val == default:
break;
if isinstance(val, list) or isinstance(val, dict):
return json.dumps(val)
return val if val is not None else default
def load_config(config_path):
if not os.path.exists(config_path):
msg = 'unable to find the config file:{}'.format(config_path)
raise ValueError(msg)
with open(config_path) as f:
return DictQuery(json.load(f))
def load_config_from_string(data):
return DictQuery(json.loads(data))
| mit |
liveblog/superdesk | server/apps/planning.py | 10 | 1989 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from superdesk.notification import push_notification
from superdesk.resource import Resource
from apps.archive.common import on_create_item
from superdesk.services import BaseService
import superdesk
def init_app(app):
endpoint_name = 'planning'
service = PlanningService(endpoint_name, backend=superdesk.get_backend())
PlanningResource(endpoint_name, app=app, service=service)
class PlanningResource(Resource):
schema = {
'guid': {
'type': 'string',
'unique': True
},
'language': {
'type': 'string'
},
'headline': {
'type': 'string'
},
'slugline': {
'type': 'string'
},
'description_text': {
'type': 'string',
'nullable': True
},
'firstcreated': {
'type': 'datetime'
},
'urgency': {
'type': 'integer'
},
'desk': Resource.rel('desks', True)
}
item_url = 'regex("[\w,.:-]+")'
datasource = {'search_backend': 'elastic'}
resource_methods = ['GET', 'POST']
privileges = {'POST': 'planning', 'PATCH': 'planning'}
class PlanningService(BaseService):
def on_create(self, docs):
on_create_item(docs)
def on_created(self, docs):
push_notification('planning', created=1)
def on_updated(self, updates, original):
push_notification('planning', updated=1)
def on_deleted(self, doc):
push_notification('planning', deleted=1)
superdesk.privilege(name='planning',
label='Planning Management',
description='User can plan and cover.')
| agpl-3.0 |
jinnykoo/wuyisj | src/oscar/apps/order/south_migrations/0008_auto__add_field_orderdiscount_category.py | 16 | 33182 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'OrderDiscount.category'
db.add_column('order_orderdiscount', 'category',
self.gf('django.db.models.fields.CharField')(default='Basket', max_length=64),
keep_default=False)
def backwards(self, orm):
# Deleting field 'OrderDiscount.category'
db.delete_column('order_orderdiscount', 'category')
models = {
'address.country': {
'Meta': {'ordering': "('-is_highlighted', 'name')", 'object_name': 'Country'},
'is_highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'customer.communicationeventtype': {
'Meta': {'object_name': 'CommunicationEventType'},
'category': ('django.db.models.fields.CharField', [], {'default': "u'Order related'", 'max_length': '255'}),
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'blank': 'True'})
},
'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.communicationevent': {
'Meta': {'object_name': 'CommunicationEvent'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['customer.CommunicationEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'communication_events'", 'to': "orm['order.Order']"})
},
'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': "orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'order.lineattribute': {
'Meta': {'object_name': 'LineAttribute'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': "orm['order.Line']"}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_attributes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['catalogue.Option']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'order.lineprice': {
'Meta': {'ordering': "('id',)", 'object_name': 'LinePrice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'prices'", 'to': "orm['order.Line']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_prices'", 'to': "orm['order.Order']"}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})
},
'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.BillingAddress']", 'null': 'True', 'blank': 'True'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingAddress']", 'null': 'True', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': "orm['{0}']".format(AUTH_USER_MODEL)})
},
'order.orderdiscount': {
'Meta': {'object_name': 'OrderDiscount'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'category': ('django.db.models.fields.CharField', [], {'default': "'Basket'", 'max_length': '64'}),
'frequency': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'offer_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'discounts'", 'to': "orm['order.Order']"}),
'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'order.ordernote': {
'Meta': {'object_name': 'OrderNote'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': "orm['order.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['{0}']".format(AUTH_USER_MODEL), 'null': 'True'})
},
'order.paymentevent': {
'Meta': {'object_name': 'PaymentEvent'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.PaymentEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.PaymentEventQuantity']", 'symmetrical': 'False'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'to': "orm['order.Order']"})
},
'order.paymenteventquantity': {
'Meta': {'object_name': 'PaymentEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.PaymentEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.paymenteventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'PaymentEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.shippingevent': {
'Meta': {'ordering': "['-date']", 'object_name': 'ShippingEvent'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.ShippingEventQuantity']", 'symmetrical': 'False'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': "orm['order.Order']"})
},
'order.shippingeventquantity': {
'Meta': {'object_name': 'ShippingEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.ShippingEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.shippingeventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'ShippingEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'partner.partner': {
'Meta': {'object_name': 'Partner'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['{0}']".format(AUTH_USER_MODEL)})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['order']
| bsd-3-clause |
Brian-Tomasik/leveraged_investing | TaxRates.py | 1 | 1393 | class TaxRates(object):
"""Investor's tax rates"""
def __init__(self, short_term_cap_gains_rate=.28,
long_term_cap_gains_rate=.15, state_income_tax=.05):
self.__short_term_cap_gains_rate = short_term_cap_gains_rate
self.__long_term_cap_gains_rate = long_term_cap_gains_rate
self.__state_income_tax = state_income_tax
def short_term_cap_gains_rate_plus_state(self):
return self.__short_term_cap_gains_rate + self.__state_income_tax
def long_term_cap_gains_rate_plus_state(self):
return self.__long_term_cap_gains_rate + self.__state_income_tax
def income_tax_rate_plus_state(self):
"""Income tax rate is same as short-term cap-gains rate."""
return self.short_term_cap_gains_rate_plus_state()
@property
def short_term_cap_gains_rate(self):
return self.__short_term_cap_gains_rate
@property
def long_term_cap_gains_rate(self):
return self.__long_term_cap_gains_rate
@property
def state_income_tax(self):
"""Since most states don't distinguish short- vs. long-term
capital gains (see http://www.aaii.com/journal/article/capital-pains-rules-for-capital-losses.touch ,
section 'State Income Taxes'), this can probably just be
one number, without distinguishing short- vs. long-term."""
return self.__state_income_tax | unlicense |
SujaySKumar/django | tests/apps/tests.py | 68 | 16296 | from __future__ import unicode_literals
import os
import warnings
from unittest import skipUnless
from django.apps import AppConfig, apps
from django.apps.registry import Apps
from django.contrib.admin.models import LogEntry
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
from django.db import models
from django.test import SimpleTestCase, override_settings
from django.test.utils import extend_sys_path
from django.utils import six
from django.utils._os import upath
from .default_config_app.apps import CustomConfig
from .models import SoAlternative, TotallyNormal, new_apps
# Small list with a variety of cases for tests that iterate on installed apps.
# Intentionally not in alphabetical order to check if the order is preserved.
SOME_INSTALLED_APPS = [
'apps.apps.MyAdmin',
'apps.apps.MyAuth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
SOME_INSTALLED_APPS_NAMES = [
'django.contrib.admin',
'django.contrib.auth',
] + SOME_INSTALLED_APPS[2:]
HERE = os.path.dirname(upath(__file__))
class AppsTests(SimpleTestCase):
def test_singleton_master(self):
"""
Ensures that only one master registry can exist.
"""
with self.assertRaises(RuntimeError):
Apps(installed_apps=None)
def test_ready(self):
"""
Tests the ready property of the master registry.
"""
# The master app registry is always ready when the tests run.
self.assertTrue(apps.ready)
# Non-master app registries are populated in __init__.
self.assertTrue(Apps().ready)
def test_bad_app_config(self):
"""
Tests when INSTALLED_APPS contains an incorrect app config.
"""
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['apps.apps.BadConfig']):
pass
def test_not_an_app_config(self):
"""
Tests when INSTALLED_APPS contains a class that isn't an app config.
"""
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['apps.apps.NotAConfig']):
pass
def test_no_such_app(self):
"""
Tests when INSTALLED_APPS contains an app that doesn't exist, either
directly or via an app config.
"""
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['there is no such app']):
pass
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['apps.apps.NoSuchApp']):
pass
def test_no_such_app_config(self):
"""
Tests when INSTALLED_APPS contains an entry that doesn't exist.
"""
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['apps.apps.NoSuchConfig']):
pass
def test_default_app_config(self):
with self.settings(INSTALLED_APPS=['apps.default_config_app']):
config = apps.get_app_config('default_config_app')
self.assertIsInstance(config, CustomConfig)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_configs(self):
"""
Tests apps.get_app_configs().
"""
app_configs = apps.get_app_configs()
self.assertListEqual(
[app_config.name for app_config in app_configs],
SOME_INSTALLED_APPS_NAMES)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_config(self):
"""
Tests apps.get_app_config().
"""
app_config = apps.get_app_config('admin')
self.assertEqual(app_config.name, 'django.contrib.admin')
app_config = apps.get_app_config('staticfiles')
self.assertEqual(app_config.name, 'django.contrib.staticfiles')
with self.assertRaises(LookupError):
apps.get_app_config('webdesign')
msg = "No installed app with label 'django.contrib.auth'. Did you mean 'myauth'"
with self.assertRaisesMessage(LookupError, msg):
apps.get_app_config('django.contrib.auth')
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_is_installed(self):
"""
Tests apps.is_installed().
"""
self.assertTrue(apps.is_installed('django.contrib.admin'))
self.assertTrue(apps.is_installed('django.contrib.auth'))
self.assertTrue(apps.is_installed('django.contrib.staticfiles'))
self.assertFalse(apps.is_installed('django.contrib.webdesign'))
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_model(self):
"""
Tests apps.get_model().
"""
self.assertEqual(apps.get_model('admin', 'LogEntry'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('admin', 'LogExit')
# App label is case-sensitive, Model name is case-insensitive.
self.assertEqual(apps.get_model('admin', 'loGentrY'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('Admin', 'LogEntry')
# A single argument is accepted.
self.assertEqual(apps.get_model('admin.LogEntry'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('admin.LogExit')
with self.assertRaises(ValueError):
apps.get_model('admin_LogEntry')
@override_settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig'])
def test_relabeling(self):
self.assertEqual(apps.get_app_config('relabeled').name, 'apps')
def test_duplicate_labels(self):
with six.assertRaisesRegex(self, ImproperlyConfigured, "Application labels aren't unique"):
with self.settings(INSTALLED_APPS=['apps.apps.PlainAppsConfig', 'apps']):
pass
def test_duplicate_names(self):
with six.assertRaisesRegex(self, ImproperlyConfigured, "Application names aren't unique"):
with self.settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig', 'apps']):
pass
def test_import_exception_is_not_masked(self):
"""
App discovery should preserve stack traces. Regression test for #22920.
"""
with six.assertRaisesRegex(self, ImportError, "Oops"):
with self.settings(INSTALLED_APPS=['import_error_package']):
pass
def test_models_py(self):
"""
Tests that the models in the models.py file were loaded correctly.
"""
self.assertEqual(apps.get_model("apps", "TotallyNormal"), TotallyNormal)
with self.assertRaises(LookupError):
apps.get_model("apps", "SoAlternative")
with self.assertRaises(LookupError):
new_apps.get_model("apps", "TotallyNormal")
self.assertEqual(new_apps.get_model("apps", "SoAlternative"), SoAlternative)
def test_dynamic_load(self):
"""
Makes a new model at runtime and ensures it goes into the right place.
"""
old_models = list(apps.get_app_config("apps").get_models())
# Construct a new model in a new app registry
body = {}
new_apps = Apps(["apps"])
meta_contents = {
'app_label': "apps",
'apps': new_apps,
}
meta = type(str("Meta"), tuple(), meta_contents)
body['Meta'] = meta
body['__module__'] = TotallyNormal.__module__
temp_model = type(str("SouthPonies"), (models.Model,), body)
# Make sure it appeared in the right place!
self.assertListEqual(list(apps.get_app_config("apps").get_models()), old_models)
with self.assertRaises(LookupError):
apps.get_model("apps", "SouthPonies")
self.assertEqual(new_apps.get_model("apps", "SouthPonies"), temp_model)
def test_model_clash(self):
"""
Test for behavior when two models clash in the app registry.
"""
new_apps = Apps(["apps"])
meta_contents = {
'app_label': "apps",
'apps': new_apps,
}
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__
type(str("SouthPonies"), (models.Model,), body)
# When __name__ and __module__ match we assume the module
# was reloaded and issue a warning. This use-case is
# useful for REPL. Refs #23621.
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__
with warnings.catch_warnings(record=True) as w:
type(str("SouthPonies"), (models.Model,), body)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(str(w[-1].message),
"Model 'apps.southponies' was already registered. "
"Reloading models is not advised as it can lead to inconsistencies, "
"most notably with related models.")
# If it doesn't appear to be a reloaded module then we expect
# a RuntimeError.
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__ + '.whatever'
with six.assertRaisesRegex(self, RuntimeError,
"Conflicting 'southponies' models in application 'apps':.*"):
type(str("SouthPonies"), (models.Model,), body)
def test_get_containing_app_config_apps_not_ready(self):
"""
apps.get_containing_app_config() should raise an exception if
apps.apps_ready isn't True.
"""
apps.apps_ready = False
try:
with self.assertRaisesMessage(AppRegistryNotReady, "Apps aren't loaded yet"):
apps.get_containing_app_config('foo')
finally:
apps.apps_ready = True
def test_lazy_model_operation(self):
"""
Tests apps.lazy_model_operation().
"""
model_classes = []
initial_pending = set(apps._pending_operations)
def test_func(*models):
model_classes[:] = models
class LazyA(models.Model):
pass
# Test models appearing twice, and models appearing consecutively
model_keys = [('apps', model_name) for model_name in ['lazya', 'lazyb', 'lazyb', 'lazyc', 'lazya']]
apps.lazy_model_operation(test_func, *model_keys)
# LazyModelA shouldn't be waited on since it's already registered,
# and LazyModelC shouldn't be waited on until LazyModelB exists.
self.assertSetEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyb')})
# Test that multiple operations can wait on the same model
apps.lazy_model_operation(test_func, ('apps', 'lazyb'))
class LazyB(models.Model):
pass
self.assertListEqual(model_classes, [LazyB])
# Now we are just waiting on LazyModelC.
self.assertSetEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyc')})
class LazyC(models.Model):
pass
# Everything should be loaded - make sure the callback was executed properly.
self.assertListEqual(model_classes, [LazyA, LazyB, LazyB, LazyC, LazyA])
class Stub(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class AppConfigTests(SimpleTestCase):
"""Unit tests for AppConfig class."""
def test_path_set_explicitly(self):
"""If subclass sets path as class attr, no module attributes needed."""
class MyAppConfig(AppConfig):
path = 'foo'
ac = MyAppConfig('label', Stub())
self.assertEqual(ac.path, 'foo')
def test_explicit_path_overrides(self):
"""If path set as class attr, overrides __path__ and __file__."""
class MyAppConfig(AppConfig):
path = 'foo'
ac = MyAppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'foo')
def test_dunder_path(self):
"""If single element in __path__, use it (in preference to __file__)."""
ac = AppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'a')
def test_no_dunder_path_fallback_to_dunder_file(self):
"""If there is no __path__ attr, use __file__."""
ac = AppConfig('label', Stub(__file__='b/__init__.py'))
self.assertEqual(ac.path, 'b')
def test_empty_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is empty, use __file__ if set."""
ac = AppConfig('label', Stub(__path__=[], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'b')
def test_multiple_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is length>1, use __file__ if set."""
ac = AppConfig('label', Stub(__path__=['a', 'b'], __file__='c/__init__.py'))
self.assertEqual(ac.path, 'c')
def test_no_dunder_path_or_dunder_file(self):
"""If there is no __path__ or __file__, raise ImproperlyConfigured."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub())
def test_empty_dunder_path_no_dunder_file(self):
"""If the __path__ attr is empty and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub(__path__=[]))
def test_multiple_dunder_path_no_dunder_file(self):
"""If the __path__ attr is length>1 and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub(__path__=['a', 'b']))
def test_duplicate_dunder_path_no_dunder_file(self):
"""
If the __path__ attr contains duplicate paths and there is no
__file__, they duplicates should be deduplicated (#25246).
"""
ac = AppConfig('label', Stub(__path__=['a', 'a']))
self.assertEqual(ac.path, 'a')
@skipUnless(six.PY3, "Namespace packages sans __init__.py were added in Python 3.3")
class NamespacePackageAppTests(SimpleTestCase):
# We need nsapp to be top-level so our multiple-paths tests can add another
# location for it (if its inside a normal package with an __init__.py that
# isn't possible). In order to avoid cluttering the already-full tests/ dir
# (which is on sys.path), we add these new entries to sys.path temporarily.
base_location = os.path.join(HERE, 'namespace_package_base')
other_location = os.path.join(HERE, 'namespace_package_other_base')
app_path = os.path.join(base_location, 'nsapp')
def test_single_path(self):
"""
A Py3.3+ namespace package can be an app if it has only one path.
"""
with extend_sys_path(self.base_location):
with self.settings(INSTALLED_APPS=['nsapp']):
app_config = apps.get_app_config('nsapp')
self.assertEqual(app_config.path, upath(self.app_path))
def test_multiple_paths(self):
"""
A Py3.3+ namespace package with multiple locations cannot be an app.
(Because then we wouldn't know where to load its templates, static
assets, etc from.)
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['nsapp']):
pass
def test_multiple_paths_explicit_path(self):
"""
Multiple locations are ok only if app-config has explicit path.
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.settings(INSTALLED_APPS=['nsapp.apps.NSAppConfig']):
app_config = apps.get_app_config('nsapp')
self.assertEqual(app_config.path, upath(self.app_path))
| bsd-3-clause |
grburgess/astromodels | astromodels/tests/test_template_model.py | 2 | 4653 | import pytest
import os
import numpy as np
from astromodels.functions.template_model import TemplateModel, TemplateModelFactory, MissingDataFile
from astromodels.functions.functions import Band, Powerlaw
from astromodels import Model, PointSource, clone_model, load_model
import pickle
__author__ = 'giacomov'
def get_comparison_function():
mo = Band()
mo.K = 1
return mo
@pytest.mark.slow
def test_template_factory_1D():
mo = get_comparison_function()
energies = np.logspace(1, 3, 50)
t = TemplateModelFactory('__test1D', 'A test template', energies, ['alpha'])
alpha_grid = np.linspace(-1.5, 1, 15)
#beta_grid = np.linspace(-3.5, -1.6, 15)
#xp_grid = np.logspace(1, 3, 20)
t.define_parameter_grid('alpha', alpha_grid)
for a in alpha_grid:
mo.alpha = a
mo.beta = -2.5
mo.xp = 300.
t.add_interpolation_data(mo(energies), alpha=a)
print("Data has been prepared")
t.save_data(overwrite=True)
@pytest.mark.slow
def test_template_factory():
mo = get_comparison_function()
energies = np.logspace(1, 3, 50)
t = TemplateModelFactory('__test', 'A test template', energies, ['alpha', 'xp', 'beta'])
alpha_grid = np.linspace(-1.5, 1, 15)
beta_grid = np.linspace(-3.5, -1.6, 15)
xp_grid = np.logspace(1, 3, 20)
t.define_parameter_grid('alpha', alpha_grid)
t.define_parameter_grid('beta', beta_grid)
t.define_parameter_grid('xp', xp_grid)
for a in alpha_grid:
for b in beta_grid:
for xp in xp_grid:
mo.alpha = a
mo.beta = b
mo.xp = xp
t.add_interpolation_data(mo(energies), alpha=a, xp=xp, beta=b)
print("Data has been prepared")
t.save_data(overwrite=True)
tm = TemplateModel('__test1D')
tm(energies)
# This will be run second, so the template will exist
@pytest.mark.slow
def test_template_function():
tm = TemplateModel('__test')
mo = get_comparison_function()
new_alpha_grid = np.linspace(-1.5, 1, 15)
new_beta_grid = np.linspace(-3.5, -1.6, 15)
new_xp_grid = np.logspace(1, 3, 15)
new_energies = np.logspace(1, 3, 40)
tm.K = 1
mo.K = 1
for a in new_alpha_grid:
for b in new_beta_grid:
for xp in new_xp_grid:
mo.alpha = a
mo.beta = b
mo.xp = xp
tm.alpha = a
tm.beta = b
tm.xp = xp
res1 = mo(new_energies)
res2 = tm(new_energies)
deltas = np.abs((res2 - res1) / res1)
idx = deltas > 0.1
if np.any(idx):
raise AssertionError("Interpolation precision @ %s is %s, "
"worse than 10 percent, "
"with parameters %s!" % (new_energies[idx], deltas[idx], [a,b,xp]))
@pytest.mark.slow
def test_input_output():
tm = TemplateModel('__test')
tm.alpha = -0.95
tm.beta = -2.23
fake_source = PointSource("test", ra=0.0, dec=0.0, spectral_shape=tm)
fake_model = Model(fake_source)
clone = clone_model(fake_model)
assert clone.get_number_of_point_sources() == 1
assert tm.data_file == clone.test.spectrum.main.shape.data_file
assert clone.test.spectrum.main.shape.alpha.value == tm.alpha.value
assert clone.test.spectrum.main.shape.beta.value == tm.beta.value
xx = np.linspace(1, 10, 100)
assert np.allclose(clone.test.spectrum.main.shape(xx), fake_model.test.spectrum.main.shape(xx))
# Test pickling
dump = pickle.dumps(clone)
clone2 = pickle.loads(dump)
assert clone2.get_number_of_point_sources() == 1
assert tm.data_file == clone2.test.spectrum.main.shape.data_file
assert np.allclose(clone2.test.spectrum.main.shape(xx), fake_model.test.spectrum.main.shape(xx))
# Test pickling with other functions
new_shape = tm * Powerlaw()
new_shape.index_2 = -2.256
dump2 = pickle.dumps(new_shape)
clone3 = pickle.loads(dump2)
assert clone3.index_2.value == new_shape.index_2.value
# Now save to disk and reload
fake_source2 = PointSource("test", ra=0.0, dec=0.0, spectral_shape=new_shape)
fake_model2 = Model(fake_source2)
fake_model2.save("__test.yml", overwrite=True)
# Now try to reload
reloaded_model = load_model("__test.yml")
assert reloaded_model.get_number_of_point_sources() == 1
assert np.allclose(fake_model2.test.spectrum.main.shape(xx), reloaded_model.test.spectrum.main.shape(xx))
os.remove("__test.yml")
| bsd-3-clause |
yifanzh/ohmypaw | cogs/streams.py | 5 | 26330 | from discord.ext import commands
from .utils.dataIO import dataIO
from .utils.chat_formatting import escape_mass_mentions
from .utils import checks
from collections import defaultdict
from string import ascii_letters
from random import choice
import discord
import os
import re
import aiohttp
import asyncio
import logging
import json
class StreamsError(Exception):
pass
class StreamNotFound(StreamsError):
pass
class APIError(StreamsError):
pass
class InvalidCredentials(StreamsError):
pass
class OfflineStream(StreamsError):
pass
class Streams:
"""Streams
Alerts for a variety of streaming services"""
def __init__(self, bot):
self.bot = bot
self.twitch_streams = dataIO.load_json("data/streams/twitch.json")
self.hitbox_streams = dataIO.load_json("data/streams/hitbox.json")
self.mixer_streams = dataIO.load_json("data/streams/beam.json")
self.picarto_streams = dataIO.load_json("data/streams/picarto.json")
settings = dataIO.load_json("data/streams/settings.json")
self.settings = defaultdict(dict, settings)
self.messages_cache = defaultdict(list)
@commands.command()
async def hitbox(self, stream: str):
"""Checks if hitbox stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(hitbox\.tv\/)'
stream = re.sub(regex, '', stream)
try:
embed = await self.hitbox_online(stream)
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
else:
await self.bot.say(embed=embed)
@commands.command(pass_context=True)
async def twitch(self, ctx, stream: str):
"""Checks if twitch stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(twitch\.tv\/)'
stream = re.sub(regex, '', stream)
try:
data = await self.fetch_twitch_ids(stream, raise_if_none=True)
embed = await self.twitch_online(data[0]["_id"])
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
except InvalidCredentials:
await self.bot.say("Owner: Client-ID is invalid or not set. "
"See `{}streamset twitchtoken`"
"".format(ctx.prefix))
else:
await self.bot.say(embed=embed)
@commands.command()
async def mixer(self, stream: str):
"""Checks if mixer stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(mixer\.com\/)'
stream = re.sub(regex, '', stream)
try:
embed = await self.mixer_online(stream)
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
else:
await self.bot.say(embed=embed)
@commands.command()
async def picarto(self, stream: str):
"""Checks if picarto stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(picarto\.tv\/)'
stream = re.sub(regex, '', stream)
try:
embed = await self.picarto_online(stream)
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
else:
await self.bot.say(embed=embed)
@commands.group(pass_context=True, no_pm=True)
@checks.mod_or_permissions(manage_server=True)
async def streamalert(self, ctx):
"""Adds/removes stream alerts from the current channel"""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
@streamalert.command(name="twitch", pass_context=True)
async def twitch_alert(self, ctx, stream: str):
"""Adds/removes twitch alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(twitch\.tv\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
data = await self.fetch_twitch_ids(stream, raise_if_none=True)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except InvalidCredentials:
await self.bot.say("Owner: Client-ID is invalid or not set. "
"See `{}streamset twitchtoken`"
"".format(ctx.prefix))
return
enabled = self.enable_or_disable_if_active(self.twitch_streams,
stream,
channel,
_id=data[0]["_id"])
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
@streamalert.command(name="hitbox", pass_context=True)
async def hitbox_alert(self, ctx, stream: str):
"""Adds/removes hitbox alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(hitbox\.tv\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
await self.hitbox_online(stream)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except OfflineStream:
pass
enabled = self.enable_or_disable_if_active(self.hitbox_streams,
stream,
channel)
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams)
@streamalert.command(name="mixer", pass_context=True)
async def mixer_alert(self, ctx, stream: str):
"""Adds/removes mixer alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(mixer\.com\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
await self.mixer_online(stream)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except OfflineStream:
pass
enabled = self.enable_or_disable_if_active(self.mixer_streams,
stream,
channel)
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/beam.json", self.mixer_streams)
@streamalert.command(name="picarto", pass_context=True)
async def picarto_alert(self, ctx, stream: str):
"""Adds/removes picarto alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(picarto\.tv\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
await self.picarto_online(stream)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except OfflineStream:
pass
enabled = self.enable_or_disable_if_active(self.picarto_streams,
stream,
channel)
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/picarto.json", self.picarto_streams)
@streamalert.command(name="stop", pass_context=True)
async def stop_alert(self, ctx):
"""Stops all streams alerts in the current channel"""
channel = ctx.message.channel
streams = (
self.hitbox_streams,
self.twitch_streams,
self.mixer_streams,
self.picarto_streams
)
for stream_type in streams:
to_delete = []
for s in stream_type:
if channel.id in s["CHANNELS"]:
s["CHANNELS"].remove(channel.id)
if not s["CHANNELS"]:
to_delete.append(s)
for s in to_delete:
stream_type.remove(s)
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams)
dataIO.save_json("data/streams/beam.json", self.mixer_streams)
dataIO.save_json("data/streams/picarto.json", self.picarto_streams)
await self.bot.say("There will be no more stream alerts in this "
"channel.")
@commands.group(pass_context=True)
async def streamset(self, ctx):
"""Stream settings"""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
@streamset.command()
@checks.is_owner()
async def twitchtoken(self, token : str):
"""Sets the Client ID for twitch
To do this, follow these steps:
1. Go to this page: https://dev.twitch.tv/dashboard/apps.
2. Click 'Register Your Application'
3. Enter a name, set the OAuth Redirect URI to 'http://localhost', and
select an Application Category of your choosing.
4. Click 'Register', and on the following page, copy the Client ID.
5. Paste the Client ID into this command. Done!
"""
self.settings["TWITCH_TOKEN"] = token
dataIO.save_json("data/streams/settings.json", self.settings)
await self.bot.say('Twitch Client-ID set.')
@streamset.command(pass_context=True, no_pm=True)
@checks.admin()
async def mention(self, ctx, *, mention_type : str):
"""Sets mentions for stream alerts
Types: everyone, here, none"""
server = ctx.message.server
mention_type = mention_type.lower()
if mention_type in ("everyone", "here"):
self.settings[server.id]["MENTION"] = "@" + mention_type
await self.bot.say("When a stream is online @\u200b{} will be "
"mentioned.".format(mention_type))
elif mention_type == "none":
self.settings[server.id]["MENTION"] = ""
await self.bot.say("Mentions disabled.")
else:
await self.bot.send_cmd_help(ctx)
dataIO.save_json("data/streams/settings.json", self.settings)
@streamset.command(pass_context=True, no_pm=True)
@checks.admin()
async def autodelete(self, ctx):
"""Toggles automatic notification deletion for streams that go offline"""
server = ctx.message.server
settings = self.settings[server.id]
current = settings.get("AUTODELETE", True)
settings["AUTODELETE"] = not current
if settings["AUTODELETE"]:
await self.bot.say("Notifications will be automatically deleted "
"once the stream goes offline.")
else:
await self.bot.say("Notifications won't be deleted anymore.")
dataIO.save_json("data/streams/settings.json", self.settings)
async def hitbox_online(self, stream):
url = "https://api.hitbox.tv/media/live/" + stream
async with aiohttp.get(url) as r:
data = await r.json(encoding='utf-8')
if "livestream" not in data:
raise StreamNotFound()
elif data["livestream"][0]["media_is_live"] == "0":
raise OfflineStream()
elif data["livestream"][0]["media_is_live"] == "1":
return self.hitbox_embed(data)
raise APIError()
async def twitch_online(self, stream):
session = aiohttp.ClientSession()
url = "https://api.twitch.tv/kraken/streams/" + stream
header = {
'Client-ID': self.settings.get("TWITCH_TOKEN", ""),
'Accept': 'application/vnd.twitchtv.v5+json'
}
async with session.get(url, headers=header) as r:
data = await r.json(encoding='utf-8')
await session.close()
if r.status == 200:
if data["stream"] is None:
raise OfflineStream()
return self.twitch_embed(data)
elif r.status == 400:
raise InvalidCredentials()
elif r.status == 404:
raise StreamNotFound()
else:
raise APIError()
async def mixer_online(self, stream):
url = "https://mixer.com/api/v1/channels/" + stream
async with aiohttp.get(url) as r:
data = await r.json(encoding='utf-8')
if r.status == 200:
if data["online"] is True:
return self.mixer_embed(data)
else:
raise OfflineStream()
elif r.status == 404:
raise StreamNotFound()
else:
raise APIError()
async def picarto_online(self, stream):
url = "https://api.picarto.tv/v1/channel/name/" + stream
async with aiohttp.get(url) as r:
data = await r.text(encoding='utf-8')
if r.status == 200:
data = json.loads(data)
if data["online"] is True:
return self.picarto_embed(data)
else:
raise OfflineStream()
elif r.status == 404:
raise StreamNotFound()
else:
raise APIError()
async def fetch_twitch_ids(self, *streams, raise_if_none=False):
def chunks(l):
for i in range(0, len(l), 100):
yield l[i:i + 100]
base_url = "https://api.twitch.tv/kraken/users?login="
header = {
'Client-ID': self.settings.get("TWITCH_TOKEN", ""),
'Accept': 'application/vnd.twitchtv.v5+json'
}
results = []
for streams_list in chunks(streams):
session = aiohttp.ClientSession()
url = base_url + ",".join(streams_list)
async with session.get(url, headers=header) as r:
data = await r.json(encoding='utf-8')
if r.status == 200:
results.extend(data["users"])
elif r.status == 400:
raise InvalidCredentials()
else:
raise APIError()
await session.close()
if not results and raise_if_none:
raise StreamNotFound()
return results
def twitch_embed(self, data):
channel = data["stream"]["channel"]
url = channel["url"]
logo = channel["logo"]
if logo is None:
logo = "https://static-cdn.jtvnw.net/jtv_user_pictures/xarth/404_user_70x70.png"
status = channel["status"]
if not status:
status = "Untitled broadcast"
embed = discord.Embed(title=status, url=url)
embed.set_author(name=channel["display_name"])
embed.add_field(name="Followers", value=channel["followers"])
embed.add_field(name="Total views", value=channel["views"])
embed.set_thumbnail(url=logo)
if data["stream"]["preview"]["medium"]:
embed.set_image(url=data["stream"]["preview"]["medium"] + self.rnd_attr())
if channel["game"]:
embed.set_footer(text="Playing: " + channel["game"])
embed.color = 0x6441A4
return embed
def hitbox_embed(self, data):
base_url = "https://edge.sf.hitbox.tv"
livestream = data["livestream"][0]
channel = livestream["channel"]
url = channel["channel_link"]
embed = discord.Embed(title=livestream["media_status"], url=url)
embed.set_author(name=livestream["media_name"])
embed.add_field(name="Followers", value=channel["followers"])
#embed.add_field(name="Views", value=channel["views"])
embed.set_thumbnail(url=base_url + channel["user_logo"])
if livestream["media_thumbnail"]:
embed.set_image(url=base_url + livestream["media_thumbnail"] + self.rnd_attr())
embed.set_footer(text="Playing: " + livestream["category_name"])
embed.color = 0x98CB00
return embed
def mixer_embed(self, data):
default_avatar = ("https://mixer.com/_latest/assets/images/main/"
"avatars/default.jpg")
user = data["user"]
url = "https://mixer.com/" + data["token"]
embed = discord.Embed(title=data["name"], url=url)
embed.set_author(name=user["username"])
embed.add_field(name="Followers", value=data["numFollowers"])
embed.add_field(name="Total views", value=data["viewersTotal"])
if user["avatarUrl"]:
embed.set_thumbnail(url=user["avatarUrl"])
else:
embed.set_thumbnail(url=default_avatar)
if data["thumbnail"]:
embed.set_image(url=data["thumbnail"]["url"] + self.rnd_attr())
embed.color = 0x4C90F3
if data["type"] is not None:
embed.set_footer(text="Playing: " + data["type"]["name"])
return embed
def picarto_embed(self, data):
avatar = ("https://picarto.tv/user_data/usrimg/{}/dsdefault.jpg{}"
"".format(data["name"].lower(), self.rnd_attr()))
url = "https://picarto.tv/" + data["name"]
thumbnail = data["thumbnails"]["web"]
embed = discord.Embed(title=data["title"], url=url)
embed.set_author(name=data["name"])
embed.set_image(url=thumbnail + self.rnd_attr())
embed.add_field(name="Followers", value=data["followers"])
embed.add_field(name="Total views", value=data["viewers_total"])
embed.set_thumbnail(url=avatar)
embed.color = 0x132332
data["tags"] = ", ".join(data["tags"])
if not data["tags"]:
data["tags"] = "None"
if data["adult"]:
data["adult"] = "NSFW | "
else:
data["adult"] = ""
embed.color = 0x4C90F3
embed.set_footer(text="{adult}Category: {category} | Tags: {tags}"
"".format(**data))
return embed
def enable_or_disable_if_active(self, streams, stream, channel, _id=None):
"""Returns True if enabled or False if disabled"""
for i, s in enumerate(streams):
stream_id = s.get("ID")
if stream_id and _id: # ID is available, matching by ID is
if stream_id != _id: # preferable
continue
else: # ID unavailable, matching by name
if s["NAME"] != stream:
continue
if channel.id in s["CHANNELS"]:
streams[i]["CHANNELS"].remove(channel.id)
if not s["CHANNELS"]:
streams.remove(s)
return False
else:
streams[i]["CHANNELS"].append(channel.id)
return True
data = {"CHANNELS": [channel.id],
"NAME": stream,
"ALREADY_ONLINE": False}
if _id:
data["ID"] = _id
streams.append(data)
return True
async def stream_checker(self):
CHECK_DELAY = 60
try:
await self._migration_twitch_v5()
except InvalidCredentials:
print("Error during convertion of twitch usernames to IDs: "
"invalid token")
except Exception as e:
print("Error during convertion of twitch usernames to IDs: "
"{}".format(e))
while self == self.bot.get_cog("Streams"):
save = False
streams = ((self.twitch_streams, self.twitch_online),
(self.hitbox_streams, self.hitbox_online),
(self.mixer_streams, self.mixer_online),
(self.picarto_streams, self.picarto_online))
for streams_list, parser in streams:
if parser == self.twitch_online:
_type = "ID"
else:
_type = "NAME"
for stream in streams_list:
if _type not in stream:
continue
key = (parser, stream[_type])
try:
embed = await parser(stream[_type])
except OfflineStream:
if stream["ALREADY_ONLINE"]:
stream["ALREADY_ONLINE"] = False
save = True
await self.delete_old_notifications(key)
except: # We don't want our task to die
continue
else:
if stream["ALREADY_ONLINE"]:
continue
save = True
stream["ALREADY_ONLINE"] = True
messages_sent = []
for channel_id in stream["CHANNELS"]:
channel = self.bot.get_channel(channel_id)
if channel is None:
continue
mention = self.settings.get(channel.server.id, {}).get("MENTION", "")
can_speak = channel.permissions_for(channel.server.me).send_messages
message = mention + " {} is live!".format(stream["NAME"])
if channel and can_speak:
m = await self.bot.send_message(channel, message, embed=embed)
messages_sent.append(m)
self.messages_cache[key] = messages_sent
await asyncio.sleep(0.5)
if save:
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams)
dataIO.save_json("data/streams/beam.json", self.mixer_streams)
dataIO.save_json("data/streams/picarto.json", self.picarto_streams)
await asyncio.sleep(CHECK_DELAY)
async def delete_old_notifications(self, key):
for message in self.messages_cache[key]:
server = message.server
settings = self.settings.get(server.id, {})
is_enabled = settings.get("AUTODELETE", True)
try:
if is_enabled:
await self.bot.delete_message(message)
except:
pass
del self.messages_cache[key]
def rnd_attr(self):
"""Avoids Discord's caching"""
return "?rnd=" + "".join([choice(ascii_letters) for i in range(6)])
async def _migration_twitch_v5(self):
# Migration of old twitch streams to API v5
to_convert = []
for stream in self.twitch_streams:
if "ID" not in stream:
to_convert.append(stream["NAME"])
if not to_convert:
return
results = await self.fetch_twitch_ids(*to_convert)
for stream in self.twitch_streams:
for result in results:
if stream["NAME"].lower() == result["name"].lower():
stream["ID"] = result["_id"]
# We might as well delete the invalid / renamed ones
self.twitch_streams = [s for s in self.twitch_streams if "ID" in s]
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
def check_folders():
if not os.path.exists("data/streams"):
print("Creating data/streams folder...")
os.makedirs("data/streams")
def check_files():
stream_files = (
"twitch.json",
"hitbox.json",
"beam.json",
"picarto.json"
)
for filename in stream_files:
if not dataIO.is_valid_json("data/streams/" + filename):
print("Creating empty {}...".format(filename))
dataIO.save_json("data/streams/" + filename, [])
f = "data/streams/settings.json"
if not dataIO.is_valid_json(f):
print("Creating empty settings.json...")
dataIO.save_json(f, {})
def setup(bot):
logger = logging.getLogger('aiohttp.client')
logger.setLevel(50) # Stops warning spam
check_folders()
check_files()
n = Streams(bot)
loop = asyncio.get_event_loop()
loop.create_task(n.stream_checker())
bot.add_cog(n)
| gpl-3.0 |
solo2101/Tilo-Menu | src/lib/tilo/backend.py | 1 | 3706 | #!/usr/bin/env python
# This application is released under the GNU General Public License
# v3 (or, at your option, any later version). You can find the full
# text of the license under http://www.gnu.org/licenses/gpl.txt.
# By using, editing and/or distributing this software you agree to
# the terms and conditions of this license.
# Thank you for using free software!
#
#(c) Whise 2009 <helderfraga@gmail.com>
#
# backend for saving and loading settings
# Part of the Tilo
import os
try:
from gi.repository import GConf
gconf_client = GConf.Client.get_default()
BACKEND = 'gconf'
print "gconf backend"
except:
BACKEND = 'xml'
import xml.dom.minidom
print "xml backend"
HomeDirectory = os.path.expanduser("~")
ConfigDirectory = HomeDirectory + '/.tilo'
gconf_app_key = '/apps/tilo'
def save_setting(name,value):
if BACKEND == 'gconf':
if isinstance(value, int) or isinstance(value, float):
gconf_client.set_int(gconf_app_key + '/' + name , int(value))
elif isinstance(value, str):
gconf_client.set_string(gconf_app_key + '/' + name , str(value))
elif isinstance(value, list):
gconf_client.set_list(gconf_app_key + '/' + name ,1, value)
elif BACKEND == 'xml':
if name == '': return
if os.path.isfile(ConfigDirectory + "/.Tilo-Settings.xml"):
XMLSettings = xml.dom.minidom.parse(ConfigDirectory + "/.Tilo-Settings.xml")
XBase = XMLSettings.getElementsByTagName('Tilo')[0]
else:
XMLSettings = xml.dom.minidom.Document()
XBase = XMLSettings.createElement('Tilo')
try:
node = XMLSettings.getElementsByTagName('settings')[0]
except:
node = XMLSettings.createElement('settings')
node.setAttribute(name, str(value))
XBase.appendChild(node)
XMLSettings.appendChild(XBase)
file = open(ConfigDirectory + "/.Tilo-Settings.xml","w")
XMLSettings.writexml(file, " ", "", "", "UTF-8")
XMLSettings.unlink()
else:
pass
def load_setting(name):
if BACKEND == 'gconf':
try:
typ = gconf_client.get_without_default(gconf_app_key + "/" + name).type
if typ == 1:
return gconf_client.get_string(gconf_app_key + "/" + name)
elif typ == 2:
return gconf_client.get_int(gconf_app_key + "/" + name)
elif typ == 6:
return gconf_client.get_list(gconf_app_key + "/" + name,1)
else:
if name == 'favorites': return []
return None
except :
if name == 'favorites': return []
return None
elif BACKEND == 'xml':
if os.path.isfile(ConfigDirectory + "/.Tilo-Settings.xml"):
XMLSettings = xml.dom.minidom.parse(ConfigDirectory + "/.Tilo-Settings.xml")
#print XMLSettings.getElementsByTagName('Tilo')[0].childNodes[0].localName
x = XMLSettings.getElementsByTagName('Tilo')[0].getElementsByTagName("settings")[0]
try:
x = x.attributes[name].value
try:
a = int(x)
except:
if str(x).find('[]') != -1 and name == 'favorites': return []
if str(x).find(':') != -1:
x = str(x).replace(" u'","").replace("u'","").replace("[","").replace("]","").replace("'","").replace('"','"')
a = x.split(',')
print a
else:
a = str(x)
return a
except:
if name == 'favorites': return []
return None
else:
return None
else:
pass
def get_default_mail_client():
if BACKEND == 'gconf':
return gconf_client.get_string("/desktop/mate/url-handlers/mailto/command")
elif BACKEND == 'xml':
return "xdg-open mailto:"
else:
pass
def get_default_internet_browser():
if BACKEND == 'gconf':
return gconf_client.get_string("/desktop/mate/url-handlers/http/command")#"/desktop/mate/applications/browser/exec")
elif BACKEND == 'xml':
return "xdg-open http:"
else:
pass
| gpl-2.0 |
gangadhar-kadam/mic-wnframework | webnotes/widgets/form/assign_to.py | 6 | 5492 | # Copyright (c) 2012 Web Notes Technologies Pvt Ltd (http://erpnext.com)
#
# MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import unicode_literals
"""assign/unassign to ToDo"""
import webnotes
@webnotes.whitelist()
def get(args=None):
"""get assigned to"""
if not args:
args = webnotes.form_dict
return webnotes.conn.sql("""select owner from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s
order by modified desc limit 5""", args, as_dict=1)
@webnotes.whitelist()
def add(args=None):
"""add in someone's to do list"""
if not args:
args = webnotes.form_dict
if webnotes.conn.sql("""select owner from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s
and owner=%(assign_to)s""", args):
webnotes.msgprint("Already in todo")
return
else:
from webnotes.model.doc import Document
from webnotes.utils import nowdate
d = Document("ToDo")
d.owner = args['assign_to']
d.reference_type = args['doctype']
d.reference_name = args['name']
d.description = args['description']
d.priority = args.get('priority', 'Medium')
d.date = args.get('date', nowdate())
d.assigned_by = args.get('assigned_by', webnotes.user.name)
d.save(1)
# set assigned_to if field exists
from webnotes.model.meta import has_field
if has_field(args['doctype'], "assigned_to"):
webnotes.conn.set_value(args['doctype'], args['name'], "assigned_to", args['assign_to'])
# notify
if not args.get("no_notification"):
notify_assignment(d.assigned_by, d.owner, d.reference_type, d.reference_name, action='ASSIGN', description=args.get("description"), notify=args.get('notify'))
# update feeed
try:
import home
from webnotes.utils import get_fullname
home.make_feed('Assignment', d.reference_type, d.reference_name, webnotes.session['user'],
'[%s] Assigned to %s' % (d.priority, get_fullname(d.owner)), '#C78F58')
except ImportError, e:
pass
return get(args)
@webnotes.whitelist()
def remove(doctype, name, assign_to):
"""remove from todo"""
res = webnotes.conn.sql("""\
select assigned_by, owner, reference_type, reference_name from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s
and owner=%(assign_to)s""", locals())
webnotes.conn.sql("""delete from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s
and owner=%(assign_to)s""", locals())
# clear assigned_to if field exists
from webnotes.model.meta import has_field
if has_field(doctype, "assigned_to"):
webnotes.conn.set_value(doctype, name, "assigned_to", None)
if res and res[0]: notify_assignment(res[0][0], res[0][1], res[0][2], res[0][3])
return get({"doctype": doctype, "name": name})
def clear(doctype, name):
for assign_to in webnotes.conn.sql_list("""select owner from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s""", locals()):
remove(doctype, name, assign_to)
def notify_assignment(assigned_by, owner, doc_type, doc_name, action='CLOSE',
description=None, notify=0):
"""
Notify assignee that there is a change in assignment
"""
if not (assigned_by and owner and doc_type and doc_name): return
# self assignment / closing - no message
if assigned_by==owner:
return
from webnotes.boot import get_fullnames
user_info = get_fullnames()
# Search for email address in description -- i.e. assignee
from webnotes.utils import get_url_to_form
assignment = get_url_to_form(doc_type, doc_name, label="%s: %s" % (doc_type, doc_name))
if action=='CLOSE':
if owner == webnotes.session.get('user'):
arg = {
'contact': assigned_by,
'txt': "The task %s, that you assigned to %s, has been \
closed." % (assignment,
user_info.get(owner, {}).get('fullname'))
}
else:
arg = {
'contact': assigned_by,
'txt': "The task %s, that you assigned to %s, \
has been closed by %s." % (assignment,
user_info.get(owner, {}).get('fullname'),
user_info.get(webnotes.session.get('user'),
{}).get('fullname'))
}
else:
arg = {
'contact': owner,
'txt': "A new task, %s, has been assigned to you by %s. %s" \
% (assignment,
user_info.get(webnotes.session.get('user'), {}).get('fullname'),
description and ("<p>Description: " + description + "</p>") or ""),
'notify': notify
}
arg["parenttype"] = "Assignment"
from core.page.messages import messages
import json
messages.post(json.dumps(arg))
| mit |
chromium/chromium | third_party/pylint/pylint/checkers/similar.py | 64 | 14174 | # pylint: disable=W0622
# Copyright (c) 2004-2013 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""a similarities / code duplication command line tool and pylint checker
"""
from __future__ import print_function
import sys
from collections import defaultdict
from logilab.common.ureports import Table
from pylint.interfaces import IRawChecker
from pylint.checkers import BaseChecker, table_lines_from_stats
import six
from six.moves import zip
class Similar(object):
"""finds copy-pasted lines of code in a project"""
def __init__(self, min_lines=4, ignore_comments=False,
ignore_docstrings=False, ignore_imports=False):
self.min_lines = min_lines
self.ignore_comments = ignore_comments
self.ignore_docstrings = ignore_docstrings
self.ignore_imports = ignore_imports
self.linesets = []
def append_stream(self, streamid, stream, encoding=None):
"""append a file to search for similarities"""
if encoding is None:
readlines = stream.readlines
else:
readlines = lambda: [line.decode(encoding) for line in stream]
try:
self.linesets.append(LineSet(streamid,
readlines(),
self.ignore_comments,
self.ignore_docstrings,
self.ignore_imports))
except UnicodeDecodeError:
pass
def run(self):
"""start looking for similarities and display results on stdout"""
self._display_sims(self._compute_sims())
def _compute_sims(self):
"""compute similarities in appended files"""
no_duplicates = defaultdict(list)
for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
duplicate = no_duplicates[num]
for couples in duplicate:
if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
couples.add((lineset1, idx1))
couples.add((lineset2, idx2))
break
else:
duplicate.append(set([(lineset1, idx1), (lineset2, idx2)]))
sims = []
for num, ensembles in six.iteritems(no_duplicates):
for couples in ensembles:
sims.append((num, couples))
sims.sort()
sims.reverse()
return sims
def _display_sims(self, sims):
"""display computed similarities on stdout"""
nb_lignes_dupliquees = 0
for num, couples in sims:
print()
print(num, "similar lines in", len(couples), "files")
couples = sorted(couples)
for lineset, idx in couples:
print("==%s:%s" % (lineset.name, idx))
# pylint: disable=W0631
for line in lineset._real_lines[idx:idx+num]:
print(" ", line.rstrip())
nb_lignes_dupliquees += num * (len(couples)-1)
nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
print("TOTAL lines=%s duplicates=%s percent=%.2f" \
% (nb_total_lignes, nb_lignes_dupliquees,
nb_lignes_dupliquees*100. / nb_total_lignes))
def _find_common(self, lineset1, lineset2):
"""find similarities in the two given linesets"""
lines1 = lineset1.enumerate_stripped
lines2 = lineset2.enumerate_stripped
find = lineset2.find
index1 = 0
min_lines = self.min_lines
while index1 < len(lineset1):
skip = 1
num = 0
for index2 in find(lineset1[index1]):
non_blank = 0
for num, ((_, line1), (_, line2)) in enumerate(
zip(lines1(index1), lines2(index2))):
if line1 != line2:
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
skip = max(skip, num)
break
if line1:
non_blank += 1
else:
# we may have reach the end
num += 1
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
skip = max(skip, num)
index1 += skip
def _iter_sims(self):
"""iterate on similarities among all files, by making a cartesian
product
"""
for idx, lineset in enumerate(self.linesets[:-1]):
for lineset2 in self.linesets[idx+1:]:
for sim in self._find_common(lineset, lineset2):
yield sim
def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
"""return lines with leading/trailing whitespace and any ignored code
features removed
"""
strippedlines = []
docstring = None
for line in lines:
line = line.strip()
if ignore_docstrings:
if not docstring and \
(line.startswith('"""') or line.startswith("'''")):
docstring = line[:3]
line = line[3:]
if docstring:
if line.endswith(docstring):
docstring = None
line = ''
if ignore_imports:
if line.startswith("import ") or line.startswith("from "):
line = ''
if ignore_comments:
# XXX should use regex in checkers/format to avoid cutting
# at a "#" in a string
line = line.split('#', 1)[0].strip()
strippedlines.append(line)
return strippedlines
class LineSet(object):
"""Holds and indexes all the lines of a single source file"""
def __init__(self, name, lines, ignore_comments=False,
ignore_docstrings=False, ignore_imports=False):
self.name = name
self._real_lines = lines
self._stripped_lines = stripped_lines(lines, ignore_comments,
ignore_docstrings,
ignore_imports)
self._index = self._mk_index()
def __str__(self):
return '<Lineset for %s>' % self.name
def __len__(self):
return len(self._real_lines)
def __getitem__(self, index):
return self._stripped_lines[index]
def __lt__(self, other):
return self.name < other.name
def __hash__(self):
return id(self)
def enumerate_stripped(self, start_at=0):
"""return an iterator on stripped lines, starting from a given index
if specified, else 0
"""
idx = start_at
if start_at:
lines = self._stripped_lines[start_at:]
else:
lines = self._stripped_lines
for line in lines:
#if line:
yield idx, line
idx += 1
def find(self, stripped_line):
"""return positions of the given stripped line in this set"""
return self._index.get(stripped_line, ())
def _mk_index(self):
"""create the index for this set"""
index = defaultdict(list)
for line_no, line in enumerate(self._stripped_lines):
if line:
index[line].append(line_no)
return index
MSGS = {'R0801': ('Similar lines in %s files\n%s',
'duplicate-code',
'Indicates that a set of similar lines has been detected \
among multiple file. This usually means that the code should \
be refactored to avoid this duplication.')}
def report_similarities(sect, stats, old_stats):
"""make a layout with some stats about duplication"""
lines = ['', 'now', 'previous', 'difference']
lines += table_lines_from_stats(stats, old_stats,
('nb_duplicated_lines',
'percent_duplicated_lines'))
sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
# wrapper to get a pylint checker from the similar class
class SimilarChecker(BaseChecker, Similar):
"""checks for similarities and duplicated code. This computation may be
memory / CPU intensive, so you should disable it if you experiment some
problems.
"""
__implements__ = (IRawChecker,)
# configuration section name
name = 'similarities'
# messages
msgs = MSGS
# configuration options
# for available dict keys/values see the optik parser 'add_option' method
options = (('min-similarity-lines',
{'default' : 4, 'type' : "int", 'metavar' : '<int>',
'help' : 'Minimum lines number of a similarity.'}),
('ignore-comments',
{'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore comments when computing similarities.'}
),
('ignore-docstrings',
{'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore docstrings when computing similarities.'}
),
('ignore-imports',
{'default' : False, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore imports when computing similarities.'}
),
)
# reports
reports = (('RP0801', 'Duplication', report_similarities),)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
Similar.__init__(self, min_lines=4,
ignore_comments=True, ignore_docstrings=True)
self.stats = None
def set_option(self, optname, value, action=None, optdict=None):
"""method called to set an option (registered in the options list)
overridden to report options setting to Similar
"""
BaseChecker.set_option(self, optname, value, action, optdict)
if optname == 'min-similarity-lines':
self.min_lines = self.config.min_similarity_lines
elif optname == 'ignore-comments':
self.ignore_comments = self.config.ignore_comments
elif optname == 'ignore-docstrings':
self.ignore_docstrings = self.config.ignore_docstrings
elif optname == 'ignore-imports':
self.ignore_imports = self.config.ignore_imports
def open(self):
"""init the checkers: reset linesets and statistics information"""
self.linesets = []
self.stats = self.linter.add_stats(nb_duplicated_lines=0,
percent_duplicated_lines=0)
def process_module(self, node):
"""process a module
the module's content is accessible via the stream object
stream must implement the readlines method
"""
with node.stream() as stream:
self.append_stream(self.linter.current_name,
stream,
node.file_encoding)
def close(self):
"""compute and display similarities on closing (i.e. end of parsing)"""
total = sum([len(lineset) for lineset in self.linesets])
duplicated = 0
stats = self.stats
for num, couples in self._compute_sims():
msg = []
for lineset, idx in couples:
msg.append("==%s:%s" % (lineset.name, idx))
msg.sort()
# pylint: disable=W0631
for line in lineset._real_lines[idx:idx+num]:
msg.append(line.rstrip())
self.add_message('R0801', args=(len(couples), '\n'.join(msg)))
duplicated += num * (len(couples) - 1)
stats['nb_duplicated_lines'] = duplicated
stats['percent_duplicated_lines'] = total and duplicated * 100. / total
def register(linter):
"""required method to auto register this checker """
linter.register_checker(SimilarChecker(linter))
def usage(status=0):
"""display command line usage information"""
print("finds copy pasted blocks in a set of files")
print()
print('Usage: symilar [-d|--duplicates min_duplicated_lines] \
[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...')
sys.exit(status)
def Run(argv=None):
"""standalone command line access point"""
if argv is None:
argv = sys.argv[1:]
from getopt import getopt
s_opts = 'hdi'
l_opts = ('help', 'duplicates=', 'ignore-comments', 'ignore-imports',
'ignore-docstrings')
min_lines = 4
ignore_comments = False
ignore_docstrings = False
ignore_imports = False
opts, args = getopt(argv, s_opts, l_opts)
for opt, val in opts:
if opt in ('-d', '--duplicates'):
min_lines = int(val)
elif opt in ('-h', '--help'):
usage()
elif opt in ('-i', '--ignore-comments'):
ignore_comments = True
elif opt in ('--ignore-docstrings',):
ignore_docstrings = True
elif opt in ('--ignore-imports',):
ignore_imports = True
if not args:
usage(1)
sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
for filename in args:
with open(filename) as stream:
sim.append_stream(filename, stream)
sim.run()
sys.exit(0)
if __name__ == '__main__':
Run()
| bsd-3-clause |
ff94315/hiwifi-openwrt-HC5661-HC5761 | staging_dir/target-mipsel_r2_uClibc-0.9.33.2/root-ralink/usr/lib/python2.7/tokenize.py | 122 | 16465 | """Tokenization help for Python programs.
generate_tokens(readline) is a generator that breaks a stream of
text into Python tokens. It accepts a readline-like method which is called
repeatedly to get the next line of input (or "" for EOF). It generates
5-tuples with these members:
the token type (see token.py)
the token (a string)
the starting (row, column) indices of the token (a 2-tuple of ints)
the ending (row, column) indices of the token (a 2-tuple of ints)
the original line (string)
It is designed to match the working of the Python tokenizer exactly, except
that it produces COMMENT tokens for comments and gives type OP for all
operators
Older entry points
tokenize_loop(readline, tokeneater)
tokenize(readline, tokeneater=printtoken)
are the same, except instead of generating tokens, tokeneater is a callback
function to which the 5 fields described above are passed as 5 arguments,
each time a new token is found."""
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger')
import string, re
from token import *
import token
__all__ = [x for x in dir(token) if not x.startswith("_")]
__all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
del x
del token
COMMENT = N_TOKENS
tok_name[COMMENT] = 'COMMENT'
NL = N_TOKENS + 1
tok_name[NL] = 'NL'
N_TOKENS += 2
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'[a-zA-Z_]\w*'
Hexnumber = r'0[xX][\da-fA-F]+[lL]?'
Octnumber = r'(0[oO][0-7]+)|(0[0-7]*)[lL]?'
Binnumber = r'0[bB][01]+[lL]?'
Decnumber = r'[1-9]\d*[lL]?'
Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?\d+'
Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Expfloat = r'\d+' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
# Single-line ' or " string.
String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
r"//=?",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'[:;.,`@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
tokenprog, pseudoprog, single3prog, double3prog = map(
re.compile, (Token, PseudoToken, Single3, Double3))
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
"'''": single3prog, '"""': double3prog,
"r'''": single3prog, 'r"""': double3prog,
"u'''": single3prog, 'u"""': double3prog,
"ur'''": single3prog, 'ur"""': double3prog,
"R'''": single3prog, 'R"""': double3prog,
"U'''": single3prog, 'U"""': double3prog,
"uR'''": single3prog, 'uR"""': double3prog,
"Ur'''": single3prog, 'Ur"""': double3prog,
"UR'''": single3prog, 'UR"""': double3prog,
"b'''": single3prog, 'b"""': double3prog,
"br'''": single3prog, 'br"""': double3prog,
"B'''": single3prog, 'B"""': double3prog,
"bR'''": single3prog, 'bR"""': double3prog,
"Br'''": single3prog, 'Br"""': double3prog,
"BR'''": single3prog, 'BR"""': double3prog,
'r': None, 'R': None, 'u': None, 'U': None,
'b': None, 'B': None}
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"u'''", 'u"""', "U'''", 'U"""',
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
"uR'''", 'uR"""', "UR'''", 'UR"""',
"b'''", 'b"""', "B'''", 'B"""',
"br'''", 'br"""', "Br'''", 'Br"""',
"bR'''", 'bR"""', "BR'''", 'BR"""'):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"u'", 'u"', "U'", 'U"',
"ur'", 'ur"', "Ur'", 'Ur"',
"uR'", 'uR"', "UR'", 'UR"',
"b'", 'b"', "B'", 'B"',
"br'", 'br"', "Br'", 'Br"',
"bR'", 'bR"', "BR'", 'BR"' ):
single_quoted[t] = t
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
def printtoken(type, token, srow_scol, erow_ecol, line): # for testing
srow, scol = srow_scol
erow, ecol = erow_ecol
print "%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token))
def tokenize(readline, tokeneater=printtoken):
"""
The tokenize() function accepts two parameters: one representing the
input stream, and one providing an output mechanism for tokenize().
The first parameter, readline, must be a callable object which provides
the same interface as the readline() method of built-in file objects.
Each call to the function should return one line of input as a string.
The second parameter, tokeneater, must also be a callable object. It is
called once for each token, with five arguments, corresponding to the
tuples generated by generate_tokens().
"""
try:
tokenize_loop(readline, tokeneater)
except StopTokenizing:
pass
# backwards compatible interface
def tokenize_loop(readline, tokeneater):
for token_info in generate_tokens(readline):
tokeneater(*token_info)
class Untokenizer:
def __init__(self):
self.tokens = []
self.prev_row = 1
self.prev_col = 0
def add_whitespace(self, start):
row, col = start
assert row <= self.prev_row
col_offset = col - self.prev_col
if col_offset:
self.tokens.append(" " * col_offset)
def untokenize(self, iterable):
for t in iterable:
if len(t) == 2:
self.compat(t, iterable)
break
tok_type, token, start, end, line = t
self.add_whitespace(start)
self.tokens.append(token)
self.prev_row, self.prev_col = end
if tok_type in (NEWLINE, NL):
self.prev_row += 1
self.prev_col = 0
return "".join(self.tokens)
def compat(self, token, iterable):
startline = False
indents = []
toks_append = self.tokens.append
toknum, tokval = token
if toknum in (NAME, NUMBER):
tokval += ' '
if toknum in (NEWLINE, NL):
startline = True
prevstring = False
for tok in iterable:
toknum, tokval = tok[:2]
if toknum in (NAME, NUMBER):
tokval += ' '
# Insert a space between two consecutive strings
if toknum == STRING:
if prevstring:
tokval = ' ' + tokval
prevstring = True
else:
prevstring = False
if toknum == INDENT:
indents.append(tokval)
continue
elif toknum == DEDENT:
indents.pop()
continue
elif toknum in (NEWLINE, NL):
startline = True
elif startline and indents:
toks_append(indents[-1])
startline = False
toks_append(tokval)
def untokenize(iterable):
"""Transform tokens back into Python source code.
Each element returned by the iterable must be a token sequence
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
Round-trip invariant for full input:
Untokenized source will match input source exactly
Round-trip invariant for limited intput:
# Output text will tokenize the back to the input
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
newcode = untokenize(t1)
readline = iter(newcode.splitlines(1)).next
t2 = [tok[:2] for tok in generate_tokens(readline)]
assert t1 == t2
"""
ut = Untokenizer()
return ut.untokenize(iterable)
def generate_tokens(readline):
"""
The generate_tokens() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string. Alternately, readline
can be a callable function terminating with StopIteration:
readline = open(myfile).next # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
"""
lnum = parenlev = continued = 0
namechars, numchars = string.ascii_letters + '_', '0123456789'
contstr, needcont = '', 0
contline = None
indents = [0]
while 1: # loop over lines in stream
try:
line = readline()
except StopIteration:
line = ''
lnum += 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError, ("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
yield (STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield (ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ':
column += 1
elif line[pos] == '\t':
column = (column//tabsize + 1)*tabsize
elif line[pos] == '\f':
column = 0
else:
break
pos += 1
if pos == max:
break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token)
yield (COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
yield (NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line)
else:
yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level",
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = pseudoprog.match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
token, initial = line[start:end], line[start]
if initial in numchars or \
(initial == '.' and token != '.'): # ordinary number
yield (NUMBER, token, spos, epos, line)
elif initial in '\r\n':
yield (NL if parenlev > 0 else NEWLINE,
token, spos, epos, line)
elif initial == '#':
assert not token.endswith("\n")
yield (COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = endprogs[token]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
yield (STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = (endprogs[initial] or endprogs[token[1]] or
endprogs[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
yield (STRING, token, spos, epos, line)
elif initial in namechars: # ordinary name
yield (NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
if initial in '([{':
parenlev += 1
elif initial in ')]}':
parenlev -= 1
yield (OP, token, spos, epos, line)
else:
yield (ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos += 1
for indent in indents[1:]: # pop remaining indent levels
yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
if __name__ == '__main__': # testing
import sys
if len(sys.argv) > 1:
tokenize(open(sys.argv[1]).readline)
else:
tokenize(sys.stdin.readline)
| gpl-2.0 |
shobhitka/linux-kernel | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
pando85/gourmet | gourmet/plugins/field_editor/__init__.py | 7 | 1175 | from gourmet.plugin import ToolPlugin
import fieldEditor
import gtk
from gettext import gettext as _
class FieldEditorPlugin (ToolPlugin):
menu_items = '''<placeholder name="DataTool">
<menuitem action="FieldEditor"/>
</placeholder>
'''
def setup_action_groups (self):
self.action_group = gtk.ActionGroup('FieldEditorPluginActionGroup')
self.action_group.add_actions([
('FieldEditor',None,_('Field Editor'),
None,_('Edit fields across multiple recipes at a time.'),self.show_field_editor
),
])
self.action_groups.append(self.action_group)
def show_field_editor (self, *args):
from gourmet.GourmetRecipeManager import get_application
self.app = get_application()
self.field_editor = fieldEditor.FieldEditor(self.app.rd, self.app)
self.field_editor.valueDialog.connect('response',self.response_cb)
self.field_editor.show()
def response_cb (self, d, r):
if r==gtk.RESPONSE_APPLY:
self.app.update_attribute_models()
plugins = [FieldEditorPlugin]
| gpl-2.0 |
christian-posta/openshift-ansible | filter_plugins/oo_filters.py | 14 | 14087 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: expandtab:tabstop=4:shiftwidth=4
'''
Custom filters for use in openshift-ansible
'''
from ansible import errors
from operator import itemgetter
import pdb
import re
import json
class FilterModule(object):
''' Custom ansible filters '''
@staticmethod
def oo_pdb(arg):
''' This pops you into a pdb instance where arg is the data passed in
from the filter.
Ex: "{{ hostvars | oo_pdb }}"
'''
pdb.set_trace()
return arg
@staticmethod
def get_attr(data, attribute=None):
''' This looks up dictionary attributes of the form a.b.c and returns
the value.
Ex: data = {'a': {'b': {'c': 5}}}
attribute = "a.b.c"
returns 5
'''
if not attribute:
raise errors.AnsibleFilterError("|failed expects attribute to be set")
ptr = data
for attr in attribute.split('.'):
ptr = ptr[attr]
return ptr
@staticmethod
def oo_flatten(data):
''' This filter plugin will flatten a list of lists
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects to flatten a List")
return [item for sublist in data for item in sublist]
@staticmethod
def oo_collect(data, attribute=None, filters=None):
''' This takes a list of dict and collects all attributes specified into a
list. If filter is specified then we will include all items that
match _ALL_ of filters. If a dict entry is missing the key in a
filter it will be excluded from the match.
Ex: data = [ {'a':1, 'b':5, 'z': 'z'}, # True, return
{'a':2, 'z': 'z'}, # True, return
{'a':3, 'z': 'z'}, # True, return
{'a':4, 'z': 'b'}, # FAILED, obj['z'] != obj['z']
]
attribute = 'a'
filters = {'z': 'z'}
returns [1, 2, 3]
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects to filter on a List")
if not attribute:
raise errors.AnsibleFilterError("|failed expects attribute to be set")
if filters is not None:
if not issubclass(type(filters), dict):
raise errors.AnsibleFilterError("|failed expects filter to be a"
" dict")
retval = [FilterModule.get_attr(d, attribute) for d in data if (
all([d.get(key, None) == filters[key] for key in filters]))]
else:
retval = [FilterModule.get_attr(d, attribute) for d in data]
return retval
@staticmethod
def oo_select_keys_from_list(data, keys):
''' This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects to filter on a list")
if not issubclass(type(keys), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
# Gather up the values for the list of keys passed in
retval = [FilterModule.oo_select_keys(item, keys) for item in data]
return FilterModule.oo_flatten(retval)
@staticmethod
def oo_select_keys(data, keys):
''' This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
'''
if not issubclass(type(data), dict):
raise errors.AnsibleFilterError("|failed expects to filter on a dict")
if not issubclass(type(keys), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
# Gather up the values for the list of keys passed in
retval = [data[key] for key in keys if data.has_key(key)]
return retval
@staticmethod
def oo_prepend_strings_in_list(data, prepend):
''' This takes a list of strings and prepends a string to each item in the
list
Ex: data = ['cart', 'tree']
prepend = 'apple-'
returns ['apple-cart', 'apple-tree']
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not all(isinstance(x, basestring) for x in data):
raise errors.AnsibleFilterError("|failed expects first param is a list"
" of strings")
retval = [prepend + s for s in data]
return retval
@staticmethod
def oo_combine_key_value(data, joiner='='):
'''Take a list of dict in the form of { 'key': 'value'} and
arrange them as a list of strings ['key=value']
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
rval = []
for item in data:
rval.append("%s%s%s" % (item['key'], joiner, item['value']))
return rval
@staticmethod
def oo_combine_dict(data, in_joiner='=', out_joiner=' '):
'''Take a dict in the form of { 'key': 'value', 'key': 'value' } and
arrange them as a string 'key=value key=value'
'''
if not issubclass(type(data), dict):
raise errors.AnsibleFilterError("|failed expects first param is a dict")
return out_joiner.join([in_joiner.join([k, v]) for k, v in data.items()])
@staticmethod
def oo_ami_selector(data, image_name):
''' This takes a list of amis and an image name and attempts to return
the latest ami.
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not data:
return None
else:
if image_name is None or not image_name.endswith('_*'):
ami = sorted(data, key=itemgetter('name'), reverse=True)[0]
return ami['ami_id']
else:
ami_info = [(ami, ami['name'].split('_')[-1]) for ami in data]
ami = sorted(ami_info, key=itemgetter(1), reverse=True)[0][0]
return ami['ami_id']
@staticmethod
def oo_ec2_volume_definition(data, host_type, docker_ephemeral=False):
''' This takes a dictionary of volume definitions and returns a valid ec2
volume definition based on the host_type and the values in the
dictionary.
The dictionary should look similar to this:
{ 'master':
{ 'root':
{ 'volume_size': 10, 'device_type': 'gp2',
'iops': 500
}
},
'node':
{ 'root':
{ 'volume_size': 10, 'device_type': 'io1',
'iops': 1000
},
'docker':
{ 'volume_size': 40, 'device_type': 'gp2',
'iops': 500, 'ephemeral': 'true'
}
}
}
'''
if not issubclass(type(data), dict):
raise errors.AnsibleFilterError("|failed expects first param is a dict")
if host_type not in ['master', 'node', 'etcd']:
raise errors.AnsibleFilterError("|failed expects etcd, master or node"
" as the host type")
root_vol = data[host_type]['root']
root_vol['device_name'] = '/dev/sda1'
root_vol['delete_on_termination'] = True
if root_vol['device_type'] != 'io1':
root_vol.pop('iops', None)
if host_type == 'node':
docker_vol = data[host_type]['docker']
docker_vol['device_name'] = '/dev/xvdb'
docker_vol['delete_on_termination'] = True
if docker_vol['device_type'] != 'io1':
docker_vol.pop('iops', None)
if docker_ephemeral:
docker_vol.pop('device_type', None)
docker_vol.pop('delete_on_termination', None)
docker_vol['ephemeral'] = 'ephemeral0'
return [root_vol, docker_vol]
elif host_type == 'etcd':
etcd_vol = data[host_type]['etcd']
etcd_vol['device_name'] = '/dev/xvdb'
etcd_vol['delete_on_termination'] = True
if etcd_vol['device_type'] != 'io1':
etcd_vol.pop('iops', None)
return [root_vol, etcd_vol]
return [root_vol]
@staticmethod
def oo_split(string, separator=','):
''' This splits the input string into a list
'''
return string.split(separator)
@staticmethod
def oo_filter_list(data, filter_attr=None):
''' This returns a list, which contains all items where filter_attr
evaluates to true
Ex: data = [ { a: 1, b: True },
{ a: 3, b: False },
{ a: 5, b: True } ]
filter_attr = 'b'
returns [ { a: 1, b: True },
{ a: 5, b: True } ]
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects to filter on a list")
if not issubclass(type(filter_attr), str):
raise errors.AnsibleFilterError("|failed expects filter_attr is a str")
# Gather up the values for the list of keys passed in
return [x for x in data if x[filter_attr]]
@staticmethod
def oo_parse_heat_stack_outputs(data):
''' Formats the HEAT stack output into a usable form
The goal is to transform something like this:
+---------------+-------------------------------------------------+
| Property | Value |
+---------------+-------------------------------------------------+
| capabilities | [] | |
| creation_time | 2015-06-26T12:26:26Z | |
| description | OpenShift cluster | |
| … | … |
| outputs | [ |
| | { |
| | "output_value": "value_A" |
| | "description": "This is the value of Key_A" |
| | "output_key": "Key_A" |
| | }, |
| | { |
| | "output_value": [ |
| | "value_B1", |
| | "value_B2" |
| | ], |
| | "description": "This is the value of Key_B" |
| | "output_key": "Key_B" |
| | }, |
| | ] |
| parameters | { |
| … | … |
+---------------+-------------------------------------------------+
into something like this:
{
"Key_A": "value_A",
"Key_B": [
"value_B1",
"value_B2"
]
}
'''
# Extract the “outputs” JSON snippet from the pretty-printed array
in_outputs = False
outputs = ''
line_regex = re.compile(r'\|\s*(.*?)\s*\|\s*(.*?)\s*\|')
for line in data['stdout_lines']:
match = line_regex.match(line)
if match:
if match.group(1) == 'outputs':
in_outputs = True
elif match.group(1) != '':
in_outputs = False
if in_outputs:
outputs += match.group(2)
outputs = json.loads(outputs)
# Revamp the “outputs” to put it in the form of a “Key: value” map
revamped_outputs = {}
for output in outputs:
revamped_outputs[output['output_key']] = output['output_value']
return revamped_outputs
def filters(self):
''' returns a mapping of filters to methods '''
return {
"oo_select_keys": self.oo_select_keys,
"oo_select_keys_from_list": self.oo_select_keys_from_list,
"oo_collect": self.oo_collect,
"oo_flatten": self.oo_flatten,
"oo_pdb": self.oo_pdb,
"oo_prepend_strings_in_list": self.oo_prepend_strings_in_list,
"oo_ami_selector": self.oo_ami_selector,
"oo_ec2_volume_definition": self.oo_ec2_volume_definition,
"oo_combine_key_value": self.oo_combine_key_value,
"oo_combine_dict": self.oo_combine_dict,
"oo_split": self.oo_split,
"oo_filter_list": self.oo_filter_list,
"oo_parse_heat_stack_outputs": self.oo_parse_heat_stack_outputs
}
| apache-2.0 |
updownlife/multipleK | dependencies/biopython-1.65/build/lib.linux-x86_64-2.7/Bio/PopGen/FDist/Utils.py | 3 | 6908 | # Copyright 2007 by Tiago Antao <tiagoantao@gmail.com>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
from Bio.PopGen.GenePop import FileParser
import Bio.PopGen.FDist
# Quite a few utility functions could be done (like remove pop,
# add locus, etc...). The recommended strategy is convert back
# and forth from/to GenePop and use GenePop Utils
def convert_genepop_to_fdist(gp_rec, report_pops=None):
"""Converts a GenePop record to a FDist one.
Parameters:
gp_rec - Genepop Record (either standard or big)
Returns:
FDist record.
"""
if hasattr(gp_rec, "populations"):
return _convert_genepop_to_fdist(gp_rec)
else:
return _convert_genepop_to_fdist_big(gp_rec, report_pops)
def _convert_genepop_to_fdist(gp_rec):
"""Converts a standard GenePop record to a FDist one.
Parameters:
gp_rec - Genepop Record (Standard)
Returns:
FDist record.
"""
fd_rec = Bio.PopGen.FDist.Record()
fd_rec.data_org = 0
fd_rec.num_loci = len(gp_rec.loci_list)
fd_rec.num_pops = len(gp_rec.populations)
for lc_i in range(len(gp_rec.loci_list)):
alleles = []
pop_data = []
for pop_i in range(len(gp_rec.populations)):
for indiv in gp_rec.populations[pop_i]:
for al in indiv[1][lc_i]:
if al is not None and al not in alleles:
alleles.append(al)
alleles.sort() # Dominance requires this
# here we go again (necessary...)
for pop_i in range(len(gp_rec.populations)):
allele_counts = {}
for indiv in gp_rec.populations[pop_i]:
for al in indiv[1][lc_i]:
if al is not None:
count = allele_counts.get(al, 0)
allele_counts[al] = count + 1
allele_array = [] # We need the same order as in alleles
for allele in alleles:
allele_array.append(allele_counts.get(allele, 0))
pop_data.append(allele_array)
fd_rec.loci_data.append((len(alleles), pop_data))
return fd_rec
def _convert_genepop_to_fdist_big(gp_rec, report_pops=None):
"""Converts a big GenePop record to a FDist one.
Parameters:
gp_rec - Genepop Record (Big)
Returns:
FDist record.
"""
fd_rec = Bio.PopGen.FDist.Record()
fd_rec.data_org = 1
fd_rec.num_loci = len(gp_rec.loci_list)
num_loci = len(gp_rec.loci_list)
loci = []
for i in range(num_loci):
loci.append(set())
pops = []
work_rec = FileParser.read(gp_rec.fname)
lParser = work_rec.get_individual()
def init_pop():
my_pop = []
for i in range(num_loci):
my_pop.append({})
return my_pop
curr_pop = init_pop()
num_pops = 1
if report_pops:
report_pops(num_pops)
while lParser:
if lParser is not True:
for loci_pos in range(num_loci):
for al in lParser[1][loci_pos]:
if al is not None:
loci[loci_pos].add(al)
curr_pop[loci_pos][al]= curr_pop[loci_pos].get(al, 0)+1
else:
pops.append(curr_pop)
num_pops += 1
if report_pops:
report_pops(num_pops)
curr_pop = init_pop()
lParser = work_rec.get_individual()
work_rec._handle.close() # TODO - Needs a proper fix
pops.append(curr_pop)
fd_rec.num_pops = num_pops
for loci_pos in range(num_loci):
alleles = sorted(loci[loci_pos])
loci_rec = [len(alleles), []]
for pop in pops:
pop_rec = []
for allele in alleles:
pop_rec.append(pop[loci_pos].get(allele, 0))
loci_rec[1].append(pop_rec)
fd_rec.loci_data.append(tuple(loci_rec))
return fd_rec
def _convert_genepop_to_fdist_big_old(gp_rec, report_loci=None):
"""Converts a big GenePop record to a FDist one.
Parameters:
gp_rec - Genepop Record (Big)
Returns:
FDist record.
"""
fd_rec = Bio.PopGen.FDist.Record()
def countPops(rec):
f2 = FileParser.read(rec.fname)
popCnt = 1
while f2.skip_population():
popCnt += 1
return popCnt
fd_rec.data_org = 0
fd_rec.num_loci = len(gp_rec.loci_list)
work_rec0 = FileParser.read(gp_rec.fname)
fd_rec.num_pops = countPops(work_rec0)
num_loci = len(gp_rec.loci_list)
for lc_i in range(num_loci):
if report_loci:
report_loci(lc_i, num_loci)
work_rec = FileParser.read(gp_rec.fname)
work_rec2 = FileParser.read(gp_rec.fname)
alleles = []
pop_data = []
lParser = work_rec.get_individual()
while lParser:
if lParser is not True:
for al in lParser[1][lc_i]:
if al is not None and al not in alleles:
alleles.append(al)
lParser = work_rec.get_individual()
# here we go again (necessary...)
alleles.sort()
def process_pop(pop_data, alleles, allele_counts):
allele_array = [] # We need the same order as in alleles
for allele in alleles:
allele_array.append(allele_counts.get(allele, 0))
pop_data.append(allele_array)
lParser = work_rec2.get_individual()
allele_counts = {}
for allele in alleles:
allele_counts[allele] = 0
allele_counts[None]=0
while lParser:
if lParser is True:
process_pop(pop_data, alleles, allele_counts)
allele_counts = {}
for allele in alleles:
allele_counts[allele] = 0
allele_counts[None]=0
else:
for al in lParser[1][lc_i]:
allele_counts[al] += 1
lParser = work_rec2.get_individual()
process_pop(pop_data, alleles, allele_counts)
fd_rec.loci_data.append((len(alleles), pop_data))
return fd_rec
def approximate_fst(desired_fst, simulated_fst, parameter_fst,
max_run_fst=1, min_run_fst=0, limit=0.005):
"""Calculates the next Fst attempt in order to approximate a
desired Fst.
"""
if abs(simulated_fst - desired_fst) < limit:
return parameter_fst, max_run_fst, min_run_fst
if simulated_fst > desired_fst:
max_run_fst = parameter_fst
next_parameter_fst = (min_run_fst + parameter_fst)/2
else:
min_run_fst = parameter_fst
next_parameter_fst = (max_run_fst + parameter_fst)/2
return next_parameter_fst, max_run_fst, min_run_fst
| gpl-2.0 |
andrius-preimantas/odoo | addons/mrp/report/price.py | 39 | 11481 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
import openerp
from openerp.report.interface import report_rml
from openerp.tools import to_xml
from openerp.report import report_sxw
from datetime import datetime
from openerp.tools.translate import _
class report_custom(report_rml):
def create_xml(self, cr, uid, ids, datas, context=None):
number = (datas.get('form', False) and datas['form']['number']) or 1
registry = openerp.registry(cr.dbname)
product_pool = registry.get('product.product')
product_uom_pool = registry.get('product.uom')
workcenter_pool = registry.get('mrp.workcenter')
user_pool = registry.get('res.users')
bom_pool = registry.get('mrp.bom')
pricelist_pool = registry.get('product.pricelist')
rml_obj=report_sxw.rml_parse(cr, uid, product_pool._name,context)
rml_obj.localcontext.update({'lang':context.get('lang',False)})
company_currency = user_pool.browse(cr, uid, uid).company_id.currency_id
company_currency_symbol = company_currency.symbol or company_currency.name
def process_bom(bom, currency_id, factor=1):
xml = '<row>'
sum = 0
sum_strd = 0
prod = product_pool.browse(cr, uid, bom['product_id'])
prod_name = to_xml(bom['name'])
prod_qtty = factor * bom['product_qty']
product_uom = product_uom_pool.browse(cr, uid, bom['product_uom'], context=context)
product_uom_name = to_xml(product_uom.name)
main_sp_price, main_sp_name , main_strd_price = '','',''
sellers, sellers_price = '',''
if prod.seller_id:
main_sp_name = '- <b>'+ to_xml(prod.seller_id.name) +'</b>\r\n'
pricelist = prod.seller_id.property_product_pricelist_purchase
price = pricelist_pool.price_get(cr,uid,[pricelist.id],
prod.id, number*prod_qtty or 1.0, prod.seller_id.id, {
'uom': prod.uom_po_id.id,
'date': time.strftime('%Y-%m-%d'),
})[pricelist.id]
main_sp_price = """<b>"""+rml_obj.formatLang(price)+' '+ (company_currency_symbol)+"""</b>\r\n"""
sum += prod_qtty*price
std_price = product_uom_pool._compute_price(cr, uid, prod.uom_id.id, prod.standard_price, to_uom_id=product_uom.id)
main_strd_price = str(std_price) + '\r\n'
sum_strd = prod_qtty*std_price
for seller_id in prod.seller_ids:
if seller_id.name.id == prod.seller_id.id:
continue
sellers += '- <i>'+ to_xml(seller_id.name.name) +'</i>\r\n'
pricelist = seller_id.name.property_product_pricelist_purchase
price = pricelist_pool.price_get(cr,uid,[pricelist.id],
prod.id, number*prod_qtty or 1.0, seller_id.name.id, {
'uom': prod.uom_po_id.id,
'date': time.strftime('%Y-%m-%d'),
})[pricelist.id]
sellers_price += """<i>"""+rml_obj.formatLang(price) +' '+ (company_currency_symbol) +"""</i>\r\n"""
xml += """<col para='yes'> """+ prod_name +""" </col>
<col para='yes'> """+ main_sp_name + sellers + """ </col>
<col f='yes'>"""+ rml_obj.formatLang(prod_qtty) +' '+ product_uom_name +"""</col>
<col f='yes'>"""+ rml_obj.formatLang(float(main_strd_price)) +' '+ (company_currency_symbol) +"""</col>
<col f='yes'>""" + main_sp_price + sellers_price + """</col>'"""
xml += '</row>'
return xml, sum, sum_strd
def process_workcenter(wrk):
workcenter = workcenter_pool.browse(cr, uid, wrk['workcenter_id'])
cost_cycle = wrk['cycle']*workcenter.costs_cycle
cost_hour = wrk['hour']*workcenter.costs_hour
total = cost_cycle + cost_hour
xml = '<row>'
xml += "<col para='yes'>" + to_xml(workcenter.name) + '</col>'
xml += "<col/>"
xml += """<col f='yes'>"""+rml_obj.formatLang(cost_cycle)+' '+ (company_currency_symbol) + """</col>"""
xml += """<col f='yes'>"""+rml_obj.formatLang(cost_hour)+' '+ (company_currency_symbol) + """</col>"""
xml += """<col f='yes'>"""+rml_obj.formatLang(cost_hour + cost_cycle)+' '+ (company_currency_symbol) + """</col>"""
xml += '</row>'
return xml, total
xml = ''
config_start = """
<config>
<date>""" + to_xml(rml_obj.formatLang(datetime.now().strftime('%Y-%m-%d %H:%M:%S'),date_time=True)) + """</date>
<company>%s</company>
<PageSize>210.00mm,297.00mm</PageSize>
<PageWidth>595.27</PageWidth>
<PageHeight>841.88</PageHeight>
<tableSize>55.00mm,58.00mm,29.00mm,29.00mm,29.00mm</tableSize>
""" % to_xml(user_pool.browse(cr, uid, uid).company_id.name)
config_stop = """
<report-footer>Generated by OpenERP</report-footer>
</config>
"""
workcenter_header = """
<lines style='header'>
<row>
<col>%s</col>
<col t='yes'/>
<col t='yes'>%s</col>
<col t='yes'>%s</col>
<col t='yes'>%s</col>
</row>
</lines>
""" % (_('Work Center name'), _('Cycles Cost'), _('Hourly Cost'),_('Work Cost'))
prod_header = """
<row>
<col>%s</col>
<col>%s</col>
<col t='yes'>%s</col>
<col t='yes'>%s</col>
<col t='yes'>%s</col>
</row>
""" % (_('Components'), _('Components suppliers'), _('Quantity'),_('Cost Price per Unit of Measure'), _('Supplier Price per Unit of Measure'))
purchase_price_digits = rml_obj.get_digits(dp='Product Price')
for product in product_pool.browse(cr, uid, ids, context=context):
product_uom_name = to_xml(product.uom_id.name)
bom_id = bom_pool._bom_find(cr, uid, product.uom_id.id, product_id=product.id)
title = "<title>%s</title>" %(_("Cost Structure"))
title += "<title>%s</title>" % (to_xml(product.name))
xml += "<lines style='header'>" + title + prod_header + "</lines>"
if not bom_id:
total_strd = number * product.standard_price
total = number * product_pool.price_get(cr, uid, [product.id], 'standard_price')[product.id]
xml += """<lines style='lines'><row>
<col para='yes'>-</col>
<col para='yes'>-</col>
<col para='yes'>-</col>
<col para='yes'>-</col>
<col para='yes'>-</col>
</row></lines>"""
xml += """<lines style='total'> <row>
<col> """ + _('Total Cost of %s %s') % (str(number), product_uom_name) + """: </col>
<col/>
<col f='yes'/>
<col t='yes'>"""+ rml_obj.formatLang(total_strd, digits=purchase_price_digits) +' '+ (company_currency_symbol) + """</col>
<col t='yes'>"""+ rml_obj.formatLang(total, digits=purchase_price_digits) +' '+ (company_currency_symbol) + """</col>
</row></lines>'"""
else:
bom = bom_pool.browse(cr, uid, bom_id, context=context)
factor = number * product.uom_id.factor / bom.product_uom.factor
sub_boms = bom_pool._bom_explode(cr, uid, bom, product, factor / bom.product_qty)
total = 0
total_strd = 0
parent_bom = {
'product_qty': bom.product_qty,
'name': bom.product_id.name,
'product_uom': bom.product_uom.id,
'product_id': bom.product_id.id
}
xml_tmp = ''
for sub_bom in (sub_boms and sub_boms[0]) or [parent_bom]:
txt, sum, sum_strd = process_bom(sub_bom, company_currency.id)
xml_tmp += txt
total += sum
total_strd += sum_strd
xml += "<lines style='lines'>" + xml_tmp + '</lines>'
xml += """<lines style='sub_total'> <row>
<col> """ + _('Components Cost of %s %s') % (str(number), product_uom_name) + """: </col>
<col/>
<col t='yes'/>
<col t='yes'>"""+ rml_obj.formatLang(total_strd, digits=purchase_price_digits) +' '+ (company_currency_symbol) + """</col>
<col t='yes'></col>
</row></lines>'"""
total2 = 0
xml_tmp = ''
for wrk in (sub_boms and sub_boms[1]):
txt, sum = process_workcenter(wrk)
xml_tmp += txt
total2 += sum
if xml_tmp:
xml += workcenter_header
xml += "<lines style='lines'>" + xml_tmp + '</lines>'
xml += """<lines style='sub_total'> <row>
<col> """ + _('Work Cost of %s %s') % (str(number), product_uom_name) +""": </col>
<col/>
<col/>
<col/>
<col t='yes'>"""+ rml_obj.formatLang(total2, digits=purchase_price_digits) +' '+ (company_currency_symbol) +"""</col>
</row></lines>'"""
xml += """<lines style='total'> <row>
<col> """ + _('Total Cost of %s %s') % (str(number), product_uom_name) + """: </col>
<col/>
<col t='yes'/>
<col t='yes'>"""+ rml_obj.formatLang(total_strd+total2, digits=purchase_price_digits) +' '+ (company_currency_symbol) + """</col>
<col t='yes'></col>
</row></lines>'"""
xml = '<?xml version="1.0" ?><report>' + config_start + config_stop + xml + '</report>'
return xml
report_custom('report.product.price', 'product.product', '', 'addons/mrp/report/price.xsl')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jobscore/sync-engine | inbox/test/api/test_auth.py | 3 | 1866 | import json
from base64 import b64encode
from inbox.test.util.base import generic_account, db # noqa
from inbox.test.api.base import new_api_client # noqa
def test_no_auth(db, generic_account): # noqa
# Because we're using the generic_account namespace
api_client = new_api_client(db, generic_account.namespace)
api_client.auth_header = {}
response = api_client.get_raw('/account')
assert response.status_code == 401
def test_basic_auth(db, generic_account): # noqa
api_client = new_api_client(db, generic_account.namespace)
response = api_client.get_raw('/account')
assert response.status_code == 200
resp_data = json.loads(response.data)
assert resp_data['id'] == generic_account.namespace.public_id
def test_bearer_token_auth(db, generic_account): # noqa
api_client = new_api_client(db, generic_account.namespace)
api_client.auth_header = {
'Authorization': 'Bearer {}'
.format(generic_account.namespace.public_id)}
response = api_client.get_raw('/account')
assert response.status_code == 200
resp_data = json.loads(response.data)
assert resp_data['id'] == generic_account.namespace.public_id
BAD_TOKEN = '1234567890abcdefg'
def test_invalid_basic_auth(db, generic_account): # noqa
api_client = new_api_client(db, generic_account.namespace)
api_client.auth_header = {'Authorization': 'Basic {}'
.format(b64encode(BAD_TOKEN + ':'))}
response = api_client.get_raw('/account')
assert response.status_code == 401
def test_invalid_bearer_token_auth(db, generic_account): # noqa
api_client = new_api_client(db, generic_account.namespace)
api_client.auth_header = {
'Authorization': 'Bearer {}'.format(BAD_TOKEN)}
response = api_client.get_raw('/account')
assert response.status_code == 401
| agpl-3.0 |
utecht/random-lines | random-lines.py | 1 | 1121 | #!/usr/bin/env python
import random
import argparse
import sys
parser = argparse.ArgumentParser(description='Return random lines of file')
parser.add_argument('file', type=argparse.FileType('r'), help='the input file')
parser.add_argument('-n', '--num', type=int, help='number of lines to return')
parser.add_argument('-p', '--percent', type=float, help='percent of lines to return, i.e. 0.1 for 10 percent')
parser.add_argument('-o', '--output', type=argparse.FileType('w'), help='an output file')
args = parser.parse_args()
if args.num is None and args.percent is None:
print('Need a num or percent')
exit(1)
elif args.num and args.percent:
print('Only pass a num or a percent')
exit(1)
lines_pulled = 0
num_lines = sum(1 for line in open(args.file.name))
if args.num:
lines_pulled = args.num
elif args.percent:
lines_pulled = int(num_lines * args.percent)
if args.output:
output = args.output
else:
output = sys.stdout
pull_lines = [random.randint(1, num_lines) for _ in range(lines_pulled)]
for i, line in enumerate(args.file):
if i in pull_lines:
output.write(line)
| gpl-2.0 |
ptisserand/ansible | lib/ansible/modules/cloud/amazon/route53_health_check.py | 23 | 13078 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: route53_health_check
short_description: add or delete health-checks in Amazons Route53 DNS service
description:
- Creates and deletes DNS Health checks in Amazons Route53 service
- Only the port, resource_path, string_match and request_interval are
considered when updating existing health-checks.
version_added: "2.0"
options:
state:
description:
- Specifies the action to take.
required: true
choices: [ 'present', 'absent' ]
ip_address:
description:
- IP address of the end-point to check. Either this or `fqdn` has to be
provided.
port:
description:
- The port on the endpoint on which you want Amazon Route 53 to perform
health checks. Required for TCP checks.
type:
description:
- The type of health check that you want to create, which indicates how
Amazon Route 53 determines whether an endpoint is healthy.
required: true
choices: [ 'HTTP', 'HTTPS', 'HTTP_STR_MATCH', 'HTTPS_STR_MATCH', 'TCP' ]
resource_path:
description:
- The path that you want Amazon Route 53 to request when performing
health checks. The path can be any value for which your endpoint will
return an HTTP status code of 2xx or 3xx when the endpoint is healthy,
for example the file /docs/route53-health-check.html.
- Required for all checks except TCP.
- The path must begin with a /
- Maximum 255 characters.
fqdn:
description:
- Domain name of the endpoint to check. Either this or `ip_address` has
to be provided. When both are given the `fqdn` is used in the `Host:`
header of the HTTP request.
string_match:
description:
- If the check type is HTTP_STR_MATCH or HTTP_STR_MATCH, the string
that you want Amazon Route 53 to search for in the response body from
the specified resource. If the string appears in the first 5120 bytes
of the response body, Amazon Route 53 considers the resource healthy.
request_interval:
description:
- The number of seconds between the time that Amazon Route 53 gets a
response from your endpoint and the time that it sends the next
health-check request.
required: true
default: 30
choices: [ 10, 30 ]
failure_threshold:
description:
- The number of consecutive health checks that an endpoint must pass or
fail for Amazon Route 53 to change the current status of the endpoint
from unhealthy to healthy or vice versa.
required: true
default: 3
choices: [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ]
author: "zimbatm (@zimbatm)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Create a health-check for host1.example.com and use it in record
- route53_health_check:
state: present
fqdn: host1.example.com
type: HTTP_STR_MATCH
resource_path: /
string_match: "Hello"
request_interval: 10
failure_threshold: 2
register: my_health_check
- route53:
action: create
zone: "example.com"
type: CNAME
record: "www.example.com"
value: host1.example.com
ttl: 30
# Routing policy
identifier: "host1@www"
weight: 100
health_check: "{{ my_health_check.health_check.id }}"
# Delete health-check
- route53_health_check:
state: absent
fqdn: host1.example.com
'''
import uuid
try:
import boto
import boto.ec2
from boto import route53
from boto.route53 import Route53Connection, exception
from boto.route53.healthcheck import HealthCheck
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, get_aws_connection_info
# Things that can't get changed:
# protocol
# ip_address or domain
# request_interval
# string_match if not previously enabled
def find_health_check(conn, wanted):
"""Searches for health checks that have the exact same set of immutable values"""
for check in conn.get_list_health_checks().HealthChecks:
config = check.HealthCheckConfig
if (
config.get('IPAddress') == wanted.ip_addr and
config.get('FullyQualifiedDomainName') == wanted.fqdn and
config.get('Type') == wanted.hc_type and
config.get('RequestInterval') == str(wanted.request_interval) and
config.get('Port') == str(wanted.port)
):
return check
return None
def to_health_check(config):
return HealthCheck(
config.get('IPAddress'),
int(config.get('Port')),
config.get('Type'),
config.get('ResourcePath'),
fqdn=config.get('FullyQualifiedDomainName'),
string_match=config.get('SearchString'),
request_interval=int(config.get('RequestInterval')),
failure_threshold=int(config.get('FailureThreshold')),
)
def health_check_diff(a, b):
a = a.__dict__
b = b.__dict__
if a == b:
return {}
diff = {}
for key in set(a.keys()) | set(b.keys()):
if a.get(key) != b.get(key):
diff[key] = b.get(key)
return diff
def to_template_params(health_check):
params = {
'ip_addr_part': '',
'port': health_check.port,
'type': health_check.hc_type,
'resource_path_part': '',
'fqdn_part': '',
'string_match_part': '',
'request_interval': health_check.request_interval,
'failure_threshold': health_check.failure_threshold,
}
if health_check.ip_addr:
params['ip_addr_part'] = HealthCheck.XMLIpAddrPart % {'ip_addr': health_check.ip_addr}
if health_check.resource_path:
params['resource_path_part'] = XMLResourcePathPart % {'resource_path': health_check.resource_path}
if health_check.fqdn:
params['fqdn_part'] = HealthCheck.XMLFQDNPart % {'fqdn': health_check.fqdn}
if health_check.string_match:
params['string_match_part'] = HealthCheck.XMLStringMatchPart % {'string_match': health_check.string_match}
return params
XMLResourcePathPart = """<ResourcePath>%(resource_path)s</ResourcePath>"""
POSTXMLBody = """
<CreateHealthCheckRequest xmlns="%(xmlns)s">
<CallerReference>%(caller_ref)s</CallerReference>
<HealthCheckConfig>
%(ip_addr_part)s
<Port>%(port)s</Port>
<Type>%(type)s</Type>
%(resource_path_part)s
%(fqdn_part)s
%(string_match_part)s
<RequestInterval>%(request_interval)s</RequestInterval>
<FailureThreshold>%(failure_threshold)s</FailureThreshold>
</HealthCheckConfig>
</CreateHealthCheckRequest>
"""
UPDATEHCXMLBody = """
<UpdateHealthCheckRequest xmlns="%(xmlns)s">
<HealthCheckVersion>%(health_check_version)s</HealthCheckVersion>
%(ip_addr_part)s
<Port>%(port)s</Port>
%(resource_path_part)s
%(fqdn_part)s
%(string_match_part)s
<FailureThreshold>%(failure_threshold)i</FailureThreshold>
</UpdateHealthCheckRequest>
"""
def create_health_check(conn, health_check, caller_ref=None):
if caller_ref is None:
caller_ref = str(uuid.uuid4())
uri = '/%s/healthcheck' % conn.Version
params = to_template_params(health_check)
params.update(xmlns=conn.XMLNameSpace, caller_ref=caller_ref)
xml_body = POSTXMLBody % params
response = conn.make_request('POST', uri, {'Content-Type': 'text/xml'}, xml_body)
body = response.read()
boto.log.debug(body)
if response.status == 201:
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
else:
raise exception.DNSServerError(response.status, response.reason, body)
def update_health_check(conn, health_check_id, health_check_version, health_check):
uri = '/%s/healthcheck/%s' % (conn.Version, health_check_id)
params = to_template_params(health_check)
params.update(
xmlns=conn.XMLNameSpace,
health_check_version=health_check_version,
)
xml_body = UPDATEHCXMLBody % params
response = conn.make_request('POST', uri, {'Content-Type': 'text/xml'}, xml_body)
body = response.read()
boto.log.debug(body)
if response.status not in (200, 204):
raise exception.DNSServerError(response.status,
response.reason,
body)
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(choices=['present', 'absent'], default='present'),
ip_address=dict(),
port=dict(type='int'),
type=dict(required=True, choices=['HTTP', 'HTTPS', 'HTTP_STR_MATCH', 'HTTPS_STR_MATCH', 'TCP']),
resource_path=dict(),
fqdn=dict(),
string_match=dict(),
request_interval=dict(type='int', choices=[10, 30], default=30),
failure_threshold=dict(type='int', choices=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], default=3),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto 2.27.0+ required for this module')
state_in = module.params.get('state')
ip_addr_in = module.params.get('ip_address')
port_in = module.params.get('port')
type_in = module.params.get('type')
resource_path_in = module.params.get('resource_path')
fqdn_in = module.params.get('fqdn')
string_match_in = module.params.get('string_match')
request_interval_in = module.params.get('request_interval')
failure_threshold_in = module.params.get('failure_threshold')
if ip_addr_in is None and fqdn_in is None:
module.fail_json(msg="parameter 'ip_address' or 'fqdn' is required")
# Default port
if port_in is None:
if type_in in ['HTTP', 'HTTP_STR_MATCH']:
port_in = 80
elif type_in in ['HTTPS', 'HTTPS_STR_MATCH']:
port_in = 443
else:
module.fail_json(msg="parameter 'port' is required for 'type' TCP")
# string_match in relation with type
if type_in in ['HTTP_STR_MATCH', 'HTTPS_STR_MATCH']:
if string_match_in is None:
module.fail_json(msg="parameter 'string_match' is required for the HTTP(S)_STR_MATCH types")
elif len(string_match_in) > 255:
module.fail_json(msg="parameter 'string_match' is limited to 255 characters max")
elif string_match_in:
module.fail_json(msg="parameter 'string_match' argument is only for the HTTP(S)_STR_MATCH types")
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
# connect to the route53 endpoint
try:
conn = Route53Connection(**aws_connect_kwargs)
except boto.exception.BotoServerError as e:
module.fail_json(msg=e.error_message)
changed = False
action = None
check_id = None
wanted_config = HealthCheck(ip_addr_in, port_in, type_in, resource_path_in, fqdn_in, string_match_in, request_interval_in, failure_threshold_in)
existing_check = find_health_check(conn, wanted_config)
if existing_check:
check_id = existing_check.Id
existing_config = to_health_check(existing_check.HealthCheckConfig)
if state_in == 'present':
if existing_check is None:
action = "create"
check_id = create_health_check(conn, wanted_config).HealthCheck.Id
changed = True
else:
diff = health_check_diff(existing_config, wanted_config)
if diff:
action = "update"
update_health_check(conn, existing_check.Id, int(existing_check.HealthCheckVersion), wanted_config)
changed = True
elif state_in == 'absent':
if check_id:
action = "delete"
conn.delete_health_check(check_id)
changed = True
else:
module.fail_json(msg="Logic Error: Unknown state")
module.exit_json(changed=changed, health_check=dict(id=check_id), action=action)
if __name__ == '__main__':
main()
| gpl-3.0 |
ldiary/pytest-testbook | setup.py | 1 | 1591 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import codecs
from setuptools import setup
directory_name = os.path.dirname(__file__)
with codecs.open(os.path.join(directory_name, 'pytest_testbook', '__init__.py'), encoding='utf-8') as fd:
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
def read(fname):
file_path = os.path.join(directory_name, fname)
return codecs.open(file_path, encoding='utf-8').read()
setup(
name='pytest-testbook',
version=VERSION,
author='Ernesto D. Luzon Jr.',
author_email='raise_a_bug_in_myrepo@github.com',
maintainer='Ernesto D. Luzon Jr.',
maintainer_email='please_raise_a_bug_in_myrepo@github.com',
license='MIT',
url='https://github.com/ldiary/pytest-testbook',
description='A plugin to run tests written in Jupyter notebook',
long_description=read('README.rst'),
packages=["pytest_testbook"],
install_requires=[
'marigoso',
'jupyter',
],
classifiers=[
"Development Status :: 3 - Alpha",
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
entry_points={
'pytest11': [
'testbook = pytest_testbook.plugin',
],
},
)
| mit |
piranna/pyfilesystem | fs/s3fs.py | 7 | 29233 | """
fs.s3fs
=======
**Currently only avaiable on Python2 due to boto not being available for Python3**
FS subclass accessing files in Amazon S3
This module provides the class 'S3FS', which implements the FS filesystem
interface for objects stored in Amazon Simple Storage Service (S3).
"""
import os
import datetime
import tempfile
from fnmatch import fnmatch
import stat as statinfo
import boto.s3.connection
from boto.s3.prefix import Prefix
from boto.exception import S3ResponseError
from fs.base import *
from fs.path import *
from fs.errors import *
from fs.remote import *
from fs.filelike import LimitBytesFile
from fs import iotools
import six
# Boto is not thread-safe, so we need to use a per-thread S3 connection.
if hasattr(threading,"local"):
thread_local = threading.local
else:
class thread_local(object):
def __init__(self):
self._map = {}
def __getattr__(self,attr):
try:
return self._map[(threading.currentThread(),attr)]
except KeyError:
raise AttributeError, attr
def __setattr__(self,attr,value):
self._map[(threading.currentThread(),attr)] = value
class S3FS(FS):
"""A filesystem stored in Amazon S3.
This class provides the FS interface for files stored in Amazon's Simple
Storage Service (S3). It should be instantiated with the name of the
S3 bucket to use, and optionally a prefix under which the files should
be stored.
Local temporary files are used when opening files from this filesystem,
and any changes are only pushed back into S3 when the files are closed
or flushed.
"""
_meta = {'thread_safe': True,
'virtual': False,
'read_only': False,
'unicode_paths': True,
'case_insensitive_paths': False,
'network': True,
'atomic.move': True,
'atomic.copy': True,
'atomic.makedir': True,
'atomic.rename': False,
'atomic.setcontents': True
}
class meta:
PATH_MAX = None
NAME_MAX = None
def __init__(self, bucket, prefix="", aws_access_key=None, aws_secret_key=None, separator="/", thread_synchronize=True, key_sync_timeout=1):
"""Constructor for S3FS objects.
S3FS objects require the name of the S3 bucket in which to store
files, and can optionally be given a prefix under which the files
should be stored. The AWS public and private keys may be specified
as additional arguments; if they are not specified they will be
read from the two environment variables AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY.
The keyword argument 'key_sync_timeout' specifies the maximum
time in seconds that the filesystem will spend trying to confirm
that a newly-uploaded S3 key is available for reading. For no
timeout set it to zero. To disable these checks entirely (and
thus reduce the filesystem's consistency guarantees to those of
S3's "eventual consistency" model) set it to None.
By default the path separator is "/", but this can be overridden
by specifying the keyword 'separator' in the constructor.
"""
self._bucket_name = bucket
self._access_keys = (aws_access_key,aws_secret_key)
self._separator = separator
self._key_sync_timeout = key_sync_timeout
# Normalise prefix to this form: path/to/files/
prefix = normpath(prefix)
while prefix.startswith(separator):
prefix = prefix[1:]
if not prefix.endswith(separator) and prefix != "":
prefix = prefix + separator
if isinstance(prefix,unicode):
prefix = prefix.encode("utf8")
if aws_access_key is None:
if "AWS_ACCESS_KEY_ID" not in os.environ:
raise CreateFailedError("AWS_ACCESS_KEY_ID not set")
if aws_secret_key is None:
if "AWS_SECRET_ACCESS_KEY" not in os.environ:
raise CreateFailedError("AWS_SECRET_ACCESS_KEY not set")
self._prefix = prefix
self._tlocal = thread_local()
super(S3FS, self).__init__(thread_synchronize=thread_synchronize)
# Make _s3conn and _s3bukt properties that are created on demand,
# since they cannot be stored during pickling.
def _s3conn(self):
try:
(c,ctime) = self._tlocal.s3conn
if time.time() - ctime > 60:
raise AttributeError
return c
except AttributeError:
c = boto.s3.connection.S3Connection(*self._access_keys)
self._tlocal.s3conn = (c,time.time())
return c
_s3conn = property(_s3conn)
def _s3bukt(self):
try:
(b,ctime) = self._tlocal.s3bukt
if time.time() - ctime > 60:
raise AttributeError
return b
except AttributeError:
try:
# Validate by listing the bucket if there is no prefix.
# If there is a prefix, validate by listing only the prefix
# itself, to avoid errors when an IAM policy has been applied.
if self._prefix:
b = self._s3conn.get_bucket(self._bucket_name, validate=0)
b.get_key(self._prefix)
else:
b = self._s3conn.get_bucket(self._bucket_name, validate=1)
except S3ResponseError, e:
if "404 Not Found" not in str(e):
raise
b = self._s3conn.create_bucket(self._bucket_name)
self._tlocal.s3bukt = (b,time.time())
return b
_s3bukt = property(_s3bukt)
def __getstate__(self):
state = super(S3FS,self).__getstate__()
del state['_tlocal']
return state
def __setstate__(self,state):
super(S3FS,self).__setstate__(state)
self._tlocal = thread_local()
def __repr__(self):
args = (self.__class__.__name__,self._bucket_name,self._prefix)
return '<%s: %s:%s>' % args
__str__ = __repr__
def _s3path(self,path):
"""Get the absolute path to a file stored in S3."""
path = relpath(normpath(path))
path = self._separator.join(iteratepath(path))
s3path = self._prefix + path
if s3path and s3path[-1] == self._separator:
s3path = s3path[:-1]
if isinstance(s3path,unicode):
s3path = s3path.encode("utf8")
return s3path
def _uns3path(self,s3path,roots3path=None):
"""Get the local path for a file stored in S3.
This is essentially the opposite of self._s3path().
"""
if roots3path is None:
roots3path = self._s3path("")
i = len(roots3path)
return s3path[i:]
def _sync_key(self,k):
"""Synchronise on contents of the given key.
Since S3 only offers "eventual consistency" of data, it is possible
to create a key but be unable to read it back straight away. This
method works around that limitation by polling the key until it reads
back the value expected by the given key.
Note that this could easily fail if the key is modified by another
program, meaning the content will never be as specified in the given
key. This is the reason for the timeout argument to the construtcor.
"""
timeout = self._key_sync_timeout
if timeout is None:
return k
k2 = self._s3bukt.get_key(k.name)
t = time.time()
while k2 is None or k2.etag != k.etag:
if timeout > 0:
if t + timeout < time.time():
break
time.sleep(0.1)
k2 = self._s3bukt.get_key(k.name)
return k2
def _sync_set_contents(self,key,contents):
"""Synchronously set the contents of a key."""
if isinstance(key,basestring):
key = self._s3bukt.new_key(key)
if isinstance(contents,basestring):
key.set_contents_from_string(contents)
elif hasattr(contents,"md5"):
hexmd5 = contents.md5
b64md5 = hexmd5.decode("hex").encode("base64").strip()
key.set_contents_from_file(contents,md5=(hexmd5,b64md5))
else:
try:
contents.seek(0)
except (AttributeError,EnvironmentError):
tf = tempfile.TemporaryFile()
data = contents.read(524288)
while data:
tf.write(data)
data = contents.read(524288)
tf.seek(0)
key.set_contents_from_file(tf)
else:
key.set_contents_from_file(contents)
return self._sync_key(key)
def makepublic(self, path):
"""Mark given path as publicly accessible using HTTP(S)"""
s3path = self._s3path(path)
k = self._s3bukt.get_key(s3path)
k.make_public()
def getpathurl(self, path, allow_none=False, expires=3600):
"""Returns a url that corresponds to the given path."""
s3path = self._s3path(path)
k = self._s3bukt.get_key(s3path)
# Is there AllUsers group with READ permissions?
is_public = True in [grant.permission == 'READ' and
grant.uri == 'http://acs.amazonaws.com/groups/global/AllUsers'
for grant in k.get_acl().acl.grants]
url = k.generate_url(expires, force_http=is_public)
if url == None:
if not allow_none:
raise NoPathURLError(path=path)
return None
if is_public:
# Strip time token; it has no sense for public resource
url = url.split('?')[0]
return url
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=64*1024):
s3path = self._s3path(path)
if isinstance(data, six.text_type):
data = data.encode(encoding=encoding, errors=errors)
self._sync_set_contents(s3path, data)
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
"""Open the named file in the given mode.
This method downloads the file contents into a local temporary file
so that it can be worked on efficiently. Any changes made to the
file are only sent back to S3 when the file is flushed or closed.
"""
if self.isdir(path):
raise ResourceInvalidError(path)
s3path = self._s3path(path)
# Truncate the file if requested
if "w" in mode:
k = self._sync_set_contents(s3path,"")
else:
k = self._s3bukt.get_key(s3path)
if k is None:
# Create the file if it's missing
if "w" not in mode and "a" not in mode:
raise ResourceNotFoundError(path)
if not self.isdir(dirname(path)):
raise ParentDirectoryMissingError(path)
k = self._sync_set_contents(s3path,"")
# Make sure nothing tries to read past end of socket data
f = LimitBytesFile(k.size,k,"r")
# For streaming reads, return the key object directly
if mode == "r-":
return f
# For everything else, use a RemoteFileBuffer.
# This will take care of closing the socket when it's done.
return RemoteFileBuffer(self,path,mode,f)
def exists(self,path):
"""Check whether a path exists."""
s3path = self._s3path(path)
s3pathD = s3path + self._separator
# The root directory always exists
if self._prefix.startswith(s3path):
return True
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
for k in ks:
# A regular file
if _eq_utf8(k.name,s3path):
return True
# A directory
if _eq_utf8(k.name,s3pathD):
return True
return False
def isdir(self,path):
"""Check whether a path exists and is a directory."""
s3path = self._s3path(path) + self._separator
# Root is always a directory
if s3path == "/" or s3path == self._prefix:
return True
# Use a list request so that we return true if there are any files
# in that directory. This avoids requiring a special file for the
# the directory itself, which other tools may not create.
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
try:
iter(ks).next()
except StopIteration:
return False
else:
return True
def isfile(self,path):
"""Check whether a path exists and is a regular file."""
s3path = self._s3path(path)
# Root is never a file
if self._prefix.startswith(s3path):
return False
k = self._s3bukt.get_key(s3path)
if k is not None:
return True
return False
def listdir(self,path="./",wildcard=None,full=False,absolute=False,
dirs_only=False,files_only=False):
"""List contents of a directory."""
return list(self.ilistdir(path,wildcard,full,absolute,
dirs_only,files_only))
def listdirinfo(self,path="./",wildcard=None,full=False,absolute=False,
dirs_only=False,files_only=False):
return list(self.ilistdirinfo(path,wildcard,full,absolute,
dirs_only,files_only))
def ilistdir(self,path="./",wildcard=None,full=False,absolute=False,
dirs_only=False,files_only=False):
"""List contents of a directory."""
keys = self._iter_keys(path)
entries = self._filter_keys(path,keys,wildcard,full,absolute,
dirs_only,files_only)
return (nm for (nm,k) in entries)
def ilistdirinfo(self,path="./",wildcard=None,full=False,absolute=False,
dirs_only=False,files_only=False):
keys = self._iter_keys(path)
entries = self._filter_keys(path,keys,wildcard,full,absolute,
dirs_only,files_only)
return ((nm,self._get_key_info(k,nm)) for (nm,k) in entries)
def _iter_keys(self,path):
"""Iterator over keys contained in the given directory.
This generator yields (name,key) pairs for each entry in the given
directory. If the path is not a directory, it raises the approprate
error.
"""
s3path = self._s3path(path) + self._separator
if s3path == "/":
s3path = ""
isDir = False
for k in self._s3bukt.list(prefix=s3path,delimiter=self._separator):
if not isDir:
isDir = True
# Skip over the entry for the directory itself, if it exists
name = self._uns3path(k.name,s3path)
if name != "":
if not isinstance(name,unicode):
name = name.decode("utf8")
if name.endswith(self._separator):
name = name[:-1]
yield (name,k)
if not isDir:
if s3path != self._prefix:
if self.isfile(path):
msg = "that's not a directory: %(path)s"
raise ResourceInvalidError(path,msg=msg)
raise ResourceNotFoundError(path)
def _key_is_dir(self, k):
if isinstance(k,Prefix):
return True
if k.name.endswith(self._separator):
return True
return False
def _filter_keys(self,path,keys,wildcard,full,absolute,
dirs_only,files_only):
"""Filter out keys not matching the given criteria.
Given a (name,key) iterator as returned by _iter_keys, this method
applies the given filtering criteria and returns a filtered iterator.
"""
sep = self._separator
if dirs_only and files_only:
raise ValueError("dirs_only and files_only can not both be True")
if dirs_only:
keys = ((nm,k) for (nm,k) in keys if self._key_is_dir(k))
elif files_only:
keys = ((nm,k) for (nm,k) in keys if not self._key_is_dir(k))
if wildcard is not None:
if callable(wildcard):
keys = ((nm,k) for (nm,k) in keys if wildcard(nm))
else:
keys = ((nm,k) for (nm,k) in keys if fnmatch(nm,wildcard))
if full:
return ((relpath(pathjoin(path, nm)),k) for (nm,k) in keys)
elif absolute:
return ((abspath(pathjoin(path, nm)),k) for (nm,k) in keys)
return keys
def makedir(self,path,recursive=False,allow_recreate=False):
"""Create a directory at the given path.
The 'mode' argument is accepted for compatibility with the standard
FS interface, but is currently ignored.
"""
s3path = self._s3path(path)
s3pathD = s3path + self._separator
if s3pathD == self._prefix:
if allow_recreate:
return
msg = "Can not create a directory that already exists"\
" (try allow_recreate=True): %(path)s"
raise DestinationExistsError(path, msg=msg)
s3pathP = self._s3path(dirname(path))
if s3pathP:
s3pathP = s3pathP + self._separator
# Check various preconditions using list of parent dir
ks = self._s3bukt.list(prefix=s3pathP,delimiter=self._separator)
if s3pathP == self._prefix:
parentExists = True
else:
parentExists = False
for k in ks:
if not parentExists:
parentExists = True
if _eq_utf8(k.name,s3path):
# It's already a file
msg = "Destination exists as a regular file: %(path)s"
raise ResourceInvalidError(path, msg=msg)
if _eq_utf8(k.name,s3pathD):
# It's already a directory
if allow_recreate:
return
msg = "Can not create a directory that already exists"\
" (try allow_recreate=True): %(path)s"
raise DestinationExistsError(path, msg=msg)
# Create parent if required
if not parentExists:
if recursive:
self.makedir(dirname(path),recursive,allow_recreate)
else:
msg = "Parent directory does not exist: %(path)s"
raise ParentDirectoryMissingError(path, msg=msg)
# Create an empty file representing the directory
self._sync_set_contents(s3pathD,"")
def remove(self,path):
"""Remove the file at the given path."""
s3path = self._s3path(path)
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
for k in ks:
if _eq_utf8(k.name,s3path):
break
if _startswith_utf8(k.name,s3path + "/"):
msg = "that's not a file: %(path)s"
raise ResourceInvalidError(path,msg=msg)
else:
raise ResourceNotFoundError(path)
self._s3bukt.delete_key(s3path)
k = self._s3bukt.get_key(s3path)
while k:
k = self._s3bukt.get_key(s3path)
def removedir(self,path,recursive=False,force=False):
"""Remove the directory at the given path."""
if normpath(path) in ('', '/'):
raise RemoveRootError(path)
s3path = self._s3path(path)
if s3path != self._prefix:
s3path = s3path + self._separator
if force:
# If we will be forcibly removing any directory contents, we
# might as well get the un-delimited list straight away.
ks = self._s3bukt.list(prefix=s3path)
else:
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
# Fail if the directory is not empty, or remove them if forced
found = False
for k in ks:
found = True
if not _eq_utf8(k.name,s3path):
if not force:
raise DirectoryNotEmptyError(path)
self._s3bukt.delete_key(k.name)
if not found:
if self.isfile(path):
msg = "removedir() called on a regular file: %(path)s"
raise ResourceInvalidError(path,msg=msg)
if path not in ("","/"):
raise ResourceNotFoundError(path)
self._s3bukt.delete_key(s3path)
if recursive and path not in ("","/"):
pdir = dirname(path)
try:
self.removedir(pdir,recursive=True,force=False)
except DirectoryNotEmptyError:
pass
def rename(self,src,dst):
"""Rename the file at 'src' to 'dst'."""
# Actually, in S3 'rename' is exactly the same as 'move'
if self.isfile(src):
self.move(src,dst)
else:
self.movedir(src,dst)
def getinfo(self,path):
s3path = self._s3path(path)
if path in ("","/"):
k = Prefix(bucket=self._s3bukt,name="/")
else:
k = self._s3bukt.get_key(s3path)
if k is None:
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
for k in ks:
if isinstance(k,Prefix):
break
else:
raise ResourceNotFoundError(path)
return self._get_key_info(k,path)
def _get_key_info(self,key,name=None):
info = {}
if name is not None:
info["name"] = basename(name)
else:
info["name"] = basename(self._uns3key(k.name))
if self._key_is_dir(key):
info["st_mode"] = 0700 | statinfo.S_IFDIR
else:
info["st_mode"] = 0700 | statinfo.S_IFREG
if hasattr(key,"size"):
info['size'] = int(key.size)
etag = getattr(key,"etag",None)
if etag is not None:
if isinstance(etag,unicode):
etag = etag.encode("utf8")
info['etag'] = etag.strip('"').strip("'")
if hasattr(key,"last_modified"):
# TODO: does S3 use any other formats?
fmt = "%a, %d %b %Y %H:%M:%S %Z"
try:
mtime = datetime.datetime.strptime(key.last_modified,fmt)
info['modified_time'] = mtime
except ValueError:
pass
return info
def desc(self,path):
return "No description available"
def copy(self,src,dst,overwrite=False,chunk_size=16384):
"""Copy a file from 'src' to 'dst'.
src -- The source path
dst -- The destination path
overwrite -- If True, then the destination may be overwritten
(if a file exists at that location). If False then an exception will be
thrown if the destination exists
chunk_size -- Size of chunks to use in copy (ignored by S3)
"""
s3path_dst = self._s3path(dst)
s3path_dstD = s3path_dst + self._separator
# Check for various preconditions.
ks = self._s3bukt.list(prefix=s3path_dst,delimiter=self._separator)
dstOK = False
for k in ks:
# It exists as a regular file
if _eq_utf8(k.name,s3path_dst):
if not overwrite:
raise DestinationExistsError(dst)
dstOK = True
break
# Check if it refers to a directory. If so, we copy *into* it.
# Since S3 lists in lexicographic order, subsequent iterations
# of the loop will check for the existence of the new filename.
if _eq_utf8(k.name,s3path_dstD):
nm = basename(src)
dst = pathjoin(dirname(dst),nm)
s3path_dst = s3path_dstD + nm
dstOK = True
if not dstOK and not self.isdir(dirname(dst)):
msg = "Destination directory does not exist: %(path)s"
raise ParentDirectoryMissingError(dst,msg=msg)
# OK, now we can copy the file.
s3path_src = self._s3path(src)
try:
self._s3bukt.copy_key(s3path_dst,self._bucket_name,s3path_src)
except S3ResponseError, e:
if "404 Not Found" in str(e):
msg = "Source is not a file: %(path)s"
raise ResourceInvalidError(src, msg=msg)
raise e
else:
k = self._s3bukt.get_key(s3path_dst)
while k is None:
k = self._s3bukt.get_key(s3path_dst)
self._sync_key(k)
def move(self,src,dst,overwrite=False,chunk_size=16384):
"""Move a file from one location to another."""
self.copy(src,dst,overwrite=overwrite)
self._s3bukt.delete_key(self._s3path(src))
def walkfiles(self,
path="/",
wildcard=None,
dir_wildcard=None,
search="breadth",
ignore_errors=False ):
if search != "breadth" or dir_wildcard is not None:
args = (wildcard,dir_wildcard,search,ignore_errors)
for item in super(S3FS,self).walkfiles(path,*args):
yield item
else:
prefix = self._s3path(path)
for k in self._s3bukt.list(prefix=prefix):
name = relpath(self._uns3path(k.name,prefix))
if name != "":
if not isinstance(name,unicode):
name = name.decode("utf8")
if not k.name.endswith(self._separator):
if wildcard is not None:
if callable(wildcard):
if not wildcard(basename(name)):
continue
else:
if not fnmatch(basename(name),wildcard):
continue
yield pathjoin(path,name)
def walkinfo(self,
path="/",
wildcard=None,
dir_wildcard=None,
search="breadth",
ignore_errors=False ):
if search != "breadth" or dir_wildcard is not None:
args = (wildcard,dir_wildcard,search,ignore_errors)
for item in super(S3FS,self).walkfiles(path,*args):
yield (item,self.getinfo(item))
else:
prefix = self._s3path(path)
for k in self._s3bukt.list(prefix=prefix):
name = relpath(self._uns3path(k.name,prefix))
if name != "":
if not isinstance(name,unicode):
name = name.decode("utf8")
if wildcard is not None:
if callable(wildcard):
if not wildcard(basename(name)):
continue
else:
if not fnmatch(basename(name),wildcard):
continue
yield (pathjoin(path,name),self._get_key_info(k,name))
def walkfilesinfo(self,
path="/",
wildcard=None,
dir_wildcard=None,
search="breadth",
ignore_errors=False ):
if search != "breadth" or dir_wildcard is not None:
args = (wildcard,dir_wildcard,search,ignore_errors)
for item in super(S3FS,self).walkfiles(path,*args):
yield (item,self.getinfo(item))
else:
prefix = self._s3path(path)
for k in self._s3bukt.list(prefix=prefix):
name = relpath(self._uns3path(k.name,prefix))
if name != "":
if not isinstance(name,unicode):
name = name.decode("utf8")
if not k.name.endswith(self._separator):
if wildcard is not None:
if callable(wildcard):
if not wildcard(basename(name)):
continue
else:
if not fnmatch(basename(name),wildcard):
continue
yield (pathjoin(path,name),self._get_key_info(k,name))
def _eq_utf8(name1,name2):
if isinstance(name1,unicode):
name1 = name1.encode("utf8")
if isinstance(name2,unicode):
name2 = name2.encode("utf8")
return name1 == name2
def _startswith_utf8(name1,name2):
if isinstance(name1,unicode):
name1 = name1.encode("utf8")
if isinstance(name2,unicode):
name2 = name2.encode("utf8")
return name1.startswith(name2)
| bsd-3-clause |
matthewwardrop/formulaic | formulaic/materializers/transforms/scale.py | 1 | 1389 | import numpy
import scipy.sparse as spsparse
from formulaic.utils.stateful_transforms import stateful_transform
@stateful_transform
def scale(data, center=True, scale=True, ddof=1, _state=None):
data = numpy.array(data)
if 'ddof' not in _state:
_state['ddof'] = ddof
else:
ddof = _state['ddof']
# Handle centering
if 'center' not in _state:
if isinstance(center, bool) and center:
_state['center'] = numpy.mean(data, axis=0)
elif not isinstance(center, bool):
_state['center'] = numpy.array(center)
else:
_state['center'] = None
if _state['center'] is not None:
data = data - _state['center']
# Handle scaling
if 'scale' not in _state:
if isinstance(scale, bool) and scale:
_state['scale'] = numpy.sqrt(numpy.sum(data ** 2, axis=0) / (data.shape[0] - ddof))
elif not isinstance(scale, bool):
_state['scale'] = numpy.array(scale)
else:
_state['scale'] = None
if _state['scale'] is not None:
data = data / _state['scale']
return data
@scale.register(spsparse.spmatrix)
def _(data, *args, **kwargs):
assert data.shape[1] == 1
return scale(data.toarray()[:, 0], *args, **kwargs)
@stateful_transform
def center(data, _state=None):
return scale(data, scale=False, _state=_state)
| mit |
erilyth/sugar | src/jarabe/view/pulsingicon.py | 1 | 7328 | # Copyright (C) 2008 One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import math
from gi.repository import GObject
from sugar3.graphics.icon import Icon
from sugar3.graphics import style
from sugar3.graphics.icon import CanvasIcon
_INTERVAL = 100
_STEP = math.pi / 10 # must be a fraction of pi, for clean caching
_MINIMAL_ALPHA_VALUE = 0.33
class Pulser(object):
def __init__(self, icon, interval=_INTERVAL):
self._pulse_hid = None
self._icon = icon
self._interval = interval
self._phase = 0
self._start_scale = 1.0
self._end_scale = 1.0
self._zoom_steps = 1
self._current_zoom_step = 1
self._current_scale_step = 1
def set_zooming(self, start_scale, end_scale, zoom_steps):
""" Set start and end scale and number of steps in zoom animation """
self._start_scale = start_scale
self._end_scale = end_scale
self._zoom_steps = zoom_steps
self._current_scale_step = abs(self._start_scale - self._end_scale) / \
self._zoom_steps
self._icon.scale = self._start_scale
def start(self, restart=False):
if restart:
self._phase = 0
if self._pulse_hid is None:
self._pulse_hid = GObject.timeout_add(self._interval,
self.__pulse_cb)
if self._start_scale != self._end_scale:
self._icon.scale = self._start_scale + \
self._current_scale_step * self._current_zoom_step
def stop(self):
if self._pulse_hid is not None:
GObject.source_remove(self._pulse_hid)
self._pulse_hid = None
self._icon.xo_color = self._icon.get_base_color()
self._phase = 0
self._icon.alpha = 1.0
def update(self):
self._icon.xo_color = self._icon.base_color
self._icon.alpha = _MINIMAL_ALPHA_VALUE + \
(1 - _MINIMAL_ALPHA_VALUE) * (math.cos(self._phase) + 1) / 2
def __pulse_cb(self):
self._phase += _STEP
if self._current_zoom_step <= self._zoom_steps and \
self._start_scale != self._end_scale:
self._icon.scale = self._start_scale + \
self._current_scale_step * self._current_zoom_step
self._current_zoom_step += 1
self.update()
return True
class PulsingIcon(Icon):
__gtype_name__ = 'SugarPulsingIcon'
def __init__(self, interval=_INTERVAL, **kwargs):
self._pulser = Pulser(self, interval)
self._base_color = None
self._pulse_color = None
self._paused = False
self._pulsing = False
Icon.__init__(self, **kwargs)
self._palette = None
self.connect('destroy', self.__destroy_cb)
def set_pulse_color(self, pulse_color):
self._pulse_color = pulse_color
self._pulser.update()
def get_pulse_color(self):
return self._pulse_color
pulse_color = GObject.property(
type=object, getter=get_pulse_color, setter=set_pulse_color)
def set_base_color(self, base_color):
self._base_color = base_color
self._pulser.update()
def get_base_color(self):
return self._base_color
def set_zooming(self, start_size=style.SMALL_ICON_SIZE,
end_size=style.XLARGE_ICON_SIZE,
zoom_steps=10):
if start_size > end_size:
start_scale = 1.0
end_scale = float(end_size) / start_size
else:
start_scale = float(start_size) / end_size
end_scale = 1.0
self._pulser.set_zooming(start_scale, end_scale, zoom_steps)
base_color = GObject.property(
type=object, getter=get_base_color, setter=set_base_color)
def set_paused(self, paused):
self._paused = paused
if self._paused:
self._pulser.stop()
else:
self._pulser.start(restart=False)
def get_paused(self):
return self._paused
paused = GObject.property(
type=bool, default=False, getter=get_paused, setter=set_paused)
def set_pulsing(self, pulsing):
self._pulsing = pulsing
if self._pulsing:
self._pulser.start(restart=True)
else:
self._pulser.stop()
def get_pulsing(self):
return self._pulsing
pulsing = GObject.property(
type=bool, default=False, getter=get_pulsing, setter=set_pulsing)
def _get_palette(self):
return self._palette
def _set_palette(self, palette):
if self._palette is not None:
self._palette.props.invoker = None
self._palette = palette
palette = property(_get_palette, _set_palette)
def __destroy_cb(self, icon):
self._pulser.stop()
if self._palette is not None:
self._palette.destroy()
class EventPulsingIcon(CanvasIcon):
__gtype_name__ = 'SugarEventPulsingIcon'
def __init__(self, interval=_INTERVAL, **kwargs):
self._pulser = Pulser(self, interval)
self._base_color = None
self._pulse_color = None
self._paused = False
self._pulsing = False
CanvasIcon.__init__(self, **kwargs)
self.connect('destroy', self.__destroy_cb)
def __destroy_cb(self, box):
self._pulser.stop()
def set_pulse_color(self, pulse_color):
self._pulse_color = pulse_color
self._pulser.update()
def get_pulse_color(self):
return self._pulse_color
pulse_color = GObject.property(
type=object, getter=get_pulse_color, setter=set_pulse_color)
def set_base_color(self, base_color):
self._base_color = base_color
self._pulser.update()
def get_base_color(self):
return self._base_color
base_color = GObject.property(
type=object, getter=get_base_color, setter=set_base_color)
def set_paused(self, paused):
self._paused = paused
if self._paused:
self._pulser.stop()
elif self._pulsing:
self._pulser.start(restart=False)
def get_paused(self):
return self._paused
paused = GObject.property(
type=bool, default=False, getter=get_paused, setter=set_paused)
def set_pulsing(self, pulsing):
self._pulsing = pulsing
if self._paused:
return
if self._pulsing:
self._pulser.start(restart=True)
else:
self._pulser.stop()
def get_pulsing(self):
return self._pulsing
pulsing = GObject.property(
type=bool, default=False, getter=get_pulsing, setter=set_pulsing)
| gpl-2.0 |
rwgdrummer/maskgen | maskgen/analytics/dctAnalytic.py | 1 | 17525 | # =============================================================================
# Authors: PAR Government
# Organization: DARPA
#
# Copyright (c) 2016 PAR Government
# All rights reserved.
#
#
# adapted from https://github.com/enmasse/jpeg_read
#==============================================================================
import sys
from math import *
from Tkinter import *
import matplotlib.pyplot as plt
import numpy as np
import logging
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
def memoize (function):
# http://programmingzen.com/2009/05/18/memoization-in-ruby-and-python/
cache = {}
def decorated_function (*args):
try:
return cache[args]
except KeyError:
val = function (*args)
cache[args] = val
return val
return decorated_function
@memoize
def decodeBits (len, val):
""" Calculate the value from the "additional" bits in the huffman data. """
return val if (val & (1 << len - 1)) else val - ((1 << len) - 1)
def extractCoeffs (data):
dclum = []
dcchr1 = []
dcchr2 = []
aclum = []
acchr1 = []
acchr2 = []
for MCU in data:
lum = MCU[0]
chr1 = MCU[1]
chr2 = MCU[2]
for MCU_component in lum:
if len (MCU_component):
dclum.append (MCU_component[0])
aclum.extend (MCU_component[1:])
for MCU_component in chr1:
if len (MCU_component):
dcchr1.append (MCU_component[0])
acchr1.extend (MCU_component[1:])
for MCU_component in chr2:
if len (MCU_component):
dcchr2.append (MCU_component[0])
acchr2.extend (MCU_component[1:])
return (dclum, dcchr1, dcchr2, aclum, acchr1, acchr2)
def generateHuffmanCodes (huffsize):
""" Calculate the huffman code of each length. """
huffcode = []
k = 0
code = 0
# Magic
for i in range (len (huffsize)):
si = huffsize[i]
for k in range (si):
huffcode.append ((i + 1, code))
code += 1
code <<= 1
return huffcode
def getBits (num, gen):
""" Get "num" bits from gen. """
out = 0
for i in range (num):
out <<= 1
val = gen.next ()
if val != []:
out += val & 0x01
else:
return []
return out
def mapHuffmanCodes (codes, values):
""" Map the huffman code to the right value. """
out = {}
for i in range (len (codes)):
out[codes[i]] = values[i]
return out
def readAPP (type, file):
""" Read APP marker. """
Lp = readWord (file)
Lp -= 2
# If APP0 try to read the JFIF header
# Not really necessary
if type == 0:
identifier = file.read (5)
Lp -= 5
version = file.read (2)
Lp -= 2
units = ord (file.read (1))
Lp -= 1
Xdensity = ord (file.read (1)) << 8
Xdensity |= ord (file.read (1))
Lp -= 2
Ydensity = ord (file.read (1)) << 8
Ydensity |= ord (file.read (1))
Lp -= 2
file.seek (Lp, 1)
def readByte (file):
""" Read a byte from file. """
return ord (file.read (1))
def readWord (file):
""" Read a 16 bit word from file. """
return ord (file.read (1)) << 8 | ord (file.read (1))
def restoreDC (data):
""" Restore the DC values. They are coded as the difference from the
previous DC value of the same component.
"""
out = []
dc_prev = [0 for x in range (len (data[0]))]
# For each MCU
for mcu in data:
# For each component
for comp_num in range (len (mcu)):
# For each DU
for du in range (len (mcu[comp_num])):
if mcu[comp_num][du]:
mcu[comp_num][du][0] += dc_prev[comp_num]
dc_prev[comp_num] = mcu[comp_num][du][0]
out.append (mcu)
return out
class JPEG_Reader:
""" Class for reading DCT coefficients from JPEG files. """
def __init__ (self):
self.huffman_ac_tables = [{}, {}, {}, {}]
self.huffman_dc_tables = [{}, {}, {}, {}]
self.q_table = [[], [], [], []]
self.XYP = 0, 0, 0
self.component = {}
self.num_components = 0
self.mcus_read = 0
self.dc = []
self.inline_dc = 0
self.bit_stream = []
self.EOI = False
def readDCT_Coeffs (self, filename):
""" Reads and returns DCT coefficients from the supplied JPEG file. """
self.__init__ ()
data = []
with open (filename, "rb") as inputFile:
in_char = inputFile.read (1)
while in_char:
if in_char == chr (0xff):
in_char = inputFile.read (1)
in_num = ord (in_char)
if 0xe0 <= in_num <= 0xef:
readAPP (in_num - 0xe0, inputFile)
elif in_num == 0xdb:
self.__readDQT (inputFile)
elif in_num == 0xdc:
self.__readDNL (inputFile)
elif in_num == 0xc4:
self.__readDHT (inputFile)
elif in_num == 0xc8:
print "JPG"
elif 0xc0 <= in_num <= 0xcf:
self.__readSOF (in_num - 0xc0, inputFile)
elif in_num == 0xda:
self.__readSOS (inputFile)
self.bit_stream = self.__readBit (inputFile)
while not self.EOI:
data.append (self.__readMCU ())
in_char = inputFile.read (1)
return extractCoeffs (data if self.inline_dc else restoreDC (data))
def __readBit (self, file):
""" A generator that reads one bit from file and handles markers and
byte stuffing.
"""
input = file.read (1)
while input and not self.EOI:
if input == chr (0xFF):
cmd = file.read (1)
if cmd:
# Byte stuffing
if cmd == chr (0x00):
input = chr (0xFF)
# End of image marker
elif cmd == chr (0xD9):
self.EOI = True
# Restart markers
elif 0xD0 <= ord (cmd) <= 0xD7 and self.inline_dc:
# Reset dc value
self.dc = [0 for i in range (self.num_components + 1)]
input = file.read (1)
else:
input = file.read (1)
#print "CMD: %x" % ord(cmd)
if not self.EOI:
for i in range (7, -1, -1):
# Output next bit
yield (ord (input) >> i) & 0x01
input = file.read (1)
while True:
yield []
def __readDHT (self, file):
""" Read and compute the huffman tables. """
# Read the marker length
Lh = readWord (file)
Lh -= 2
while Lh > 0:
huffsize = []
huffval = []
T = readByte (file)
Th = T & 0x0F
Tc = (T >> 4) & 0x0F
#print "Lh: %d Th: %d Tc: %d" % (Lh, Th, Tc)
Lh -= 1
# Read how many symbols of each length
# up to 16 bits
for i in range (16):
huffsize.append (readByte (file))
Lh -= 1
# Generate the huffman codes
huffcode = generateHuffmanCodes (huffsize)
#print "Huffcode", huffcode
# Read the values that should be mapped to huffman codes
for i in huffcode:
#print i
try:
huffval.append (readByte (file))
Lh -= 1
except TypeError:
continue
# Generate lookup tables
if Tc == 0:
self.huffman_dc_tables[Th] = mapHuffmanCodes (huffcode, huffval)
else:
self.huffman_ac_tables[Th] = mapHuffmanCodes (huffcode, huffval)
def __readDNL (self, file):
""" Read the DNL marker. Changes the number of lines. """
Ld = readWord (file)
Ld -= 2
NL = readWord (file)
Ld -= 2
X, Y, P = self.XYP
if Y == 0:
self.XYP = X, NL, P
def __readDQT (self, file):
""" Read the quantization table. The table is in zigzag order. """
Lq = readWord (file)
Lq -= 2
while Lq > 0:
table = []
Tq = readByte (file)
Pq = Tq >> 4
Tq &= 0xF
Lq -= 1
if Pq == 0:
for i in range (64):
table.append (readByte (file))
Lq -= 1
else:
for i in range (64):
val = readWord (file)
table.append (val)
Lq -= 2
self.q_table[Tq] = table
def __readDU (self, comp_num):
""" Read one data unit with component index comp_num. """
data = []
comp = self.component[comp_num]
huff_tbl = self.huffman_dc_tables[comp['Td']]
# Fill data with 64 coefficients
while len (data) < 64:
key = 0
for bits in range (1, 17):
key_len = []
key <<= 1
# Get one bit from bit_stream
val = getBits (1, self.bit_stream)
if val == []:
break
key |= val
# If huffman code exists
if huff_tbl.has_key ((bits, key)):
key_len = huff_tbl[(bits, key)]
break
# After getting the DC value switch to the AC table
huff_tbl = self.huffman_ac_tables[comp['Ta']]
if key_len == []:
#print (bits, key, bin(key)), "key not found"
break
# If ZRL fill with 16 zero coefficients
elif key_len == 0xF0:
for i in range (16):
data.append (0)
continue
# If not DC coefficient
if len (data) != 0:
# If End of block
if key_len == 0x00:
# Fill the rest of the DU with zeros
while len (data) < 64:
data.append (0)
break
# The first part of the AC key_len is the number of leading
# zeros
for i in range (key_len >> 4):
if len (data) < 64:
data.append (0)
key_len &= 0x0F
if len (data) >= 64:
break
if key_len != 0:
# The rest of key_len is the number of "additional" bits
val = getBits (key_len, self.bit_stream)
if val == []:
break
# Decode the additional bits
num = decodeBits (key_len, val)
# Experimental, doesn't work right
if len (data) == 0 and self.inline_dc:
# The DC coefficient value is added to the DC value from
# the corresponding DU in the previous MCU
num += self.dc[comp_num]
self.dc[comp_num] = num
data.append (num)
else:
data.append (0)
#if len(data) != 64:
#print "Wrong size", len(data)
return data
def __readMCU (self):
""" Read an MCU. """
comp_num = mcu = range (self.num_components)
# For each component
for i in comp_num:
comp = self.component[i + 1]
mcu[i] = []
# For each DU
for j in range (comp['H'] * comp['V']):
if not self.EOI:
mcu[i].append (self.__readDU (i + 1))
self.mcus_read += 1
return mcu
def __readSOF (self, type, file):
""" Read the start of frame marker. """
Lf = readWord (file) # Read the marker length
Lf -= 2
P = readByte (file) # Read the sample precision
Lf -= 1
Y = readWord (file) # Read number of lines
Lf -= 2
X = readWord (file) # Read the number of samples per line
Lf -= 2
Nf = readByte (file) # Read number of components
Lf -= 1
self.XYP = X, Y, P
#print self.XYP
while Lf > 0:
C = readByte (file) # Read component identifier
V = readByte (file) # Read sampling factors
Tq = readByte (file)
Lf -= 3
H = V >> 4
V &= 0xF
# Assign horizontal & vertical sampling factors and qtable
self.component[C] = { 'H' : H, 'V' : V, 'Tq' : Tq }
def __readSOS (self, file):
""" Read the start of scan marker. """
Ls = readWord (file)
Ls -= 2
Ns = readByte (file) # Read number of components in scan
Ls -= 1
for i in range (Ns):
Cs = readByte (file) # Read the scan component selector
Ls -= 1
Ta = readByte (file) # Read the huffman table selectors
Ls -= 1
Td = Ta >> 4
Ta &= 0xF
# Assign the DC huffman table
self.component[Cs]['Td'] = Td
# Assign the AC huffman table
self.component[Cs]['Ta'] = Ta
Ss = readByte (file) # Should be zero if baseline DCT
Ls -= 1
Se = readByte (file) # Should be 63 if baseline DCT
Ls -= 1
A = readByte (file) # Should be zero if baseline DCT
Ls -= 1
#print "Ns:%d Ss:%d Se:%d A:%02X" % (Ns, Ss, Se, A)
self.num_components = Ns
self.dc = [0 for i in range (self.num_components + 1)]
def dequantize (self, mcu):
""" Dequantize an MCU. """
out = mcu
# For each coefficient in each DU in each component, multiply by the
# corresponding value in the quantization table.
for c in range (len (out)):
for du in range (len (out[c])):
for i in range (len (out[c][du])):
out[c][du][i] *= self.q_table[self.component[c + 1]['Tq']][i]
return out
def getHist(filename):
try:
import JPEG_MetaInfoPy
hist, lowValue = JPEG_MetaInfoPy.generateHistogram(filename)
return np.asarray(hist),np.asarray(range(lowValue,lowValue+len(hist)+1))
except Exception as ex:
logging.getLogger('maskgen').warn('External JPEG_MetaInfoPy failed: {}'.format(str(ex)))
DC = JPEG_Reader().readDCT_Coeffs(filename)[0]
minDC = min(DC)
maxDC = max(DC)
binCount = maxDC - minDC + 1
return np.histogram (DC, bins=binCount,
range=(minDC, maxDC + 1))
class JPEG_View:
def appliesTo (self, filename):
return filename.lower ().endswith (('jpg', 'jpeg'))
def draw (self, frame, filename):
fig = plt.figure ();
self._plotHistogram (fig, getHist(filename))
canvas = FigureCanvasTkAgg (fig, frame)
canvas.show ()
canvas.get_tk_widget ().pack (side=BOTTOM, fill=BOTH, expand=True)
def _labelSigma (self, figure, sigma):
""" Add a label of the value of sigma to the histogram plot. """
props = dict (boxstyle='round', facecolor='wheat', alpha=0.5)
figure.text (0.25, 0.85, '$\sigma=%.2f$' % (sigma),
fontsize=14, verticalalignment='top', bbox=props)
class DCTView (JPEG_View):
def screenName (self):
return 'JPG DCT Histogram'
def _plotHistogram (self, figure, histogram):
ordinates, abscissae = histogram
plt.bar (abscissae[:-1], ordinates, 1);
self._labelSigma (figure, ordinates.std ())
class FFT_DCTView (JPEG_View):
def screenName (self):
return 'FFT(JPG DCT Histogram)'
def _plotHistogram (self, figure, histogram):
# Calculate the DFT of the zero-meaned histogram values. The n/2+1
# positive frequencies are returned by rfft. Mirror the result back
# into ordinates.
#
mean = histogram[0].mean ()
posFreqs = abs (np.fft.rfft ([i - mean for i in histogram[0]]))
ordinates = list (reversed (posFreqs))
ordinates.extend (posFreqs[1:])
n = len (posFreqs)
abscissae = range (1 - n, n)
plt.plot (abscissae, ordinates, 'k')
plt.plot (abscissae, self.__hat (ordinates), 'r')
self._labelSigma (figure, np.std (ordinates))
def __hat (self, data):
length = len (data)
intercept1 = int (length * 0.425)
intercept2 = int (length * 0.575)
amp = max (data)
threshold = amp * 0.15
arr = np.full (length, threshold)
arr[intercept1:intercept2] = amp
return arr
if __name__ == "__main__":
DCTView ().draw (None, sys.argv[1])
FFT_DCTView ().draw (None, sys.argv[1]) | bsd-3-clause |
scikit-optimize/scikit-optimize | skopt/callbacks.py | 1 | 9377 | """Monitor and influence the optimization procedure via callbacks.
Callbacks are callables which are invoked after each iteration of the optimizer
and are passed the results "so far". Callbacks can monitor progress, or stop
the optimization early by returning `True`.
"""
try:
from collections.abc import Callable
except ImportError:
from collections import Callable
from time import time
import numpy as np
from skopt.utils import dump
def check_callback(callback):
"""
Check if callback is a callable or a list of callables.
"""
if callback is not None:
if isinstance(callback, Callable):
return [callback]
elif (isinstance(callback, list) and
all([isinstance(c, Callable) for c in callback])):
return callback
else:
raise ValueError("callback should be either a callable or "
"a list of callables.")
else:
return []
class VerboseCallback(object):
"""
Callback to control the verbosity.
Parameters
----------
n_init : int, optional
Number of points provided by the user which are yet to be
evaluated. This is equal to `len(x0)` when `y0` is None
n_random : int, optional
Number of points randomly chosen.
n_total : int
Total number of func calls.
Attributes
----------
iter_no : int
Number of iterations of the optimization routine.
"""
def __init__(self, n_total, n_init=0, n_random=0):
self.n_init = n_init
self.n_random = n_random
self.n_total = n_total
self.iter_no = 1
self._start_time = time()
self._print_info(start=True)
def _print_info(self, start=True):
iter_no = self.iter_no
if start:
status = "started"
eval_status = "Evaluating function"
search_status = "Searching for the next optimal point."
else:
status = "ended"
eval_status = "Evaluation done"
search_status = "Search finished for the next optimal point."
if iter_no <= self.n_init:
print("Iteration No: %d %s. %s at provided point."
% (iter_no, status, eval_status))
elif self.n_init < iter_no <= (self.n_random + self.n_init):
print("Iteration No: %d %s. %s at random point."
% (iter_no, status, eval_status))
else:
print("Iteration No: %d %s. %s"
% (iter_no, status, search_status))
def __call__(self, res):
"""
Parameters
----------
res : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
"""
time_taken = time() - self._start_time
self._print_info(start=False)
curr_y = res.func_vals[-1]
curr_min = res.fun
print("Time taken: %0.4f" % time_taken)
print("Function value obtained: %0.4f" % curr_y)
print("Current minimum: %0.4f" % curr_min)
self.iter_no += 1
if self.iter_no <= self.n_total:
self._print_info(start=True)
self._start_time = time()
class TimerCallback(object):
"""
Log the elapsed time between each iteration of the minimization loop.
The time for each iteration is stored in the `iter_time` attribute which
you can inspect after the minimization has completed.
Attributes
----------
iter_time : list, shape (n_iter,)
`iter_time[i-1]` gives the time taken to complete iteration `i`
"""
def __init__(self):
self._time = time()
self.iter_time = []
def __call__(self, res):
"""
Parameters
----------
res : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
"""
elapsed_time = time() - self._time
self.iter_time.append(elapsed_time)
self._time = time()
class EarlyStopper(object):
"""Decide to continue or not given the results so far.
The optimization procedure will be stopped if the callback returns True.
"""
def __call__(self, result):
"""
Parameters
----------
result : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
"""
return self._criterion(result)
def _criterion(self, result):
"""Compute the decision to stop or not.
Classes inheriting from `EarlyStop` should use this method to
implement their decision logic.
Parameters
----------
result : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
Returns
-------
decision : boolean or None
Return True/False if the criterion can make a decision or `None` if
there is not enough data yet to make a decision.
"""
raise NotImplementedError("The _criterion method should be implemented"
" by subclasses of EarlyStopper.")
class DeltaXStopper(EarlyStopper):
"""Stop the optimization when ``|x1 - x2| < delta``
If the last two positions at which the objective has been evaluated
are less than `delta` apart stop the optimization procedure.
"""
def __init__(self, delta):
super(EarlyStopper, self).__init__()
self.delta = delta
def _criterion(self, result):
if len(result.x_iters) >= 2:
return result.space.distance(result.x_iters[-2],
result.x_iters[-1]) < self.delta
else:
return None
class DeltaYStopper(EarlyStopper):
"""Stop the optimization if the `n_best` minima are within `delta`
Stop the optimizer if the absolute difference between the `n_best`
objective values is less than `delta`.
"""
def __init__(self, delta, n_best=5):
super(EarlyStopper, self).__init__()
self.delta = delta
self.n_best = n_best
def _criterion(self, result):
if len(result.func_vals) >= self.n_best:
func_vals = np.sort(result.func_vals)
worst = func_vals[self.n_best - 1]
best = func_vals[0]
# worst is always larger, so no need for abs()
return worst - best < self.delta
else:
return None
class HollowIterationsStopper(EarlyStopper):
"""
Stop if the improvement over the last n iterations is below a threshold.
"""
def __init__(self, n_iterations, threshold=0):
super(HollowIterationsStopper, self).__init__()
self.n_iterations = n_iterations
self.threshold = abs(threshold)
def _criterion(self, result):
if len(result.func_vals) <= self.n_iterations:
return False
cummin = np.minimum.accumulate(result.func_vals)
return cummin[-self.n_iterations - 1] - cummin[-1] <= self.threshold
class DeadlineStopper(EarlyStopper):
"""
Stop the optimization before running out of a fixed budget of time.
Attributes
----------
iter_time : list, shape (n_iter,)
`iter_time[i-1]` gives the time taken to complete iteration `i`
Parameters
----------
total_time : float
fixed budget of time (seconds) that the optimization must
finish within.
"""
def __init__(self, total_time):
super(DeadlineStopper, self).__init__()
self._time = time()
self.iter_time = []
self.total_time = total_time
def _criterion(self, result):
elapsed_time = time() - self._time
self.iter_time.append(elapsed_time)
self._time = time()
if result.x_iters:
time_remaining = self.total_time - np.sum(self.iter_time)
return time_remaining <= np.max(self.iter_time)
else:
return None
class ThresholdStopper(EarlyStopper):
"""
Stop the optimization when the objective value is lower
than the given threshold.
"""
def __init__(self, threshold: float) -> None:
super(EarlyStopper, self).__init__()
self.threshold = threshold
def _criterion(self, result) -> bool:
return np.any([val <= self.threshold for val in result.func_vals])
class CheckpointSaver(object):
"""
Save current state after each iteration with :class:`skopt.dump`.
Examples
--------
>>> import skopt
>>> def obj_fun(x):
... return x[0]**2
>>> checkpoint_callback = skopt.callbacks.CheckpointSaver("./result.pkl")
>>> skopt.gp_minimize(obj_fun, [(-2, 2)], n_calls=10,
... callback=[checkpoint_callback]) # doctest: +SKIP
Parameters
----------
checkpoint_path : string
location where checkpoint will be saved to;
dump_options : string
options to pass on to `skopt.dump`, like `compress=9`
"""
def __init__(self, checkpoint_path, **dump_options):
self.checkpoint_path = checkpoint_path
self.dump_options = dump_options
def __call__(self, res):
"""
Parameters
----------
res : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
"""
dump(res, self.checkpoint_path, **self.dump_options)
| bsd-3-clause |
bjackman/workload-automation | wlauto/resource_getters/standard.py | 1 | 23149 | # Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module contains the standard set of resource getters used by Workload Automation.
"""
import os
import sys
import shutil
import inspect
import httplib
import logging
import json
import requests
from wlauto import ResourceGetter, GetterPriority, Parameter, NO_ONE, settings, __file__ as __base_filepath
from wlauto.exceptions import ResourceError
from wlauto.utils.android import ApkInfo
from wlauto.utils.misc import ensure_directory_exists as _d, ensure_file_directory_exists as _f, sha256, urljoin
from wlauto.utils.types import boolean
from wlauto.utils.revent import ReventRecording
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
class PackageFileGetter(ResourceGetter):
name = 'package_file'
description = """
Looks for exactly one file with the specified extension in the owner's directory. If a version
is specified on invocation of get, it will filter the discovered file based on that version.
Versions are treated as case-insensitive.
"""
extension = None
def register(self):
self.resolver.register(self, self.extension, GetterPriority.package)
def get(self, resource, **kwargs):
resource_dir = os.path.dirname(sys.modules[resource.owner.__module__].__file__)
version = kwargs.get('version')
return get_from_location_by_extension(resource, resource_dir, self.extension, version)
class EnvironmentFileGetter(ResourceGetter):
name = 'environment_file'
description = """
Looks for exactly one file with the specified extension in the owner's directory. If a version
is specified on invocation of get, it will filter the discovered file based on that version.
Versions are treated as case-insensitive.
"""
extension = None
def register(self):
self.resolver.register(self, self.extension, GetterPriority.environment)
def get(self, resource, **kwargs):
resource_dir = resource.owner.dependencies_directory
version = kwargs.get('version')
return get_from_location_by_extension(resource, resource_dir, self.extension, version)
class ReventGetter(ResourceGetter):
"""Implements logic for identifying revent files."""
def get_base_location(self, resource):
raise NotImplementedError()
def register(self):
self.resolver.register(self, 'revent', GetterPriority.package)
def get(self, resource, **kwargs):
# name format: [model/device_name.stage.revent]
device_model = resource.owner.device.get_device_model()
wa_device_name = resource.owner.device.name
for name in [device_model, wa_device_name]:
if not name:
continue
filename = '.'.join([name, resource.stage, 'revent']).lower()
self.logger.debug('Trying to get {0}.'.format(str(filename)))
location = _d(os.path.join(self.get_base_location(resource), 'revent_files'))
for candidate in os.listdir(location):
if candidate.lower() == filename.lower():
path = os.path.join(location, candidate)
try:
ReventRecording(path).close() # Check valid recording
return path
except ValueError as e:
self.logger.warning(e.message)
class PackageApkGetter(PackageFileGetter):
name = 'package_apk'
extension = 'apk'
description = """
Uses the same dependency resolution mechanism as ``PackageFileGetter``.
"""
def get(self, resource, **kwargs):
resource_dir = os.path.dirname(sys.modules[resource.owner.__module__].__file__)
version = kwargs.get('version')
variant = kwargs.get('variant_name')
return get_from_location_by_extension(resource, resource_dir, self.extension, version, variant=variant)
class PackageJarGetter(PackageFileGetter):
name = 'package_jar'
extension = 'jar'
class PackageReventGetter(ReventGetter):
name = 'package_revent'
def get_base_location(self, resource):
return get_owner_path(resource)
class EnvironmentApkGetter(EnvironmentFileGetter):
name = 'environment_apk'
extension = 'apk'
description = """
Uses the same dependency resolution mechanism as ``EnvironmentFileGetter``.
"""
def get(self, resource, **kwargs):
resource_dir = resource.owner.dependencies_directory
version = kwargs.get('version')
variant = kwargs.get('variant_name')
return get_from_location_by_extension(resource, resource_dir, self.extension, version, variant=variant)
class EnvironmentJarGetter(EnvironmentFileGetter):
name = 'environment_jar'
extension = 'jar'
class EnvironmentReventGetter(ReventGetter):
name = 'enviroment_revent'
def get_base_location(self, resource):
return resource.owner.dependencies_directory
class ExecutableGetter(ResourceGetter):
name = 'exe_getter'
resource_type = 'executable'
priority = GetterPriority.environment
def get(self, resource, **kwargs):
if settings.binaries_repository:
path = os.path.join(settings.binaries_repository, resource.platform, resource.filename)
if os.path.isfile(path):
return path
class PackageExecutableGetter(ExecutableGetter):
name = 'package_exe_getter'
priority = GetterPriority.package
def get(self, resource, **kwargs):
path = os.path.join(get_owner_path(resource), 'bin', resource.platform, resource.filename)
if os.path.isfile(path):
return path
class EnvironmentExecutableGetter(ExecutableGetter):
name = 'env_exe_getter'
def get(self, resource, **kwargs):
paths = [
os.path.join(resource.owner.dependencies_directory, 'bin',
resource.platform, resource.filename),
os.path.join(settings.environment_root, 'bin',
resource.platform, resource.filename),
]
for path in paths:
if os.path.isfile(path):
return path
class DependencyFileGetter(ResourceGetter):
name = 'filer'
description = """
Gets resources from the specified mount point. Copies them the local dependencies
directory, and returns the path to the local copy.
"""
resource_type = 'file'
relative_path = '' # May be overridden by subclasses.
priority = GetterPriority.remote
parameters = [
Parameter('mount_point', default='/', global_alias='remote_assets_path',
description='Local mount point for the remote filer.'),
]
def __init__(self, resolver, **kwargs):
super(DependencyFileGetter, self).__init__(resolver, **kwargs)
def get(self, resource, **kwargs):
force = kwargs.get('force')
remote_path = os.path.join(self.mount_point, self.relative_path, resource.path)
local_path = _f(os.path.join(settings.dependencies_directory, '__remote',
resource.owner.name, os.path.basename(resource.path)))
if not os.path.exists(local_path) or force:
if not os.path.exists(remote_path):
return None
self.logger.debug('Copying {} to {}'.format(remote_path, local_path))
shutil.copy(remote_path, local_path)
return local_path
class PackageCommonDependencyGetter(ResourceGetter):
name = 'packaged_common_dependency'
resource_type = 'file'
priority = GetterPriority.package - 1 # check after owner-specific locations
def get(self, resource, **kwargs):
path = os.path.join(settings.package_directory, 'common', resource.path)
if os.path.exists(path):
return path
class EnvironmentCommonDependencyGetter(ResourceGetter):
name = 'environment_common_dependency'
resource_type = 'file'
priority = GetterPriority.environment - 1 # check after owner-specific locations
def get(self, resource, **kwargs):
path = os.path.join(settings.dependencies_directory,
os.path.basename(resource.path))
if os.path.exists(path):
return path
class PackageDependencyGetter(ResourceGetter):
name = 'packaged_dependency'
resource_type = 'file'
priority = GetterPriority.package
def get(self, resource, **kwargs):
owner_path = inspect.getfile(resource.owner.__class__)
path = os.path.join(os.path.dirname(owner_path), resource.path)
if os.path.exists(path):
return path
class EnvironmentDependencyGetter(ResourceGetter):
name = 'environment_dependency'
resource_type = 'file'
priority = GetterPriority.environment
def get(self, resource, **kwargs):
path = os.path.join(resource.owner.dependencies_directory, os.path.basename(resource.path))
if os.path.exists(path):
return path
class ExtensionAssetGetter(DependencyFileGetter):
name = 'extension_asset'
resource_type = 'extension_asset'
class HttpGetter(ResourceGetter):
name = 'http_assets'
description = """
Downloads resources from a server based on an index fetched from the specified URL.
Given a URL, this will try to fetch ``<URL>/index.json``. The index file maps extension
names to a list of corresponing asset descriptons. Each asset description continas a path
(relative to the base URL) of the resource and a SHA256 hash, so that this Getter can
verify whether the resource on the remote has changed.
For example, let's assume we want to get the APK file for workload "foo", and that
assets are hosted at ``http://example.com/assets``. This Getter will first try to
donwload ``http://example.com/assests/index.json``. The index file may contian
something like ::
{
"foo": [
{
"path": "foo-app.apk",
"sha256": "b14530bb47e04ed655ac5e80e69beaa61c2020450e18638f54384332dffebe86"
},
{
"path": "subdir/some-other-asset.file",
"sha256": "48d9050e9802246d820625717b72f1c2ba431904b8484ca39befd68d1dbedfff"
}
]
}
This Getter will look through the list of assets for "foo" (in this case, two) check
the paths until it finds one matching the resource (in this case, "foo-app.apk").
Finally, it will try to dowload that file relative to the base URL and extension name
(in this case, "http://example.com/assets/foo/foo-app.apk"). The downloaded version
will be cached locally, so that in the future, the getter will check the SHA256 hash
of the local file against the one advertised inside index.json, and provided that hasn't
changed, it won't try to download the file again.
"""
priority = GetterPriority.remote
resource_type = ['apk', 'file', 'jar', 'revent', 'executable']
parameters = [
Parameter('url', global_alias='remote_assets_url',
description="""URL of the index file for assets on an HTTP server."""),
Parameter('username',
description="""User name for authenticating with assets URL"""),
Parameter('password',
description="""Password for authenticationg with assets URL"""),
Parameter('always_fetch', kind=boolean, default=False, global_alias='always_fetch_remote_assets',
description="""If ``True``, will always attempt to fetch assets from the remote, even if
a local cached copy is available."""),
Parameter('chunk_size', kind=int, default=1024,
description="""Chunk size for streaming large assets."""),
]
def __init__(self, resolver, **kwargs):
super(HttpGetter, self).__init__(resolver, **kwargs)
self.index = None
def get(self, resource, **kwargs):
if not resource.owner:
return # TODO: add support for unowned resources
if not self.index:
self.index = self.fetch_index()
asset = self.resolve_resource(resource)
if not asset:
return
return self.download_asset(asset, resource.owner.name)
def fetch_index(self):
if not self.url:
return {}
index_url = urljoin(self.url, 'index.json')
response = self.geturl(index_url)
if response.status_code != httplib.OK:
message = 'Could not fetch "{}"; recieved "{} {}"'
self.logger.error(message.format(index_url, response.status_code, response.reason))
return {}
return json.loads(response.content)
def download_asset(self, asset, owner_name):
url = urljoin(self.url, owner_name, asset['path'])
local_path = _f(os.path.join(settings.dependencies_directory, '__remote',
owner_name, asset['path'].replace('/', os.sep)))
if os.path.exists(local_path) and not self.always_fetch:
local_sha = sha256(local_path)
if local_sha == asset['sha256']:
self.logger.debug('Local SHA256 matches; not re-downloading')
return local_path
self.logger.debug('Downloading {}'.format(url))
response = self.geturl(url, stream=True)
if response.status_code != httplib.OK:
message = 'Could not download asset "{}"; recieved "{} {}"'
self.logger.warning(message.format(url, response.status_code, response.reason))
return
with open(local_path, 'wb') as wfh:
for chunk in response.iter_content(chunk_size=self.chunk_size):
wfh.write(chunk)
return local_path
def geturl(self, url, stream=False):
if self.username:
auth = (self.username, self.password)
else:
auth = None
return requests.get(url, auth=auth, stream=stream)
def resolve_resource(self, resource):
# pylint: disable=too-many-branches,too-many-locals
assets = self.index.get(resource.owner.name, {})
if not assets:
return {}
if resource.name in ['apk', 'jar']:
paths = [a['path'] for a in assets]
version = getattr(resource, 'version', None)
found = get_from_list_by_extension(resource, paths, resource.name, version)
if found:
for a in assets:
if a['path'] == found:
return a
elif resource.name == 'revent':
device_model = resource.owner.device.get_device_model()
wa_device_name = resource.owner.device.name
for name in [device_model, wa_device_name]:
if not name:
continue
filename = '.'.join([name, resource.stage, 'revent']).lower()
for asset in assets:
pathname = os.path.basename(asset['path']).lower()
if pathname == filename:
try:
ReventRecording(asset['path']).close() # Check valid recording
return asset
except ValueError as e:
self.logger.warning(e.message)
elif resource.name == 'executable':
platform = resource.platform
path = '/'.join(['bin', platform, resource.filename])
for asset in assets:
if asset['path'].lower() == path.lower():
return asset
else: # file
for asset in assets:
if asset['path'].lower() == resource.path.lower():
return asset
class RemoteFilerGetter(ResourceGetter):
name = 'filer_assets'
description = """
Finds resources on a (locally mounted) remote filer and caches them locally.
This assumes that the filer is mounted on the local machine (e.g. as a samba share).
"""
priority = GetterPriority.remote
resource_type = ['apk', 'file', 'jar', 'revent']
parameters = [
Parameter('remote_path', global_alias='remote_assets_path', default='',
description="""Path, on the local system, where the assets are located."""),
Parameter('always_fetch', kind=boolean, default=False, global_alias='always_fetch_remote_assets',
description="""If ``True``, will always attempt to fetch assets from the remote, even if
a local cached copy is available."""),
]
def get(self, resource, **kwargs):
version = kwargs.get('version')
if resource.owner:
remote_path = os.path.join(self.remote_path, resource.owner.name)
local_path = os.path.join(settings.environment_root, '__filer', resource.owner.dependencies_directory)
message = 'resource={}, version={}, remote_path={}, local_path={}'
self.logger.debug(message.format(resource, version, remote_path, local_path))
return self.try_get_resource(resource, version, remote_path, local_path)
else:
result = None
for entry in os.listdir(remote_path):
remote_path = os.path.join(self.remote_path, entry)
local_path = os.path.join(settings.environment_root, '__filer', settings.dependencies_directory, entry)
result = self.try_get_resource(resource, version, remote_path, local_path)
if result:
break
return result
def try_get_resource(self, resource, version, remote_path, local_path):
if not self.always_fetch:
result = self.get_from(resource, version, local_path)
if result:
return result
if remote_path:
# Didn't find it cached locally; now check the remoted
result = self.get_from(resource, version, remote_path)
if not result:
return result
else: # remote path is not set
return None
# Found it remotely, cache locally, then return it
local_full_path = os.path.join(_d(local_path), os.path.basename(result))
self.logger.debug('cp {} {}'.format(result, local_full_path))
shutil.copy(result, local_full_path)
return local_full_path
def get_from(self, resource, version, location): # pylint: disable=no-self-use
# pylint: disable=too-many-branches
if resource.name in ['apk', 'jar']:
return get_from_location_by_extension(resource, location, resource.name, version)
elif resource.name == 'file':
filepath = os.path.join(location, resource.path)
if os.path.exists(filepath):
return filepath
elif resource.name == 'revent':
device_model = resource.owner.device.get_device_model()
wa_device_name = resource.owner.device.name
for name in [device_model, wa_device_name]:
if not name:
continue
filename = '.'.join([name, resource.stage, 'revent']).lower()
alternate_location = os.path.join(location, 'revent_files')
# There tends to be some confusion as to where revent files should
# be placed. This looks both in the extension's directory, and in
# 'revent_files' subdirectory under it, if it exists.
path = None
if os.path.isdir(alternate_location):
for candidate in os.listdir(alternate_location):
if candidate.lower() == filename.lower():
path = os.path.join(alternate_location, candidate)
if os.path.isdir(location):
for candidate in os.listdir(location):
if candidate.lower() == filename.lower():
path = os.path.join(location, candidate)
if path:
try:
ReventRecording(path).close() # Check valid recording
return path
except ValueError as e:
self.logger.warning(e.message)
else:
raise ValueError('Unexpected resource type: {}'.format(resource.name))
# Utility functions
def get_from_location_by_extension(resource, location, extension, version=None, variant=None):
try:
found_files = [os.path.join(location, f) for f in os.listdir(location)]
except OSError:
return None
try:
return get_from_list_by_extension(resource, found_files, extension, version, variant=variant)
except ResourceError:
raise ResourceError('More than one .{} found in {} for {}.'.format(extension,
location,
resource.owner.name))
def get_from_list_by_extension(resource, filelist, extension, version=None, variant=None):
filelist = [ff for ff in filelist if os.path.splitext(ff)[1].lower().endswith('.' + extension)]
if variant:
filelist = [ff for ff in filelist if variant.lower() in os.path.basename(ff).lower()]
if version:
if extension == 'apk':
filelist = [ff for ff in filelist if version.lower() in ApkInfo(ff).version_name.lower()]
else:
filelist = [ff for ff in filelist if version.lower() in os.path.basename(ff).lower()]
if extension == 'apk':
filelist = [ff for ff in filelist if not ApkInfo(ff).native_code or resource.platform in ApkInfo(ff).native_code]
filelist = [ff for ff in filelist if resource.uiauto == ('com.arm.wlauto.uiauto' in ApkInfo(ff).package)]
if len(filelist) == 1:
return filelist[0]
elif not filelist:
return None
else:
raise ResourceError('More than one .{} found in {} for {}.'.format(extension,
filelist,
resource.owner.name))
def get_owner_path(resource):
if resource.owner is NO_ONE:
return os.path.join(os.path.dirname(__base_filepath), 'common')
else:
return os.path.dirname(sys.modules[resource.owner.__module__].__file__)
| apache-2.0 |
thundernet8/WRGameVideos-Server | venv/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py | 475 | 9162 | """
lockfile.py - Platform-independent advisory file locks.
Requires Python 2.5 unless you apply 2.4.diff
Locking is done on a per-thread basis instead of a per-process basis.
Usage:
>>> lock = LockFile('somefile')
>>> try:
... lock.acquire()
... except AlreadyLocked:
... print 'somefile', 'is locked already.'
... except LockFailed:
... print 'somefile', 'can\\'t be locked.'
... else:
... print 'got lock'
got lock
>>> print lock.is_locked()
True
>>> lock.release()
>>> lock = LockFile('somefile')
>>> print lock.is_locked()
False
>>> with lock:
... print lock.is_locked()
True
>>> print lock.is_locked()
False
>>> lock = LockFile('somefile')
>>> # It is okay to lock twice from the same thread...
>>> with lock:
... lock.acquire()
...
>>> # Though no counter is kept, so you can't unlock multiple times...
>>> print lock.is_locked()
False
Exceptions:
Error - base class for other exceptions
LockError - base class for all locking exceptions
AlreadyLocked - Another thread or process already holds the lock
LockFailed - Lock failed for some other reason
UnlockError - base class for all unlocking exceptions
AlreadyUnlocked - File was not locked.
NotMyLock - File was locked but not by the current thread/process
"""
from __future__ import absolute_import
import sys
import socket
import os
import threading
import time
import urllib
import warnings
import functools
# Work with PEP8 and non-PEP8 versions of threading module.
if not hasattr(threading, "current_thread"):
threading.current_thread = threading.currentThread
if not hasattr(threading.Thread, "get_name"):
threading.Thread.get_name = threading.Thread.getName
__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked',
'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock',
'LinkLockFile', 'MkdirLockFile', 'SQLiteLockFile',
'LockBase', 'locked']
class Error(Exception):
"""
Base class for other exceptions.
>>> try:
... raise Error
... except Exception:
... pass
"""
pass
class LockError(Error):
"""
Base class for error arising from attempts to acquire the lock.
>>> try:
... raise LockError
... except Error:
... pass
"""
pass
class LockTimeout(LockError):
"""Raised when lock creation fails within a user-defined period of time.
>>> try:
... raise LockTimeout
... except LockError:
... pass
"""
pass
class AlreadyLocked(LockError):
"""Some other thread/process is locking the file.
>>> try:
... raise AlreadyLocked
... except LockError:
... pass
"""
pass
class LockFailed(LockError):
"""Lock file creation failed for some other reason.
>>> try:
... raise LockFailed
... except LockError:
... pass
"""
pass
class UnlockError(Error):
"""
Base class for errors arising from attempts to release the lock.
>>> try:
... raise UnlockError
... except Error:
... pass
"""
pass
class NotLocked(UnlockError):
"""Raised when an attempt is made to unlock an unlocked file.
>>> try:
... raise NotLocked
... except UnlockError:
... pass
"""
pass
class NotMyLock(UnlockError):
"""Raised when an attempt is made to unlock a file someone else locked.
>>> try:
... raise NotMyLock
... except UnlockError:
... pass
"""
pass
class LockBase:
"""Base class for platform-specific lock classes."""
def __init__(self, path, threaded=True, timeout=None):
"""
>>> lock = LockBase('somefile')
>>> lock = LockBase('somefile', threaded=False)
"""
self.path = path
self.lock_file = os.path.abspath(path) + ".lock"
self.hostname = socket.gethostname()
self.pid = os.getpid()
if threaded:
t = threading.current_thread()
# Thread objects in Python 2.4 and earlier do not have ident
# attrs. Worm around that.
ident = getattr(t, "ident", hash(t))
self.tname = "-%x" % (ident & 0xffffffff)
else:
self.tname = ""
dirname = os.path.dirname(self.lock_file)
# unique name is mostly about the current process, but must
# also contain the path -- otherwise, two adjacent locked
# files conflict (one file gets locked, creating lock-file and
# unique file, the other one gets locked, creating lock-file
# and overwriting the already existing lock-file, then one
# gets unlocked, deleting both lock-file and unique file,
# finally the last lock errors out upon releasing.
self.unique_name = os.path.join(dirname,
"%s%s.%s%s" % (self.hostname,
self.tname,
self.pid,
hash(self.path)))
self.timeout = timeout
def acquire(self, timeout=None):
"""
Acquire the lock.
* If timeout is omitted (or None), wait forever trying to lock the
file.
* If timeout > 0, try to acquire the lock for that many seconds. If
the lock period expires and the file is still locked, raise
LockTimeout.
* If timeout <= 0, raise AlreadyLocked immediately if the file is
already locked.
"""
raise NotImplemented("implement in subclass")
def release(self):
"""
Release the lock.
If the file is not locked, raise NotLocked.
"""
raise NotImplemented("implement in subclass")
def is_locked(self):
"""
Tell whether or not the file is locked.
"""
raise NotImplemented("implement in subclass")
def i_am_locking(self):
"""
Return True if this object is locking the file.
"""
raise NotImplemented("implement in subclass")
def break_lock(self):
"""
Remove a lock. Useful if a locking thread failed to unlock.
"""
raise NotImplemented("implement in subclass")
def __enter__(self):
"""
Context manager support.
"""
self.acquire()
return self
def __exit__(self, *_exc):
"""
Context manager support.
"""
self.release()
def __repr__(self):
return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name,
self.path)
def _fl_helper(cls, mod, *args, **kwds):
warnings.warn("Import from %s module instead of lockfile package" % mod,
DeprecationWarning, stacklevel=2)
# This is a bit funky, but it's only for awhile. The way the unit tests
# are constructed this function winds up as an unbound method, so it
# actually takes three args, not two. We want to toss out self.
if not isinstance(args[0], str):
# We are testing, avoid the first arg
args = args[1:]
if len(args) == 1 and not kwds:
kwds["threaded"] = True
return cls(*args, **kwds)
def LinkFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import LinkLockFile from the
lockfile.linklockfile module.
"""
from . import linklockfile
return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile",
*args, **kwds)
def MkdirFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import MkdirLockFile from the
lockfile.mkdirlockfile module.
"""
from . import mkdirlockfile
return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile",
*args, **kwds)
def SQLiteFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import SQLiteLockFile from the
lockfile.mkdirlockfile module.
"""
from . import sqlitelockfile
return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile",
*args, **kwds)
def locked(path, timeout=None):
"""Decorator which enables locks for decorated function.
Arguments:
- path: path for lockfile.
- timeout (optional): Timeout for acquiring lock.
Usage:
@locked('/var/run/myname', timeout=0)
def myname(...):
...
"""
def decor(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
lock = FileLock(path, timeout=timeout)
lock.acquire()
try:
return func(*args, **kwargs)
finally:
lock.release()
return wrapper
return decor
if hasattr(os, "link"):
from . import linklockfile as _llf
LockFile = _llf.LinkLockFile
else:
from . import mkdirlockfile as _mlf
LockFile = _mlf.MkdirLockFile
FileLock = LockFile
| gpl-2.0 |
peterbraden/tensorflow | tensorflow/python/kernel_tests/sparse_matmul_op_test.py | 9 | 3990 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.tf.matmul."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
def RandMatrix(rows, cols, tr):
if tr:
rows, cols = cols, rows
return (np.clip(np.random.uniform(low=-100.0, high=100.0, size=rows * cols),
0, 100) / 100).reshape([rows, cols]).astype(np.float32)
class SparseMatMulTest(tf.test.TestCase):
def _testCpuMatmul(self, x, y, tr_a=False, tr_b=False,
sp_a=True, sp_b=False):
x_mat = np.matrix(x)
if tr_a:
x_mat = np.transpose(x_mat)
y_mat = np.matrix(y)
if tr_b:
y_mat = np.transpose(y_mat)
np_ans = x_mat * y_mat
with self.test_session(use_gpu=False):
tf_ans = tf.matmul(x, y,
transpose_a=tr_a, transpose_b=tr_b,
a_is_sparse=sp_a,
b_is_sparse=sp_b)
out = tf_ans.eval()
self.assertAllClose(np_ans, out, rtol=1e-4, atol=1e-4)
self.assertShapeEqual(np_ans, tf_ans)
def testFloatBasic(self):
x = np.arange(0., 4.).reshape([4, 1]).astype(np.float32)
y = np.arange(-1., 1.).reshape([1, 2]).astype(np.float32)
self._testCpuMatmul(x, y)
# Tests setting one dimension to be a high value.
def testFloatLarge(self):
r1 = np.random.randint(6000, 20000)
r2 = np.random.randint(1, 10)
r3 = np.random.randint(1, 10)
for m, k, n in [(r1, r2, r3),
(r2, r1, r3),
(r2, r3, r1)]:
x = RandMatrix(m, k, False)
y = RandMatrix(k, n, False)
self._testCpuMatmul(x, y)
self._testCpuMatmul(x, y, sp_a=False, sp_b=True)
# Tests random sized matrices.
def testFloatRandom(self):
for _ in range(10):
for tr_a in [True, False]:
for tr_b in [True, False]:
for sp_a in [True, False]:
for sp_b in [True, False]:
n, k, m = np.random.randint(1, 100, size=3)
x = RandMatrix(n, k, tr_a)
y = RandMatrix(k, m, tr_b)
self._testCpuMatmul(x, y, tr_a, tr_b, sp_a, sp_b)
class MatMulGradientTest(tf.test.TestCase):
def _testGradients(self, tr_a, tr_b, sp_a, sp_b, name):
with self.test_session():
a = tf.constant(RandMatrix(3, 2, tr_a), dtype=tf.float32)
b = tf.constant(RandMatrix(2, 4, tr_b), dtype=tf.float32)
m = tf.matmul(a, b,
name=name,
transpose_a=tr_a,
transpose_b=tr_b,
a_is_sparse=sp_a,
b_is_sparse=sp_b)
err = (tf.test.compute_gradient_error(a, [2, 3]
if tr_a else [3, 2], m, [3, 4]) +
tf.test.compute_gradient_error(b, [4, 2]
if tr_b else [2, 4], m, [3, 4]))
print("sparse_matmul gradient err = ", err)
self.assertLess(err, 1e-3)
def testGradientInput(self):
for tr_a in [True, False]:
for tr_b in [True, False]:
for sp_a in [True, False]:
for sp_b in [True, False]:
name = "sparse_matmul_%s_%s_%s_%s" % (tr_a, tr_b, sp_a, sp_b)
self._testGradients(tr_a, tr_b, sp_a, sp_b, name)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
pythonalliance/uno2bot | errors.py | 1 | 1077 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Telegram bot to play UNO in group chats
# Copyright (c) 2016 Jannes Höke <uno@jhoeke.de>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class NoGameInChatError(Exception):
pass
class AlreadyJoinedError(Exception):
pass
class LobbyClosedError(Exception):
pass
class NotEnoughPlayersError(Exception):
pass
class DeckEmptyError(Exception):
pass
class PlayerLeftError(Exception):
pass
| agpl-3.0 |
partofthething/home-assistant | homeassistant/components/zha/core/const.py | 1 | 10279 | """All constants related to the ZHA component."""
import enum
import logging
from typing import List
import bellows.zigbee.application
from zigpy.config import CONF_DEVICE_PATH # noqa: F401 # pylint: disable=unused-import
import zigpy_cc.zigbee.application
import zigpy_deconz.zigbee.application
import zigpy_xbee.zigbee.application
import zigpy_zigate.zigbee.application
import zigpy_znp.zigbee.application
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR
from homeassistant.components.climate import DOMAIN as CLIMATE
from homeassistant.components.cover import DOMAIN as COVER
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.components.fan import DOMAIN as FAN
from homeassistant.components.light import DOMAIN as LIGHT
from homeassistant.components.lock import DOMAIN as LOCK
from homeassistant.components.number import DOMAIN as NUMBER
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.components.switch import DOMAIN as SWITCH
from .typing import CALLABLE_T
ATTR_ARGS = "args"
ATTR_ATTRIBUTE = "attribute"
ATTR_ATTRIBUTE_ID = "attribute_id"
ATTR_ATTRIBUTE_NAME = "attribute_name"
ATTR_AVAILABLE = "available"
ATTR_CLUSTER_ID = "cluster_id"
ATTR_CLUSTER_TYPE = "cluster_type"
ATTR_COMMAND_TYPE = "command_type"
ATTR_DEVICE_IEEE = "device_ieee"
ATTR_DEVICE_TYPE = "device_type"
ATTR_ENDPOINTS = "endpoints"
ATTR_ENDPOINT_NAMES = "endpoint_names"
ATTR_ENDPOINT_ID = "endpoint_id"
ATTR_IEEE = "ieee"
ATTR_IN_CLUSTERS = "in_clusters"
ATTR_LAST_SEEN = "last_seen"
ATTR_LEVEL = "level"
ATTR_LQI = "lqi"
ATTR_MANUFACTURER = "manufacturer"
ATTR_MANUFACTURER_CODE = "manufacturer_code"
ATTR_MEMBERS = "members"
ATTR_MODEL = "model"
ATTR_NEIGHBORS = "neighbors"
ATTR_NODE_DESCRIPTOR = "node_descriptor"
ATTR_NWK = "nwk"
ATTR_OUT_CLUSTERS = "out_clusters"
ATTR_POWER_SOURCE = "power_source"
ATTR_PROFILE_ID = "profile_id"
ATTR_QUIRK_APPLIED = "quirk_applied"
ATTR_QUIRK_CLASS = "quirk_class"
ATTR_RSSI = "rssi"
ATTR_SIGNATURE = "signature"
ATTR_TYPE = "type"
ATTR_UNIQUE_ID = "unique_id"
ATTR_VALUE = "value"
ATTR_WARNING_DEVICE_DURATION = "duration"
ATTR_WARNING_DEVICE_MODE = "mode"
ATTR_WARNING_DEVICE_STROBE = "strobe"
ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE = "duty_cycle"
ATTR_WARNING_DEVICE_STROBE_INTENSITY = "intensity"
BAUD_RATES = [2400, 4800, 9600, 14400, 19200, 38400, 57600, 115200, 128000, 256000]
BINDINGS = "bindings"
CHANNEL_ACCELEROMETER = "accelerometer"
CHANNEL_ANALOG_INPUT = "analog_input"
CHANNEL_ANALOG_OUTPUT = "analog_output"
CHANNEL_ATTRIBUTE = "attribute"
CHANNEL_BASIC = "basic"
CHANNEL_COLOR = "light_color"
CHANNEL_COVER = "window_covering"
CHANNEL_DOORLOCK = "door_lock"
CHANNEL_ELECTRICAL_MEASUREMENT = "electrical_measurement"
CHANNEL_EVENT_RELAY = "event_relay"
CHANNEL_FAN = "fan"
CHANNEL_HUMIDITY = "humidity"
CHANNEL_IAS_WD = "ias_wd"
CHANNEL_IDENTIFY = "identify"
CHANNEL_ILLUMINANCE = "illuminance"
CHANNEL_LEVEL = ATTR_LEVEL
CHANNEL_MULTISTATE_INPUT = "multistate_input"
CHANNEL_OCCUPANCY = "occupancy"
CHANNEL_ON_OFF = "on_off"
CHANNEL_POWER_CONFIGURATION = "power"
CHANNEL_PRESSURE = "pressure"
CHANNEL_SHADE = "shade"
CHANNEL_SMARTENERGY_METERING = "smartenergy_metering"
CHANNEL_TEMPERATURE = "temperature"
CHANNEL_THERMOSTAT = "thermostat"
CHANNEL_ZDO = "zdo"
CHANNEL_ZONE = ZONE = "ias_zone"
CLUSTER_COMMAND_SERVER = "server"
CLUSTER_COMMANDS_CLIENT = "client_commands"
CLUSTER_COMMANDS_SERVER = "server_commands"
CLUSTER_TYPE_IN = "in"
CLUSTER_TYPE_OUT = "out"
PLATFORMS = (
BINARY_SENSOR,
CLIMATE,
COVER,
DEVICE_TRACKER,
FAN,
LIGHT,
LOCK,
NUMBER,
SENSOR,
SWITCH,
)
CONF_BAUDRATE = "baudrate"
CONF_DATABASE = "database_path"
CONF_DEVICE_CONFIG = "device_config"
CONF_ENABLE_QUIRKS = "enable_quirks"
CONF_FLOWCONTROL = "flow_control"
CONF_RADIO_TYPE = "radio_type"
CONF_USB_PATH = "usb_path"
CONF_ZIGPY = "zigpy_config"
DATA_DEVICE_CONFIG = "zha_device_config"
DATA_ZHA = "zha"
DATA_ZHA_CONFIG = "config"
DATA_ZHA_BRIDGE_ID = "zha_bridge_id"
DATA_ZHA_CORE_EVENTS = "zha_core_events"
DATA_ZHA_DISPATCHERS = "zha_dispatchers"
DATA_ZHA_GATEWAY = "zha_gateway"
DATA_ZHA_PLATFORM_LOADED = "platform_loaded"
DEBUG_COMP_BELLOWS = "bellows"
DEBUG_COMP_ZHA = "homeassistant.components.zha"
DEBUG_COMP_ZIGPY = "zigpy"
DEBUG_COMP_ZIGPY_CC = "zigpy_cc"
DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz"
DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee"
DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate"
DEBUG_LEVEL_CURRENT = "current"
DEBUG_LEVEL_ORIGINAL = "original"
DEBUG_LEVELS = {
DEBUG_COMP_BELLOWS: logging.DEBUG,
DEBUG_COMP_ZHA: logging.DEBUG,
DEBUG_COMP_ZIGPY: logging.DEBUG,
DEBUG_COMP_ZIGPY_CC: logging.DEBUG,
DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG,
DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG,
DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG,
}
DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY]
DEFAULT_RADIO_TYPE = "ezsp"
DEFAULT_BAUDRATE = 57600
DEFAULT_DATABASE_NAME = "zigbee.db"
DEVICE_PAIRING_STATUS = "pairing_status"
DISCOVERY_KEY = "zha_discovery_info"
DOMAIN = "zha"
GROUP_ID = "group_id"
GROUP_IDS = "group_ids"
GROUP_NAME = "group_name"
MFG_CLUSTER_ID_START = 0xFC00
POWER_MAINS_POWERED = "Mains"
POWER_BATTERY_OR_UNKNOWN = "Battery or Unknown"
class RadioType(enum.Enum):
# pylint: disable=invalid-name
"""Possible options for radio type."""
znp = (
"ZNP = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_znp.zigbee.application.ControllerApplication,
)
ezsp = (
"EZSP = Silicon Labs EmberZNet protocol: Elelabs, HUSBZB-1, Telegesis",
bellows.zigbee.application.ControllerApplication,
)
deconz = (
"deCONZ = dresden elektronik deCONZ protocol: ConBee I/II, RaspBee I/II",
zigpy_deconz.zigbee.application.ControllerApplication,
)
ti_cc = (
"Legacy TI_CC = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_cc.zigbee.application.ControllerApplication,
)
zigate = (
"ZiGate = ZiGate Zigbee radios: PiZiGate, ZiGate USB-TTL, ZiGate WiFi",
zigpy_zigate.zigbee.application.ControllerApplication,
)
xbee = (
"XBee = Digi XBee Zigbee radios: Digi XBee Series 2, 2C, 3",
zigpy_xbee.zigbee.application.ControllerApplication,
)
@classmethod
def list(cls) -> List[str]:
"""Return a list of descriptions."""
return [e.description for e in RadioType]
@classmethod
def get_by_description(cls, description: str) -> str:
"""Get radio by description."""
for radio in cls:
if radio.description == description:
return radio.name
raise ValueError
def __init__(self, description: str, controller_cls: CALLABLE_T):
"""Init instance."""
self._desc = description
self._ctrl_cls = controller_cls
@property
def controller(self) -> CALLABLE_T:
"""Return controller class."""
return self._ctrl_cls
@property
def description(self) -> str:
"""Return radio type description."""
return self._desc
REPORT_CONFIG_MAX_INT = 900
REPORT_CONFIG_MAX_INT_BATTERY_SAVE = 10800
REPORT_CONFIG_MIN_INT = 30
REPORT_CONFIG_MIN_INT_ASAP = 1
REPORT_CONFIG_MIN_INT_IMMEDIATE = 0
REPORT_CONFIG_MIN_INT_OP = 5
REPORT_CONFIG_MIN_INT_BATTERY_SAVE = 3600
REPORT_CONFIG_RPT_CHANGE = 1
REPORT_CONFIG_DEFAULT = (
REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_ASAP = (
REPORT_CONFIG_MIN_INT_ASAP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_BATTERY_SAVE = (
REPORT_CONFIG_MIN_INT_BATTERY_SAVE,
REPORT_CONFIG_MAX_INT_BATTERY_SAVE,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_IMMEDIATE = (
REPORT_CONFIG_MIN_INT_IMMEDIATE,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_OP = (
REPORT_CONFIG_MIN_INT_OP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
SENSOR_ACCELERATION = "acceleration"
SENSOR_BATTERY = "battery"
SENSOR_ELECTRICAL_MEASUREMENT = CHANNEL_ELECTRICAL_MEASUREMENT
SENSOR_GENERIC = "generic"
SENSOR_HUMIDITY = CHANNEL_HUMIDITY
SENSOR_ILLUMINANCE = CHANNEL_ILLUMINANCE
SENSOR_METERING = "metering"
SENSOR_OCCUPANCY = CHANNEL_OCCUPANCY
SENSOR_OPENING = "opening"
SENSOR_PRESSURE = CHANNEL_PRESSURE
SENSOR_TEMPERATURE = CHANNEL_TEMPERATURE
SENSOR_TYPE = "sensor_type"
SIGNAL_ADD_ENTITIES = "zha_add_new_entities"
SIGNAL_ATTR_UPDATED = "attribute_updated"
SIGNAL_AVAILABLE = "available"
SIGNAL_MOVE_LEVEL = "move_level"
SIGNAL_REMOVE = "remove"
SIGNAL_SET_LEVEL = "set_level"
SIGNAL_STATE_ATTR = "update_state_attribute"
SIGNAL_UPDATE_DEVICE = "{}_zha_update_device"
SIGNAL_GROUP_ENTITY_REMOVED = "group_entity_removed"
SIGNAL_GROUP_MEMBERSHIP_CHANGE = "group_membership_change"
UNKNOWN = "unknown"
UNKNOWN_MANUFACTURER = "unk_manufacturer"
UNKNOWN_MODEL = "unk_model"
WARNING_DEVICE_MODE_STOP = 0
WARNING_DEVICE_MODE_BURGLAR = 1
WARNING_DEVICE_MODE_FIRE = 2
WARNING_DEVICE_MODE_EMERGENCY = 3
WARNING_DEVICE_MODE_POLICE_PANIC = 4
WARNING_DEVICE_MODE_FIRE_PANIC = 5
WARNING_DEVICE_MODE_EMERGENCY_PANIC = 6
WARNING_DEVICE_STROBE_NO = 0
WARNING_DEVICE_STROBE_YES = 1
WARNING_DEVICE_SOUND_LOW = 0
WARNING_DEVICE_SOUND_MEDIUM = 1
WARNING_DEVICE_SOUND_HIGH = 2
WARNING_DEVICE_SOUND_VERY_HIGH = 3
WARNING_DEVICE_STROBE_LOW = 0x00
WARNING_DEVICE_STROBE_MEDIUM = 0x01
WARNING_DEVICE_STROBE_HIGH = 0x02
WARNING_DEVICE_STROBE_VERY_HIGH = 0x03
WARNING_DEVICE_SQUAWK_MODE_ARMED = 0
WARNING_DEVICE_SQUAWK_MODE_DISARMED = 1
ZHA_DISCOVERY_NEW = "zha_discovery_new_{}"
ZHA_GW_MSG = "zha_gateway_message"
ZHA_GW_MSG_DEVICE_FULL_INIT = "device_fully_initialized"
ZHA_GW_MSG_DEVICE_INFO = "device_info"
ZHA_GW_MSG_DEVICE_JOINED = "device_joined"
ZHA_GW_MSG_DEVICE_REMOVED = "device_removed"
ZHA_GW_MSG_GROUP_ADDED = "group_added"
ZHA_GW_MSG_GROUP_INFO = "group_info"
ZHA_GW_MSG_GROUP_MEMBER_ADDED = "group_member_added"
ZHA_GW_MSG_GROUP_MEMBER_REMOVED = "group_member_removed"
ZHA_GW_MSG_GROUP_REMOVED = "group_removed"
ZHA_GW_MSG_LOG_ENTRY = "log_entry"
ZHA_GW_MSG_LOG_OUTPUT = "log_output"
ZHA_GW_MSG_RAW_INIT = "raw_device_initialized"
EFFECT_BLINK = 0x00
EFFECT_BREATHE = 0x01
EFFECT_OKAY = 0x02
EFFECT_DEFAULT_VARIANT = 0x00
| mit |
bsmedberg/socorro | webapp-django/bin/linting.py | 2 | 1511 | #!/usr/bin/env python
"""
Use like this:
find somedir | xargs check.py | python linting.py
or:
check.py somedir | python linting.py
or:
git ls-files somedir | python linting.py
"""
import os
import sys
# Enter any part of a warning that we deem OK.
# It can be a pep8 warning error code or any other part of a string.
#
# NOTE! Be as specific as you possibly can!
# Only blanket whole files if you desperately have to
#
EXCEPTIONS = (
# has a exceptional use of `...import *`
'settings/base.py:4:',
# has a well known `...import *` trick that we like
'settings/__init__.py',
# all downloaded libs to be ignored
'/js/lib/',
# See https://bugzilla.mozilla.org/show_bug.cgi?id=997270
'/js/jquery/',
'/js/flot',
'/js/timeago/',
'jquery.tablesorter.min.js',
'async-local-storage-with-Promise.min.js',
'underscore-min.js',
'moment.min.js',
'jquery.metadata.js',
)
EXTENSIONS_ONLY = (
'.py',
# commented out until we clean up our .js files
# See https://bugzilla.mozilla.org/show_bug.cgi?id=997272
# '.js'
)
def main():
errors = 0
for line in sys.stdin:
if not line.strip():
continue
_, ext = os.path.splitext(line.split(':')[0])
if ext not in EXTENSIONS_ONLY:
continue
if [f for f in EXCEPTIONS if f in line]:
continue
errors += 1
sys.stderr.write(line)
return errors
if __name__ == '__main__':
sys.exit(main())
| mpl-2.0 |
ericholscher/django-tastypie | tastypie/validation.py | 47 | 3685 | from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.forms import ModelForm
from django.forms.models import model_to_dict
class Validation(object):
"""
A basic validation stub that does no validation.
"""
def __init__(self, **kwargs):
pass
def is_valid(self, bundle, request=None):
"""
Performs a check on the data within the bundle (and optionally the
request) to ensure it is valid.
Should return a dictionary of error messages. If the dictionary has
zero items, the data is considered valid. If there are errors, keys
in the dictionary should be field names and the values should be a list
of errors, even if there is only one.
"""
return {}
class FormValidation(Validation):
"""
A validation class that uses a Django ``Form`` to validate the data.
This class **DOES NOT** alter the data sent, only verifies it. If you
want to alter the data, please use the ``CleanedDataFormValidation`` class
instead.
This class requires a ``form_class`` argument, which should be a Django
``Form`` (or ``ModelForm``, though ``save`` will never be called) class.
This form will be used to validate the data in ``bundle.data``.
"""
def __init__(self, **kwargs):
if not 'form_class' in kwargs:
raise ImproperlyConfigured("You must provide a 'form_class' to 'FormValidation' classes.")
self.form_class = kwargs.pop('form_class')
super(FormValidation, self).__init__(**kwargs)
def form_args(self, bundle):
data = bundle.data
# Ensure we get a bound Form, regardless of the state of the bundle.
if data is None:
data = {}
kwargs = {'data': {}}
if hasattr(bundle.obj, 'pk'):
if issubclass(self.form_class, ModelForm):
kwargs['instance'] = bundle.obj
kwargs['data'] = model_to_dict(bundle.obj)
kwargs['data'].update(data)
return kwargs
def is_valid(self, bundle, request=None):
"""
Performs a check on ``bundle.data``to ensure it is valid.
If the form is valid, an empty list (all valid) will be returned. If
not, a list of errors will be returned.
"""
form = self.form_class(**self.form_args(bundle))
if form.is_valid():
return {}
# The data is invalid. Let's collect all the error messages & return
# them.
return form.errors
class CleanedDataFormValidation(FormValidation):
"""
A validation class that uses a Django ``Form`` to validate the data.
This class **ALTERS** data sent by the user!!!
This class requires a ``form_class`` argument, which should be a Django
``Form`` (or ``ModelForm``, though ``save`` will never be called) class.
This form will be used to validate the data in ``bundle.data``.
"""
def is_valid(self, bundle, request=None):
"""
Checks ``bundle.data``to ensure it is valid & replaces it with the
cleaned results.
If the form is valid, an empty list (all valid) will be returned. If
not, a list of errors will be returned.
"""
form = self.form_class(**self.form_args(bundle))
if form.is_valid():
# We're different here & relying on having a reference to the same
# bundle the rest of the process is using.
bundle.data = form.cleaned_data
return {}
# The data is invalid. Let's collect all the error messages & return
# them.
return form.errors
| bsd-3-clause |
suninsky/ReceiptOCR | Python/server/lib/python2.7/site-packages/selenium/webdriver/safari/service.py | 31 | 1854 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from selenium.webdriver.common import service, utils
from subprocess import PIPE
class Service(service.Service):
"""
Object that manages the starting and stopping of the SafariDriver
"""
def __init__(self, executable_path, port=0, quiet=False):
"""
Creates a new instance of the Service
:Args:
- executable_path : Path to the SafariDriver
- port : Port the service is running on """
if not os.path.exists(executable_path):
raise Exception("SafariDriver requires Safari 10 on OSX El Capitan or greater")
if port == 0:
port = utils.free_port()
self.quiet = quiet
log = PIPE
if quiet:
log = open(os.devnull, 'w')
service.Service.__init__(self, executable_path, port, log)
def command_line_args(self):
return ["-p", "%s" % self.port]
@property
def service_url(self):
"""
Gets the url of the SafariDriver Service
"""
return "http://localhost:%d" % self.port
| mit |
G33KS44n/mysql-5.6 | xtrabackup/test/kewpie/percona_tests/xtrabackup_disabled/bug766607_test.py | 24 | 8243 | #! /usr/bin/env python
# -*- mode: python; indent-tabs-mode: nil; -*-
# vim:expandtab:shiftwidth=2:tabstop=2:smarttab:
#
# Copyright (C) 2011 Patrick Crews
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import shutil
from lib.util.mysqlBaseTestCase import mysqlBaseTestCase
server_requirements = [['--innodb-file-per-table']]
servers = []
server_manager = None
test_executor = None
# we explicitly use the --no-timestamp option
# here. We will be using a generic / vanilla backup dir
backup_path = None
class basicTest(mysqlBaseTestCase):
def setUp(self):
master_server = servers[0] # assumption that this is 'master'
backup_path = os.path.join(master_server.vardir, 'full_backup')
inc_backup_path = os.path.join(master_server.vardir, 'inc_backup')
# remove backup paths
for del_path in [backup_path, inc_backup_path]:
if os.path.exists(del_path):
shutil.rmtree(del_path)
def load_table(self, table_name, row_count, server):
queries = []
for i in range(row_count):
queries.append("INSERT INTO %s VALUES (%d, %d)" %(table_name,i, row_count))
retcode, result = self.execute_queries(queries, server)
self.assertEqual(retcode, 0, msg=result)
def test_ib_incremental(self):
self.servers = servers
logging = test_executor.logging
if servers[0].type not in ['mysql','percona']:
return
else:
innobackupex = test_executor.system_manager.innobackupex_path
xtrabackup = test_executor.system_manager.xtrabackup_path
master_server = servers[0] # assumption that this is 'master'
backup_path = os.path.join(master_server.vardir, 'full_backup')
inc_backup_path = os.path.join(master_server.vardir, 'inc_backup')
output_path = os.path.join(master_server.vardir, 'innobackupex.out')
exec_path = os.path.dirname(innobackupex)
table_name = "`test`"
# populate our server with a test bed
queries = ["DROP TABLE IF EXISTS %s" %(table_name)
,("CREATE TABLE %s "
"(`a` int(11) DEFAULT NULL, "
"`number` int(11) DEFAULT NULL) "
" ENGINE=InnoDB DEFAULT CHARSET=latin1"
%(table_name)
)
]
retcode, result = self.execute_queries(queries, master_server)
self.assertEqual(retcode, 0, msg = result)
row_count = 100
self.load_table(table_name, row_count, master_server)
# take a backup
cmd = [ xtrabackup
, "--defaults-file=%s" %master_server.cnf_file
, "--datadir=%s" %master_server.datadir
, "--backup"
, "--target-dir=%s" %backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
queries = [ "CREATE TABLE t(a int) ENGINE=InnoDB"
, "INSERT INTO t VALUES (1), (2), (3)"
, "FLUSH LOGS"
]
retcode, result = self.execute_queries(queries, master_server)
self.assertEqual(retcode,0,msg=result)
# stop / restart the server
master_server.stop()
master_server.start()
# Take an incremental backup
cmd = [ xtrabackup
, "--defaults-file=%s" %master_server.cnf_file
, "--datadir=%s" %master_server.datadir
, "--backup"
, "--target-dir=%s" %inc_backup_path
, "--incremental-basedir=%s" %backup_path
]
cmd = " ".join(cmd)
logging.test_debug(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertEqual(retcode,0,output)
# shutdown our server
master_server.stop()
# prepare our main backup
cmd = [ xtrabackup
, "--prepare"
, "--apply-log-only"
, "--datadir=%s" %master_server.datadir
, "--use-memory=500M"
, "--target-dir=%s" %backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
# prepare our incremental backup
cmd = [ xtrabackup
, "--prepare"
, "--apply-log-only"
, "--datadir=%s" %master_server.datadir
, "--use-memory=500M"
, "--target-dir=%s" %backup_path
, "--incremental-dir=%s" %(inc_backup_path)
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
# do final prepare on main backup
cmd = [ xtrabackup
, "--prepare"
, "--datadir=%s" %master_server.datadir
, "--use-memory=500M"
, "--target-dir=%s" %backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
# copy our data files back
for root, dirs, files in os.walk(backup_path):
if files:
file_info = root.split(backup_path)[1]
for file_name in files:
# We do a quick check to make sure
# no names start with '/' as os.path
# throws a hissy when it sees such things
if file_info.startswith('/'):
file_info = file_info[1:]
if file_name.startswith('/'):
file_name = file_name[1:]
to_path = os.path.join(master_server.datadir
, file_info
, file_name)
new_dir = os.path.dirname(to_path)
try:
if not os.path.exists(new_dir):
os.makedirs(new_dir)
except OSError, e:
logging.error("Could not create directory: %s | %s" %(new_dir, e))
try:
shutil.copy(os.path.join(root,file_name),to_path)
except IOError, e:
logging.error( "ERROR: Could not copy file: %s | %s" %(file_name, e))
# restart server (and ensure it doesn't crash)
master_server.start()
self.assertTrue(master_server.status==1, 'Server failed restart from restored datadir...')
# Get a checksum for our table
query = "SELECT * FROM t"
retcode, result = self.execute_query(query, master_server)
self.assertEqual(retcode, 0, msg=result)
expected_result= ((1L,), (2L,), (3L,))
self.assertEqual(result, expected_result, msg = "%s || %s" %(expected_result, result))
| gpl-2.0 |
cnits/CnitSymfony | vendor/doctrine/orm/docs/en/conf.py | 2448 | 6497 | # -*- coding: utf-8 -*-
#
# Doctrine 2 ORM documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 3 18:10:24 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_exts'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['configurationblock']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Doctrine 2 ORM'
copyright = u'2010-12, Doctrine Project Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2'
# The full version, including alpha/beta/rc tags.
release = '2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'doctrine'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Doctrine2ORMdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Doctrine2ORM.tex', u'Doctrine 2 ORM Documentation',
u'Doctrine Project Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
primary_domain = "dcorm"
def linkcode_resolve(domain, info):
if domain == 'dcorm':
return 'http://'
return None
| mit |
JijonHyuni/HyperKernel-JB | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
badloop/SickRage | lib/tornado/concurrent.py | 35 | 18000 | #!/usr/bin/env python
#
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities for working with threads and ``Futures``.
``Futures`` are a pattern for concurrent programming introduced in
Python 3.2 in the `concurrent.futures` package (this package has also
been backported to older versions of Python and can be installed with
``pip install futures``). Tornado will use `concurrent.futures.Future` if
it is available; otherwise it will use a compatible class defined in this
module.
"""
from __future__ import absolute_import, division, print_function, with_statement
import functools
import platform
import traceback
import sys
from tornado.log import app_log
from tornado.stack_context import ExceptionStackContext, wrap
from tornado.util import raise_exc_info, ArgReplacer
try:
from concurrent import futures
except ImportError:
futures = None
# Can the garbage collector handle cycles that include __del__ methods?
# This is true in cpython beginning with version 3.4 (PEP 442).
_GC_CYCLE_FINALIZERS = (platform.python_implementation() == 'CPython' and
sys.version_info >= (3, 4))
class ReturnValueIgnoredError(Exception):
pass
# This class and associated code in the future object is derived
# from the Trollius project, a backport of asyncio to Python 2.x - 3.x
class _TracebackLogger(object):
"""Helper to log a traceback upon destruction if not cleared.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _TracebackLogger, and
then the _TracebackLogger would be included in a cycle, which is
what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield From') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ('exc_info', 'formatted_tb')
def __init__(self, exc_info):
self.exc_info = exc_info
self.formatted_tb = None
def activate(self):
exc_info = self.exc_info
if exc_info is not None:
self.exc_info = None
self.formatted_tb = traceback.format_exception(*exc_info)
def clear(self):
self.exc_info = None
self.formatted_tb = None
def __del__(self):
if self.formatted_tb:
app_log.error('Future exception was never retrieved: %s',
''.join(self.formatted_tb).rstrip())
class Future(object):
"""Placeholder for an asynchronous result.
A ``Future`` encapsulates the result of an asynchronous
operation. In synchronous applications ``Futures`` are used
to wait for the result from a thread or process pool; in
Tornado they are normally used with `.IOLoop.add_future` or by
yielding them in a `.gen.coroutine`.
`tornado.concurrent.Future` is similar to
`concurrent.futures.Future`, but not thread-safe (and therefore
faster for use with single-threaded event loops).
In addition to ``exception`` and ``set_exception``, methods ``exc_info``
and ``set_exc_info`` are supported to capture tracebacks in Python 2.
The traceback is automatically available in Python 3, but in the
Python 2 futures backport this information is discarded.
This functionality was previously available in a separate class
``TracebackFuture``, which is now a deprecated alias for this class.
.. versionchanged:: 4.0
`tornado.concurrent.Future` is always a thread-unsafe ``Future``
with support for the ``exc_info`` methods. Previously it would
be an alias for the thread-safe `concurrent.futures.Future`
if that package was available and fall back to the thread-unsafe
implementation if it was not.
.. versionchanged:: 4.1
If a `.Future` contains an error but that error is never observed
(by calling ``result()``, ``exception()``, or ``exc_info()``),
a stack trace will be logged when the `.Future` is garbage collected.
This normally indicates an error in the application, but in cases
where it results in undesired logging it may be necessary to
suppress the logging by ensuring that the exception is observed:
``f.add_done_callback(lambda f: f.exception())``.
"""
def __init__(self):
self._done = False
self._result = None
self._exc_info = None
self._log_traceback = False # Used for Python >= 3.4
self._tb_logger = None # Used for Python <= 3.3
self._callbacks = []
def cancel(self):
"""Cancel the operation, if possible.
Tornado ``Futures`` do not support cancellation, so this method always
returns False.
"""
return False
def cancelled(self):
"""Returns True if the operation has been cancelled.
Tornado ``Futures`` do not support cancellation, so this method
always returns False.
"""
return False
def running(self):
"""Returns True if this operation is currently running."""
return not self._done
def done(self):
"""Returns True if the future has finished running."""
return self._done
def _clear_tb_log(self):
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
def result(self, timeout=None):
"""If the operation succeeded, return its result. If it failed,
re-raise its exception.
This method takes a ``timeout`` argument for compatibility with
`concurrent.futures.Future` but it is an error to call it
before the `Future` is done, so the ``timeout`` is never used.
"""
self._clear_tb_log()
if self._result is not None:
return self._result
if self._exc_info is not None:
raise_exc_info(self._exc_info)
self._check_done()
return self._result
def exception(self, timeout=None):
"""If the operation raised an exception, return the `Exception`
object. Otherwise returns None.
This method takes a ``timeout`` argument for compatibility with
`concurrent.futures.Future` but it is an error to call it
before the `Future` is done, so the ``timeout`` is never used.
"""
self._clear_tb_log()
if self._exc_info is not None:
return self._exc_info[1]
else:
self._check_done()
return None
def add_done_callback(self, fn):
"""Attaches the given callback to the `Future`.
It will be invoked with the `Future` as its argument when the Future
has finished running and its result is available. In Tornado
consider using `.IOLoop.add_future` instead of calling
`add_done_callback` directly.
"""
if self._done:
fn(self)
else:
self._callbacks.append(fn)
def set_result(self, result):
"""Sets the result of a ``Future``.
It is undefined to call any of the ``set`` methods more than once
on the same object.
"""
self._result = result
self._set_done()
def set_exception(self, exception):
"""Sets the exception of a ``Future.``"""
self.set_exc_info(
(exception.__class__,
exception,
getattr(exception, '__traceback__', None)))
def exc_info(self):
"""Returns a tuple in the same format as `sys.exc_info` or None.
.. versionadded:: 4.0
"""
self._clear_tb_log()
return self._exc_info
def set_exc_info(self, exc_info):
"""Sets the exception information of a ``Future.``
Preserves tracebacks on Python 2.
.. versionadded:: 4.0
"""
self._exc_info = exc_info
self._log_traceback = True
if not _GC_CYCLE_FINALIZERS:
self._tb_logger = _TracebackLogger(exc_info)
try:
self._set_done()
finally:
# Activate the logger after all callbacks have had a
# chance to call result() or exception().
if self._log_traceback and self._tb_logger is not None:
self._tb_logger.activate()
self._exc_info = exc_info
def _check_done(self):
if not self._done:
raise Exception("DummyFuture does not support blocking for results")
def _set_done(self):
self._done = True
for cb in self._callbacks:
try:
cb(self)
except Exception:
app_log.exception('Exception in callback %r for %r',
cb, self)
self._callbacks = None
# On Python 3.3 or older, objects with a destructor part of a reference
# cycle are never destroyed. It's no longer the case on Python 3.4 thanks to
# the PEP 442.
if _GC_CYCLE_FINALIZERS:
def __del__(self):
if not self._log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
tb = traceback.format_exception(*self._exc_info)
app_log.error('Future %r exception was never retrieved: %s',
self, ''.join(tb).rstrip())
TracebackFuture = Future
if futures is None:
FUTURES = Future
else:
FUTURES = (futures.Future, Future)
def is_future(x):
return isinstance(x, FUTURES)
class DummyExecutor(object):
def submit(self, fn, *args, **kwargs):
future = TracebackFuture()
try:
future.set_result(fn(*args, **kwargs))
except Exception:
future.set_exc_info(sys.exc_info())
return future
def shutdown(self, wait=True):
pass
dummy_executor = DummyExecutor()
def run_on_executor(*args, **kwargs):
"""Decorator to run a synchronous method asynchronously on an executor.
The decorated method may be called with a ``callback`` keyword
argument and returns a future.
The `.IOLoop` and executor to be used are determined by the ``io_loop``
and ``executor`` attributes of ``self``. To use different attributes,
pass keyword arguments to the decorator::
@run_on_executor(executor='_thread_pool')
def foo(self):
pass
.. versionchanged:: 4.2
Added keyword arguments to use alternative attributes.
"""
def run_on_executor_decorator(fn):
executor = kwargs.get("executor", "executor")
io_loop = kwargs.get("io_loop", "io_loop")
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
callback = kwargs.pop("callback", None)
future = getattr(self, executor).submit(fn, self, *args, **kwargs)
if callback:
getattr(self, io_loop).add_future(
future, lambda future: callback(future.result()))
return future
return wrapper
if args and kwargs:
raise ValueError("cannot combine positional and keyword args")
if len(args) == 1:
return run_on_executor_decorator(args[0])
elif len(args) != 0:
raise ValueError("expected 1 argument, got %d", len(args))
return run_on_executor_decorator
_NO_RESULT = object()
def return_future(f):
"""Decorator to make a function that returns via callback return a
`Future`.
The wrapped function should take a ``callback`` keyword argument
and invoke it with one argument when it has finished. To signal failure,
the function can simply raise an exception (which will be
captured by the `.StackContext` and passed along to the ``Future``).
From the caller's perspective, the callback argument is optional.
If one is given, it will be invoked when the function is complete
with `Future.result()` as an argument. If the function fails, the
callback will not be run and an exception will be raised into the
surrounding `.StackContext`.
If no callback is given, the caller should use the ``Future`` to
wait for the function to complete (perhaps by yielding it in a
`.gen.engine` function, or passing it to `.IOLoop.add_future`).
Usage:
.. testcode::
@return_future
def future_func(arg1, arg2, callback):
# Do stuff (possibly asynchronous)
callback(result)
@gen.engine
def caller(callback):
yield future_func(arg1, arg2)
callback()
..
Note that ``@return_future`` and ``@gen.engine`` can be applied to the
same function, provided ``@return_future`` appears first. However,
consider using ``@gen.coroutine`` instead of this combination.
"""
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = TracebackFuture()
callback, args, kwargs = replacer.replace(
lambda value=_NO_RESULT: future.set_result(value),
args, kwargs)
def handle_error(typ, value, tb):
future.set_exc_info((typ, value, tb))
return True
exc_info = None
with ExceptionStackContext(handle_error):
try:
result = f(*args, **kwargs)
if result is not None:
raise ReturnValueIgnoredError(
"@return_future should not be used with functions "
"that return values")
except:
exc_info = sys.exc_info()
raise
if exc_info is not None:
# If the initial synchronous part of f() raised an exception,
# go ahead and raise it to the caller directly without waiting
# for them to inspect the Future.
future.result()
# If the caller passed in a callback, schedule it to be called
# when the future resolves. It is important that this happens
# just before we return the future, or else we risk confusing
# stack contexts with multiple exceptions (one here with the
# immediate exception, and again when the future resolves and
# the callback triggers its exception by calling future.result()).
if callback is not None:
def run_callback(future):
result = future.result()
if result is _NO_RESULT:
callback()
else:
callback(future.result())
future.add_done_callback(wrap(run_callback))
return future
return wrapper
def chain_future(a, b):
"""Chain two futures together so that when one completes, so does the other.
The result (success or failure) of ``a`` will be copied to ``b``, unless
``b`` has already been completed or cancelled by the time ``a`` finishes.
"""
def copy(future):
assert future is a
if b.done():
return
if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture)
and a.exc_info() is not None):
b.set_exc_info(a.exc_info())
elif a.exception() is not None:
b.set_exception(a.exception())
else:
b.set_result(a.result())
a.add_done_callback(copy)
| gpl-3.0 |
balloob/home-assistant | tests/components/alarmdecoder/test_config_flow.py | 5 | 13829 | """Test the AlarmDecoder config flow."""
from alarmdecoder.util import NoDeviceError
import pytest
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.alarmdecoder import config_flow
from homeassistant.components.alarmdecoder.const import (
CONF_ALT_NIGHT_MODE,
CONF_AUTO_BYPASS,
CONF_CODE_ARM_REQUIRED,
CONF_DEVICE_BAUD,
CONF_DEVICE_PATH,
CONF_RELAY_ADDR,
CONF_RELAY_CHAN,
CONF_ZONE_LOOP,
CONF_ZONE_NAME,
CONF_ZONE_NUMBER,
CONF_ZONE_RFID,
CONF_ZONE_TYPE,
DEFAULT_ARM_OPTIONS,
DEFAULT_ZONE_OPTIONS,
DOMAIN,
OPTIONS_ARM,
OPTIONS_ZONES,
PROTOCOL_SERIAL,
PROTOCOL_SOCKET,
)
from homeassistant.components.binary_sensor import DEVICE_CLASS_WINDOW
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_PROTOCOL
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
from tests.common import MockConfigEntry
@pytest.mark.parametrize(
"protocol,connection,title",
[
(
PROTOCOL_SOCKET,
{
CONF_HOST: "alarmdecoder123",
CONF_PORT: 10001,
},
"alarmdecoder123:10001",
),
(
PROTOCOL_SERIAL,
{
CONF_DEVICE_PATH: "/dev/ttyUSB123",
CONF_DEVICE_BAUD: 115000,
},
"/dev/ttyUSB123",
),
],
)
async def test_setups(hass: HomeAssistant, protocol, connection, title):
"""Test flow for setting up the available AlarmDecoder protocols."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PROTOCOL: protocol},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "protocol"
with patch("homeassistant.components.alarmdecoder.config_flow.AdExt.open"), patch(
"homeassistant.components.alarmdecoder.config_flow.AdExt.close"
), patch(
"homeassistant.components.alarmdecoder.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.alarmdecoder.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], connection
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == title
assert result["data"] == {
**connection,
CONF_PROTOCOL: protocol,
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_setup_connection_error(hass: HomeAssistant):
"""Test flow for setup with a connection error."""
port = 1001
host = "alarmdecoder"
protocol = PROTOCOL_SOCKET
connection_settings = {CONF_HOST: host, CONF_PORT: port}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PROTOCOL: protocol},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "protocol"
with patch(
"homeassistant.components.alarmdecoder.config_flow.AdExt.open",
side_effect=NoDeviceError,
), patch("homeassistant.components.alarmdecoder.config_flow.AdExt.close"):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], connection_settings
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_options_arm_flow(hass: HomeAssistant):
"""Test arm options flow."""
user_input = {
CONF_ALT_NIGHT_MODE: True,
CONF_AUTO_BYPASS: True,
CONF_CODE_ARM_REQUIRED: True,
}
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"edit_selection": "Arming Settings"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "arm_settings"
with patch(
"homeassistant.components.alarmdecoder.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=user_input,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert entry.options == {
OPTIONS_ARM: user_input,
OPTIONS_ZONES: DEFAULT_ZONE_OPTIONS,
}
async def test_options_zone_flow(hass: HomeAssistant):
"""Test options flow for adding/deleting zones."""
zone_number = "2"
zone_settings = {CONF_ZONE_NAME: "Front Entry", CONF_ZONE_TYPE: DEVICE_CLASS_WINDOW}
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"edit_selection": "Zones"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_select"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_ZONE_NUMBER: zone_number},
)
with patch(
"homeassistant.components.alarmdecoder.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=zone_settings,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert entry.options == {
OPTIONS_ARM: DEFAULT_ARM_OPTIONS,
OPTIONS_ZONES: {zone_number: zone_settings},
}
# Make sure zone can be removed...
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"edit_selection": "Zones"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_select"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_ZONE_NUMBER: zone_number},
)
with patch(
"homeassistant.components.alarmdecoder.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert entry.options == {
OPTIONS_ARM: DEFAULT_ARM_OPTIONS,
OPTIONS_ZONES: {},
}
async def test_options_zone_flow_validation(hass: HomeAssistant):
"""Test input validation for zone options flow."""
zone_number = "2"
zone_settings = {CONF_ZONE_NAME: "Front Entry", CONF_ZONE_TYPE: DEVICE_CLASS_WINDOW}
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"edit_selection": "Zones"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_select"
# Zone Number must be int
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_ZONE_NUMBER: "asd"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_select"
assert result["errors"] == {CONF_ZONE_NUMBER: "int"}
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_ZONE_NUMBER: zone_number},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
# CONF_RELAY_ADDR & CONF_RELAY_CHAN are inclusive
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_RELAY_ADDR: "1"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {"base": "relay_inclusive"}
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_RELAY_CHAN: "1"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {"base": "relay_inclusive"}
# CONF_RELAY_ADDR, CONF_RELAY_CHAN must be int
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_RELAY_ADDR: "abc", CONF_RELAY_CHAN: "abc"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {
CONF_RELAY_ADDR: "int",
CONF_RELAY_CHAN: "int",
}
# CONF_ZONE_LOOP depends on CONF_ZONE_RFID
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_ZONE_LOOP: "1"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {CONF_ZONE_LOOP: "loop_rfid"}
# CONF_ZONE_LOOP must be int
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_ZONE_RFID: "rfid123", CONF_ZONE_LOOP: "ab"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {CONF_ZONE_LOOP: "int"}
# CONF_ZONE_LOOP must be between [1,4]
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_ZONE_RFID: "rfid123", CONF_ZONE_LOOP: "5"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {CONF_ZONE_LOOP: "loop_range"}
# All valid settings
with patch(
"homeassistant.components.alarmdecoder.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
**zone_settings,
CONF_ZONE_RFID: "rfid123",
CONF_ZONE_LOOP: "2",
CONF_RELAY_ADDR: "12",
CONF_RELAY_CHAN: "1",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert entry.options == {
OPTIONS_ARM: DEFAULT_ARM_OPTIONS,
OPTIONS_ZONES: {
zone_number: {
**zone_settings,
CONF_ZONE_RFID: "rfid123",
CONF_ZONE_LOOP: 2,
CONF_RELAY_ADDR: 12,
CONF_RELAY_CHAN: 1,
}
},
}
@pytest.mark.parametrize(
"protocol,connection",
[
(
PROTOCOL_SOCKET,
{
CONF_HOST: "alarmdecoder123",
CONF_PORT: 10001,
},
),
(
PROTOCOL_SERIAL,
{
CONF_DEVICE_PATH: "/dev/ttyUSB123",
CONF_DEVICE_BAUD: 115000,
},
),
],
)
async def test_one_device_allowed(hass, protocol, connection):
"""Test that only one AlarmDecoder device is allowed."""
flow = config_flow.AlarmDecoderFlowHandler()
flow.hass = hass
MockConfigEntry(
domain=DOMAIN,
data=connection,
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PROTOCOL: protocol},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "protocol"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], connection
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
| apache-2.0 |
rsyvarth/simple-blog | lib/wtforms/ext/sqlalchemy/validators.py | 40 | 1225 | from __future__ import unicode_literals
import warnings
from wtforms import ValidationError
from sqlalchemy.orm.exc import NoResultFound
class Unique(object):
"""Checks field value unicity against specified table field.
:param get_session:
A function that return a SQAlchemy Session.
:param model:
The model to check unicity against.
:param column:
The unique column.
:param message:
The error message.
"""
field_flags = ('unique', )
def __init__(self, get_session, model, column, message=None):
warnings.warn('The Unique validator will be removed in WTForms 1.1', DeprecationWarning)
self.get_session = get_session
self.model = model
self.column = column
self.message = message
def __call__(self, form, field):
try:
obj = self.get_session().query(self.model)\
.filter(self.column == field.data).one()
if not hasattr(form, '_obj') or not form._obj == obj:
if self.message is None:
self.message = field.gettext('Already exists.')
raise ValidationError(self.message)
except NoResultFound:
pass
| mit |
ygol/dotfiles | bin/.venv-ansible-venv/lib/python2.6/site-packages/pip/exceptions.py | 398 | 1086 | """Exceptions used throughout package"""
class PipError(Exception):
"""Base pip exception"""
class InstallationError(PipError):
"""General exception during installation"""
class UninstallationError(PipError):
"""General exception during uninstallation"""
class DistributionNotFound(InstallationError):
"""Raised when a distribution cannot be found to satisfy a requirement"""
class BestVersionAlreadyInstalled(PipError):
"""Raised when the most up-to-date version of a package is already
installed. """
class BadCommand(PipError):
"""Raised when virtualenv or a command is not found"""
class CommandError(PipError):
"""Raised when there is an error in command-line arguments"""
class PreviousBuildDirError(PipError):
"""Raised when there's a previous conflicting build directory"""
class HashMismatch(InstallationError):
"""Distribution file hash values don't match."""
class InvalidWheelFilename(InstallationError):
"""Invalid wheel filename."""
class UnsupportedWheel(InstallationError):
"""Unsupported wheel."""
| mit |
taschik/ramcloud-load-manager | scripts/config.py | 2 | 3706 | #!/usr/bin/env python
# Copyright (c) 2011 Stanford University
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
This module defines a collection of variables that specify site-specific
configuration information such as names of RAMCloud hosts and the location
of RAMCloud binaries. This should be the only file you have to modify to
run RAMCloud scripts at your site.
"""
from common import captureSh
import os
import re
import subprocess
import sys
__all__ = ['coordinator_port', 'default_disk1','default_disk2', 'git_branch',
'hosts', 'obj_dir', 'obj_path', 'scripts_path', 'second_backup_port',
'server_port', 'top_path']
# git_branch is the name of the current git branch, which is used
# for purposes such as computing objDir.
try:
git_branch = re.search('^refs/heads/(.*)$',
captureSh('git symbolic-ref -q HEAD 2>/dev/null'))
except subprocess.CalledProcessError:
git_branch = None
obj_dir = 'obj'
else:
git_branch = git_branch.group(1)
obj_dir = 'obj.%s' % git_branch
# obj_dir is the name of the directory containing binaries for the current
# git branch (it's just a single name such as "obj.master", not a full path)
if git_branch == None:
obj_dir = 'obj'
else:
obj_dir = 'obj.%s' % git_branch
# The full path name of the directory containing this script file.
scripts_path = os.path.dirname(os.path.abspath(__file__))
# The full pathname of the parent of scriptsPath (the top-level directory
# of a RAMCloud source tree).
top_path = os.path.abspath(scripts_path + '/..')
# Add /usr/local/lib to LD_LIBARY_PATH it isn't already there (this was
# needed for CentOS 5.5, but should probably be deleted now).
try:
ld_library_path = os.environ['LD_LIBRARY_PATH'].split(':')
except KeyError:
ld_library_path = []
if '/usr/local/lib' not in ld_library_path:
ld_library_path.insert(0, '/usr/local/lib')
os.environ['LD_LIBRARY_PATH'] = ':'.join(ld_library_path)
# All of the hosts available for servers or clients; each entry
# consists of a name for the host (for ssh), an IP address
# to use for creating service locators. and an id for generating
# Ethernet addresses.
hosts = []
for i in range(1, 61):
hosts.append(('rc%02d' % i,
'192.168.1.%d' % (100 + i),
i))
# Host on which old master is run for running recoveries.
# Need not be a member of hosts
old_master_host = ('rcmaster', '192.168.1.1', 81)
# Full path to the directory containing RAMCloud executables.
obj_path = '%s/%s' % (top_path, obj_dir)
# Ports (for TCP, etc.) to use for each kind of server.
coordinator_port = 12246
server_port = 12247
second_backup_port = 12248
# Command-line argument specifying where the first backup on each
# server should storage the segment replicas.
default_disk1 = '-f /dev/sda2'
# Command-line argument specifying where the second backup should
# store its segment replicas.
default_disk2 = '-f /dev/sdb2'
# Try to include local overrides.
try:
from localconfig import *
except:
pass
| isc |
eliezerfot123/django-with-angular | django_with_angular_je/django_with_angular_je/settings.py | 1 | 2686 | """
Django settings for django_with_angular_je project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0q^+*z#p3in1od3$@4s_m4*#ohpo71454go_=%8na5dg6%uc33'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'django_with_angular_je.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_with_angular_je.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| gpl-2.0 |
seckyn/jaikuengine | common/management/commands/clean.py | 35 | 1150 | # Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from optparse import make_option
from django.core.management.base import BaseCommand
import build
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'--skip-zip', action='store_true', dest='skip_zip', default=False,
help='Do not clean up zip files'
),
)
help = 'Cleans up the results of a build'
args = ''
requires_model_validation = False
def handle(self, *test_labels, **options):
skip_zip = options.get('skip_zip', False)
build.clean(skip_zip=skip_zip)
| apache-2.0 |
ubalance-team/magum | magum/magum.py | 1 | 26743 |
#!/usr/bin/python
"""
MAGUM python module (Beta 1.1.0)
MAGUM stands for (Magnetometer, Accelerometer and Gyroscope Udoo Management)
it includes some modules such as smbus, time, os, sys, subprocess etc.. to manage the udoo-neo
motion sensors over the I2C serial communicaton protocol.
Because the I2C device interface is opened R/W using smbus module,
users of this module usually MUST have ROOT permissions.
"""
# including necessary modules
import smbus
import time
import sys
import os
import shlex
import subprocess
import re
import math
from array import *
from .utils import _dataConvertion
from .utils import _regsExample
from .regs import *
class Magum:
""" Magum(gScaleRange,fsDouble,aScaleRange,noise) -> Magum
Return a new Magum object that is (optionally)
automatically initialized with the default values.
"""
_i2cBus = smbus.SMBus(3) # open communication to I2C channel 4
_calibrated = False # check calibration
accScale = None
gyrScale = None
gyrDouble = None
# Complementary Filter Attributes
compAux = 0
_cFAngleX = 0
_cFAngleY = 0
_cFAngleZ = 0
compAux = 0
def __init__(self,gScaleRange=None,fsDouble=None,aScaleRange=None,noise=None):
self.killDrivers(1)
self._initAm(aScaleRange,noise)
self._initG(gScaleRange,fsDouble)
# accelerometer and magnetometer initialization
def _initAm(self,scaleRange=None,noise=None):
self.toStandby('a')
if noise == 1 and scaleRange in (2,4):
regNoise = 0x0c
elif noise in (0,None):
regNoise = 0x00
else:
print 'Error: incorrect low noise value, it can assume 1 (enabled) or 0 (diabled)'
sys.exit(1)
if scaleRange == 2:
self.setSensConf('a','A_XYZ_DATA_CFG',0x00) # set range to +/- 2g
elif scaleRange == 4:
self.setSensConf('a','A_XYZ_DATA_CFG',0x01) # set range to +/- 4g
elif scaleRange == 8:
self.setSensConf('a','A_XYZ_DATA_CFG',0x02) # set range to +/- 8g
elif scaleRange == None:
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1,0x01) # set to active mode
time.sleep(.300) # sleep 300 ms
else:
print 'Error: incorrect aScalRange value, read the documentation for the correct config.'
sys.exit(1)
self.accScale = scaleRange
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1,0x01 | regNoise) # set to active mode
time.sleep(.300) # sleep 300 ms
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,M_CTRL_REG1,0x03) # enable both accelerometer and magnetometer sensors
# gyroscope initialization
def _initG(self,scaleRange=None,fsDouble=None):
self.toStandby('g')
if fsDouble == 1:
self.gyrDouble = 2
self.setSensConf('g','G_CTRL_REG3',0x01)
elif fsDouble == 0:
self.gyrDouble = 1
self.setSensConf('g','G_CTRL_REG3',0x00)
else:
self.gyrDouble = 1
self.setSensConf('g','G_CTRL_REG3',0x00)
if scaleRange == 2000:
self.setSensConf('g','G_CTRL_REG0',0x00) # set range to +/- 2000dps (4000dps if CTRL_REG3 is set to double)
elif scaleRange == 1000:
self.setSensConf('g','G_CTRL_REG0',0x01) # set range to +/- 1000dps (2000dps if CTRL_REG3 is set to double)
elif scaleRange == 500:
self.setSensConf('g','G_CTRL_REG0',0x02) # set range to +/- 500dps (1000dps if CTRL_REG3 is set to double)
elif scaleRange == 250:
self.setSensConf('g','G_CTRL_REG0',0x03) # set range to +/- 250dps (500dps if CTRL_REG3 is set to double)
elif scaleRange == None:
self._i2cBus.write_byte_data(I2C_G_ADDRESS,A_CTRL_REG1,0x16) # set to active mode
time.sleep(.300) # sleep 300 ms
else:
print 'Error: incorrect gScalRange value, read the documentation for the corret config.'
sys.exit(1)
self.gyrScale = scaleRange
self._i2cBus.write_byte_data(I2C_G_ADDRESS,G_CTRL_REG1,0x16) # set to active mode
time.sleep(.300) # sleep 300ms
def toStandby(self,sensor):
if sensor in ('a','m'):
currReg = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1) # get current configuration
if currReg % 2 == 1:
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1,currReg - 1) # set to standby_mode
if sensor in ('g'):
currReg = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG1) # get old configuration
currReg = currReg >> 2
currReg = currReg << 2
self._i2cBus.write_byte_data(I2C_G_ADDRESS,G_CTRL_REG1,currReg) # set to standby_mode
time.sleep(.300) # sleep 300ms
def toActive(self,sensor):
if sensor in ('a','m'):
currReg = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1) # get current configuration
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1,currReg) # set to active_mode
if sensor in ('g'):
currReg = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG1) # get old configuration
currReg = currReg >> 2
currReg = currReg << 2
currReg = currReg + 2
self._i2cBus.write_byte_data(I2C_G_ADDRESS,G_CTRL_REG1,currReg) # set to active_mode
time.sleep(.300) # sleep 300ms
# enable/disable system drivers
def killDrivers(self,x):
proc1 = subprocess.Popen(shlex.split('lsmod'),stdout=subprocess.PIPE)
proc2 = subprocess.Popen(shlex.split('grep fxas2100x'),stdin=proc1.stdout,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
proc1.stdout.close() # Allow proc1 to receive a SIGPIPE if proc2 exits.
out1,err1=proc2.communicate()
proc1 = subprocess.Popen(shlex.split('lsmod'),stdout=subprocess.PIPE)
proc2 = subprocess.Popen(shlex.split('grep fxos8700'),stdin=proc1.stdout,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
proc1.stdout.close() # Allow proc1 to receive a SIGPIPE if proc2 exits.
out2,err2=proc2.communicate()
if x == 1:
if out1:
os.system('rmmod fxas2100x')
if out2:
os.system('rmmod fxos8700')
elif x == 0:
if not out1:
os.system('modprobe fxas2100x')
if not out2:
os.system('modprobe fxos8700')
else:
print "Error: wrong killDrivers(x) parameter.\n self.killDrivers(0): enable drivers \n killDrivers(1): disable drivers."
sys.exit(1)
# sensor calibration
def calibrateSens(self,samples):
acc_angle = array('i',[])
rate_gyr = array('i',[])
i = 0
sumX = 0
sumY = 0
sumZ = 0
gsumX = 0
gsumY = 0
gsumZ = 0
tarXvect = array('i',[])
tarYvect = array('i',[])
tarZvect = array('i',[])
gtarXvect = array('i',[])
gtarYvect = array('i',[])
gtarZvect = array('i',[])
gyrXangle = 0.0
gyrYangle = 0.0
gyrZangle = 0.0
accXangle = 0.0
accYangle = 0.0
accZangle = 0.0
axisOffset = array('i',[])
# sensors Calibration
raw_input("CAUTION! Sensors calibration.\nSet your udoo-neo in an horizontal position and press Enter Key...\n")
perc = -1
while i<samples:
acc_angle = self.readAData()
rate_gyr = self.readGData()
factor = self.accScale/2
if acc_angle[0] >= 32768:
tarXvect.insert(i,int(acc_angle[0]-65536))
else:
tarXvect.insert(i,int(acc_angle[0]))
if acc_angle[1] >= 32768:
tarYvect.insert(i,int(acc_angle[1]-65536))
else:
tarYvect.insert(i,int(acc_angle[1]))
if acc_angle[2] >= 32768:
tarZvect.insert(i,int(acc_angle[2] - 65536 + 16384/factor))
else:
tarZvect.insert(i,int(acc_angle[2] + 16384/factor))
if rate_gyr[0] >= 32768:
gtarXvect.insert(i,int(rate_gyr[0]-65536))
else:
gtarXvect.insert(i,int(rate_gyr[0]))
if rate_gyr[1] >= 32768:
gtarYvect.insert(i,int(rate_gyr[1]-65536))
else:
gtarYvect.insert(i,int(rate_gyr[1]))
if rate_gyr[2] >= 32768:
gtarZvect.insert(i,int(rate_gyr[2] - 65536))
else:
gtarZvect.insert(i,int(rate_gyr[2]))
sumX += tarXvect[i]
sumY += tarYvect[i]
sumZ += tarZvect[i]
gsumX += gtarXvect[i]
gsumY += gtarYvect[i]
gsumZ += gtarZvect[i]
loading = int((i*100)/samples)
if loading != perc:
print "Calibration percentage: " + str(int(loading)) + "%"
perc = loading
i += 1
print "Calibration percentage: 100%\n"
avgX = sumX/samples
avgY = sumY/samples
avgZ = sumZ/samples
gavgX = gsumX/samples
gavgY = gsumY/samples
gavgZ = gsumZ/samples
axisOffset.insert(0,avgX)
axisOffset.insert(1,avgY)
axisOffset.insert(2,avgZ)
axisOffset.insert(3,gavgX)
axisOffset.insert(4,gavgY)
axisOffset.insert(5,gavgZ)
self._calibrated = True
return axisOffset
# set sensors configurations
def setSensConf(self,sensor,reg,hexVal):
self.toStandby(sensor)
if sensor == 'a':
if reg in A_CREGS_LIST:
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,COMPLETE_REGS_DICT[reg],hexVal)
else:
_regsExample('a')
if sensor == 'm':
if reg in M_CREGS_LIST:
if bool(is_hex(str(hexVal))):
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,COMPLETE_REGS_DICT[reg],hexVal)
else:
_regsExample('m')
if sensor == 'g':
if reg in G_CREG_LIST:
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,COMPLETE_REGS_DICT[reg],hexVal)
else:
_regsExample('g')
time.sleep(.300) # sleep 300ms
self.toActive(sensor)
# read accelerometer data
def readAData(self,uM=None):
axisList = array('f',[])
# getting x,y,z coordinate shifting first 8bit and adding
# (with the or operator) the others 8 bit to the address
xMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_X_MSB)
xLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_X_LSB)
xRaw = (xMsbRaw << 8 | xLsbRaw) # x axis
yMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_Y_MSB)
yLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_Y_LSB)
yRaw = (yMsbRaw << 8 | yLsbRaw) # y axis
zMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_Z_MSB)
zLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_Z_LSB)
zRaw = (zMsbRaw << 8 | zLsbRaw) # z axis
axisList.insert(0,xRaw)
axisList.insert(1,yRaw)
axisList.insert(2,zRaw)
axisList = _dataConvertion(self._i2cBus,"a",axisList,uM)
return axisList
# read magnetometer data
def readMData(self,uM=None):
axisList = array('f',[])
# getting x,y,z coordinate shifting first 8bit and adding
# (with the or operator) the others 8 bit to the address
xMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_X_MSB)
xLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_X_LSB)
xRaw = xMsbRaw << 8 | xLsbRaw # x axis
yMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_Y_MSB)
yLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_Y_LSB)
yRaw = yMsbRaw << 8 | yLsbRaw # y axis
zMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_Z_MSB)
zLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_Z_LSB)
zRaw = zMsbRaw << 8 | zLsbRaw # z axis
axisList.insert(0,xRaw)
axisList.insert(1,yRaw)
axisList.insert(2,zRaw)
axisList = _dataConvertion(self._i2cBus,'m',axisList,uM)
return axisList
# read gyroscope data
def readGData(self,uM=None):
axisList = array('f',[])
# getting x,y,z coordinate shifting first 8bit and adding
# (with the or operator) the others 8 bit to the address
xMsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_X_MSB)
xLsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_X_LSB)
xRaw = xMsbRaw << 8 | xLsbRaw # x axis
yMsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_Y_MSB)
yLsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_Y_LSB)
yRaw = yMsbRaw << 8 | yLsbRaw # y axis
zMsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_Z_MSB)
zLsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_Z_LSB)
zRaw = zMsbRaw << 8 | zLsbRaw # z axis
axisList.insert(0,xRaw)
axisList.insert(1,yRaw)
axisList.insert(2,zRaw)
axisList = _dataConvertion(self._i2cBus,"g",axisList,uM)
return axisList
def readTData(self,uM=None):
tempRaw= self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TEMP)
if tempRaw >= 128:
tempCels= float((tempRaw-256)*0.96)
else:
tempCels=float((tempRaw)*0.96)
if uM in (None, 'raw'):
return tempRaw
if uM == 'C':
return tempCels
if uM == 'K':
tempKelv= float(tempCels + 273.15)
return tempKelv
if uM == 'F':
tempFahr= float(float(tempCels * 1.8)+32)
return tempFahr
# complementary filter algorithm
def compFilter(self,DT,axisOffset):
exTime = 0.013 # execution time
if DT < exTime:
print "Error: DT is too small to sample the accelerometer and gyroscope data.\nDT must be greater than 0.013."
sys.exit(1)
else:
if self._calibrated == True:
highPass = DT / (DT + exTime)
rate_gyr = array('i',[])
acc_angle = array('i',[])
cFAngleAxis = array('f',[])
rate_gyr = self.readGData()
acc_angle = self.readAData()
factor = self.accScale/2
gFactor = float((self.gyrScale/(1000*32))*self.gyrDouble)
if acc_angle[0] >= 32768:
acc_angle[0] -= 65536
if acc_angle[1] >= 32768:
acc_angle[1] -= 65536
if acc_angle[2] >= 32768:
acc_angle[2] -= 65536
if rate_gyr[0] >= 32768:
rate_gyr[0] -= 65536
if rate_gyr[1] >= 32768:
rate_gyr[1] -= 65536
if rate_gyr[2] >= 32768:
rate_gyr[2] -= 65536
x = (((acc_angle[0] - axisOffset[0])/4) * 0.244 * factor)
y = (((acc_angle[1] - axisOffset[1])/4) * 0.244 * factor)
z = (((acc_angle[2] - axisOffset[2])/4) * 0.244 * factor)
x2 = x * x
y2 = y * y
z2 = z * z
accXangle = math.atan(x/math.sqrt(y2+z2))*(180/math.pi)
accYangle = math.atan(y/math.sqrt(x2+z2))*(180/math.pi)
accZangle = math.atan(z/math.sqrt(x2+y2))*(180/math.pi)
gyrXangle = float(((rate_gyr[0] - axisOffset[3]) * gFactor)/DT)
gyrYangle = float(((rate_gyr[1] - axisOffset[4]) * gFactor)/DT)
gyrZangle = float(((rate_gyr[2] - axisOffset[5]) * gFactor)/DT)
modGyr = (gyrXangle*gyrXangle) + (gyrYangle*gyrYangle) + (gyrZangle*gyrZangle)
# Only for the first time we get the position or if the base doesn't move
#if self.compAux == 0 || (math.fabs(gyrXangle) <= 5 && math.fabs(gyrYangle) <= 5 && math.fabs(gyrZangle) <= 5):
if self.compAux == 0:
self._cFAngleX = float(accXangle)
self._cFAngleY = float(accYangle)
self._cFAngleZ = float(accZangle)
self.compAux = 1
else: # Then we use the Complementary Filter
self._cFAngleX = (highPass) * (self._cFAngleX + gyrXangle * DT) + (1-highPass)*(accXangle)
self._cFAngleY = (highPass) * (self._cFAngleY + gyrYangle * DT) + (1-highPass)*(accYangle)
self._cFAngleZ = (highPass) * (self._cFAngleZ + gyrZangle * DT) + (1-highPass)*(accZangle)
cFAngleAxis.insert(0,self._cFAngleX)
cFAngleAxis.insert(1,self._cFAngleY*(-1))
cFAngleAxis.insert(2,self._cFAngleZ*(-1))
gyrXangle = float((rate_gyr[0] - axisOffset[3]) * gFactor)
gyrYangle = float((rate_gyr[1] - axisOffset[4]) * gFactor)
gyrZangle = float((rate_gyr[2] - axisOffset[5]) * gFactor)
time.sleep(DT-exTime)
return cFAngleAxis
else:
print "Error: failed calibration.\nMake sure to calibrate the sensors using calibrateSens(sensor,samples)"
sys.exit(1)
# Kalman Filter
# Note: this algorithm is under development, it may not work properly like a common Kalman Filter
# If you want to improve this algorithm join us on github at https://github.com/ubalance-team/magum
def kalmanFilter(self,DT,axis,axisOffset):
exTime = 0.012 # execution time
if DT < exTime:
print "Error: DT is too small to sample the accelerometer and gyroscope data.\nDT must be greater than 0.015."
sys.exit(1)
else:
if self._calibrated == True:
rate_gyr = self.readGData()
acc_angle = self.readAData()
factor = self.accScale/2
gFactor = float((self.gyrScale/(1000*32))*self.gyrDouble)
if acc_angle[0] >= 32768:
acc_angle[0] -= 65536
if acc_angle[1] >= 32768:
acc_angle[1] -= 65536
if acc_angle[2] >= 32768:
acc_angle[2] -= 65536
if rate_gyr[0] >= 32768:
rate_gyr[0] -= 65536
if rate_gyr[1] >= 32768:
rate_gyr[1] -= 65536
if rate_gyr[2] >= 32768:
rate_gyr[2] -= 65536
x = (((acc_angle[0] - axisOffset[0])/4) * 0.244 * factor)
y = (((acc_angle[1] - axisOffset[1])/4) * 0.244 * factor)
z = (((acc_angle[2] - axisOffset[2])/4) * 0.244 * factor)
x2 = x * x
y2 = y * y
z2 = z * z
if axis == 'x':
accAngle = math.atan(x/math.sqrt(y2+z2))*(180/math.pi)
gyroRate = float((rate_gyr[0] - axisOffset[3]) * gFactor)
elif axis == 'y':
accAngle = math.atan(y/math.sqrt(x2+z2))*(180/math.pi)*(-1)
gyroRate = float((rate_gyr[1] - axisOffset[4]) * gFactor)
elif axis == 'z':
accAngle = math.atan(z/math.sqrt(x2+y2))*(180/math.pi)*(-1)
gyroRate = float((rate_gyr[2] - axisOffset[5]) * gFactor)
Q_angle = 0.01
Q_gyro = 0.0003
R_angle = 0.01
a_bias = 0
AP_00 = 0
AP_01 = 0
AP_10 = 0
AP_11 = 0
KFangle = 0.0
KFangle += DT * (gyroRate - a_bias)
AP_00 += - DT * (AP_10 + AP_01) + Q_angle * DT
AP_01 += - DT * AP_11
AP_10 += - DT * AP_11
AP_11 += + Q_gyro * DT
a = accAngle - KFangle
S = AP_00 + R_angle
K_0 = AP_00 / S
K_1 = AP_10 / S
KFangle += K_0 * a
a_bias += K_1 * a
AP_00 -= K_0 * AP_00
AP_01 -= K_0 * AP_01
AP_10 -= K_1 * AP_00
AP_11 -= K_1 * AP_01
time.sleep(DT-exTime)
return KFangle*float(180/math.pi)*0.9
else:
print "Error: failed calibration.\nMake sure to calibrate the sensors using calibrateSens(sensor,samples)"
sys.exit(1)
# Implementation of Sebastian Madgwick's "...efficient orientation filter for... inertial/magnetic sensor arrays"
# (see http://www.x-io.co.uk/category/open-source/ for examples and more details)
# which fuses acceleration, rotation rate, and magnetic moments to produce a quaternion-based estimate of absolute
# device orientation
def madgwickQuaternionFilter(self,aCompArray,gCompArray,mCompArray):
ax = aCompArray[0]
ay = aCompArray[1]
az = aCompArray[2]
mx = mCompArray[0]
my = mCompArray[1]
mz = mCompArray[2]
gx = gCompArray[0]
gy = gCompArray[1]
gz = gCompArray[2]
deltat = 0.001
gyroMeasError = math.pi * (5.0 / 180.0)
gyroMeasDrift = math.pi * (0.2 / 180.0)
beta = math.sqrt(3.0 / 4.0) * gyroMeasError
zeta = math.sqrt(3.0 / 4.0) * gyroMeasDrift
q = array('f',[])
q1 = 1.0
q2 = 0.0
q3 = 0.0
q4 = 0.0
norm = 0.0
hx = 0.0
hy = 0.0
_2bx = 0.0
_2bz = 0.0
s1 = 0.0
s2 = 0.0
s3 = 0.0
s4 = 0.0
qDot1 = 0.0
qDot2 = 0.0
qDot3 = 0.0
qDot4 = 0.0
# Auxiliary variables to avoid repeated arithmetic
_2q1mx = 0.0
_2q1my = 0.0
_2q1mz = 0.0
_2q2mx = 0.0
_4bx = 0.0
_4bz = 0.0
_2q1 = 2.0 * q1
_2q2 = 2.0 * q2
_2q3 = 2.0 * q3
_2q4 = 2.0 * q4
_2q1q3 = 2.0 * q1 * q3
_2q3q4 = 2.0 * q3 * q4
q1q1 = q1 * q1
q1q2 = q1 * q2
q1q3 = q1 * q3
q1q4 = q1 * q4
q2q2 = q2 * q2
q2q3 = q2 * q3
q2q4 = q2 * q4
q3q3 = q3 * q3
q3q4 = q3 * q4
q4q4 = q4 * q4
# Normalize accelerometer measurement
norm = math.sqrt(ax * ax + ay * ay + az * az)
if norm == 0.0: return # handle NaN
norm = 1.0/norm
ax *= norm
ay *= norm
az *= norm
# Normalize magnetometer measurement
norm = math.sqrt(mx * mx + my * my + mz * mz)
if norm == 0.0: return # handle NaN
norm = 1.0/norm
mx *= norm
my *= norm
mz *= norm
# Reference direction of Earth s magnetic field
_2q1mx = 2.0 * q1 * mx
_2q1my = 2.0 * q1 * my
_2q1mz = 2.0 * q1 * mz
_2q2mx = 2.0 * q2 * mx
hx = mx * q1q1 - _2q1my * q4 + _2q1mz * q3 + mx * q2q2 + _2q2 * my * q3 + _2q2 * mz * q4 - mx * q3q3 - mx * q4q4
hy = _2q1mx * q4 + my * q1q1 - _2q1mz * q2 + _2q2mx * q3 - my * q2q2 + my * q3q3 + _2q3 * mz * q4 - my * q4q4
_2bx = math.sqrt(hx * hx + hy * hy)
_2bz = -_2q1mx * q3 + _2q1my * q2 + mz * q1q1 + _2q2mx * q4 - mz * q2q2 + _2q3 * my * q4 - mz * q3q3 + mz * q4q4
_4bx = 2.0 * _2bx
_4bz = 2.0 * _2bz
# Gradient decent algorithm corrective step
s1 = -_2q3 * (2.0 * q2q4 - _2q1q3 - ax) + _2q2 * (2.0 * q1q2 + _2q3q4 - ay) - _2bz * q3 * (_2bx * (0.5 - q3q3 - q4q4) + _2bz * (q2q4 - q1q3) - mx) + (-_2bx * q4 + _2bz * q2) * (_2bx * (q2q3 - q1q4) + _2bz * (q1q2 + q3q4) - my) + _2bx * q3 * (_2bx * (q1q3 + q2q4) + _2bz * (0.5 - q2q2 - q3q3) - mz)
s2 = _2q4 * (2.0 * q2q4 - _2q1q3 - ax) + _2q1 * (2.0 * q1q2 + _2q3q4 - ay) - 4.0 * q2 * (1.0 - 2.0 * q2q2 - 2.0 * q3q3 - az) + _2bz * q4 * (_2bx * (0.5 - q3q3 - q4q4) + _2bz * (q2q4 - q1q3) - mx) + (_2bx * q3 + _2bz * q1) * (_2bx * (q2q3 - q1q4) + _2bz * (q1q2 + q3q4) - my) + (_2bx * q4 - _4bz * q2) * (_2bx * (q1q3 + q2q4) + _2bz * (0.5 - q2q2 - q3q3) - mz)
s3 = -_2q1 * (2.0 * q2q4 - _2q1q3 - ax) + _2q4 * (2.0 * q1q2 + _2q3q4 - ay) - 4.0 * q3 * (1.0 - 2.0 * q2q2 - 2.0 * q3q3 - az) + (-_4bx * q3 - _2bz * q1) * (_2bx * (0.5 - q3q3 - q4q4) + _2bz * (q2q4 - q1q3) - mx) + (_2bx * q2 + _2bz * q4) * (_2bx * (q2q3 - q1q4) + _2bz * (q1q2 + q3q4) - my) + (_2bx * q1 - _4bz * q3) * (_2bx * (q1q3 + q2q4) + _2bz * (0.5 - q2q2 - q3q3) - mz)
s4 = _2q2 * (2.0 * q2q4 - _2q1q3 - ax) + _2q3 * (2.0 * q1q2 + _2q3q4 - ay) + (-_4bx * q4 + _2bz * q2) * (_2bx * (0.5 - q3q3 - q4q4) + _2bz * (q2q4 - q1q3) - mx) + (-_2bx * q1 + _2bz * q3) * (_2bx * (q2q3 - q1q4) + _2bz * (q1q2 + q3q4) - my) + _2bx * q2 * (_2bx * (q1q3 + q2q4) + _2bz * (0.5 - q2q2 - q3q3) - mz)
norm = math.sqrt(s1 * s1 + s2 * s2 + s3 * s3 + s4 * s4) # normalize step magnitude
norm = 1.0/norm
s1 *= norm
s2 *= norm
s3 *= norm
s4 *= norm
# Compute rate of change of quaternion
qDot1 = 0.5 * (-q2 * gx - q3 * gy - q4 * gz) - beta * s1
qDot2 = 0.5 * (q1 * gx + q3 * gz - q4 * gy) - beta * s2
qDot3 = 0.5 * (q1 * gy - q2 * gz + q4 * gx) - beta * s3
qDot4 = 0.5 * (q1 * gz + q2 * gy - q3 * gx) - beta * s4
# Integrate to yield quaternion
q1 += qDot1 * deltat
q2 += qDot2 * deltat
q3 += qDot3 * deltat
q4 += qDot4 * deltat
norm = math.sqrt(q1 * q1 + q2 * q2 + q3 * q3 + q4 * q4) # normalize quaternion
norm = 1.0/norm
q.insert(0,q1 * norm)
q.insert(1,q2 * norm)
q.insert(2,q3 * norm)
q.insert(3,q4 * norm)
return q
# get current sensors configurtaions
def getCurrentConf(self,sensor,screen = None):
if sensor == 'a':
config = [None] * 28
_regName = ['A_TRIG_CFG','A_CTRL_REG1','A_CTRL_REG2','A_CTRL_REG3','A_CTRL_REG4','A_CTRL_REG5','A_ASPL_COUNT','A_F_SETUP','A_XYZ_DATA_CFG','A_HP_FILTER_CUTOFF','A_PL_CFG',
'A_PL_COUNT','A_PL_BF_ZCOMP','A_PL_THS_REG','A_FFMT_CFG','A_FFMT_THS','A_FFMT_COUNT','A_VECM_CFG','A_VECM_THS_MSB','A_TRANSIENT_CFG',
'A_TRANSIENT_THS','A_TRANSIENT_COUNT','A_PULSE_CFG','A_PULSE_TMLT','A_PULSE_LTCY','A_OFF_X','A_OFF_Y','A_OFF_Z']
config[0] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TRIG_CFG)
config[1] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1)
config[2] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG2)
config[3] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG3)
config[4] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG4)
config[5] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG5)
config[6] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_ASPL_COUNT)
config[7] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_F_SETUP)
config[8] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_XYZ_DATA_CFG)
config[9] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_HP_FILTER_CUTOFF)
config[10] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PL_CFG)
config[11] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PL_COUNT)
config[12] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PL_BF_ZCOMP)
config[13] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PL_THS_REG)
config[14] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_FFMT_CFG)
config[15] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_FFMT_THS)
config[16] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_FFMT_COUNT)
config[17] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_VECM_CFG)
config[18] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_VECM_THS_MSB)
config[19] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TRANSIENT_CFG)
config[20] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TRANSIENT_THS)
config[21] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TRANSIENT_COUNT)
config[22] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PULSE_CFG)
config[23] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PULSE_TMLT)
config[24] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PULSE_LTCY)
config[25] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OFF_X)
config[26] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OFF_Y)
config[27] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OFF_Z)
if sensor == 'm':
config = [None] * 15
_regName = ['M_OFF_X_MSB','M_OFF_X_LSB','M_OFF_Y_MSB','M_OFF_Y_LSB','M_OFF_Z_MSB','M_OFF_Z_LSB','M_THS_CFG','M_THS_COUNT',
'M_CTRL_REG1','M_CTRL_REG2','M_CTRL_REG3','M_VECM_CFG','M_VECM_THS_MSB','M_VECM_THS_LSB','M_VECM_CNT']
config[0] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_X_MSB)
config[1] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_X_LSB)
config[2] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_Y_MSB)
config[3] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_Y_LSB)
config[4] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_Z_MSB)
config[5] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_Z_LSB)
config[6] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_THS_CFG)
config[7] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_THS_COUNT)
config[8] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_CTRL_REG1)
config[9] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_CTRL_REG2)
config[10] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_CTRL_REG3)
config[11] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_VECM_CFG)
config[12] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_VECM_THS_MSB)
config[13] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_VECM_THS_LSB)
config[14] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_VECM_CNT)
if sensor == 'g':
config = [None] * 8
_regName = ['G_F_SETUP','G_CTRL_REG0','G_RT_CFG','G_RT_THS','G_RT_COUNT','G_CTRL_REG1','G_CTRL_REG2','G_CTRL_REG3']
config[0] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_F_SETUP)
config[1] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG0)
config[2] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_RT_CFG)
config[3] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_RT_THS)
config[4] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_RT_COUNT)
config[5] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG1)
config[6] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG2)
config[7] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG3)
if screen == 1:
for i,reg in enumerate(_regName):
print reg + ': ' + str('0x{:02x}'.format(config[i]))
return config
| gpl-2.0 |
astraw/stdeb | stdeb/command/install_deb.py | 1 | 1027 | import glob
import os
import stdeb.util as util
from distutils.core import Command
__all__ = ['install_deb']
class install_deb(Command):
description = 'distutils command to install debian binary package'
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# generate .deb file
self.run_command('bdist_deb')
# get relevant options passed to sdist_dsc
sdist_dsc = self.get_finalized_command('sdist_dsc')
# execute system command and read output
# (execute and read output of find cmd)
target_debs = glob.glob(os.path.join(sdist_dsc.dist_dir, '*.deb'))
if len(target_debs) == 0:
raise ValueError('could not find .deb file')
for target_deb in target_debs:
# define system command to execute (install .deb binary pkg)
syscmd = ['dpkg', '--install', target_deb]
util.process_command(syscmd)
| mit |
tian2992/flask-sockets | flask_sockets.py | 2 | 1627 | # -*- coding: utf-8 -*-
def log_request(self):
log = self.server.log
if log:
if hasattr(log, 'info'):
log.info(self.format_request() + '\n')
else:
log.write(self.format_request() + '\n')
# Monkeys are made for freedom.
try:
import gevent
from geventwebsocket.gunicorn.workers import GeventWebSocketWorker as Worker
except ImportError:
pass
if 'gevent' in locals():
# Freedom-Patch logger for Gunicorn.
if hasattr(gevent, 'pywsgi'):
gevent.pywsgi.WSGIHandler.log_request = log_request
class SocketMiddleware(object):
def __init__(self, wsgi_app, socket):
self.ws = socket
self.app = wsgi_app
def __call__(self, environ, start_response):
path = environ['PATH_INFO']
if path in self.ws.url_map:
handler = self.ws.url_map[path]
environment = environ['wsgi.websocket']
handler(environment)
return []
else:
return self.app(environ, start_response)
class Sockets(object):
def __init__(self, app=None):
self.url_map = {}
if app:
self.init_app(app)
def init_app(self, app):
app.wsgi_app = SocketMiddleware(app.wsgi_app, self)
def route(self, rule, **options):
def decorator(f):
endpoint = options.pop('endpoint', None)
self.add_url_rule(rule, endpoint, f, **options)
return f
return decorator
def add_url_rule(self, rule, _, f, **options):
self.url_map[rule] = f
# CLI sugar.
if 'Worker' in locals():
worker = Worker
| mit |
rdipietro/tensorflow | tensorflow/g3doc/how_tos/adding_an_op/zero_out_1_test.py | 56 | 1441 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test for version 1 of the zero_out op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import tensorflow as tf
from tensorflow.g3doc.how_tos.adding_an_op import zero_out_op_1
class ZeroOut1Test(tf.test.TestCase):
def test(self):
with self.test_session():
result = zero_out_op_1.zero_out([5, 4, 3, 2, 1])
self.assertAllEqual(result.eval(), [5, 0, 0, 0, 0])
def testLoadTwice(self):
zero_out_loaded_again = tf.load_op_library(os.path.join(
tf.resource_loader.get_data_files_path(), 'zero_out_op_kernel_1.so'))
self.assertEqual(zero_out_loaded_again, zero_out_op_1._zero_out_module)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
steedos/odoo | addons/calendar/__init__.py | 391 | 1038 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import calendar
import controllers
import contacts
| agpl-3.0 |
yookoala/ibus-cangjie | src/engine.py | 1 | 16581 | # Copyright (c) 2012-2013 - The IBus Cangjie authors
#
# This file is part of ibus-cangjie, the IBus Cangjie input method engine.
#
# ibus-cangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ibus-cangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ibus-cangjie. If not, see <http://www.gnu.org/licenses/>.
__all__ = ["EngineCangjie", "EngineQuick"]
from operator import attrgetter
from gi.repository import GLib
from gi.repository import IBus
try:
import pycanberra
except ImportError:
# Too bad, the user won't get sound feedback on errors
pass
import cangjie
from .config import Config, properties
def is_inputnumber(keyval):
"""Is the `keyval` param a numeric input, e.g to select a candidate."""
return keyval in range(getattr(IBus, "0"), getattr(IBus, "9")+1)
class Engine(IBus.Engine):
"""The base class for Cangjie and Quick engines."""
def __init__(self):
super(Engine, self).__init__()
self.config = Config(IBus.Bus(), self.config_name,
self.on_value_changed)
self.current_input = ""
self.current_radicals = ""
self.clear_on_next_input = False
self.lookuptable = IBus.LookupTable()
self.lookuptable.set_page_size(9)
self.lookuptable.set_round(True)
self.lookuptable.set_orientation(IBus.Orientation.VERTICAL)
self.init_properties()
self.init_cangjie()
def init_properties(self):
self.prop_list = IBus.PropList()
for p in properties:
key = p["name"]
stored_value = self.config.read(key)
state = IBus.PropState.CHECKED if stored_value else IBus.PropState.UNCHECKED
try:
# Try the new constructor from IBus >= 1.5
prop = IBus.Property(key=key,
prop_type=IBus.PropType.TOGGLE,
label=p["label"],
icon='',
sensitive=True,
visible=True,
state=state,
sub_props=None)
except TypeError:
# IBus 1.4.x didn't have the GI overrides for the nice
# constructor, so let's do it the old, non-pythonic way.
# IBus.Property.new(key, type, label, icon, tooltip,
# sensitive, visible, state, sub_props)
prop = IBus.Property.new(key, IBus.PropType.TOGGLE,
IBus.Text.new_from_string(p["label"]),
'', IBus.Text.new_from_string(''),
True, True, state, None)
self.prop_list.append(prop)
def do_property_activate(self, prop_name, state):
active = state == IBus.PropState.CHECKED
self.config.write(prop_name, GLib.Variant("b", active))
def do_focus_in(self):
self.register_properties(self.prop_list)
def init_cangjie(self):
version = self.config.read("version").unpack()
version = getattr(cangjie.versions, "CANGJIE%d"%version)
filters = (cangjie.filters.BIG5 | cangjie.filters.HKSCS
| cangjie.filters.PUNCTUATION)
if self.config.read("include_allzh"):
filters |= cangjie.filters.CHINESE
if self.config.read("include_jp"):
filters |= cangjie.filters.KANJI
filters |= cangjie.filters.HIRAGANA
filters |= cangjie.filters.KATAKANA
if self.config.read("include_zhuyin"):
filters |= cangjie.filters.ZHUYIN
if self.config.read("include_symbols"):
filters |= cangjie.filters.SYMBOLS
self.cangjie = cangjie.Cangjie(version, filters)
def on_value_changed(self, config, section, name, value, data):
if section != self.config.config_section:
return
self.init_cangjie()
def do_focus_out (self):
"""Handle focus out event
This happens, for example, when switching between application windows
or input contexts.
Such events should clear the current input.
"""
self.clear_current_input()
def do_cancel_input(self):
"""Cancel the current input.
However, if there isn't any current input, then we shouldn't try to do
anything at all, so that the key can fulfill its original function.
"""
if not self.current_input:
return False
self.clear_current_input()
return True
def do_page_down(self):
"""Present the next page of candidates.
However, if there isn't any current input, then we shouldn't try to do
anything at all, so that the key can fulfill its original function.
"""
if not self.lookuptable.get_number_of_candidates():
return False
self.lookuptable.page_down()
self.update_lookup_table()
self.update_auxiliary_text()
return True
def do_page_up(self):
"""Present the previous page of candidates.
However, if there isn't any current input, then we shouldn't try to do
anything at all, so that the key can fulfill its original function.
"""
if not self.lookuptable.get_number_of_candidates():
return False
self.lookuptable.page_up()
self.update_lookup_table()
self.update_auxiliary_text()
return True
def do_backspace(self):
"""Go back from one input character.
This doesn't cancel the current input, only removes the last
user-inputted character from the current input, and clear the list of
candidates.
However, if there isn't any pre-edit, then we shouldn't handle the
backspace key at all, so that it can fulfill its original function:
deleting characters backwards.
"""
if not self.current_input:
return False
self.update_current_input(drop=1)
self.lookuptable.clear()
self.update_lookup_table()
return True
def do_space(self):
"""Handle the space key.
This is our "power key". It implements most of the behaviour behind
Cangjie and Quick.
It can be used to fetch the candidates if there are none, scroll to
the next page of candidates if appropriate or just commit the first
candidate when we have only one page.
Of course, it can also be used to input a "space" character.
"""
if not self.current_input:
return self.do_fullwidth_char(" ")
if not self.lookuptable.get_number_of_candidates():
try:
self.get_candidates()
except (cangjie.errors.CangjieNoCharsError,
cangjie.errors.CangjieInvalidInputError):
self.play_error_bell()
self.clear_on_next_input = True
return True
if self.lookuptable.get_number_of_candidates() <= 9:
self.do_select_candidate(1)
return True
self.do_page_down()
return True
def do_number(self, keyval):
"""Handle numeric input."""
if self.lookuptable.get_number_of_candidates():
return self.do_select_candidate(int(IBus.keyval_to_unicode(keyval)))
return self.do_fullwidth_char(IBus.keyval_to_unicode(keyval))
def do_other_key(self, keyval):
"""Handle all otherwise unhandled key presses."""
c = IBus.keyval_to_unicode(keyval)
if not c or c == '\n' or c == '\r':
return False
if not self.lookuptable.get_number_of_candidates() and \
self.current_input:
# FIXME: This is really ugly
if len(self.current_input) == 1 and \
not self.cangjie.is_input_key(self.current_input):
self.get_candidates(by_shortcode=True)
else:
self.get_candidates()
if self.lookuptable.get_number_of_candidates():
self.do_select_candidate(1)
return self.do_fullwidth_char(IBus.keyval_to_unicode(keyval))
def do_fullwidth_char(self, inputchar):
"""Commit the full-width version of an input character."""
if self.config.read("halfwidth_chars"):
return False
self.update_current_input(append=inputchar)
try:
self.get_candidates(code=inputchar, by_shortcode=True)
except cangjie.errors.CangjieNoCharsError:
self.clear_current_input()
return False
return True
def do_select_candidate(self, index):
"""Commit the selected candidate.
Parameter `index` is the number entered by the user corresponding to
the character she wishes to select on the current page.
Note: user-visible index starts at 1, but start at 0 in the lookup
table.
"""
page_index = self.lookuptable.get_cursor_pos()
selected = self.lookuptable.get_candidate(page_index+index-1)
self.commit_text(selected)
self.clear_current_input()
return True
def do_process_key_event(self, keyval, keycode, state):
"""Handle `process-key-event` events.
This event is fired when the user presses a key.
"""
# Ignore key release events
if (state & IBus.ModifierType.RELEASE_MASK):
return False
# Work around integer overflow bug on 32 bits systems:
# https://bugzilla.gnome.org/show_bug.cgi?id=693121
# The bug is fixed in pygobject 3.7.91, but many distributions will
# ship the previous version for some time. (e.g Fedora 18)
if (state & 1073741824):
return False
if state & (IBus.ModifierType.CONTROL_MASK |
IBus.ModifierType.MOD1_MASK):
# Ignore Alt+<key> and Ctrl+<key>
return False
if keyval == IBus.Escape:
return self.do_cancel_input()
if keyval == IBus.space:
return self.do_space()
if keyval == IBus.Page_Down:
return self.do_page_down()
if keyval == IBus.Page_Up:
return self.do_page_up()
if keyval == IBus.BackSpace:
return self.do_backspace()
if is_inputnumber(keyval):
return self.do_number(keyval)
c = IBus.keyval_to_unicode(keyval)
if c and c == "*":
return self.do_star()
if c and self.cangjie.is_input_key(c):
return self.do_inputchar(c)
return self.do_other_key(keyval)
def clear_current_input(self):
"""Clear the current input."""
self.current_input = ""
self.current_radicals = ""
self.clear_on_next_input = False
self.update_lookup_table()
self.update_auxiliary_text()
def update_current_input(self, append=None, drop=None):
"""Update the current input."""
if append is not None:
if self.clear_on_next_input:
self.clear_current_input()
if len(self.current_input) < self.input_max_len:
self.current_input += append
try:
self.current_radicals += self.cangjie.get_radical(append)
except cangjie.errors.CangjieInvalidInputError:
# That character doesn't have a radical
self.current_radicals += append
else:
self.play_error_bell()
elif drop is not None:
self.clear_on_next_input = False
self.current_input = self.current_input[:-drop]
self.current_radicals = self.current_radicals[:-drop]
else:
raise ValueError("You must specify either 'append' or 'drop'")
self.update_auxiliary_text()
def get_candidates(self, code=None, by_shortcode=False):
"""Get the candidates based on the user input.
If the optional `code` parameter is not specified, then use the
current input instead.
"""
self.lookuptable.clear()
num_candidates = 0
if not code:
code = self.current_input
if not by_shortcode:
chars = self.cangjie.get_characters(code)
else:
chars = self.cangjie.get_characters_by_shortcode(code)
for c in sorted(chars, key=attrgetter("frequency"), reverse=True):
self.lookuptable.append_candidate(IBus.Text.new_from_string(c.chchar))
num_candidates += 1
if num_candidates == 1:
self.do_select_candidate(1)
else:
# More than one candidate, display them
self.update_lookup_table()
def update_preedit_text(self):
"""Update the preedit text.
This is never used with Cangjie and Quick, so let's nullify it
completely, in case something else in the IBus machinery calls it.
"""
pass
def update_auxiliary_text(self):
"""Update the auxiliary text.
This should contain the radicals for the current input.
"""
text = IBus.Text.new_from_string(self.current_radicals)
super(Engine, self).update_auxiliary_text(text, len(self.current_radicals)>0)
def update_lookup_table(self):
"""Update the lookup table."""
if not self.current_input:
self.lookuptable.clear()
num_candidates = self.lookuptable.get_number_of_candidates()
super(Engine, self).update_lookup_table(self.lookuptable,
num_candidates>0)
def play_error_bell(self):
"""Play an error sound, to notify the user of invalid input."""
try:
if not hasattr(self, "canberra"):
self.canberra = pycanberra.Canberra()
self.canberra.play(1, pycanberra.CA_PROP_EVENT_ID, "dialog-error",
pycanberra.CA_PROP_MEDIA_ROLE, "error", None)
except:
# Playing a sound is a nice indication for the user, but if it
# fails, it should never take down the input system
pass
class EngineCangjie(Engine):
"""The Cangjie engine."""
__gtype_name__ = "EngineCangjie"
config_name = "cangjie"
input_max_len = 5
def do_inputchar(self, inputchar):
"""Handle user input of valid Cangjie input characters."""
if self.lookuptable.get_number_of_candidates():
self.do_select_candidate(1)
self.update_current_input(append=inputchar)
return True
def do_star(self):
"""Handle the star key (*)
For Cangjie, this can in some cases be a wildcard key.
"""
if self.current_input:
return self.do_inputchar("*")
return self.do_other_key(IBus.asterisk)
class EngineQuick(Engine):
"""The Quick engine."""
__gtype_name__ = "EngineQuick"
config_name = "quick"
input_max_len = 2
def do_inputchar(self, inputchar):
"""Handle user input of valid Cangjie input characters."""
if self.lookuptable.get_number_of_candidates():
self.do_select_candidate(1)
if len(self.current_input) < self.input_max_len:
self.update_current_input(append=inputchar)
# Now that we appended/committed, let's check the new length
if len(self.current_input) == self.input_max_len:
current_input = "*".join(self.current_input)
try:
self.get_candidates(current_input)
except cangjie.errors.CangjieNoCharsError:
self.play_error_bell()
self.clear_on_next_input = True
return True
def do_star(self):
"""Handle the star key (*)
For Quick, this should just be considered as any other key.
"""
return self.do_other_key(IBus.asterisk)
| gpl-3.0 |
TedaLIEz/sentry | src/sentry/migrations/0042_auto__add_projectcountbyminute__add_unique_projectcountbyminute_projec.py | 36 | 17255 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ProjectCountByMinute'
db.create_table('sentry_projectcountbyminute', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'], null=True)),
('date', self.gf('django.db.models.fields.DateTimeField')()),
('times_seen', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('time_spent_total', self.gf('django.db.models.fields.FloatField')(default=0)),
('time_spent_count', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal('sentry', ['ProjectCountByMinute'])
# Adding unique constraint on 'ProjectCountByMinute', fields ['project', 'date']
db.create_unique('sentry_projectcountbyminute', ['project_id', 'date'])
def backwards(self, orm):
# Removing unique constraint on 'ProjectCountByMinute', fields ['project', 'date']
db.delete_unique('sentry_projectcountbyminute', ['project_id', 'date'])
# Deleting model 'ProjectCountByMinute'
db.delete_table('sentry_projectcountbyminute')
models = {
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'sentry.filtervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'FilterValue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'logger', 'culprit', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.View']", 'symmetrical': 'False', 'blank': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.messagecountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'MessageCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.messagefiltervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'MessageFilterValue'},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.messageindex': {
'Meta': {'unique_together': "(('column', 'value', 'object_id'),)", 'object_name': 'MessageIndex'},
'column': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.project': {
'Meta': {'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': "orm['sentry.User']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.projectdomain': {
'Meta': {'unique_together': "(('project', 'domain'),)", 'object_name': 'ProjectDomain'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'domain_set'", 'to': "orm['sentry.Project']"})
},
'sentry.projectmember': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'ProjectMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_project_set'", 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'token_set'", 'to': "orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.view': {
'Meta': {'object_name': 'View'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'verbose_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'verbose_name_plural': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['sentry']
| bsd-3-clause |
VPAC/pytsm | pytsm/actions/__init__.py | 1 | 1692 | # Copyright 2012-2014 VPAC
#
# This file is part of pytsm.
#
# pytsm is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pytsm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pytsm. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import importlib
import sys
import codecs
import locale
def load_command(name):
assert not name.startswith("_")
assert name.find(".") == -1
mod = importlib.import_module('pytsm.actions.commands.' + name)
return mod.Command
def command_line(argv=None):
if sys.version_info < (3, 0):
# for python2 we need to use correct encoding when writing to stdout
encoding = locale.getpreferredencoding()
Writer = codecs.getwriter(encoding)
sys.stdout = Writer(sys.stdout)
if argv is None:
argv = sys.argv
try:
command = argv[1]
except IndexError:
command = "help"
args = argv[2:]
try:
klass = load_command(command)
except ImportError:
print("Unknown command %s." % command, file=sys.stderr)
return 255
obj = klass()
rc = obj.execute(argv[0], command, args)
return rc
| gpl-3.0 |