repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
sjohannes/exaile | tests/xl/trax/test_search.py | 2 | 16207 | # -*- coding: utf-8
from mox3 import mox
from xl.trax import search
from xl.trax import track
import pytest
def test_search_result_track_get_track():
val = 'foo'
search_result_track = search.SearchResultTrack(val)
assert search_result_track.track == val, search_result_track.track
def get_search_result_track():
tr = track.Track('file:///foo')
return search.SearchResultTrack(tr)
class TestMatcher(object):
def setup(self):
self.mox = mox.Mox()
self.strack = get_search_result_track()
self.strack.track.set_tag_raw('artist', [u'foo', u'bar'])
def teardown(self):
self.mox.UnsetStubs()
def test_match_list_true(self):
self.mox.StubOutWithMock(search._Matcher, '_matches')
search._Matcher._matches(mox.IsA(basestring)).AndReturn(True)
self.mox.ReplayAll()
matcher = search._Matcher('artist', u'bar', lambda x: x)
assert matcher.match(self.strack)
self.mox.VerifyAll()
def test_match_list_false(self):
self.mox.StubOutWithMock(search._Matcher, '_matches')
# ensure that both tags are checked
search._Matcher._matches(mox.IsA(basestring)).AndReturn(False)
search._Matcher._matches(mox.IsA(basestring)).AndReturn(False)
self.mox.ReplayAll()
matcher = search._Matcher('artist', u'bar', lambda x: x)
assert not matcher.match(self.strack)
self.mox.VerifyAll()
def test_match_list_none(self):
self.mox.StubOutWithMock(search._Matcher, '_matches')
search._Matcher._matches(None).AndReturn(True)
self.mox.ReplayAll()
matcher = search._Matcher('album', None, lambda x: x)
assert matcher.match(self.strack)
self.mox.VerifyAll()
def test_matches(self):
matcher = search._Matcher('album', None, lambda x: x)
with pytest.raises(NotImplementedError):
matcher._matches('foo')
class TestExactMatcher(object):
def setup(self):
self.str = get_search_result_track()
def test_exact_matcher_true(self):
matcher = search._ExactMatcher('album', 'Foo', lambda x: x)
self.str.track.set_tag_raw('album', 'Foo')
assert matcher.match(self.str)
def test_exact_matcher_false(self):
matcher = search._ExactMatcher('album', 'Foo', lambda x: x)
self.str.track.set_tag_raw('album', 'FoO')
assert not matcher.match(self.str)
class TestInMatcher(object):
def setup(self):
self.str = get_search_result_track()
def test_in_matcher_none(self):
matcher = search._InMatcher('album', 'Foo', lambda x: x)
self.str.track.set_tag_raw('album', None)
assert not matcher.match(self.str)
def test_in_matcher_true(self):
matcher = search._InMatcher('album', 'hello', lambda x: x)
self.str.track.set_tag_raw('album', 'Foohelloworld')
assert matcher.match(self.str)
def test_in_matcher_error(self):
matcher = search._InMatcher('album', 2, lambda x: x)
self.str.track.set_tag_raw('album', 'Foohelloworld')
assert not matcher.match(self.str)
def test_in_matcher_false(self):
matcher = search._InMatcher('album', 'hello', lambda x: x)
self.str.track.set_tag_raw('album', 'Fooheloworld')
assert not matcher.match(self.str)
class TestGtLtMatchers(object):
def setup(self):
self.str = get_search_result_track()
def test_gt_bitrate_matcher_true(self):
matcher = search._GtMatcher('__bitrate', 100000, lambda x: x)
self.str.track.set_tag_raw('__bitrate', 128000)
assert matcher.match(self.str)
def test_gt_bitrate_matcher_false(self):
matcher = search._GtMatcher('__bitrate', 100000, lambda x: x)
self.str.track.set_tag_raw('__bitrate', 28000)
assert not matcher.match(self.str)
def test_lt_bitrate_matcher_true(self):
matcher = search._LtMatcher('__bitrate', 100000, lambda x: x)
self.str.track.set_tag_raw('__bitrate', 28000)
assert matcher.match(self.str)
def test_lt_bitrate_matcher_false(self):
matcher = search._LtMatcher('__bitrate', 100000, lambda x: x)
self.str.track.set_tag_raw('__bitrate', 128000)
assert not matcher.match(self.str)
class TestMetaMatcherClasses(object):
class _Matcher(object):
def __init__(self, val):
self.val = val
def matches(self, val):
return self.val
def match(self, val):
return self.val
class TestNotMetaMatcher(TestMetaMatcherClasses):
def test_true(self):
matcher = self._Matcher(True)
matcher = search._NotMetaMatcher(matcher)
assert not matcher.match('foo')
def test_false(self):
matcher = self._Matcher(False)
matcher = search._NotMetaMatcher(matcher)
assert matcher.match('foo')
class TestOrMetaMatcher(TestMetaMatcherClasses):
def test_true_true(self):
matcher_1 = self._Matcher(True)
matcher_2 = self._Matcher(True)
matcher = search._OrMetaMatcher(matcher_1, matcher_2)
assert matcher.match('foo')
def test_true_false(self):
matcher_1 = self._Matcher(True)
matcher_2 = self._Matcher(False)
matcher = search._OrMetaMatcher(matcher_1, matcher_2)
assert matcher.match('foo')
def test_false_true(self):
matcher_1 = self._Matcher(False)
matcher_2 = self._Matcher(True)
matcher = search._OrMetaMatcher(matcher_1, matcher_2)
assert matcher.match('foo')
def test_false_false(self):
matcher_1 = self._Matcher(False)
matcher_2 = self._Matcher(False)
matcher = search._OrMetaMatcher(matcher_1, matcher_2)
assert not matcher.match('foo')
class TestMultiMetaMatcher(TestMetaMatcherClasses):
def test_true(self):
matcher = [self._Matcher(True)] * 10
matcher = search._MultiMetaMatcher(matcher)
assert matcher.match('foo')
def test_false(self):
matcher = [self._Matcher(True)] * 10 + [self._Matcher(False)]
matcher = search._MultiMetaMatcher(matcher)
assert not matcher.match('foo')
class TestManyMultiMetaMatcher(TestMetaMatcherClasses):
def test_true(self):
matcher = [self._Matcher(True)] * 10 + [self._Matcher(False)]
for match in matcher:
match.tag = 'artist'
matcher = search._ManyMultiMetaMatcher(matcher)
assert matcher.match('foo')
def test_false(self):
matcher = [self._Matcher(False)] * 10
for match in matcher:
match.tag = 'artist'
matcher = search._ManyMultiMetaMatcher(matcher)
assert not matcher.match('foo')
class TestTracksMatcher(object):
def setup(self):
self.str = get_search_result_track()
def test_in_matcher(self):
matcher = search.TracksMatcher("artist=foo")
assert len(matcher.matchers) == 1
match = matcher.matchers[0]
self.match_is_type(match, search._InMatcher)
assert match.tag == 'artist'
assert match.content == 'foo'
def test_exact_matcher(self):
matcher = search.TracksMatcher("artist==foo")
assert len(matcher.matchers) == 1
match = matcher.matchers[0]
self.match_is_type(match, search._ExactMatcher)
assert match.tag == 'artist'
assert match.content == 'foo'
def match_is_type(self, match, expected):
assert isinstance(match, expected), match
def test_not_matcher(self):
matcher = search.TracksMatcher("! foo", keyword_tags=['artist'])
match = matcher
# NotMetaMatcher
assert len(match.matchers) == 1
match = matcher.matchers[0]
self.match_is_type(match, search._NotMetaMatcher)
# MultiMetaMatcher
match = match.matcher
self.match_is_type(match, search._MultiMetaMatcher)
# ManyMultiMetaMatcher
assert len(match.matchers) == 1
match = match.matchers[0]
self.match_is_type(match, search._ManyMultiMetaMatcher)
# InMatcher
assert len(match.matchers) == 1
match = match.matchers[0]
self.match_is_type(match, search._InMatcher)
assert match.tag == 'artist'
assert match.content == 'foo'
def test_or_matcher(self):
matcher = search.TracksMatcher("foo | bar", keyword_tags=['artist'])
match = matcher
# OrMetaMatcher
assert len(match.matchers) == 1
match = matcher.matchers[0]
self.match_is_type(match, search._OrMetaMatcher)
# MultiMetaMatcher
assert match.left
assert match.right
self.match_is_type(match.left, search._MultiMetaMatcher)
self.match_is_type(match.right, search._MultiMetaMatcher)
# ManyMultiMetaMatcher
assert len(match.left.matchers) == 1
assert len(match.right.matchers) == 1
match_left = match.left.matchers[0]
match_right = match.right.matchers[0]
self.match_is_type(match_left, search._ManyMultiMetaMatcher)
self.match_is_type(match_right, search._ManyMultiMetaMatcher)
# InMatcher
assert len(match_left.matchers) == 1
assert len(match_right.matchers) == 1
match_left = match_left.matchers[0]
match_right = match_right.matchers[0]
self.match_is_type(match_left, search._InMatcher)
self.match_is_type(match_right, search._InMatcher)
assert match_left.tag == 'artist'
assert match_right.tag == 'artist'
if match_left.content == 'foo':
if match_right.content != 'bar':
assert not "We lost a search term on an or"
elif match_left.content == 'bar':
if match_right.content != 'foo':
assert not "We lost a search term on an or"
else:
assert not "We lost both parts of an or"
def test_paren_matcher(self):
matcher = search.TracksMatcher("( foo | bar )", keyword_tags=['artist'])
match = matcher
# MultiMetaMatcher
assert len(match.matchers) == 1
match = matcher.matchers[0]
self.match_is_type(match, search._MultiMetaMatcher)
assert len(match.matchers) == 1
match = match.matchers[0]
self.match_is_type(match, search._OrMetaMatcher)
# This is the same code as the OrMetaMatcher
assert match.left
assert match.right
self.match_is_type(match.left, search._MultiMetaMatcher)
self.match_is_type(match.right, search._MultiMetaMatcher)
# ManyMultiMetaMatcher
assert len(match.left.matchers) == 1
assert len(match.right.matchers) == 1
match_left = match.left.matchers[0]
match_right = match.right.matchers[0]
self.match_is_type(match_left, search._ManyMultiMetaMatcher)
self.match_is_type(match_right, search._ManyMultiMetaMatcher)
# InMatcher
assert len(match_left.matchers) == 1
assert len(match_right.matchers) == 1
match_left = match_left.matchers[0]
match_right = match_right.matchers[0]
self.match_is_type(match_left, search._InMatcher)
self.match_is_type(match_right, search._InMatcher)
assert match_left.tag == 'artist'
assert match_right.tag == 'artist'
if match_left.content == 'foo':
if match_right.content != 'bar':
assert not "We lost a search term on an or"
elif match_left.content == 'bar':
if match_right.content != 'foo':
assert not "We lost a search term on an or"
else:
assert not "We lost both parts of an or"
def test_match_true(self):
matcher = search.TracksMatcher("foo", keyword_tags=['artist'])
self.str.track.set_tag_raw('artist', 'foo')
assert matcher.match(self.str)
assert self.str.on_tags == ['artist']
def test_match_true_tag(self):
matcher = search.TracksMatcher("artist=foo")
self.str.track.set_tag_raw('artist', 'foo')
assert matcher.match(self.str)
assert self.str.on_tags == ['artist']
def test_match_true_case_insensitive(self):
matcher = search.TracksMatcher("artist=FoO", case_sensitive=False)
self.str.track.set_tag_raw('artist', 'foo')
assert matcher.match(self.str)
assert self.str.on_tags == ['artist']
def test_match_true_none(self):
matcher = search.TracksMatcher("artist==__null__")
self.str.track.set_tag_raw('artist', '')
assert matcher.match(self.str)
assert self.str.on_tags == ['artist']
def test_match_false(self):
matcher = search.TracksMatcher("foo", keyword_tags=['artist'])
self.str.track.set_tag_raw('artist', 'bar')
assert not matcher.match(self.str)
class TestSearchTracks(object):
def test_search_tracks(self):
matcher = search.TracksMatcher("foo", keyword_tags=['artist'])
tracks = [track.Track(x) for x in ('foo', 'bar', 'baz', 'quux')]
tracks = [search.SearchResultTrack(tr) for tr in tracks]
tracks[0].track.set_tag_raw('artist', 'foooo')
tracks[2].track.set_tag_raw('artist', 'foooooo')
gen = search.search_tracks(tracks, [matcher])
assert gen.next() == tracks[0]
assert gen.next() == tracks[2]
with pytest.raises(StopIteration):
gen.next()
def test_take_not_srt(self):
matcher = search.TracksMatcher("foo", keyword_tags=['artist'])
tracks = [track.Track(x) for x in ('foo', 'bar', 'baz', 'quux')]
tracks[0].set_tag_raw('artist', 'foooo')
tracks[2].set_tag_raw('artist', 'foooooo')
gen = search.search_tracks(tracks, [matcher])
assert gen.next().track == tracks[0]
assert gen.next().track == tracks[2]
with pytest.raises(StopIteration):
gen.next()
def test_search_tracks_from_string(self):
tracks = [track.Track(x) for x in ('foo', 'bar', 'baz', 'quux')]
tracks[0].set_tag_raw('artist', 'foooo')
tracks[2].set_tag_raw('artist', 'foooooo')
gen = search.search_tracks_from_string(tracks, 'foo', keyword_tags=['artist'])
assert gen.next().track == tracks[0]
assert gen.next().track == tracks[2]
with pytest.raises(StopIteration):
gen.next()
@pytest.mark.parametrize("sstr", ["motley crue", u"mötley crüe", u"motley crüe"])
def test_search_tracks_ignore_diacritic_from_string(self, sstr):
'''Ensure that searching for tracks with diacritics return
appropriately normalized results'''
tracks = [track.Track(x) for x in ('foo', 'bar', 'baz', 'quux')]
tracks[0].set_tag_raw('artist', 'motley crue')
tracks[1].set_tag_raw('artist', 'rubbish')
tracks[2].set_tag_raw('artist', u'motley crüe')
gen = search.search_tracks_from_string(tracks, sstr, keyword_tags=['artist'])
assert gen.next().track == tracks[0]
assert gen.next().track == tracks[2]
with pytest.raises(StopIteration):
gen.next()
def test_search_tracks_with_unicodemark_from_string(self):
tracks = [track.Track(x) for x in ('foo', 'bar', 'baz', 'quux')]
tracks[0].set_tag_raw('artist', 'foooo')
tracks[2].set_tag_raw('artist', u'中')
# the weird character is normalized, so you can't search based on that
gen = search.search_tracks_from_string(tracks, u'中', keyword_tags=['artist'])
assert gen.next().track == tracks[2]
with pytest.raises(StopIteration):
gen.next()
def test_search_tracks_with_int_from_string(self):
# unlike mp3, mp4 will return integers for BPM.. make sure that works
tracks = [track.Track(x) for x in ('foo', 'bar', 'baz', 'quux')]
tracks[1].set_tag_raw('bpm', '2')
tracks[2].set_tag_raw('bpm', 2)
gen = search.search_tracks_from_string(tracks, '2', keyword_tags=['bpm'])
assert gen.next().track == tracks[1]
assert gen.next().track == tracks[2]
with pytest.raises(StopIteration):
gen.next()
| gpl-2.0 |
Netuitive/netuitive-diamond | src/collectors/kvm/kvm.py | 31 | 1279 | # coding=utf-8
"""
Collects /sys/kernel/debug/kvm/*
#### Dependencies
* /sys/kernel/debug/kvm
"""
import diamond.collector
import os
class KVMCollector(diamond.collector.Collector):
PROC = '/sys/kernel/debug/kvm'
def get_default_config_help(self):
config_help = super(KVMCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(KVMCollector, self).get_default_config()
config.update({
'path': 'kvm',
})
return config
def collect(self):
if not os.path.isdir(self.PROC):
self.log.error('/sys/kernel/debug/kvm is missing. Did you' +
' "mount -t debugfs debugfs /sys/kernel/debug"?')
return {}
for filename in os.listdir(self.PROC):
filepath = os.path.abspath(os.path.join(self.PROC, filename))
fh = open(filepath, 'r')
metric_value = self.derivative(filename,
float(fh.readline()),
4294967295)
self.publish(filename, metric_value)
| mit |
pnedunuri/scipy | scipy/sparse/linalg/isolve/iterative/test.py | 110 | 4126 | from __future__ import division, print_function, absolute_import
from scipy import *
from iterative import *
def test_fun(alpha, x, beta, y, A, n):
# compute z = alpha*A*x + beta*y
xx = x[:n]
yy = y[:n]
w = dot(A,xx)
z = alpha*w+beta*yy
y[:n] = z
return
def test_fun_t(alpha, x, beta, y, A, n):
# compute z = alpha*A*x + beta*y
xx = x[:n]
yy = y[:n]
AA = conj(transpose(A))
w = dot(AA,xx)
z = alpha*w+beta*yy
y[:n] = z
return
def test_psolve(x,b,n):
x[:n] = b[:n]
return
def test_psolve_t(x,b,n):
x[:n] = b[:n]
return
def test_psolveq(x,b,which,n):
x[:n] = b[:n]
return
def test_psolveq_t(x,b,which,n):
x[:n] = b[:n]
return
n = 5
dA = 1.0*array([[2, -1, 0, 0, 0],
[-1, 2, -1, 0, 0],
[0, -1, 2, -1, 0],
[0, 0, -1, 2, -1],
[0, 0, 0, 1, 2]])
db = 1.0*array([0,1,1,0,0])
##zA = (1.0+0j)*array([[ 2, -1+0.1j, 0, 0, 0],
## [-1+0.1j, 2, -1-0.1j, 0, 0],
## [ 0, -1-0.1j, 2, -1+0.1j, 0],
## [ 0, 0, -1+0.1j, 2, -1-0.1j],
## [ 0, 0, 0, -1, 2-0.1j]])
zA = (1.0+0j)*array([[2, -1 + 1j, 0, 0, 0],
[-1+0.1j, 2, -1-0.1j, 0, 0],
[0, -1 - 1j, 2, -1+0.1j, 0],
[0, 0, -1+0.1j, 2, -1-0.1j],
[0, 0, 0, -1, 2-0.1j]])
zb = (1.0+0j)*array([0,1,1,0,0])
dx = 0*db.copy()
zx = 0*zb.copy()
diter = 1000
dresid = 1e-6
ziter = 1000
zresid = 1e-6
drestrt = n
zrestrt = n
############### BiCG #######################
dx,diter,dresid,dinfor = dbicg(db,dx,diter,dresid,test_fun,test_fun_t,test_psolve,test_psolve_t,(dA,n),(dA,n),(n,),(n,))
zx,ziter,zresid,zinfor = zbicg(zb,zx,ziter,zresid,test_fun,test_fun_t,test_psolve,test_psolve_t,(zA,n),(zA,n),(n,),(n,))
############### BiCGSTAB ###################
#dx,diter,dresid,dinfor = dbicgstab(db,dx,diter,dresid,test_fun,test_psolve,(dA,n),(n,))
#zx,ziter,zresid,zinfor = zbicgstab(zb,zx,ziter,zresid,test_fun,test_psolve,(zA,n),(n,))
############### CG #########################
##dA = 1.0*array([[ 2, -1, 0, 0, 0],
## [-1, 2, -1, 0, 0],
## [ 0, -1, 2, -1, 0],
## [ 0, 0, -1, 2, -1],
## [ 0, 0, 0, -1, 2]])
##dx = db.copy()
##zA = (1.0+0j)*array([[ 2, -1+0.1j, 0, 0, 0],
## [-1+0.1j, 2, -1-0.1j, 0, 0],
## [ 0, -1-0.1j, 2, -1+0.1j, 0],
## [ 0, 0, -1+0.1j, 2, -1-0.1j],
## [ 0, 0, 0, -1, 2-0.1j]])
##zx = zb.copy()
##dx,diter,dresid,dinfor = dcg(db,dx,diter,dresid,test_fun,test_psolve,(dA,n),(n,))
##zx,ziter,zresid,zinfor = zcg(zb,zx,ziter,zresid,test_fun,test_psolve,(zA,n),(n,))
############### CGS ########################
#dx,diter,dresid,dinfor = dcgs(db,dx,diter,dresid,test_fun,test_psolve,(dA,n),(n,))
#zx,ziter,zresid,zinfor = zcgs(zb,zx,ziter,zresid,test_fun,test_psolve,(zA,n),(n,))
############### GMRES ######################
#dx,diter,dresid,dinfor = dgmres(db,dx,drestrt,diter,dresid,test_fun,test_psolve,(dA,n),(n,))
#zx,ziter,zresid,zinfor = zgmres(zb,zx,zrestrt,ziter,zresid,test_fun,test_psolve,(zA,n),(n,))
############### QMR ########################
#dx,diter,dresid,dinfor = dqmr(db,dx,diter,dresid,test_fun,test_fun_t,test_psolveq,test_psolveq_t,(dA,n),(dA,n),(n,),(n,))
#zx,ziter,zresid,zinfor = zqmr(zb,zx,ziter,zresid,test_fun,test_fun_t,test_psolveq,test_psolveq_t,(zA,n),(zA,n),(n,),(n,))
print()
print('**************** double *****************')
print('iter:',diter, 'resid:', dresid, 'info:',dinfor)
print('x=',dx)
print('*****************************************')
print()
print()
print('**************** complex ****************')
print('iter:',ziter, 'resid:',zresid, 'info:',zinfor)
print('x=',zx)
print('*****************************************')
print()
| bsd-3-clause |
Yashasvi-Sriram/EscapeErrands | escapeerrands/settings.py | 1 | 2950 | import os
import dj_database_url
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_APP = os.path.dirname(os.path.abspath(__file__))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'rt$g!n6fo^gs91m%osd#4ia-rc^qyhaaisj%+0=^dd05p7@=&h'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ROOT_URLCONF = 'escapeerrands.urls'
WSGI_APPLICATION = 'escapeerrands.wsgi.application'
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'core',
'interface',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug': True,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
# Update database configuration with $DATABASE_URL.
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static files (CSS, JavaScript, Images)
STATIC_ROOT = os.path.join(PROJECT_APP, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(PROJECT_APP, 'static'),
)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
| apache-2.0 |
ledsusop/airflow | airflow/contrib/operators/dataflow_operator.py | 14 | 3784 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from airflow.contrib.hooks.gcp_dataflow_hook import DataFlowHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class DataFlowJavaOperator(BaseOperator):
"""
Start a Java Cloud DataFlow batch job. The parameters of the operation
will be passed to the job.
It's a good practice to define dataflow_* parameters in the default_args of the dag
like the project, zone and staging location.
```
default_args = {
'dataflow_default_options': {
'project': 'my-gcp-project',
'zone': 'europe-west1-d',
'stagingLocation': 'gs://my-staging-bucket/staging/'
}
}
```
You need to pass the path to your dataflow as a file reference with the ``jar``
parameter, the jar needs to be a self executing jar. Use ``options`` to pass on
options to your job.
```
t1 = DataFlowOperation(
task_id='datapflow_example',
jar='{{var.value.gcp_dataflow_base}}pipeline/build/libs/pipeline-example-1.0.jar',
options={
'autoscalingAlgorithm': 'BASIC',
'maxNumWorkers': '50',
'start': '{{ds}}',
'partitionType': 'DAY'
},
dag=my-dag)
```
Both ``jar`` and ``options`` are templated so you can use variables in them.
"""
template_fields = ['options', 'jar']
ui_color = '#0273d4'
@apply_defaults
def __init__(
self,
jar,
dataflow_default_options=None,
options=None,
gcp_conn_id='google_cloud_default',
delegate_to=None,
*args,
**kwargs):
"""
Create a new DataFlowJavaOperator.
For more detail on about job submission have a look at the reference:
https://cloud.google.com/dataflow/pipelines/specifying-exec-params
:param jar: The reference to a self executing DataFlow jar.
:type jar: string
:param dataflow_default_options: Map of default job options.
:type dataflow_default_options: dict
:param options: Map of job specific options.
:type options: dict
:param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform.
:type gcp_conn_id: string
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must have domain-wide
delegation enabled.
:type delegate_to: string
"""
super(DataFlowJavaOperator, self).__init__(*args, **kwargs)
dataflow_default_options = dataflow_default_options or {}
options = options or {}
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.jar = jar
self.dataflow_default_options = dataflow_default_options
self.options = options
def execute(self, context):
hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to)
dataflow_options = copy.copy(self.dataflow_default_options)
dataflow_options.update(self.options)
hook.start_java_dataflow(self.task_id, dataflow_options, self.jar)
| apache-2.0 |
saisaizhang/Food | flask/lib/python2.7/site-packages/whoosh/lang/__init__.py | 71 | 4308 | # coding=utf-8
# Copyright 2012 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
# Exceptions
class NoStemmer(Exception):
pass
class NoStopWords(Exception):
pass
# Data and functions for language names
languages = ("ar", "da", "nl", "en", "fi", "fr", "de", "hu", "it", "no", "pt",
"ro", "ru", "es", "sv", "tr")
aliases = {
# By ISO 639-1 three letter codes
"ara": "ar",
"dan": "da", "nld": "nl", "eng": "en", "fin": "fi", "fra": "fr",
"deu": "de", "hun": "hu", "ita": "it", "nor": "no", "por": "pt",
"ron": "ro", "rus": "ru", "spa": "es", "swe": "sv", "tur": "tr",
# By name in English
"arabic": "ar",
"danish": "da",
"dutch": "nl",
"english": "en",
"finnish": "fi",
"french": "fr",
"german": "de",
"hungarian": "hu",
"italian": "it",
"norwegian": "no",
"portuguese": "pt",
"romanian": "ro",
"russian": "ru",
"spanish": "es",
"swedish": "sw",
"turkish": "tr",
# By name in own language
"العربية": "ar",
"dansk": "da",
"nederlands": "nl",
"suomi": "fi",
"français": "fr",
"deutsch": "de",
"magyar": "hu",
"italiano": "it",
"norsk": "no",
"português": "pt",
"русский язык": "ru",
"español": "es",
"svenska": "sv",
"türkçe": "tr",
}
def two_letter_code(name):
if name in languages:
return name
if name in aliases:
return aliases[name]
return None
# Getter functions
def has_stemmer(lang):
try:
return bool(stemmer_for_language(lang))
except NoStemmer:
return False
def has_stopwords(lang):
try:
return bool(stopwords_for_language(lang))
except NoStopWords:
return False
def stemmer_for_language(lang):
if lang == "en_porter":
# Original porter stemming algorithm is several times faster than the
# more correct porter2 algorithm in snowball package
from .porter import stem as porter_stem
return porter_stem
tlc = two_letter_code(lang)
if tlc == "ar":
from .isri import ISRIStemmer
return ISRIStemmer().stem
from .snowball import classes as snowball_classes
if tlc in snowball_classes:
return snowball_classes[tlc]().stem
raise NoStemmer("No stemmer available for %r" % lang)
def stopwords_for_language(lang):
from .stopwords import stoplists
tlc = two_letter_code(lang)
if tlc in stoplists:
return stoplists[tlc]
raise NoStopWords("No stop-word list available for %r" % lang)
| bsd-3-clause |
iagapov/ocelot | utils/db.py | 2 | 6438 | import sqlite3
import os, sys
import datetime, time
class Tuning:
def __init__(self, pars):
self.time = pars[0]
self.id = pars[1]
self.wl = pars[2]
self.charge = pars[3]
self.comment = pars[4]
class ActionResult:
def __init__(self, pars):
self.tuning_id = pars[0]
self.id = pars[1]
self.sase_start = pars[2]
self.sase_end = pars[3]
class ActionParameters:
def __init__(self, pars):
self.tuning_id = pars[0]
self.action_id = pars[1]
self.par_name = pars[2]
self.start_value = pars[3]
self.end_value = pars[4]
class PerfDB:
def __init__(self, dbname="flash.db"):
print("connecting to database ... ")
self.db = sqlite3.connect(dbname)
def new_tuning(self, params):
print ('creating new tuning', params)
with self.db:
cursor = self.db.cursor()
cursor.execute("insert into TUNINGS(TIME,CHARGE,WL, COMMENT) VALUES(?,?,?,?)",
(datetime.datetime.now(), params['charge'], params['wl'], "test"))
def get_tunings(self):
cursor = self.db.cursor()
cursor.execute("select * from TUNINGS")
return [Tuning(r) for r in cursor.fetchall()]
def current_tuning_id(self):
return self.get_tunings()[-1].id # TODO: not effective
def new_action(self, tuning_id, start_sase, end_sase):
print ('creating new action')
with self.db:
cursor = self.db.cursor()
cursor.execute("insert into ACTIONS(TUNING_ID,SASE_START,SASE_END) VALUES(?,?,?)",(tuning_id, start_sase,end_sase))
def get_actions(self, tuning_id = None):
cursor = self.db.cursor()
if tuning_id == None: tuning_id = self.current_tuning_id()
cursor.execute("select * from ACTIONS WHERE TUNING_ID=:Id", {'Id': tuning_id})
return [ActionResult(r) for r in cursor.fetchall()]
def current_action_id(self):
return self.get_actions()[-1].id # TODO: not effective
def add_action_parameters(self, tuning_id, action_id, param_names, start_vals, end_vals):
print('updating action', tuning_id, action_id)
with self.db:
cursor = self.db.cursor()
for i in range(len(param_names)):
cursor.execute("insert into PARAMETERS(TUNING_ID,ACTION_ID, PAR_NAME,PAR_START_VALUE,PAR_END_VALUE) VALUES(?,?,?,?,?)",
(tuning_id, action_id, param_names[i], start_vals[i], end_vals[i]))
def get_action_parameters(self, tuning_id, action_id):
cursor = self.db.cursor()
cursor.execute("select * from PARAMETERS WHERE TUNING_ID=:tid and ACTION_ID = :aid", {'tid': tuning_id, 'aid': action_id})
#return cursor.fetchall()
return [ActionParameters(r) for r in cursor.fetchall()]
def add_machine_parameters(self, tuning_id, params):
print ('updating machine parameters for tuning ', tuning_id)
with self.db:
cursor = self.db.cursor()
for k in params.keys():
cursor.execute("insert into MACHINE_STATE(TUNING_ID,PAR_NAME,PAR_VALUE) VALUES(?,?,?)",
(tuning_id, k, params[k]))
def get_machine_parameters(self, tuning_id):
cursor = self.db.cursor()
cursor.execute("select * from MACHINE_STATE WHERE TUNING_ID=:tid",{'tid':tuning_id})
return cursor.fetchall()
def close(self):
self.db.close()
def create_db(dbname="flash.db"):
db = sqlite3.connect(dbname)
cursor = db.cursor()
cursor.execute("drop table if exists TUNINGS")
sql = """CREATE TABLE TUNINGS (
TIME DATETIME NOT NULL,
ID INTEGER PRIMARY KEY,
WL FLOAT,
CHARGE FLOAT,
COMMENT CHAR(20))"""
cursor.execute(sql)
cursor.execute("drop table if exists ACTIONS")
sql = """CREATE TABLE ACTIONS (
TUNING_ID INTEGER,
ACTION_ID INTEGER PRIMARY KEY,
SASE_START FLOAT,
SASE_END FLOAT)"""
cursor.execute(sql)
cursor.execute("drop table if exists PARAMETERS")
sql = """CREATE TABLE PARAMETERS (
TUNING_ID INTEGER,
ACTION_ID INTEGER,
PAR_NAME CHAR(20),
PAR_START_VALUE FLOAT,
PAR_END_VALUE FLOAT,
PRIMARY KEY(TUNING_ID, ACTION_ID, PAR_NAME) )"""
cursor.execute(sql)
cursor.execute("drop table if exists MACHINE_STATE")
sql = """CREATE TABLE MACHINE_STATE (
TUNING_ID INTEGER,
PAR_NAME CHAR(20),
PAR_VALUE FLOAT)"""
cursor.execute(sql)
db.close()
def test_new_tunings(dbname):
db = PerfDB(dbname=dbname)
db.new_tuning({'wl':13.6, 'charge':0.1,'comment':'test tuning'}) # creates new tuning record (e.g. for each shift);
tunings = db.get_tunings()
print ('current tunings', [(t.id, t.time, t.charge, t.wl) for t in tunings])
tune_id = db.current_tuning_id()
print ('current id', tune_id)
db.add_machine_parameters(tune_id, params = {"hbar":1.0e-34, "nbunh":"ff"})
print ('current machine parameters', db.get_machine_parameters(tune_id))
def test_new_action(dbname):
db = PerfDB(dbname=dbname)
tune_id = db.current_tuning_id()
print ('new action for tune_id', tune_id)
db.new_action(tune_id, start_sase = 1.0, end_sase = 150)
print ('current actions in tuning', [(t.id, t.tuning_id, t.sase_start, t.sase_end) for t in db.get_actions()])
def test_add_action_parameters(dbname):
db = PerfDB(dbname=dbname)
tune_id = db.current_tuning_id()
action_id = db.current_action_id()
print ('updating', tune_id, action_id)
db.add_action_parameters(tune_id, action_id, param_names = ["H1","Q1", "test"], start_vals = [0.1,0.2, "test"], end_vals=[1.1, 1.3, "gh"])
print ('current actions', [(t.id, t.tuning_id, t.sase_start, t.sase_end) for t in db.get_actions()])
print ('current action parameters', [(p.tuning_id, p.action_id, p.par_name, p.start_value, p.end_value) for p in db.get_action_parameters(tune_id, action_id)])
if __name__ == "__main__":
# tests
dbname = "test2.db"
create_db(dbname)
test_new_tunings(dbname)
test_new_action(dbname)
test_add_action_parameters(dbname)
test_new_action(dbname)
test_add_action_parameters(dbname)
| gpl-3.0 |
javiergarridomellado/Empresa_django | devcodela/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util.py | 248 | 21407 | # urllib3/util.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from base64 import b64encode
from binascii import hexlify, unhexlify
from collections import namedtuple
from hashlib import md5, sha1
from socket import error as SocketError, _GLOBAL_DEFAULT_TIMEOUT
import time
try:
from select import poll, POLLIN
except ImportError: # `poll` doesn't exist on OSX and other platforms
poll = False
try:
from select import select
except ImportError: # `select` doesn't exist on AppEngine.
select = False
try: # Test for SSL features
SSLContext = None
HAS_SNI = False
import ssl
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
from ssl import SSLContext # Modern SSL?
from ssl import HAS_SNI # Has SNI?
except ImportError:
pass
from .packages import six
from .exceptions import LocationParseError, SSLError, TimeoutStateError
_Default = object()
# The default timeout to use for socket connections. This is the attribute used
# by httplib to define the default timeout
def current_time():
"""
Retrieve the current time, this function is mocked out in unit testing.
"""
return time.time()
class Timeout(object):
"""
Utility object for storing timeout values.
Example usage:
.. code-block:: python
timeout = urllib3.util.Timeout(connect=2.0, read=7.0)
pool = HTTPConnectionPool('www.google.com', 80, timeout=timeout)
pool.request(...) # Etc, etc
:param connect:
The maximum amount of time to wait for a connection attempt to a server
to succeed. Omitting the parameter will default the connect timeout to
the system default, probably `the global default timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout for connection attempts.
:type connect: integer, float, or None
:param read:
The maximum amount of time to wait between consecutive
read operations for a response from the server. Omitting
the parameter will default the read timeout to the system
default, probably `the global default timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout.
:type read: integer, float, or None
:param total:
This combines the connect and read timeouts into one; the read timeout
will be set to the time leftover from the connect attempt. In the
event that both a connect timeout and a total are specified, or a read
timeout and a total are specified, the shorter timeout will be applied.
Defaults to None.
:type total: integer, float, or None
.. note::
Many factors can affect the total amount of time for urllib3 to return
an HTTP response. Specifically, Python's DNS resolver does not obey the
timeout specified on the socket. Other factors that can affect total
request time include high CPU load, high swap, the program running at a
low priority level, or other behaviors. The observed running time for
urllib3 to return a response may be greater than the value passed to
`total`.
In addition, the read and total timeouts only measure the time between
read operations on the socket connecting the client and the server,
not the total amount of time for the request to return a complete
response. For most requests, the timeout is raised because the server
has not sent the first byte in the specified time. This is not always
the case; if a server streams one byte every fifteen seconds, a timeout
of 20 seconds will not ever trigger, even though the request will
take several minutes to complete.
If your goal is to cut off any request after a set amount of wall clock
time, consider having a second "watcher" thread to cut off a slow
request.
"""
#: A sentinel object representing the default timeout value
DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
def __init__(self, total=None, connect=_Default, read=_Default):
self._connect = self._validate_timeout(connect, 'connect')
self._read = self._validate_timeout(read, 'read')
self.total = self._validate_timeout(total, 'total')
self._start_connect = None
def __str__(self):
return '%s(connect=%r, read=%r, total=%r)' % (
type(self).__name__, self._connect, self._read, self.total)
@classmethod
def _validate_timeout(cls, value, name):
""" Check that a timeout attribute is valid
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is used
for clear error messages
:return: the value
:raises ValueError: if the type is not an integer or a float, or if it
is a numeric value less than zero
"""
if value is _Default:
return cls.DEFAULT_TIMEOUT
if value is None or value is cls.DEFAULT_TIMEOUT:
return value
try:
float(value)
except (TypeError, ValueError):
raise ValueError("Timeout value %s was %s, but it must be an "
"int or float." % (name, value))
try:
if value < 0:
raise ValueError("Attempted to set %s timeout to %s, but the "
"timeout cannot be set to a value less "
"than 0." % (name, value))
except TypeError: # Python 3
raise ValueError("Timeout value %s was %s, but it must be an "
"int or float." % (name, value))
return value
@classmethod
def from_float(cls, timeout):
""" Create a new Timeout from a legacy timeout value.
The timeout value used by httplib.py sets the same timeout on the
connect(), and recv() socket requests. This creates a :class:`Timeout`
object that sets the individual timeouts to the ``timeout`` value passed
to this function.
:param timeout: The legacy timeout value
:type timeout: integer, float, sentinel default object, or None
:return: a Timeout object
:rtype: :class:`Timeout`
"""
return Timeout(read=timeout, connect=timeout)
def clone(self):
""" Create a copy of the timeout object
Timeout properties are stored per-pool but each request needs a fresh
Timeout object to ensure each one has its own start/stop configured.
:return: a copy of the timeout object
:rtype: :class:`Timeout`
"""
# We can't use copy.deepcopy because that will also create a new object
# for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
# detect the user default.
return Timeout(connect=self._connect, read=self._read,
total=self.total)
def start_connect(self):
""" Start the timeout clock, used during a connect() attempt
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to start a timer that has been started already.
"""
if self._start_connect is not None:
raise TimeoutStateError("Timeout timer has already been started.")
self._start_connect = current_time()
return self._start_connect
def get_connect_duration(self):
""" Gets the time elapsed since the call to :meth:`start_connect`.
:return: the elapsed time
:rtype: float
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to get duration for a timer that hasn't been started.
"""
if self._start_connect is None:
raise TimeoutStateError("Can't get connect duration for timer "
"that has not started.")
return current_time() - self._start_connect
@property
def connect_timeout(self):
""" Get the value to use when setting a connection timeout.
This will be a positive float or integer, the value None
(never timeout), or the default system timeout.
:return: the connect timeout
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
"""
if self.total is None:
return self._connect
if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
return self.total
return min(self._connect, self.total)
@property
def read_timeout(self):
""" Get the value for the read timeout.
This assumes some time has elapsed in the connection timeout and
computes the read timeout appropriately.
If self.total is set, the read timeout is dependent on the amount of
time taken by the connect timeout. If the connection time has not been
established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
raised.
:return: the value to use for the read timeout
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
has not yet been called on this object.
"""
if (self.total is not None and
self.total is not self.DEFAULT_TIMEOUT and
self._read is not None and
self._read is not self.DEFAULT_TIMEOUT):
# in case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
return max(0, min(self.total - self.get_connect_duration(),
self._read))
elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
return max(0, self.total - self.get_connect_duration())
else:
return self._read
class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])):
"""
Datastructure for representing an HTTP URL. Used as a return value for
:func:`parse_url`.
"""
slots = ()
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None):
return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment)
@property
def hostname(self):
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
@property
def request_uri(self):
"""Absolute path including the query string."""
uri = self.path or '/'
if self.query is not None:
uri += '?' + self.query
return uri
@property
def netloc(self):
"""Network location including host and port"""
if self.port:
return '%s:%d' % (self.host, self.port)
return self.host
def split_first(s, delims):
"""
Given a string and an iterable of delimiters, split on the first found
delimiter. Return two split parts and the matched delimiter.
If not found, then the first part is the full input string.
Example: ::
>>> split_first('foo/bar?baz', '?/=')
('foo', 'bar?baz', '/')
>>> split_first('foo/bar?baz', '123')
('foo/bar?baz', '', None)
Scales linearly with number of delims. Not ideal for large number of delims.
"""
min_idx = None
min_delim = None
for d in delims:
idx = s.find(d)
if idx < 0:
continue
if min_idx is None or idx < min_idx:
min_idx = idx
min_delim = d
if min_idx is None or min_idx < 0:
return s, '', None
return s[:min_idx], s[min_idx+1:], min_delim
def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
Partly backwards-compatible with :mod:`urlparse`.
Example: ::
>>> parse_url('http://google.com/mail/')
Url(scheme='http', host='google.com', port=None, path='/', ...)
>>> parse_url('google.com:80')
Url(scheme=None, host='google.com', port=80, path=None, ...)
>>> parse_url('/foo?bar')
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
# While this code has overlap with stdlib's urlparse, it is much
# simplified for our needs and less annoying.
# Additionally, this implementations does silly things to be optimal
# on CPython.
scheme = None
auth = None
host = None
port = None
path = None
fragment = None
query = None
# Scheme
if '://' in url:
scheme, url = url.split('://', 1)
# Find the earliest Authority Terminator
# (http://tools.ietf.org/html/rfc3986#section-3.2)
url, path_, delim = split_first(url, ['/', '?', '#'])
if delim:
# Reassemble the path
path = delim + path_
# Auth
if '@' in url:
# Last '@' denotes end of auth part
auth, url = url.rsplit('@', 1)
# IPv6
if url and url[0] == '[':
host, url = url.split(']', 1)
host += ']'
# Port
if ':' in url:
_host, port = url.split(':', 1)
if not host:
host = _host
if port:
# If given, ports must be integers.
if not port.isdigit():
raise LocationParseError("Failed to parse: %s" % url)
port = int(port)
else:
# Blank ports are cool, too. (rfc3986#section-3.2.3)
port = None
elif not host and url:
host = url
if not path:
return Url(scheme, auth, host, port, path, query, fragment)
# Fragment
if '#' in path:
path, fragment = path.split('#', 1)
# Query
if '?' in path:
path, query = path.split('?', 1)
return Url(scheme, auth, host, port, path, query, fragment)
def get_host(url):
"""
Deprecated. Use :func:`.parse_url` instead.
"""
p = parse_url(url)
return p.scheme or 'http', p.hostname, p.port
def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
basic_auth=None, proxy_basic_auth=None):
"""
Shortcuts for generating request headers.
:param keep_alive:
If ``True``, adds 'connection: keep-alive' header.
:param accept_encoding:
Can be a boolean, list, or string.
``True`` translates to 'gzip,deflate'.
List will get joined by comma.
String will be used as provided.
:param user_agent:
String representing the user-agent you want, such as
"python-urllib3/0.6"
:param basic_auth:
Colon-separated username:password string for 'authorization: basic ...'
auth header.
:param proxy_basic_auth:
Colon-separated username:password string for 'proxy-authorization: basic ...'
auth header.
Example: ::
>>> make_headers(keep_alive=True, user_agent="Batman/1.0")
{'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
>>> make_headers(accept_encoding=True)
{'accept-encoding': 'gzip,deflate'}
"""
headers = {}
if accept_encoding:
if isinstance(accept_encoding, str):
pass
elif isinstance(accept_encoding, list):
accept_encoding = ','.join(accept_encoding)
else:
accept_encoding = 'gzip,deflate'
headers['accept-encoding'] = accept_encoding
if user_agent:
headers['user-agent'] = user_agent
if keep_alive:
headers['connection'] = 'keep-alive'
if basic_auth:
headers['authorization'] = 'Basic ' + \
b64encode(six.b(basic_auth)).decode('utf-8')
if proxy_basic_auth:
headers['proxy-authorization'] = 'Basic ' + \
b64encode(six.b(proxy_basic_auth)).decode('utf-8')
return headers
def is_connection_dropped(conn): # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`httplib.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
sock = getattr(conn, 'sock', False)
if not sock: # Platform-specific: AppEngine
return False
if not poll:
if not select: # Platform-specific: AppEngine
return False
try:
return select([sock], [], [], 0.0)[0]
except SocketError:
return True
# This version is better on platforms that support it.
p = poll()
p.register(sock, POLLIN)
for (fno, ev) in p.poll(0.0):
if fno == sock.fileno():
# Either data is buffered (bad), or the connection is dropped.
return True
def resolve_cert_reqs(candidate):
"""
Resolves the argument to a numeric constant, which can be passed to
the wrap_socket function/method from the ssl module.
Defaults to :data:`ssl.CERT_NONE`.
If given a string it is assumed to be the name of the constant in the
:mod:`ssl` module or its abbrevation.
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
If it's neither `None` nor a string we assume it is already the numeric
constant which can directly be passed to wrap_socket.
"""
if candidate is None:
return CERT_NONE
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'CERT_' + candidate)
return res
return candidate
def resolve_ssl_version(candidate):
"""
like resolve_cert_reqs
"""
if candidate is None:
return PROTOCOL_SSLv23
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'PROTOCOL_' + candidate)
return res
return candidate
def assert_fingerprint(cert, fingerprint):
"""
Checks if given fingerprint matches the supplied certificate.
:param cert:
Certificate as bytes object.
:param fingerprint:
Fingerprint as string of hexdigits, can be interspersed by colons.
"""
# Maps the length of a digest to a possible hash function producing
# this digest.
hashfunc_map = {
16: md5,
20: sha1
}
fingerprint = fingerprint.replace(':', '').lower()
digest_length, rest = divmod(len(fingerprint), 2)
if rest or digest_length not in hashfunc_map:
raise SSLError('Fingerprint is of invalid length.')
# We need encode() here for py32; works on py2 and p33.
fingerprint_bytes = unhexlify(fingerprint.encode())
hashfunc = hashfunc_map[digest_length]
cert_digest = hashfunc(cert).digest()
if not cert_digest == fingerprint_bytes:
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
.format(hexlify(fingerprint_bytes),
hexlify(cert_digest)))
def is_fp_closed(obj):
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
if hasattr(obj, 'fp'):
# Object is a container for another file-like object that gets released
# on exhaustion (e.g. HTTPResponse)
return obj.fp is None
return obj.closed
if SSLContext is not None: # Python 3.2+
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
"""
All arguments except `server_hostname` have the same meaning as for
:func:`ssl.wrap_socket`
:param server_hostname:
Hostname of the expected certificate
"""
context = SSLContext(ssl_version)
context.verify_mode = cert_reqs
# Disable TLS compression to migitate CRIME attack (issue #309)
OP_NO_COMPRESSION = 0x20000
context.options |= OP_NO_COMPRESSION
if ca_certs:
try:
context.load_verify_locations(ca_certs)
# Py32 raises IOError
# Py33 raises FileNotFoundError
except Exception as e: # Reraise as SSLError
raise SSLError(e)
if certfile:
# FIXME: This block needs a test.
context.load_cert_chain(certfile, keyfile)
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
return context.wrap_socket(sock, server_hostname=server_hostname)
return context.wrap_socket(sock)
else: # Python 3.1 and earlier
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
return wrap_socket(sock, keyfile=keyfile, certfile=certfile,
ca_certs=ca_certs, cert_reqs=cert_reqs,
ssl_version=ssl_version)
| gpl-2.0 |
calebkleveter/Mist | node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py | 1382 | 30567 | # Copyright (c) 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
the generator flag config_path) the path of a json file that dictates the files
and targets to search for. The following keys are supported:
files: list of paths (relative) of the files to search for.
test_targets: unqualified target names to search for. Any target in this list
that depends upon a file in |files| is output regardless of the type of target
or chain of dependencies.
additional_compile_targets: Unqualified targets to search for in addition to
test_targets. Targets in the combined list that depend upon a file in |files|
are not necessarily output. For example, if the target is of type none then the
target is not output (but one of the descendants of the target will be).
The following is output:
error: only supplied if there is an error.
compile_targets: minimal set of targets that directly or indirectly (for
targets of type none) depend on the files in |files| and is one of the
supplied targets or a target that one of the supplied targets depends on.
The expectation is this set of targets is passed into a build step. This list
always contains the output of test_targets as well.
test_targets: set of targets from the supplied |test_targets| that either
directly or indirectly depend upon a file in |files|. This list if useful
if additional processing needs to be done for certain targets after the
build, such as running tests.
status: outputs one of three values: none of the supplied files were found,
one of the include files changed so that it should be assumed everything
changed (in this case test_targets and compile_targets are not output) or at
least one file was found.
invalid_targets: list of supplied targets that were not found.
Example:
Consider a graph like the following:
A D
/ \
B C
A depends upon both B and C, A is of type none and B and C are executables.
D is an executable, has no dependencies and nothing depends on it.
If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
the following is output:
|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
and the supplied target A depends upon it. A is not output as a build_target
as it is of type none with no rules and actions.
|test_targets| = ["B"] B directly depends upon the change file b.cc.
Even though the file d.cc, which D depends upon, has changed D is not output
as it was not supplied by way of |additional_compile_targets| or |test_targets|.
If the generator flag analyzer_output_path is specified, output is written
there. Otherwise output is written to stdout.
In Gyp the "all" target is shorthand for the root targets in the files passed
to gyp. For example, if file "a.gyp" contains targets "a1" and
"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
then the "all" target includes "b1" and "b2".
"""
import gyp.common
import gyp.ninja_syntax as ninja_syntax
import json
import os
import posixpath
import sys
debug = False
found_dependency_string = 'Found dependency'
no_dependency_string = 'No dependencies'
# Status when it should be assumed that everything has changed.
all_changed_string = 'Found dependency (all)'
# MatchStatus is used indicate if and how a target depends upon the supplied
# sources.
# The target's sources contain one of the supplied paths.
MATCH_STATUS_MATCHES = 1
# The target has a dependency on another target that contains one of the
# supplied paths.
MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
# The target's sources weren't in the supplied paths and none of the target's
# dependencies depend upon a target that matched.
MATCH_STATUS_DOESNT_MATCH = 3
# The target doesn't contain the source, but the dependent targets have not yet
# been visited to determine a more specific status yet.
MATCH_STATUS_TBD = 4
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
'LIB_DIR', 'SHARED_LIB_DIR']:
generator_default_variables[dirname] = '!!!'
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
'CONFIGURATION_NAME']:
generator_default_variables[unused] = ''
def _ToGypPath(path):
"""Converts a path to the format used by gyp."""
if os.sep == '\\' and os.altsep == '/':
return path.replace('\\', '/')
return path
def _ResolveParent(path, base_path_components):
"""Resolves |path|, which starts with at least one '../'. Returns an empty
string if the path shouldn't be considered. See _AddSources() for a
description of |base_path_components|."""
depth = 0
while path.startswith('../'):
depth += 1
path = path[3:]
# Relative includes may go outside the source tree. For example, an action may
# have inputs in /usr/include, which are not in the source tree.
if depth > len(base_path_components):
return ''
if depth == len(base_path_components):
return path
return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
'/' + path
def _AddSources(sources, base_path, base_path_components, result):
"""Extracts valid sources from |sources| and adds them to |result|. Each
source file is relative to |base_path|, but may contain '..'. To make
resolving '..' easier |base_path_components| contains each of the
directories in |base_path|. Additionally each source may contain variables.
Such sources are ignored as it is assumed dependencies on them are expressed
and tracked in some other means."""
# NOTE: gyp paths are always posix style.
for source in sources:
if not len(source) or source.startswith('!!!') or source.startswith('$'):
continue
# variable expansion may lead to //.
org_source = source
source = source[0] + source[1:].replace('//', '/')
if source.startswith('../'):
source = _ResolveParent(source, base_path_components)
if len(source):
result.append(source)
continue
result.append(base_path + source)
if debug:
print 'AddSource', org_source, result[len(result) - 1]
def _ExtractSourcesFromAction(action, base_path, base_path_components,
results):
if 'inputs' in action:
_AddSources(action['inputs'], base_path, base_path_components, results)
def _ToLocalPath(toplevel_dir, path):
"""Converts |path| to a path relative to |toplevel_dir|."""
if path == toplevel_dir:
return ''
if path.startswith(toplevel_dir + '/'):
return path[len(toplevel_dir) + len('/'):]
return path
def _ExtractSources(target, target_dict, toplevel_dir):
# |target| is either absolute or relative and in the format of the OS. Gyp
# source paths are always posix. Convert |target| to a posix path relative to
# |toplevel_dir_|. This is done to make it easy to build source paths.
base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
base_path_components = base_path.split('/')
# Add a trailing '/' so that _AddSources() can easily build paths.
if len(base_path):
base_path += '/'
if debug:
print 'ExtractSources', target, base_path
results = []
if 'sources' in target_dict:
_AddSources(target_dict['sources'], base_path, base_path_components,
results)
# Include the inputs from any actions. Any changes to these affect the
# resulting output.
if 'actions' in target_dict:
for action in target_dict['actions']:
_ExtractSourcesFromAction(action, base_path, base_path_components,
results)
if 'rules' in target_dict:
for rule in target_dict['rules']:
_ExtractSourcesFromAction(rule, base_path, base_path_components, results)
return results
class Target(object):
"""Holds information about a particular target:
deps: set of Targets this Target depends upon. This is not recursive, only the
direct dependent Targets.
match_status: one of the MatchStatus values.
back_deps: set of Targets that have a dependency on this Target.
visited: used during iteration to indicate whether we've visited this target.
This is used for two iterations, once in building the set of Targets and
again in _GetBuildTargets().
name: fully qualified name of the target.
requires_build: True if the target type is such that it needs to be built.
See _DoesTargetTypeRequireBuild for details.
added_to_compile_targets: used when determining if the target was added to the
set of targets that needs to be built.
in_roots: true if this target is a descendant of one of the root nodes.
is_executable: true if the type of target is executable.
is_static_library: true if the type of target is static_library.
is_or_has_linked_ancestor: true if the target does a link (eg executable), or
if there is a target in back_deps that does a link."""
def __init__(self, name):
self.deps = set()
self.match_status = MATCH_STATUS_TBD
self.back_deps = set()
self.name = name
# TODO(sky): I don't like hanging this off Target. This state is specific
# to certain functions and should be isolated there.
self.visited = False
self.requires_build = False
self.added_to_compile_targets = False
self.in_roots = False
self.is_executable = False
self.is_static_library = False
self.is_or_has_linked_ancestor = False
class Config(object):
"""Details what we're looking for
files: set of files to search for
targets: see file description for details."""
def __init__(self):
self.files = []
self.targets = set()
self.additional_compile_target_names = set()
self.test_target_names = set()
def Init(self, params):
"""Initializes Config. This is a separate method as it raises an exception
if there is a parse error."""
generator_flags = params.get('generator_flags', {})
config_path = generator_flags.get('config_path', None)
if not config_path:
return
try:
f = open(config_path, 'r')
config = json.load(f)
f.close()
except IOError:
raise Exception('Unable to open file ' + config_path)
except ValueError as e:
raise Exception('Unable to parse config file ' + config_path + str(e))
if not isinstance(config, dict):
raise Exception('config_path must be a JSON file containing a dictionary')
self.files = config.get('files', [])
self.additional_compile_target_names = set(
config.get('additional_compile_targets', []))
self.test_target_names = set(config.get('test_targets', []))
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
"""Returns true if the build file |build_file| is either in |files| or
one of the files included by |build_file| is in |files|. |toplevel_dir| is
the root of the source tree."""
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
if debug:
print 'gyp file modified', build_file
return True
# First element of included_files is the file itself.
if len(data[build_file]['included_files']) <= 1:
return False
for include_file in data[build_file]['included_files'][1:]:
# |included_files| are relative to the directory of the |build_file|.
rel_include_file = \
_ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
if debug:
print 'included gyp file modified, gyp_file=', build_file, \
'included file=', rel_include_file
return True
return False
def _GetOrCreateTargetByName(targets, target_name):
"""Creates or returns the Target at targets[target_name]. If there is no
Target for |target_name| one is created. Returns a tuple of whether a new
Target was created and the Target."""
if target_name in targets:
return False, targets[target_name]
target = Target(target_name)
targets[target_name] = target
return True, target
def _DoesTargetTypeRequireBuild(target_dict):
"""Returns true if the target type is such that it needs to be built."""
# If a 'none' target has rules or actions we assume it requires a build.
return bool(target_dict['type'] != 'none' or
target_dict.get('actions') or target_dict.get('rules'))
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
build_files):
"""Returns a tuple of the following:
. A dictionary mapping from fully qualified name to Target.
. A list of the targets that have a source file in |files|.
. Targets that constitute the 'all' target. See description at top of file
for details on the 'all' target.
This sets the |match_status| of the targets that contain any of the source
files in |files| to MATCH_STATUS_MATCHES.
|toplevel_dir| is the root of the source tree."""
# Maps from target name to Target.
name_to_target = {}
# Targets that matched.
matching_targets = []
# Queue of targets to visit.
targets_to_visit = target_list[:]
# Maps from build file to a boolean indicating whether the build file is in
# |files|.
build_file_in_files = {}
# Root targets across all files.
roots = set()
# Set of Targets in |build_files|.
build_file_targets = set()
while len(targets_to_visit) > 0:
target_name = targets_to_visit.pop()
created_target, target = _GetOrCreateTargetByName(name_to_target,
target_name)
if created_target:
roots.add(target)
elif target.visited:
continue
target.visited = True
target.requires_build = _DoesTargetTypeRequireBuild(
target_dicts[target_name])
target_type = target_dicts[target_name]['type']
target.is_executable = target_type == 'executable'
target.is_static_library = target_type == 'static_library'
target.is_or_has_linked_ancestor = (target_type == 'executable' or
target_type == 'shared_library')
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
if not build_file in build_file_in_files:
build_file_in_files[build_file] = \
_WasBuildFileModified(build_file, data, files, toplevel_dir)
if build_file in build_files:
build_file_targets.add(target)
# If a build file (or any of its included files) is modified we assume all
# targets in the file are modified.
if build_file_in_files[build_file]:
print 'matching target from modified build file', target_name
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
else:
sources = _ExtractSources(target_name, target_dicts[target_name],
toplevel_dir)
for source in sources:
if _ToGypPath(os.path.normpath(source)) in files:
print 'target', target_name, 'matches', source
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
break
# Add dependencies to visit as well as updating back pointers for deps.
for dep in target_dicts[target_name].get('dependencies', []):
targets_to_visit.append(dep)
created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
dep)
if not created_dep_target:
roots.discard(dep_target)
target.deps.add(dep_target)
dep_target.back_deps.add(target)
return name_to_target, matching_targets, roots & build_file_targets
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
"""Returns a tuple of the following:
. mapping (dictionary) from unqualified name to Target for all the
Targets in |to_find|.
. any target names not found. If this is empty all targets were found."""
result = {}
if not to_find:
return {}, []
to_find = set(to_find)
for target_name in all_targets.keys():
extracted = gyp.common.ParseQualifiedTarget(target_name)
if len(extracted) > 1 and extracted[1] in to_find:
to_find.remove(extracted[1])
result[extracted[1]] = all_targets[target_name]
if not to_find:
return result, []
return result, [x for x in to_find]
def _DoesTargetDependOnMatchingTargets(target):
"""Returns true if |target| or any of its dependencies is one of the
targets containing the files supplied as input to analyzer. This updates
|matches| of the Targets as it recurses.
target: the Target to look for."""
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
return False
if target.match_status == MATCH_STATUS_MATCHES or \
target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
return True
for dep in target.deps:
if _DoesTargetDependOnMatchingTargets(dep):
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
print '\t', target.name, 'matches by dep', dep.name
return True
target.match_status = MATCH_STATUS_DOESNT_MATCH
return False
def _GetTargetsDependingOnMatchingTargets(possible_targets):
"""Returns the list of Targets in |possible_targets| that depend (either
directly on indirectly) on at least one of the targets containing the files
supplied as input to analyzer.
possible_targets: targets to search from."""
found = []
print 'Targets that matched by dependency:'
for target in possible_targets:
if _DoesTargetDependOnMatchingTargets(target):
found.append(target)
return found
def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
"""Recurses through all targets that depend on |target|, adding all targets
that need to be built (and are in |roots|) to |result|.
roots: set of root targets.
add_if_no_ancestor: If true and there are no ancestors of |target| then add
|target| to |result|. |target| must still be in |roots|.
result: targets that need to be built are added here."""
if target.visited:
return
target.visited = True
target.in_roots = target in roots
for back_dep_target in target.back_deps:
_AddCompileTargets(back_dep_target, roots, False, result)
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
target.in_roots |= back_dep_target.in_roots
target.is_or_has_linked_ancestor |= (
back_dep_target.is_or_has_linked_ancestor)
# Always add 'executable' targets. Even though they may be built by other
# targets that depend upon them it makes detection of what is going to be
# built easier.
# And always add static_libraries that have no dependencies on them from
# linkables. This is necessary as the other dependencies on them may be
# static libraries themselves, which are not compile time dependencies.
if target.in_roots and \
(target.is_executable or
(not target.added_to_compile_targets and
(add_if_no_ancestor or target.requires_build)) or
(target.is_static_library and add_if_no_ancestor and
not target.is_or_has_linked_ancestor)):
print '\t\tadding to compile targets', target.name, 'executable', \
target.is_executable, 'added_to_compile_targets', \
target.added_to_compile_targets, 'add_if_no_ancestor', \
add_if_no_ancestor, 'requires_build', target.requires_build, \
'is_static_library', target.is_static_library, \
'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
result.add(target)
target.added_to_compile_targets = True
def _GetCompileTargets(matching_targets, supplied_targets):
"""Returns the set of Targets that require a build.
matching_targets: targets that changed and need to be built.
supplied_targets: set of targets supplied to analyzer to search from."""
result = set()
for target in matching_targets:
print 'finding compile targets for match', target.name
_AddCompileTargets(target, supplied_targets, True, result)
return result
def _WriteOutput(params, **values):
"""Writes the output, either to stdout or a file is specified."""
if 'error' in values:
print 'Error:', values['error']
if 'status' in values:
print values['status']
if 'targets' in values:
values['targets'].sort()
print 'Supplied targets that depend on changed files:'
for target in values['targets']:
print '\t', target
if 'invalid_targets' in values:
values['invalid_targets'].sort()
print 'The following targets were not found:'
for target in values['invalid_targets']:
print '\t', target
if 'build_targets' in values:
values['build_targets'].sort()
print 'Targets that require a build:'
for target in values['build_targets']:
print '\t', target
if 'compile_targets' in values:
values['compile_targets'].sort()
print 'Targets that need to be built:'
for target in values['compile_targets']:
print '\t', target
if 'test_targets' in values:
values['test_targets'].sort()
print 'Test targets:'
for target in values['test_targets']:
print '\t', target
output_path = params.get('generator_flags', {}).get(
'analyzer_output_path', None)
if not output_path:
print json.dumps(values)
return
try:
f = open(output_path, 'w')
f.write(json.dumps(values) + '\n')
f.close()
except IOError as e:
print 'Error writing to output file', output_path, str(e)
def _WasGypIncludeFileModified(params, files):
"""Returns true if one of the files in |files| is in the set of included
files."""
if params['options'].includes:
for include in params['options'].includes:
if _ToGypPath(os.path.normpath(include)) in files:
print 'Include file modified, assuming all changed', include
return True
return False
def _NamesNotIn(names, mapping):
"""Returns a list of the values in |names| that are not in |mapping|."""
return [name for name in names if name not in mapping]
def _LookupTargets(names, mapping):
"""Returns a list of the mapping[name] for each value in |names| that is in
|mapping|."""
return [mapping[name] for name in names if name in mapping]
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
elif flavor == 'win':
default_variables.setdefault('OS', 'win')
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
class TargetCalculator(object):
"""Calculates the matching test_targets and matching compile_targets."""
def __init__(self, files, additional_compile_target_names, test_target_names,
data, target_list, target_dicts, toplevel_dir, build_files):
self._additional_compile_target_names = set(additional_compile_target_names)
self._test_target_names = set(test_target_names)
self._name_to_target, self._changed_targets, self._root_targets = (
_GenerateTargets(data, target_list, target_dicts, toplevel_dir,
frozenset(files), build_files))
self._unqualified_mapping, self.invalid_targets = (
_GetUnqualifiedToTargetMapping(self._name_to_target,
self._supplied_target_names_no_all()))
def _supplied_target_names(self):
return self._additional_compile_target_names | self._test_target_names
def _supplied_target_names_no_all(self):
"""Returns the supplied test targets without 'all'."""
result = self._supplied_target_names();
result.discard('all')
return result
def is_build_impacted(self):
"""Returns true if the supplied files impact the build at all."""
return self._changed_targets
def find_matching_test_target_names(self):
"""Returns the set of output test targets."""
assert self.is_build_impacted()
# Find the test targets first. 'all' is special cased to mean all the
# root targets. To deal with all the supplied |test_targets| are expanded
# to include the root targets during lookup. If any of the root targets
# match, we remove it and replace it with 'all'.
test_target_names_no_all = set(self._test_target_names)
test_target_names_no_all.discard('all')
test_targets_no_all = _LookupTargets(test_target_names_no_all,
self._unqualified_mapping)
test_target_names_contains_all = 'all' in self._test_target_names
if test_target_names_contains_all:
test_targets = [x for x in (set(test_targets_no_all) |
set(self._root_targets))]
else:
test_targets = [x for x in test_targets_no_all]
print 'supplied test_targets'
for target_name in self._test_target_names:
print '\t', target_name
print 'found test_targets'
for target in test_targets:
print '\t', target.name
print 'searching for matching test targets'
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
matching_test_targets_contains_all = (test_target_names_contains_all and
set(matching_test_targets) &
set(self._root_targets))
if matching_test_targets_contains_all:
# Remove any of the targets for all that were not explicitly supplied,
# 'all' is subsequentely added to the matching names below.
matching_test_targets = [x for x in (set(matching_test_targets) &
set(test_targets_no_all))]
print 'matched test_targets'
for target in matching_test_targets:
print '\t', target.name
matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
for target in matching_test_targets]
if matching_test_targets_contains_all:
matching_target_names.append('all')
print '\tall'
return matching_target_names
def find_matching_compile_target_names(self):
"""Returns the set of output compile targets."""
assert self.is_build_impacted();
# Compile targets are found by searching up from changed targets.
# Reset the visited status for _GetBuildTargets.
for target in self._name_to_target.itervalues():
target.visited = False
supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
self._unqualified_mapping)
if 'all' in self._supplied_target_names():
supplied_targets = [x for x in (set(supplied_targets) |
set(self._root_targets))]
print 'Supplied test_targets & compile_targets'
for target in supplied_targets:
print '\t', target.name
print 'Finding compile targets'
compile_targets = _GetCompileTargets(self._changed_targets,
supplied_targets)
return [gyp.common.ParseQualifiedTarget(target.name)[1]
for target in compile_targets]
def GenerateOutput(target_list, target_dicts, data, params):
"""Called by gyp as the final stage. Outputs results."""
config = Config()
try:
config.Init(params)
if not config.files:
raise Exception('Must specify files to analyze via config_path generator '
'flag')
toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
if debug:
print 'toplevel_dir', toplevel_dir
if _WasGypIncludeFileModified(params, config.files):
result_dict = { 'status': all_changed_string,
'test_targets': list(config.test_target_names),
'compile_targets': list(
config.additional_compile_target_names |
config.test_target_names) }
_WriteOutput(params, **result_dict)
return
calculator = TargetCalculator(config.files,
config.additional_compile_target_names,
config.test_target_names, data,
target_list, target_dicts, toplevel_dir,
params['build_files'])
if not calculator.is_build_impacted():
result_dict = { 'status': no_dependency_string,
'test_targets': [],
'compile_targets': [] }
if calculator.invalid_targets:
result_dict['invalid_targets'] = calculator.invalid_targets
_WriteOutput(params, **result_dict)
return
test_target_names = calculator.find_matching_test_target_names()
compile_target_names = calculator.find_matching_compile_target_names()
found_at_least_one_target = compile_target_names or test_target_names
result_dict = { 'test_targets': test_target_names,
'status': found_dependency_string if
found_at_least_one_target else no_dependency_string,
'compile_targets': list(
set(compile_target_names) |
set(test_target_names)) }
if calculator.invalid_targets:
result_dict['invalid_targets'] = calculator.invalid_targets
_WriteOutput(params, **result_dict)
except Exception as e:
_WriteOutput(params, error=str(e))
| mit |
jiachenning/odoo | addons/website_mail/models/mail_thread.py | 338 | 1454 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
# TODO for trunk, remove me
class MailThread(osv.AbstractModel):
_inherit = 'mail.thread'
_columns = {
'website_message_ids': fields.one2many(
'mail.message', 'res_id',
domain=lambda self: [
'&', ('model', '=', self._name), ('type', '=', 'comment')
],
string='Website Messages',
help="Website communication history",
),
}
| agpl-3.0 |
kennedyshead/home-assistant | homeassistant/components/logentries/__init__.py | 24 | 1699 | """Support for sending data to Logentries webhook endpoint."""
import json
import logging
import requests
import voluptuous as vol
from homeassistant.const import CONF_TOKEN, EVENT_STATE_CHANGED
from homeassistant.helpers import state as state_helper
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "logentries"
DEFAULT_HOST = "https://webhook.logentries.com/noformat/logs/"
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_TOKEN): cv.string})}, extra=vol.ALLOW_EXTRA
)
def setup(hass, config):
"""Set up the Logentries component."""
conf = config[DOMAIN]
token = conf.get(CONF_TOKEN)
le_wh = f"{DEFAULT_HOST}{token}"
def logentries_event_listener(event):
"""Listen for new messages on the bus and sends them to Logentries."""
state = event.data.get("new_state")
if state is None:
return
try:
_state = state_helper.state_as_number(state)
except ValueError:
_state = state.state
json_body = [
{
"domain": state.domain,
"entity_id": state.object_id,
"attributes": dict(state.attributes),
"time": str(event.time_fired),
"value": _state,
}
]
try:
payload = {"host": le_wh, "event": json_body}
requests.post(le_wh, data=json.dumps(payload), timeout=10)
except requests.exceptions.RequestException as error:
_LOGGER.exception("Error sending to Logentries: %s", error)
hass.bus.listen(EVENT_STATE_CHANGED, logentries_event_listener)
return True
| apache-2.0 |
scharron/chardet | chardet/gb2312freq.py | 323 | 36001 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# GB2312 most frequently used character table
#
# Char to FreqOrder table , from hz6763
# 512 --> 0.79 -- 0.79
# 1024 --> 0.92 -- 0.13
# 2048 --> 0.98 -- 0.06
# 6768 --> 1.00 -- 0.02
#
# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
# Random Distribution Ration = 512 / (3755 - 512) = 0.157
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
GB2312_TABLE_SIZE = 3760
GB2312CharToFreqOrder = ( \
1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575,
2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606,
360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052,
198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26,
3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403,
3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940,
836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121,
1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233,
1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094,
4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152,
3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909,
509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221,
2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360,
4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243,
1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257,
3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781,
1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937,
930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789,
396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451,
3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780,
2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745,
776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657,
163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536,
1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894,
915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541,
1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754,
1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99,
1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280,
160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885,
125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131,
259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947,
774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814,
4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480,
3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769,
1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207,
57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623,
193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616,
3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377,
1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315,
470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557,
3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503,
448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27,
1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31,
475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881,
1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276,
1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843,
3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770,
3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713,
768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014,
1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510,
386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459,
1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,
1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512
#Everything below is of no interest for detection purpose
5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636,
5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874,
5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278,
3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806,
4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827,
5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512,
5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578,
4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828,
4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105,
4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189,
4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561,
3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226,
6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778,
4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039,
6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404,
4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213,
4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739,
4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328,
5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592,
3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424,
4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270,
3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232,
4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456,
4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121,
6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971,
6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409,
5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519,
4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367,
6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834,
4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460,
5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464,
5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709,
5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906,
6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530,
3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262,
6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920,
4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190,
5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318,
6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538,
6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697,
4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544,
5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016,
4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638,
5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006,
5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071,
4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552,
4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556,
5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432,
4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632,
4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885,
5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336,
4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729,
4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854,
4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332,
5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004,
5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419,
4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293,
3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580,
4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339,
6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341,
5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493,
5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046,
4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904,
6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728,
5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350,
6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233,
4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944,
5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413,
5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700,
3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999,
5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694,
6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571,
4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359,
6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178,
4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421,
4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330,
6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855,
3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587,
6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803,
4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791,
3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304,
3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445,
3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506,
4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856,
2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057,
5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777,
4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369,
5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028,
5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914,
5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175,
4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681,
5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534,
4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912,
5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054,
1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336,
3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666,
4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375,
4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113,
6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614,
4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173,
5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197,
3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271,
5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423,
5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529,
5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921,
3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837,
5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922,
5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187,
3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382,
5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628,
5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683,
5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053,
6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928,
4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662,
6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663,
4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554,
3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191,
4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013,
5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932,
5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055,
5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829,
3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096,
3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660,
6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199,
6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748,
5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402,
6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957,
6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668,
6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763,
6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407,
6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051,
5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429,
6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791,
6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028,
3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305,
3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159,
4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683,
4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372,
3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514,
5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544,
5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472,
5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716,
5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905,
5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327,
4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030,
5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281,
6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224,
5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327,
4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062,
4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354,
6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065,
3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953,
4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681,
4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708,
5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442,
6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387,
6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237,
4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713,
6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547,
5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957,
5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337,
5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074,
5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685,
5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455,
4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722,
5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615,
5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093,
5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989,
5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094,
6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212,
4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967,
5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733,
4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260,
4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864,
6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353,
4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095,
6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287,
3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504,
5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539,
6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750,
6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864,
6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213,
5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573,
6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252,
6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970,
3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703,
5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978,
4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767)
| lgpl-2.1 |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/software-center/softwarecenter/ui/gtk3/widgets/unused__pathbar.py | 2 | 37728 | from gi.repository import Atk
from gi.repository import Gtk, Gdk
from gi.repository import GObject
from gi.repository import Pango
from softwarecenter.ui.gtk3.em import em
from gettext import gettext as _
import logging
LOG = logging.getLogger("softwarecenter.view.widgets.NavigationBar")
# pi constants
from math import pi
PI = pi
PI_OVER_180 = pi / 180
class Shape:
""" Base class for a Shape implementation.
Currently implements a single method <layout> which is called
to layout the shape using cairo paths. It can also store the
'direction' of the shape which should be on of the Gtk.TEXT_DIR
constants. Default 'direction' is Gtk.TextDirection.LTR.
When implementing a Shape, there are two options available.
If the Shape is direction dependent, the Shape MUST
implement <_layout_ltr> and <_layout_rtl> methods.
If the Shape is not direction dependent, then it simply can
override the <layout> method.
<layout> methods must take the following as arguments:
cr : a CairoContext
x : x coordinate
y : y coordinate
w : width value
h : height value
<layout> methods can then be passed Shape specific
keyword arguments which can be used as paint-time modifiers.
"""
def __init__(self, direction):
self.direction = direction
self.name = 'Shapeless'
self.hadjustment = 0
self._color = 1, 0, 0
return
def __eq__(self, other):
return self.name == other.name
def layout(self, cr, x, y, w, h, r, aw):
if self.direction != Gtk.TextDirection.RTL:
self._layout_ltr(cr, x, y, w, h, r, aw)
else:
self._layout_rtl(cr, x, y, w, h, r, aw)
return
class ShapeRoundedRect(Shape):
"""
RoundedRect lays out a rectangle with all four corners
rounded as specified at the layout call by the keyword argument:
radius : an integer or float specifying the corner radius.
The radius must be > 0.
RoundedRectangle is not direction sensitive.
"""
def __init__(self, direction=Gtk.TextDirection.LTR):
Shape.__init__(self, direction)
self.name = 'RoundedRect'
return
def layout(self, cr, x, y, w, h, r, aw):
cr.new_sub_path()
cr.arc(r+x, r+y, r, PI, 270*PI_OVER_180)
cr.arc(x+w-r, r+y, r, 270*PI_OVER_180, 0)
cr.arc(x+w-r, y+h-r, r, 0, 90*PI_OVER_180)
cr.arc(r+x, y+h-r, r, 90*PI_OVER_180, PI)
cr.close_path()
return
class ShapeStartArrow(Shape):
def __init__(self, direction=Gtk.TextDirection.LTR):
Shape.__init__(self, direction)
self.name = 'StartArrow'
return
def _layout_ltr(self, cr, x, y, w, h, r, aw):
haw = aw/2
cr.new_sub_path()
cr.arc(r+x, r+y, r, PI, 270*PI_OVER_180)
# arrow head
cr.line_to(x+w-haw, y)
cr.line_to(x+w+haw, y+(h/2))
cr.line_to(x+w-haw, y+h)
cr.arc(r+x, y+h-r, r, 90*PI_OVER_180, PI)
cr.close_path()
return
def _layout_rtl(self, cr, x, y, w, h, r, aw):
haw = aw/2
cr.new_sub_path()
cr.move_to(x-haw, (y+h)/2)
cr.line_to(x+aw-haw, y)
cr.arc(x+w-r, r+y, r, 270*PI_OVER_180, 0)
cr.arc(x+w-r, y+h-r, r, 0, 90*PI_OVER_180)
cr.line_to(x+aw-haw, y+h)
cr.close_path()
return
class ShapeMidArrow(Shape):
def __init__(self, direction=Gtk.TextDirection.LTR):
Shape.__init__(self, direction)
#~ self.draw_xoffset = -2
self._color = 0, 1, 0
self.name = 'MidArrow'
return
def _layout_ltr(self, cr, x, y, w, h, r, aw):
self.hadjustment = haw = aw/2
cr.move_to(x-haw-1, y)
# arrow head
cr.line_to(x+w-haw, y)
cr.line_to(x+w+haw, y+(h/2))
cr.line_to(x+w-haw, y+h)
cr.line_to(x-haw-1, y+h)
cr.line_to(x+haw-1, y+(h/2))
cr.close_path()
return
def _layout_rtl(self, cr, x, y, w, h, r, aw):
self.hadjustment = haw = -aw/2
cr.move_to(x+haw, (h+y)/2)
cr.line_to(x+aw+haw, y)
cr.line_to(x+w-haw+1, y)
cr.line_to(x+w-aw-haw+1, (y+h)/2)
cr.line_to(x+w-haw+1, y+h)
cr.line_to(x+aw+haw, y+h)
cr.close_path()
return
class ShapeEndCap(Shape):
def __init__(self, direction=Gtk.TextDirection.LTR):
Shape.__init__(self, direction)
#~ self.draw_xoffset = -2
self._color = 0, 0, 1
self.name = 'EndCap'
return
def _layout_ltr(self, cr, x, y, w, h, r, aw):
self.hadjustment = haw = aw/2
cr.move_to(x-haw-1, y)
# rounded end
cr.arc(x+w-r, r+y, r, 270*PI_OVER_180, 0)
cr.arc(x+w-r, y+h-r, r, 0, 90*PI_OVER_180)
# arrow
cr.line_to(x-haw-1, y+h)
cr.line_to(x+haw-1, y+(h/2))
cr.close_path()
return
def _layout_rtl(self, cr, x, y, w, h, r, aw):
self.hadjustment = haw = -aw/2
cr.arc(r+x, r+y, r, PI, 270*PI_OVER_180)
cr.line_to(x+w-haw+1, y)
cr.line_to(x+w-haw-aw+1, (y+h)/2)
cr.line_to(x+w-haw+1, y+h)
cr.arc(r+x, y+h-r, r, 90*PI_OVER_180, PI)
cr.close_path()
return
class AnimationClock(GObject.GObject):
_1SECOND = 1000
__gsignals__ = {
"animation-frame" : (GObject.SignalFlags.RUN_LAST,
None,
(float,),),
"animation-finished" : (GObject.SignalFlags.RUN_FIRST,
None,
(bool,),),
}
def __init__(self, fps, duration):
GObject.GObject.__init__(self)
self.fps = fps
self.in_progress = False
self.set_duration(duration)
self._clock = None
self._progress = 0 # progress as an msec offset
return
def _get_timstep(self):
d = self.duration
return max(10, int(d / ((d / AnimationClock._1SECOND) * self.fps)))
def _schedule_animation_frame(self):
if self._progress > self.duration:
self._clock = None
self.in_progress = False
self.emit('animation-finished', False)
return False
self._progress += self._timestep
self.emit('animation-frame', self.progress)
return True
@property
def progress(self):
return min(1.0, self._progress / self.duration)
def set_duration(self, duration):
self.duration = float(duration)
self._timestep = self._get_timstep()
return
def stop(self, who_called='?'):
if self._clock:
#~ print who_called+'.Stop'
GObject.source_remove(self._clock)
self.emit('animation-finished', True)
self._clock = None
self._progress = 0
self.in_progress = False
return
def start(self):
self.stop(who_called='start')
if not self.sequence: return
self._clock = GObject.timeout_add(self._timestep,
self._schedule_animation_frame,
priority=100)
self.in_progress = True
return
class PathBarAnimator(AnimationClock):
# animation display constants
FPS = 50
DURATION = 150 # spec says 150ms
# animation modes
NONE = 'animation-none'
OUT = 'animation-out'
IN = 'animation-in'
WIDTH_CHANGE = 'animation-width-change'
def __init__(self, pathbar):
AnimationClock.__init__(self, self.FPS, self.DURATION)
self.pathbar = pathbar
self.sequence = []
self.connect('animation-frame', self._on_animation_frame)
self.connect('animation-finished', self._on_animation_finished)
return
def _animate_out(self, part, progress, kwargs):
real_alloc = part.get_allocation()
xo = real_alloc.width - int(real_alloc.width * progress)
if self.pathbar.get_direction() == Gtk.TextDirection.RTL:
xo *= -1
anim_alloc = Gdk.Rectangle()
anim_alloc.x = real_alloc.x-xo
anim_alloc.y = real_alloc.y
anim_alloc.width = real_alloc.width
anim_alloc.height = real_alloc.height
part.new_frame(anim_alloc)
return
def _animate_in(self, part, progress, kwargs):
real_alloc = part.get_allocation()
xo = int(real_alloc.width * progress)
if self.pathbar.get_direction() == Gtk.TextDirection.RTL:
xo *= -1
anim_alloc = Gdk.Rectangle()
anim_alloc.x = real_alloc.x-xo
anim_alloc.y = real_alloc.y
anim_alloc.width = real_alloc.width
anim_alloc.height = real_alloc.height
part.new_frame(anim_alloc)
return
def _animate_width_change(self, part, progress, kwargs):
start_w = kwargs['start_width']
end_w = kwargs['end_width']
width = int(round(start_w + (end_w - start_w) * progress))
part.set_size_request(width, part.get_height_request())
return
def _on_animation_frame(self, clock, progress):
if not self.sequence: return
for actor, animation, kwargs in self.sequence:
if animation == PathBarAnimator.NONE: continue
if animation == PathBarAnimator.OUT:
self._animate_out(actor, progress, kwargs)
elif animation == PathBarAnimator.IN:
self._animate_in(actor, progress, kwargs)
elif animation == PathBarAnimator.WIDTH_CHANGE:
self._animate_width_change(actor, progress, kwargs)
return
def _on_animation_finished(self, clock, interrupted):
for actor, animation, kwargs in self.sequence:
actor.animation_finished()
self.sequence = []
self.pathbar.psuedo_parts = []
self.pathbar.queue_draw()
return
def append_animation(self, actor, animation, **kwargs):
self.sequence.append((actor, animation, kwargs))
return
def reset(self, who_called='?'):
AnimationClock.stop(self, who_called=who_called+'.reset')
self.sequence = []
return
class PathBar(Gtk.HBox):
MIN_PART_WIDTH = 25 # pixels
def __init__(self):
GObject.GObject.__init__(self)
self.set_redraw_on_allocate(False)
self.set_size_request(-1, em(1.75))
self._allocation = None
# Accessibility info
atk_desc = self.get_accessible()
atk_desc.set_name(_("You are here:"))
atk_desc.set_role(Atk.Role.PANEL)
self.use_animations = True
self.animator = PathBarAnimator(self)
self.out_of_width = False
self.psuedo_parts = []
# used for certain button press logic
self._press_origin = None
# tracks the id of the revealer timeout
self._revealer = None
# values derived from the gtk settings
s = Gtk.Settings.get_default()
# time to wait before revealing a part on enter event in ms
self._timeout_reveal = s.get_property("gtk-tooltip-timeout")
# time to wait until emitting click event in ms
self._timeout_initial = s.get_property("gtk-timeout-initial")
# les signales!
self.connect('size-allocate', self._on_allocate)
self.connect('draw', self._on_draw)
return
# sugar
def __len__(self):
return len(self.get_children())
def __getitem__(self, index):
return self.get_children()[index]
# signal handlers
def _on_allocate(self, widget, _):
allocation = self.get_allocation()
if self._allocation == allocation:
return True
# prevent vertical bobby when the searchentry is shown/hidden
if allocation.height > self.get_property('height-request'):
self.set_property('height-request', allocation.height)
if not self._allocation:
self._allocation = allocation
self.queue_draw()
return True
pthbr_width = allocation.width
parts_width = self.get_parts_width()
#~ print parts_width, pthbr_width
#~ self.animator.reset('on_allocate')
self.set_use_animations(True)
if pthbr_width > parts_width and self.out_of_width:
dw = pthbr_width - parts_width
self._grow_parts(dw)
elif pthbr_width < parts_width:
overhang = parts_width - pthbr_width
if overhang > 0:
self.set_use_animations(False)
self._shrink_parts(overhang)
self._allocation = allocation
if self.use_animations and self.animator.sequence and not \
self.animator.in_progress:
self.animator.start()
else:
self.queue_draw()
return
def _on_draw(self, widget, cr):
# always paint psuedo parts first
a = self.get_allocation()
context = self.get_style_context()
context.save()
context.add_class("button")
self._paint_psuedo_parts(cr, context, a.x, a.y)
# paint a frame around the entire pathbar
width = self.get_parts_width()
Gtk.render_background(context, cr, 1, 1, width-2, a.height-2)
self._paint_widget_parts(cr, context, a.x, a.y)
Gtk.render_frame(context, cr, 0, 0, width, a.height)
context.restore()
return True
# private methods
def _paint_widget_parts(self, cr, context, xo, yo):
parts = self.get_children()
# paint in reverse order, so we get correct overlapping during
# animation
parts.reverse()
for part in parts:
part.paint(cr,
part.animation_allocation or part.get_allocation(),
context,
xo, yo)
return
def _paint_psuedo_parts(self, cr, context, xo, yo):
# a special case: paint psuedo parts paint first,
# i.e those parts animating 'in' on their removal
for part in self.psuedo_parts:
part.paint(cr,
part.animation_allocation or part.get_allocation(),
context,
xo, yo)
return
def _shrink_parts(self, overhang):
self.out_of_width = True
for part in self:
old_width = part.get_width_request()
new_width = max(self.MIN_PART_WIDTH, old_width - overhang)
if False:#self.use_animations:
self.animator.append_animation(part,
PathBarAnimator.WIDTH_CHANGE,
start_width=old_width,
end_width=new_width)
else:
part.set_size_request(new_width,
part.get_height_request())
overhang -= old_width - new_width
if overhang <= 0: break
return
def _grow_parts(self, claim):
children = self.get_children()
children.reverse()
for part in children:
if part.get_allocation().width == part.get_natural_width():
continue
growth = min(claim, (part.get_natural_width() - part.width))
if growth <= 0: break
claim -= growth
if self.use_animations:
self.animator.append_animation(part,
PathBarAnimator.WIDTH_CHANGE,
start_width=part.width,
end_width=part.width + growth)
else:
part.set_size_request(part.width + growth,
part.get_height_request())
return
def _make_space(self, part):
children = self.get_children()
if not children: return
cur_width = self.get_parts_width()
incomming_width = cur_width + part.get_width_request()
overhang = incomming_width - self.get_allocation().width
if overhang > 0:
print 'shrink parts by:', overhang
self._shrink_parts(overhang)
return
def _reclaim_space(self, part):
if not self.out_of_width: return
claim = part.get_width_request()
self._grow_parts(claim)
return
def _append_compose_parts(self, new_part):
d = self.get_direction()
children = self.get_children()
n_parts = len(children)
if n_parts > 0:
new_part.set_shape(ShapeEndCap(d))
first_part = children[0]
first_part.set_shape(ShapeStartArrow(d))
else:
new_part.set_shape(ShapeRoundedRect(d))
if not n_parts > 1: return
new_mid = children[-1]
new_mid.set_shape(ShapeMidArrow(d))
return
def _remove_compose_parts(self):
d = self.get_direction()
children = self.get_children()
n_parts = len(children)
if n_parts == 0:
return
elif n_parts == 1:
children[0].set_shape(ShapeRoundedRect(d))
return
last = children[-1]
last.set_shape(ShapeEndCap(d))
self.queue_draw()
return
def _cleanup_revealer(self):
if not self._revealer: return
GObject.source_remove(self._revealer)
self._revealer = None
return
def _theme(self, part):
#~ part.set_padding(self.theme['xpad'], self.theme['ypad'])
part.set_padding(12, 4)
return
# public methods
@property
def first_part(self):
children = self.get_children()
if not children: return None
return children[0]
@property
def last_part(self):
children = self.get_children()
if not children: return None
return children[-1]
def reveal_part(self, part, animate=True):
# do not do here:
#~ self.animator.reset(who_called='reveal_animation')
self.set_use_animations(animate)
part_old_width = part.get_width_request()
part_new_width = part.get_natural_width()
if part_new_width == part_old_width: return
change_amount = part_new_width - part_old_width
for p in self.get_children():
if p == part:
old_width = part_old_width
new_width = part_new_width
else:
if change_amount <= 0: continue
old_width = p.get_width_request()
new_width = max(self.MIN_PART_WIDTH, old_width - change_amount)
change_amount -= old_width - new_width
if self.use_animations:
self.animator.append_animation(p,
PathBarAnimator.WIDTH_CHANGE,
start_width=old_width,
end_width=new_width)
else:
p.set_size_request(new_width,
p.get_height_request())
self.animator.start()
return
def queue_reveal_part(self, part):
def reveal_part_cb(part):
self.reveal_part(part)
return
self._cleanup_revealer()
self._revealer = GObject.timeout_add(self._timeout_reveal,
reveal_part_cb,
part)
return
def get_parts_width(self):
last = self.last_part
if not last: return 0
if self.get_direction() != Gtk.TextDirection.RTL:
return last.x + last.width - self.first_part.x
first = self.first_part
return first.x + first.width - last.x
def get_visual_width(self):
last = self.last_part
first = self.first_part
if not last: return 0
la = last.animation_allocation or last.get_allocation()
fa = first.animation_allocation or first.get_allocation()
if self.get_direction() != Gtk.TextDirection.RTL:
return la.x + la.width - fa.x
return fa.x + fa.width - la.x
def set_use_animations(self, use_animations):
self.use_animations = use_animations
if not use_animations and self.animator.in_progress:
self.animator.reset()
return
def append(self, part):
print 'append', part
print
part.set_nopaint(True)
self.animator.reset('append')
self._theme(part)
self._append_compose_parts(part)
self._make_space(part)
self.pack_start(part, False, False, 0)
part.show()
if self.use_animations:
# XXX: please note that animations also get queued up
# within _shrink_parts()
self.animator.append_animation(part, PathBarAnimator.OUT)
#~ print self.animator.sequence
self.animator.start()
else:
part.set_nopaint(False)
part.queue_draw()
return
def pop(self):
children = self.get_children()
if not children: return
self.animator.reset('pop')
last = children[-1]
if self.use_animations:
# because we remove the real part immediately we need to
# replicate just enough attributes to preform the slide in
# animation
part = PsuedoPathPart(self, last)
self.psuedo_parts.append(part)
self.remove(last)
self._remove_compose_parts()
self._reclaim_space(last)
last.destroy()
if not self.use_animations: return
self.animator.append_animation(part, PathBarAnimator.IN)
self.animator.start()
return
def navigate_up(self):
""" just another name for pop() """
self.pop()
class PathPartCommon:
def __init__(self):
self.animation_in_progress = False
self.animation_allocation = None
return
@property
def x(self):
return self.get_allocation().x
@property
def y(self):
return self.get_allocation().y
@property
def width(self):
return self.get_allocation().width
@property
def height(self):
return self.get_allocation().height
def new_frame(self, allocation):
if self.is_nopaint:
self.is_nopaint = False
if not self.animation_in_progress:
self.animation_in_progress = True
self.animation_allocation = allocation
self.queue_draw()
return
def animation_finished(self):
self.animation_in_progress = False
self.animation_allocation = None
if self.get_parent(): self.get_parent().queue_draw()
return
def paint(self, cr, a, context, xo, yo):
if self.is_nopaint: return
cr.save()
x, y = 0, 0
w, h = a.width, a.height
arrow_width = 12#theme['arrow-width']
if isinstance(self, PathPart):
_a = self.get_allocation()
self.shape.layout(cr,
_a.x-xo+1, _a.y-yo,
w, h, 3, arrow_width)
cr.clip()
else:
Gtk.render_background(context, cr,
a.x-xo-10, a.y-yo,
a.width+10, a.height)
cr.translate(a.x-xo, a.y-yo)
if self.shape.name.find('Arrow') != -1:
# draw arrow head
cr.move_to(w-arrow_width/2, 2)
cr.line_to(w+5, h/2)
cr.line_to(w-arrow_width/2, h-2)
# fetch the line color and stroke
rgba = context.get_border_color(Gtk.StateFlags.NORMAL)
cr.set_source_rgb(rgba.red, rgba.green, rgba.blue)
cr.set_line_width(1)
cr.stroke()
# render the layout
e = self.layout.get_pixel_extents()[1]
lw, lh = e.width, e.height
pw, ph = a.width, a.height
x = min(self.xpadding, (pw-lw)/2)
y = (ph-lh)/2
# layout area
Gtk.render_layout(context,
cr,
int(x),
int(y),
self.layout)
# paint the focus frame if need be
if isinstance(self, PathPart) and self.has_focus():
# layout area
x, w, h = x-2, lw+4, lh+1
Gtk.render_focus(context, cr, x, y, w, h)
cr.restore()
return
class PsuedoPathPart(PathPartCommon):
def __init__(self, pathbar, real_part):
PathPartCommon.__init__(self)
self.parent = pathbar
self.style = pathbar.get_style()
self.state = real_part.get_state()
self.allocation = real_part.get_allocation()
self.size_request = real_part.get_size_request()
self.xpadding = real_part.xpadding
self.ypadding = real_part.ypadding
# PsuedoPathParts are only used during the remove animation
# sequence, so the shape is always a ShapeEndCap
self.shape = ShapeEndCap(pathbar.get_direction())
self.label = real_part.label
self.layout = real_part.create_pango_layout(self.label)
self.is_nopaint = False
return
def get_allocation(self):
return self.allocation
def get_state(self):
return self.state
def get_width_request(self):
return self.size_request[0]
def get_height_request(self):
return self.size_request[1]
def animation_finished(self):
return
def queue_draw(self):
a = self.allocation
aw = 12
self.parent.queue_draw_area(a.x-aw/2, a.y,
a.width+aw, a.height)
return
class PathPart(Gtk.EventBox, PathPartCommon):
__gsignals__ = {
"clicked" : (GObject.SignalFlags.RUN_LAST,
None,
(),),
}
def __init__(self, label):
Gtk.EventBox.__init__(self)
PathPartCommon.__init__(self)
self.set_visible_window(False)
self.atk = self.get_accessible()
self.atk.set_role(Atk.Role.PUSH_BUTTON)
self.layout = self.create_pango_layout(label)
self.layout.set_ellipsize(Pango.EllipsizeMode.END)
self.xpadding = 6
self.ypadding = 3
self.shape = ShapeRoundedRect(self.get_direction())
self.is_nopaint = False
self.set_label(label)
self._init_event_handling()
return
def __repr__(self):
return "PathPart: '%s'" % self.label
def __str__(self):
return "PathPart: '%s'" % self.label
# signal handlers
def _on_enter_notify(self, part, event):
self.pathbar.queue_reveal_part(self)
if self.pathbar._press_origin == part:
part.set_state(Gtk.StateFlags.ACTIVE)
else:
part.set_state(Gtk.StateFlags.PRELIGHT)
self.queue_draw()
return
def _on_leave_notify(self, part, event):
self.pathbar.queue_reveal_part(self.pathbar.last_part)
part.set_state(Gtk.StateFlags.NORMAL)
self.queue_draw()
return
def _on_button_press(self, part, event):
if event.button != 1: return
self.pathbar._press_origin = part
part.set_state(Gtk.StateFlags.ACTIVE)
self.queue_draw()
return
def _on_button_release(self, part, event):
if event.button != 1: return
if self.pathbar._press_origin != part:
self.pathbar._press_origin = None
return
self.pathbar._press_origin = None
state = part.get_state()
if state == Gtk.StateFlags.ACTIVE:
part.set_state(Gtk.StateFlags.PRELIGHT)
GObject.timeout_add(self.pathbar._timeout_initial,
self.emit, 'clicked')
self.queue_draw()
return
def _on_key_press(self, part, event):
if event.keyval in (Gdk.KEY_space, Gdk.KEY_Return, Gdk.KEY_KP_Enter):
part.set_state(Gtk.StateFlags.ACTIVE)
self.queue_draw()
return
def _on_key_release(self, part, event):
if event.keyval in (Gdk.KEY_space, Gdk.KEY_Return, Gdk.KEY_KP_Enter):
part.set_state(Gtk.StateFlags.NORMAL)
GObject.timeout_add(self.pathbar._timeout_initial,
self.emit, 'clicked')
self.queue_draw()
return
def _on_focus_in(self, part, event):
self.pathbar.reveal_part(self)
return
def _on_focus_out(self, part, event):
self.queue_draw()
return
# private methods
def _init_event_handling(self):
self.set_property("can-focus", True)
self.set_events(Gdk.EventMask.BUTTON_PRESS_MASK|
Gdk.EventMask.BUTTON_RELEASE_MASK|
Gdk.EventMask.KEY_RELEASE_MASK|
Gdk.EventMask.KEY_PRESS_MASK|
Gdk.EventMask.ENTER_NOTIFY_MASK|
Gdk.EventMask.LEAVE_NOTIFY_MASK)
self.connect("enter-notify-event", self._on_enter_notify)
self.connect("leave-notify-event", self._on_leave_notify)
self.connect("button-press-event", self._on_button_press)
self.connect("button-release-event", self._on_button_release)
self.connect("key-press-event", self._on_key_press)
self.connect("key-release-event", self._on_key_release)
self.connect("focus-in-event", self._on_focus_in)
self.connect("focus-out-event", self._on_focus_out)
return
def _calc_natural_size(self, who_called='?'):
ne = self.natural_extents
nw, nh = ne.width, ne.height
nw += self.shape.hadjustment + 2*self.xpadding
nh += 2*self.ypadding
self.natural_size = nw, nh
self.set_size_request(nw, nh)
return
# public methods
@property
def pathbar(self):
return self.get_parent()
def set_padding(self, xpadding, ypadding):
self.xpadding = xpadding
self.ypadding = ypadding
self._calc_natural_size()
return
def set_size_request(self, width, height):
width = max(2*self.xpadding+1, width)
height = max(2*self.ypadding+1, height)
self.layout.set_width(Pango.SCALE * (width - 2*self.xpadding))
Gtk.Widget.set_size_request(self, width, height)
return
def set_nopaint(self, is_nopaint):
self.is_nopaint = is_nopaint
self.queue_draw()
return
def set_shape(self, shape):
if shape == self.shape: return
self.shape = shape
self._calc_natural_size()
self.queue_draw()
return
def set_label(self, label):
self.label = label
self.atk.set_name(label)
self.atk.set_description(_('Navigates to the %s page.') % label)
self.layout.set_markup(label, -1)
self.layout.set_width(-1)
self.natural_extents = self.layout.get_pixel_extents()[1]
self._calc_natural_size()
self.queue_draw()
return
def get_natural_size(self):
return self.natural_size
def get_natural_width(self):
return self.natural_size[0]
def get_natural_height(self):
return self.natural_size[1]
def get_width_request(self):
return self.get_property("width-request")
def get_height_request(self):
return self.get_property("height-request")
def queue_draw(self):
a = self.get_allocation()
parent = self.get_parent()
if parent:
aw = 12
else:
aw = 0
self.queue_draw_area(a.x-aw/2, a.y,
a.width+aw, a.height)
return
class NavigationBar(PathBar):
def __init__(self, group=None):
PathBar.__init__(self)
self.id_to_part = {}
self._callback_id = None
return
def _on_part_clicked(self, part):
part.callback(self, part)
return
def add_with_id(self, label, callback, id, do_callback=True, animate=True):
"""
Add a new button with the given label/callback
If there is the same id already, replace the existing one
with the new one
"""
LOG.debug("add_with_id label='%s' callback='%s' id='%s' "
"do_callback=%s animate=%s" % (label, callback, id,
do_callback, animate))
label = GObject.markup_escape_text(label)
if not self.id_to_part:
self.set_use_animations(False)
else:
self.set_use_animations(animate)
# check if we have the button of that id or need a new one
if id in self.id_to_part:
part = self.id_to_part[id]
if part.label == label:
return
part.set_label(label)
else:
part = PathPart(label)
part.connect('clicked', self._on_part_clicked)
part.set_name(id)
self.id_to_part[id] = part
part.callback = callback
if do_callback:
# cleanup any superceeded idle callback
if self._callback_id:
GObject.source_remove(self._callback_id)
self._callback_id = None
# if i do not have call the callback in an idle,
# all hell breaks loose
self._callback_id = GObject.idle_add(callback,
self, # pathbar
part)
self.append(part)
return
def remove_ids(self, *ids, **kwargs):
parts = self.get_parts()
print 'remove ids', ids
# it would seem parts can become stale within the id_to_part dict,
# so we clean these up ...
cleanup_ids = []
# the index of the first part to be clipped
index = len(parts)
for id, part in self.id_to_part.iteritems():
if id not in ids: continue
if part not in parts:
cleanup_ids.append(id)
part.destroy()
else:
index = min(index, parts.index(part))
if index == len(parts): return
# cleanup any stale id:part pairs in the id_to_part dict
for id in cleanup_ids:
del self.id_to_part[id]
# remove id:part pairs from the id_to_part dict, for whom removal
# has been requested
for id in ids:
if id in self.id_to_part:
del self.id_to_part[id]
#~ print index, self.id_to_part.keys()
# the index is used to remove all parts after the index but we
# keep one part around to animate its removal
for part in parts[index+1:]:
part.destroy()
animate = True
if 'animate' in kwargs:
animate = kwargs['animate']
# animate the removal of the final part, or not
self.set_use_animations(animate)
self.pop()
# check if we should call the new tail parts callback
if 'do_callback' in kwargs and kwargs['do_callback']:
part = self[-1]
part.callback(self, part)
return
def remove_all(self, **kwargs):
if len(self) <= 1: return
ids = filter(lambda k: k != 'category',
self.id_to_part.keys())
self.remove_ids(*ids, **kwargs)
return
def has_id(self, id):
return self.id_to_part.has_key(id)
def get_parts(self):
return self.get_children()
def get_active(self):
parts = self.get_parts()
if not parts: return None
return parts[-1]
def get_button_from_id(self, id):
"""
return the button for the given id (or None)
"""
if not id in self.id_to_part:
return None
return self.id_to_part[id]
def set_active_no_callback(self, part):
return
class TestIt:
def __init__(self):
def append(button, entry, pathbar):
t = entry.get_text() or 'no label %s' % len(pathbar)
part = PathPart(t)
pathbar.append(part)
return
def remove(button, entry, pathbar):
pathbar.pop()
return
win = Gtk.Window()
win.set_border_width(30)
win.set_size_request(600, 300)
vb = Gtk.VBox(spacing=6)
win.add(vb)
pb = PathBar()
pb.set_size_request(-1, 30)
vb.pack_start(pb, False, False, 0)
part = PathPart('Get Software')
pb.append(part)
entry = Gtk.Entry()
vb.pack_start(entry, False, False, 0)
b = Gtk.Button('Append')
vb.pack_start(b, True, True, 0)
b.connect('clicked', append, entry, pb)
b = Gtk.Button('Remove')
vb.pack_start(b, True, True, 0)
b.connect('clicked', remove, entry, pb)
win.show_all()
win.connect('destroy', Gtk.main_quit)
self.win = win
self.win.pb = pb
def get_test_pathbar_window():
t = TestIt()
return t.win
if __name__ == '__main__':
win = get_test_pathbar_window()
Gtk.main()
| gpl-3.0 |
nju520/django | tests/model_meta/models.py | 192 | 5039 | from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Relation(models.Model):
pass
class AbstractPerson(models.Model):
# DATA fields
data_abstract = models.CharField(max_length=10)
fk_abstract = models.ForeignKey(Relation, models.CASCADE, related_name='fk_abstract_rel')
# M2M fields
m2m_abstract = models.ManyToManyField(Relation, related_name='m2m_abstract_rel')
friends_abstract = models.ManyToManyField('self', related_name='friends_abstract', symmetrical=True)
following_abstract = models.ManyToManyField('self', related_name='followers_abstract', symmetrical=False)
# VIRTUAL fields
data_not_concrete_abstract = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['abstract_non_concrete_id'],
to_fields=['id'],
related_name='fo_abstract_rel',
)
# GFK fields
content_type_abstract = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_abstract = models.PositiveIntegerField()
content_object_abstract = GenericForeignKey('content_type_abstract', 'object_id_abstract')
# GR fields
generic_relation_abstract = GenericRelation(Relation)
class Meta:
abstract = True
class BasePerson(AbstractPerson):
# DATA fields
data_base = models.CharField(max_length=10)
fk_base = models.ForeignKey(Relation, models.CASCADE, related_name='fk_base_rel')
# M2M fields
m2m_base = models.ManyToManyField(Relation, related_name='m2m_base_rel')
friends_base = models.ManyToManyField('self', related_name='friends_base', symmetrical=True)
following_base = models.ManyToManyField('self', related_name='followers_base', symmetrical=False)
# VIRTUAL fields
data_not_concrete_base = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['base_non_concrete_id'],
to_fields=['id'],
related_name='fo_base_rel',
)
# GFK fields
content_type_base = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_base = models.PositiveIntegerField()
content_object_base = GenericForeignKey('content_type_base', 'object_id_base')
# GR fields
generic_relation_base = GenericRelation(Relation)
class Person(BasePerson):
# DATA fields
data_inherited = models.CharField(max_length=10)
fk_inherited = models.ForeignKey(Relation, models.CASCADE, related_name='fk_concrete_rel')
# M2M Fields
m2m_inherited = models.ManyToManyField(Relation, related_name='m2m_concrete_rel')
friends_inherited = models.ManyToManyField('self', related_name='friends_concrete', symmetrical=True)
following_inherited = models.ManyToManyField('self', related_name='followers_concrete', symmetrical=False)
# VIRTUAL fields
data_not_concrete_inherited = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['model_non_concrete_id'],
to_fields=['id'],
related_name='fo_concrete_rel',
)
# GFK fields
content_type_concrete = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_concrete = models.PositiveIntegerField()
content_object_concrete = GenericForeignKey('content_type_concrete', 'object_id_concrete')
# GR fields
generic_relation_concrete = GenericRelation(Relation)
class ProxyPerson(Person):
class Meta:
proxy = True
class Relating(models.Model):
# ForeignKey to BasePerson
baseperson = models.ForeignKey(BasePerson, models.CASCADE, related_name='relating_baseperson')
baseperson_hidden = models.ForeignKey(BasePerson, models.CASCADE, related_name='+')
# ForeignKey to Person
person = models.ForeignKey(Person, models.CASCADE, related_name='relating_person')
person_hidden = models.ForeignKey(Person, models.CASCADE, related_name='+')
# ForeignKey to ProxyPerson
proxyperson = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='relating_proxyperson')
proxyperson_hidden = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='+')
# ManyToManyField to BasePerson
basepeople = models.ManyToManyField(BasePerson, related_name='relating_basepeople')
basepeople_hidden = models.ManyToManyField(BasePerson, related_name='+')
# ManyToManyField to Person
people = models.ManyToManyField(Person, related_name='relating_people')
people_hidden = models.ManyToManyField(Person, related_name='+')
# ParentListTests models
class CommonAncestor(models.Model):
pass
class FirstParent(CommonAncestor):
first_ancestor = models.OneToOneField(CommonAncestor, models.SET_NULL, primary_key=True, parent_link=True)
class SecondParent(CommonAncestor):
second_ancestor = models.OneToOneField(CommonAncestor, models.SET_NULL, primary_key=True, parent_link=True)
class Child(FirstParent, SecondParent):
pass
| bsd-3-clause |
chaluemwut/fbserver | venv/lib/python2.7/site-packages/scipy/interpolate/tests/test_polyint.py | 9 | 18758 | from __future__ import division, print_function, absolute_import
import warnings
from numpy.testing import (assert_almost_equal, assert_array_equal,
TestCase, run_module_suite, assert_allclose, assert_equal, assert_)
from scipy.interpolate import (KroghInterpolator, krogh_interpolate,
BarycentricInterpolator, barycentric_interpolate,
PiecewisePolynomial, piecewise_polynomial_interpolate,
approximate_taylor_polynomial, pchip, PchipInterpolator)
from scipy.lib.six import xrange
import scipy
import numpy as np
from scipy.interpolate import splrep, splev
def check_shape(interpolator_cls, x_shape, y_shape, deriv_shape=None, axis=0):
np.random.seed(1234)
x = [-1, 0, 1]
s = list(range(1, len(y_shape)+1))
s.insert(axis % (len(y_shape)+1), 0)
y = np.random.rand(*((3,) + y_shape)).transpose(s)
# Cython code chokes on y.shape = (0, 3) etc, skip them
if y.size == 0:
return
xi = np.zeros(x_shape)
yi = interpolator_cls(x, y, axis=axis)(xi)
target_shape = ((deriv_shape or ()) + y.shape[:axis]
+ x_shape + y.shape[axis:][1:])
assert_equal(yi.shape, target_shape)
# check it works also with lists
if x_shape and y.size > 0:
interpolator_cls(list(x), list(y), axis=axis)(list(xi))
# check also values
if xi.size > 0 and deriv_shape is None:
bs_shape = (y.shape[:axis] + ((1,)*len(x_shape)) + y.shape[axis:][1:])
yv = y[((slice(None,None,None),)*(axis % y.ndim))+(1,)].reshape(bs_shape)
yi, y = np.broadcast_arrays(yi, yv)
assert_allclose(yi, y)
SHAPES = [(), (0,), (1,), (3,2,5)]
def test_shapes():
for ip in [KroghInterpolator, BarycentricInterpolator, pchip]:
for s1 in SHAPES:
for s2 in SHAPES:
for axis in range(-len(s2), len(s2)):
yield check_shape, ip, s1, s2, None, axis
def test_derivs_shapes():
def krogh_derivs(x, y, axis=0):
return KroghInterpolator(x, y, axis).derivatives
for s1 in SHAPES:
for s2 in SHAPES:
for axis in range(-len(s2), len(s2)):
yield check_shape, krogh_derivs, s1, s2, (3,), axis
def test_deriv_shapes():
def krogh_deriv(x, y, axis=0):
return KroghInterpolator(x, y, axis).derivative
def pchip_deriv(x, y, axis=0):
return pchip(x, y, axis).derivative()
def pchip_deriv2(x, y, axis=0):
return pchip(x, y, axis).derivative(2)
def pchip_deriv_inplace(x, y, axis=0):
class P(PchipInterpolator):
def __call__(self, x):
return PchipInterpolator.__call__(self, x, 1)
pass
return P(x, y, axis)
for ip in [krogh_deriv, pchip_deriv, pchip_deriv2, pchip_deriv_inplace]:
for s1 in SHAPES:
for s2 in SHAPES:
for axis in range(-len(s2), len(s2)):
yield check_shape, ip, s1, s2, (), axis
def _check_complex(ip):
x = [1, 2, 3, 4]
y = [1, 2, 1j, 3]
p = ip(x, y)
assert_allclose(y, p(x))
def test_complex():
for ip in [KroghInterpolator, BarycentricInterpolator, pchip]:
yield _check_complex, ip
class CheckKrogh(TestCase):
def setUp(self):
self.true_poly = scipy.poly1d([-2,3,1,5,-4])
self.test_xs = np.linspace(-1,1,100)
self.xs = np.linspace(-1,1,5)
self.ys = self.true_poly(self.xs)
def test_lagrange(self):
P = KroghInterpolator(self.xs,self.ys)
assert_almost_equal(self.true_poly(self.test_xs),P(self.test_xs))
def test_scalar(self):
P = KroghInterpolator(self.xs,self.ys)
assert_almost_equal(self.true_poly(7),P(7))
assert_almost_equal(self.true_poly(np.array(7)), P(np.array(7)))
def test_derivatives(self):
P = KroghInterpolator(self.xs,self.ys)
D = P.derivatives(self.test_xs)
for i in xrange(D.shape[0]):
assert_almost_equal(self.true_poly.deriv(i)(self.test_xs),
D[i])
def test_low_derivatives(self):
P = KroghInterpolator(self.xs,self.ys)
D = P.derivatives(self.test_xs,len(self.xs)+2)
for i in xrange(D.shape[0]):
assert_almost_equal(self.true_poly.deriv(i)(self.test_xs),
D[i])
def test_derivative(self):
P = KroghInterpolator(self.xs,self.ys)
m = 10
r = P.derivatives(self.test_xs,m)
for i in xrange(m):
assert_almost_equal(P.derivative(self.test_xs,i),r[i])
def test_high_derivative(self):
P = KroghInterpolator(self.xs,self.ys)
for i in xrange(len(self.xs),2*len(self.xs)):
assert_almost_equal(P.derivative(self.test_xs,i),
np.zeros(len(self.test_xs)))
def test_hermite(self):
xs = [0,0,0,1,1,1,2]
ys = [self.true_poly(0),
self.true_poly.deriv(1)(0),
self.true_poly.deriv(2)(0),
self.true_poly(1),
self.true_poly.deriv(1)(1),
self.true_poly.deriv(2)(1),
self.true_poly(2)]
P = KroghInterpolator(self.xs,self.ys)
assert_almost_equal(self.true_poly(self.test_xs),P(self.test_xs))
def test_vector(self):
xs = [0, 1, 2]
ys = np.array([[0,1],[1,0],[2,1]])
P = KroghInterpolator(xs,ys)
Pi = [KroghInterpolator(xs,ys[:,i]) for i in xrange(ys.shape[1])]
test_xs = np.linspace(-1,3,100)
assert_almost_equal(P(test_xs),
np.rollaxis(np.asarray([p(test_xs) for p in Pi]),-1))
assert_almost_equal(P.derivatives(test_xs),
np.transpose(np.asarray([p.derivatives(test_xs) for p in Pi]),
(1,2,0)))
def test_empty(self):
P = KroghInterpolator(self.xs,self.ys)
assert_array_equal(P([]), [])
def test_shapes_scalarvalue(self):
P = KroghInterpolator(self.xs,self.ys)
assert_array_equal(np.shape(P(0)), ())
assert_array_equal(np.shape(P(np.array(0))), ())
assert_array_equal(np.shape(P([0])), (1,))
assert_array_equal(np.shape(P([0,1])), (2,))
def test_shapes_scalarvalue_derivative(self):
P = KroghInterpolator(self.xs,self.ys)
n = P.n
assert_array_equal(np.shape(P.derivatives(0)), (n,))
assert_array_equal(np.shape(P.derivatives(np.array(0))), (n,))
assert_array_equal(np.shape(P.derivatives([0])), (n,1))
assert_array_equal(np.shape(P.derivatives([0,1])), (n,2))
def test_shapes_vectorvalue(self):
P = KroghInterpolator(self.xs,np.outer(self.ys,np.arange(3)))
assert_array_equal(np.shape(P(0)), (3,))
assert_array_equal(np.shape(P([0])), (1,3))
assert_array_equal(np.shape(P([0,1])), (2,3))
def test_shapes_1d_vectorvalue(self):
P = KroghInterpolator(self.xs,np.outer(self.ys,[1]))
assert_array_equal(np.shape(P(0)), (1,))
assert_array_equal(np.shape(P([0])), (1,1))
assert_array_equal(np.shape(P([0,1])), (2,1))
def test_shapes_vectorvalue_derivative(self):
P = KroghInterpolator(self.xs,np.outer(self.ys,np.arange(3)))
n = P.n
assert_array_equal(np.shape(P.derivatives(0)), (n,3))
assert_array_equal(np.shape(P.derivatives([0])), (n,1,3))
assert_array_equal(np.shape(P.derivatives([0,1])), (n,2,3))
def test_wrapper(self):
P = KroghInterpolator(self.xs,self.ys)
assert_almost_equal(P(self.test_xs),krogh_interpolate(self.xs,self.ys,self.test_xs))
assert_almost_equal(P.derivative(self.test_xs,2),krogh_interpolate(self.xs,self.ys,self.test_xs,der=2))
assert_almost_equal(P.derivatives(self.test_xs,2),krogh_interpolate(self.xs,self.ys,self.test_xs,der=[0,1]))
def test_int_inputs(self):
# Check input args are cast correctly to floats, gh-3669
x = [0, 234,468,702,936,1170,1404,2340,3744,6084,8424,13104,60000]
offset_cdf = np.array([-0.95, -0.86114777, -0.8147762, -0.64072425, -0.48002351,
-0.34925329, -0.26503107, -0.13148093, -0.12988833, -0.12979296,
-0.12973574, -0.08582937, 0.05])
f = KroghInterpolator(x, offset_cdf)
assert_allclose(abs((f(x) - offset_cdf) / f.derivative(x, 1)), 0, atol=1e-10)
class CheckTaylor(TestCase):
def test_exponential(self):
degree = 5
p = approximate_taylor_polynomial(np.exp, 0, degree, 1, 15)
for i in xrange(degree+1):
assert_almost_equal(p(0),1)
p = p.deriv()
assert_almost_equal(p(0),0)
class CheckBarycentric(TestCase):
def setUp(self):
self.true_poly = scipy.poly1d([-2,3,1,5,-4])
self.test_xs = np.linspace(-1,1,100)
self.xs = np.linspace(-1,1,5)
self.ys = self.true_poly(self.xs)
def test_lagrange(self):
P = BarycentricInterpolator(self.xs,self.ys)
assert_almost_equal(self.true_poly(self.test_xs),P(self.test_xs))
def test_scalar(self):
P = BarycentricInterpolator(self.xs,self.ys)
assert_almost_equal(self.true_poly(7),P(7))
assert_almost_equal(self.true_poly(np.array(7)),P(np.array(7)))
def test_delayed(self):
P = BarycentricInterpolator(self.xs)
P.set_yi(self.ys)
assert_almost_equal(self.true_poly(self.test_xs),P(self.test_xs))
def test_append(self):
P = BarycentricInterpolator(self.xs[:3],self.ys[:3])
P.add_xi(self.xs[3:],self.ys[3:])
assert_almost_equal(self.true_poly(self.test_xs),P(self.test_xs))
def test_vector(self):
xs = [0, 1, 2]
ys = np.array([[0,1],[1,0],[2,1]])
P = BarycentricInterpolator(xs,ys)
Pi = [BarycentricInterpolator(xs,ys[:,i]) for i in xrange(ys.shape[1])]
test_xs = np.linspace(-1,3,100)
assert_almost_equal(P(test_xs),
np.rollaxis(np.asarray([p(test_xs) for p in Pi]),-1))
def test_shapes_scalarvalue(self):
P = BarycentricInterpolator(self.xs,self.ys)
assert_array_equal(np.shape(P(0)), ())
assert_array_equal(np.shape(P(np.array(0))), ())
assert_array_equal(np.shape(P([0])), (1,))
assert_array_equal(np.shape(P([0,1])), (2,))
def test_shapes_vectorvalue(self):
P = BarycentricInterpolator(self.xs,np.outer(self.ys,np.arange(3)))
assert_array_equal(np.shape(P(0)), (3,))
assert_array_equal(np.shape(P([0])), (1,3))
assert_array_equal(np.shape(P([0,1])), (2,3))
def test_shapes_1d_vectorvalue(self):
P = BarycentricInterpolator(self.xs,np.outer(self.ys,[1]))
assert_array_equal(np.shape(P(0)), (1,))
assert_array_equal(np.shape(P([0])), (1,1))
assert_array_equal(np.shape(P([0,1])), (2,1))
def test_wrapper(self):
P = BarycentricInterpolator(self.xs,self.ys)
assert_almost_equal(P(self.test_xs),barycentric_interpolate(self.xs,self.ys,self.test_xs))
class CheckPiecewise(TestCase):
def setUp(self):
self.tck = splrep([0,1,2,3,4,5], [0,10,-1,3,7,2], s=0)
self.test_xs = np.linspace(-1,6,100)
self.spline_ys = splev(self.test_xs, self.tck)
self.spline_yps = splev(self.test_xs, self.tck, der=1)
self.xi = np.unique(self.tck[0])
self.yi = [[splev(x, self.tck, der=j) for j in xrange(3)] for x in self.xi]
def test_construction(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi, self.yi, 3)
assert_almost_equal(P(self.test_xs), self.spline_ys)
def test_scalar(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi,self.yi,3)
assert_almost_equal(P(self.test_xs[0]),self.spline_ys[0])
assert_almost_equal(P.derivative(self.test_xs[0],1),self.spline_yps[0])
assert_almost_equal(P(np.array(self.test_xs[0])),self.spline_ys[0])
assert_almost_equal(P.derivative(np.array(self.test_xs[0]),1),
self.spline_yps[0])
def test_derivative(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi,self.yi,3)
assert_almost_equal(P.derivative(self.test_xs,1),self.spline_yps)
def test_derivatives(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi,self.yi,3)
m = 4
r = P.derivatives(self.test_xs,m)
#print r.shape, r
for i in xrange(m):
assert_almost_equal(P.derivative(self.test_xs,i),r[i])
def test_vector(self):
xs = [0, 1, 2]
ys = [[[0,1]],[[1,0],[-1,-1]],[[2,1]]]
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(xs,ys)
Pi = [PiecewisePolynomial(xs,[[yd[i] for yd in y] for y in ys])
for i in xrange(len(ys[0][0]))]
test_xs = np.linspace(-1,3,100)
assert_almost_equal(P(test_xs),
np.rollaxis(np.asarray([p(test_xs) for p in Pi]),-1))
assert_almost_equal(P.derivative(test_xs,1),
np.transpose(np.asarray([p.derivative(test_xs,1) for p in Pi]),
(1,0)))
def test_incremental(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial([self.xi[0]], [self.yi[0]], 3)
for i in xrange(1,len(self.xi)):
P.append(self.xi[i],self.yi[i],3)
assert_almost_equal(P(self.test_xs),self.spline_ys)
def test_shapes_scalarvalue(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi,self.yi,4)
assert_array_equal(np.shape(P(0)), ())
assert_array_equal(np.shape(P(np.array(0))), ())
assert_array_equal(np.shape(P([0])), (1,))
assert_array_equal(np.shape(P([0,1])), (2,))
def test_shapes_scalarvalue_derivative(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi,self.yi,4)
n = 4
assert_array_equal(np.shape(P.derivative(0,1)), ())
assert_array_equal(np.shape(P.derivative(np.array(0),1)), ())
assert_array_equal(np.shape(P.derivative([0],1)), (1,))
assert_array_equal(np.shape(P.derivative([0,1],1)), (2,))
def test_shapes_vectorvalue(self):
yi = np.multiply.outer(np.asarray(self.yi),np.arange(3))
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi,yi,4)
assert_array_equal(np.shape(P(0)), (3,))
assert_array_equal(np.shape(P([0])), (1,3))
assert_array_equal(np.shape(P([0,1])), (2,3))
def test_shapes_vectorvalue_1d(self):
yi = np.multiply.outer(np.asarray(self.yi),np.arange(1))
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi,yi,4)
assert_array_equal(np.shape(P(0)), (1,))
assert_array_equal(np.shape(P([0])), (1,1))
assert_array_equal(np.shape(P([0,1])), (2,1))
def test_shapes_vectorvalue_derivative(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi, np.multiply.outer(self.yi,
np.arange(3)),4)
n = 4
assert_array_equal(np.shape(P.derivative(0,1)), (3,))
assert_array_equal(np.shape(P.derivative([0],1)), (1,3))
assert_array_equal(np.shape(P.derivative([0,1],1)), (2,3))
def test_wrapper(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
P = PiecewisePolynomial(self.xi,self.yi)
assert_almost_equal(P(self.test_xs),
piecewise_polynomial_interpolate(self.xi, self.yi,
self.test_xs))
assert_almost_equal(P.derivative(self.test_xs,2),
piecewise_polynomial_interpolate(self.xi,
self.yi,
self.test_xs,
der=2))
assert_almost_equal(P.derivatives(self.test_xs,2),
piecewise_polynomial_interpolate(self.xi,
self.yi,
self.test_xs,
der=[0,1]))
class TestPCHIP(TestCase):
def _make_random(self, npts=20):
np.random.seed(1234)
xi = np.sort(np.random.random(npts))
yi = np.random.random(npts)
return pchip(xi, yi), xi, yi
def test_overshoot(self):
# PCHIP should not overshoot
p, xi, yi = self._make_random()
for i in range(len(xi)-1):
x1, x2 = xi[i], xi[i+1]
y1, y2 = yi[i], yi[i+1]
if y1 > y2:
y1, y2 = y2, y1
xp = np.linspace(x1, x2, 10)
yp = p(xp)
assert_(((y1 <= yp) & (yp <= y2)).all())
def test_monotone(self):
# PCHIP should preserve monotonicty
p, xi, yi = self._make_random()
for i in range(len(xi)-1):
x1, x2 = xi[i], xi[i+1]
y1, y2 = yi[i], yi[i+1]
xp = np.linspace(x1, x2, 10)
yp = p(xp)
assert_(((y2-y1) * (yp[1:] - yp[:1]) > 0).all())
def test_cast(self):
# regression test for integer input data, see gh-3453
data = np.array([[0, 4, 12, 27, 47, 60, 79, 87, 99, 100],
[-33, -33, -19, -2, 12, 26, 38, 45, 53, 55]])
xx = np.arange(100)
curve = pchip(data[0], data[1])(xx)
data1 = data * 1.0
curve1 = pchip(data1[0], data1[1])(xx)
assert_allclose(curve, curve1, atol=1e-14, rtol=1e-14)
if __name__ == '__main__':
run_module_suite()
| apache-2.0 |
tombstone/models | official/nlp/transformer/beam_search_v1_test.py | 2 | 3370 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test beam search helper methods."""
import tensorflow.compat.v1 as tf
from official.nlp.transformer import beam_search_v1 as beam_search
class BeamSearchHelperTests(tf.test.TestCase):
def setUp(self):
super(BeamSearchHelperTests, self).setUp()
tf.compat.v1.disable_eager_execution()
def test_expand_to_beam_size(self):
x = tf.ones([7, 4, 2, 5])
x = beam_search._expand_to_beam_size(x, 3)
with self.session() as sess:
shape = sess.run(tf.shape(x))
self.assertAllEqual([7, 3, 4, 2, 5], shape)
def test_shape_list(self):
y = tf.compat.v1.placeholder(dtype=tf.int32, shape=[])
x = tf.ones([7, y, 2, 5])
shape = beam_search._shape_list(x)
self.assertIsInstance(shape[0], int)
self.assertIsInstance(shape[1], tf.Tensor)
self.assertIsInstance(shape[2], int)
self.assertIsInstance(shape[3], int)
def test_get_shape_keep_last_dim(self):
y = tf.constant(4.0)
x = tf.ones([7, tf.cast(tf.sqrt(y), tf.int32), 2, 5])
shape = beam_search._get_shape_keep_last_dim(x)
self.assertAllEqual([None, None, None, 5],
shape.as_list())
def test_flatten_beam_dim(self):
x = tf.ones([7, 4, 2, 5])
x = beam_search._flatten_beam_dim(x)
with self.session() as sess:
shape = sess.run(tf.shape(x))
self.assertAllEqual([28, 2, 5], shape)
def test_unflatten_beam_dim(self):
x = tf.ones([28, 2, 5])
x = beam_search._unflatten_beam_dim(x, 7, 4)
with self.session() as sess:
shape = sess.run(tf.shape(x))
self.assertAllEqual([7, 4, 2, 5], shape)
def test_gather_beams(self):
x = tf.reshape(tf.range(24), [2, 3, 4])
# x looks like: [[[ 0 1 2 3]
# [ 4 5 6 7]
# [ 8 9 10 11]]
#
# [[12 13 14 15]
# [16 17 18 19]
# [20 21 22 23]]]
y = beam_search._gather_beams(x, [[1, 2], [0, 2]], 2, 2)
with self.session() as sess:
y = sess.run(y)
self.assertAllEqual([[[4, 5, 6, 7],
[8, 9, 10, 11]],
[[12, 13, 14, 15],
[20, 21, 22, 23]]],
y)
def test_gather_topk_beams(self):
x = tf.reshape(tf.range(24), [2, 3, 4])
x_scores = [[0, 1, 1], [1, 0, 1]]
y = beam_search._gather_topk_beams(x, x_scores, 2, 2)
with self.session() as sess:
y = sess.run(y)
self.assertAllEqual([[[4, 5, 6, 7],
[8, 9, 10, 11]],
[[12, 13, 14, 15],
[20, 21, 22, 23]]],
y)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
karanisverma/flasktest | lib/flask/__init__.py | 345 | 1672 | # -*- coding: utf-8 -*-
"""
flask
~~~~~
A microframework based on Werkzeug. It's extensively documented
and follows best practice patterns.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
__version__ = '0.10'
# utilities we import from Werkzeug and Jinja2 that are unused
# in the module but are exported as public interface.
from werkzeug.exceptions import abort
from werkzeug.utils import redirect
from jinja2 import Markup, escape
from .app import Flask, Request, Response
from .config import Config
from .helpers import url_for, flash, send_file, send_from_directory, \
get_flashed_messages, get_template_attribute, make_response, safe_join, \
stream_with_context
from .globals import current_app, g, request, session, _request_ctx_stack, \
_app_ctx_stack
from .ctx import has_request_context, has_app_context, \
after_this_request, copy_current_request_context
from .module import Module
from .blueprints import Blueprint
from .templating import render_template, render_template_string
# the signals
from .signals import signals_available, template_rendered, request_started, \
request_finished, got_request_exception, request_tearing_down, \
appcontext_tearing_down, appcontext_pushed, \
appcontext_popped, message_flashed
# We're not exposing the actual json module but a convenient wrapper around
# it.
from . import json
# This was the only thing that flask used to export at one point and it had
# a more generic name.
jsonify = json.jsonify
# backwards compat, goes away in 1.0
from .sessions import SecureCookieSession as Session
json_available = True
| apache-2.0 |
linovia/django-rest-framework | rest_framework/mixins.py | 20 | 3013 | """
Basic building blocks for generic class based views.
We don't bind behaviour to http method handlers yet,
which allows mixin classes to be composed in interesting ways.
"""
from __future__ import unicode_literals
from rest_framework import status
from rest_framework.response import Response
from rest_framework.settings import api_settings
class CreateModelMixin(object):
"""
Create a model instance.
"""
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def perform_create(self, serializer):
serializer.save()
def get_success_headers(self, data):
try:
return {'Location': data[api_settings.URL_FIELD_NAME]}
except (TypeError, KeyError):
return {}
class ListModelMixin(object):
"""
List a queryset.
"""
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
class RetrieveModelMixin(object):
"""
Retrieve a model instance.
"""
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
serializer = self.get_serializer(instance)
return Response(serializer.data)
class UpdateModelMixin(object):
"""
Update a model instance.
"""
def update(self, request, *args, **kwargs):
partial = kwargs.pop('partial', False)
instance = self.get_object()
serializer = self.get_serializer(instance, data=request.data, partial=partial)
serializer.is_valid(raise_exception=True)
self.perform_update(serializer)
if getattr(instance, '_prefetched_objects_cache', None):
# If 'prefetch_related' has been applied to a queryset, we need to
# forcibly invalidate the prefetch cache on the instance.
instance._prefetched_objects_cache = {}
return Response(serializer.data)
def perform_update(self, serializer):
serializer.save()
def partial_update(self, request, *args, **kwargs):
kwargs['partial'] = True
return self.update(request, *args, **kwargs)
class DestroyModelMixin(object):
"""
Destroy a model instance.
"""
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
self.perform_destroy(instance)
return Response(status=status.HTTP_204_NO_CONTENT)
def perform_destroy(self, instance):
instance.delete()
| bsd-2-clause |
idlead/scikit-learn | examples/decomposition/plot_pca_iris.py | 29 | 1484 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=========================================================
PCA example with Iris Data-set
=========================================================
Principal Component Analysis applied to the Iris dataset.
See `here <http://en.wikipedia.org/wiki/Iris_flower_data_set>`_ for more
information on this dataset.
"""
print(__doc__)
# Code source: Gaël Varoquaux
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn import decomposition
from sklearn import datasets
np.random.seed(5)
centers = [[1, 1], [-1, -1], [1, -1]]
iris = datasets.load_iris()
X = iris.data
y = iris.target
fig = plt.figure(1, figsize=(4, 3))
plt.clf()
ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
plt.cla()
pca = decomposition.PCA(n_components=3)
pca.fit(X)
X = pca.transform(X)
for name, label in [('Setosa', 0), ('Versicolour', 1), ('Virginica', 2)]:
ax.text3D(X[y == label, 0].mean(),
X[y == label, 1].mean() + 1.5,
X[y == label, 2].mean(), name,
horizontalalignment='center',
bbox=dict(alpha=.5, edgecolor='w', facecolor='w'))
# Reorder the labels to have colors matching the cluster results
y = np.choose(y, [1, 2, 0]).astype(np.float)
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral)
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
plt.show()
| bsd-3-clause |
hugsy/sstoper | misc/SSTP_Fuzzer.py | 1 | 2599 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ptit fuzzer pour tester le server sstp windows
#
from binascii import hexlify
from socket import socket
from ssl import wrap_socket
from sys import argv
from random import randint
from struct import pack
if len(argv) != 2:
print("usage: python %s <target_ip_addr>" % argv[0])
exit(1)
HOST = argv[1]
PORT = 443 # SSTP default port
http_neg = """
SSTP_DUPLEX_POST /sra_{BA195980-CD49-458b-9E23-C84EE0ADCD75}/ HTTP/1.1\r
Host: %s\r
SSTPCORRELATIONID: {62DFA5C0-E2E0-FD50-D286B00}\r
Content-Length: 18446744073709551615\r
\r
""" % HOST
sock = socket()
sock.connect((HOST, PORT))
ssl_sock = wrap_socket(sock)
active = False
if ssl_sock is None:
print ("[!] Failed to create socket")
exit(1)
ssl_sock.write(http_neg)
data = ssl_sock.read()
if not "HTTP/1.1 200" in data:
print("[-] No SSTP service detected")
ssl_sock.close()
exit(0)
print ("[+] SSTP service found on %s:%d, starting dumb fuzzing..." % (HOST, PORT))
class SSTP_Packet :
def __init__(self):
self.fields = [ ("version", 1, 0),
("reserved_ctrl", 1, 0),
("pktlen", 2, 0) ]
def fuzz(self, field):
for name, size, value in self.fields:
if name == field:
# print ("Fuzzing field '%s' on %d bits" % (name, size*8))
value = randint(0, size*8)
def send(self, sock):
payload = ""
for name, size, value in self.fields:
self.fuzz(name)
if size == 1: l = "B"
elif size == 2: l = "H"
elif size == 4: l = "I"
else:
continue
payload += pack(">%c" % l, value)
sock.write(payload)
# print ("Fuzzed packet sent")
class SSTP_Control_Packet(SSTP_Packet):
def __init__(self):
SSTP_Packet.__init__(self)
self.fields += [ ("msg_type", 2, 0),
("num_attr", 2, 0) ]
def add_attribute(self):
self.fields += [ ("attr_reserved", 1, 0),
("attr_id", 1, 0),
("attr_size", 2, 0),
("attr_value", 4, 0)]
try :
while True:
sstp = SSTP_Control_Packet()
sstp.send(ssl_sock)
res = ssl_sock.read()
if len(res) :
print hexlify(res)
except KeyboardInterrupt, ki:
print ("Stopping")
except Exception, e:
print e
# with("crash.0", "a+") as f:
# f.write(sstp)
exit(0)
| gpl-2.0 |
sundresh/organize_photos | helpers/find_processed_photos.py | 1 | 1081 | #!/usr/bin/env python
from collections import defaultdict
import exifread, os, PIL.Image, sys
DATE_TIME = 'Image DateTime'
DATE_TIME_ORIGINAL = 'EXIF DateTimeOriginal'
SOFTWARE = 'Image Software'
num_bytes = 0
for (dirpath, dirnames, filenames) in os.walk(sys.argv[1]):
for filename in filenames:
filepath = os.path.join(dirpath, filename)
upper = filename.upper()
extension = upper[upper.rindex('.')+1:] if ('.' in upper) else ''
if extension in ['JPG', 'JPEG', 'AVI', 'MOV', 'MP4', 'PNG', 'GIF', 'TIF', 'TIFF']:
f = open(filepath, 'rb')
tags = exifread.process_file(f, details=False)#, stop_tag='DateTimeOriginal')
f.close()
if DATE_TIME in tags and DATE_TIME_ORIGINAL in tags and SOFTWARE in tags:
date_time_original = str(tags[DATE_TIME_ORIGINAL])
date_time = str(tags[DATE_TIME])
software = str(tags[SOFTWARE])
if date_time_original != date_time and software == 'QuickTime 7.5':
print '%s : %s vs. %s' % (filepath, date_time_original, date_time)
num_bytes += os.stat(os.path.join(dirpath, filename)).st_size
print num_bytes
| agpl-3.0 |
HKUST-SING/tensorflow | tensorflow/python/util/keyword_args_test.py | 163 | 1804 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keyword args tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.platform import test
from tensorflow.python.util import keyword_args
class KeywordArgsTest(test.TestCase):
def test_keyword_args_only(self):
def func_without_decorator(a, b):
return a + b
@keyword_args.keyword_args_only
def func_with_decorator(a, b):
return func_without_decorator(a, b)
self.assertEqual(3, func_without_decorator(1, 2))
self.assertEqual(3, func_without_decorator(a=1, b=2))
self.assertEqual(3, func_with_decorator(a=1, b=2))
# Providing non-keyword args should fail.
with self.assertRaisesRegexp(
ValueError, "Must use keyword args to call func_with_decorator."):
self.assertEqual(3, func_with_decorator(1, 2))
# Partially providing keyword args should fail.
with self.assertRaisesRegexp(
ValueError, "Must use keyword args to call func_with_decorator."):
self.assertEqual(3, func_with_decorator(1, b=2))
if __name__ == "__main__":
test.main()
| apache-2.0 |
treedledee/flask | examples/minitwit/minitwit.py | 156 | 8492 | # -*- coding: utf-8 -*-
"""
MiniTwit
~~~~~~~~
A microblogging application written with Flask and sqlite3.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import time
from sqlite3 import dbapi2 as sqlite3
from hashlib import md5
from datetime import datetime
from flask import Flask, request, session, url_for, redirect, \
render_template, abort, g, flash, _app_ctx_stack
from werkzeug import check_password_hash, generate_password_hash
# configuration
DATABASE = '/tmp/minitwit.db'
PER_PAGE = 30
DEBUG = True
SECRET_KEY = 'development key'
# create our little application :)
app = Flask(__name__)
app.config.from_object(__name__)
app.config.from_envvar('MINITWIT_SETTINGS', silent=True)
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
top = _app_ctx_stack.top
if not hasattr(top, 'sqlite_db'):
top.sqlite_db = sqlite3.connect(app.config['DATABASE'])
top.sqlite_db.row_factory = sqlite3.Row
return top.sqlite_db
@app.teardown_appcontext
def close_database(exception):
"""Closes the database again at the end of the request."""
top = _app_ctx_stack.top
if hasattr(top, 'sqlite_db'):
top.sqlite_db.close()
def init_db():
"""Initializes the database."""
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
@app.cli.command('initdb')
def initdb_command():
"""Creates the database tables."""
init_db()
print('Initialized the database.')
def query_db(query, args=(), one=False):
"""Queries the database and returns a list of dictionaries."""
cur = get_db().execute(query, args)
rv = cur.fetchall()
return (rv[0] if rv else None) if one else rv
def get_user_id(username):
"""Convenience method to look up the id for a username."""
rv = query_db('select user_id from user where username = ?',
[username], one=True)
return rv[0] if rv else None
def format_datetime(timestamp):
"""Format a timestamp for display."""
return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d @ %H:%M')
def gravatar_url(email, size=80):
"""Return the gravatar image for the given email address."""
return 'http://www.gravatar.com/avatar/%s?d=identicon&s=%d' % \
(md5(email.strip().lower().encode('utf-8')).hexdigest(), size)
@app.before_request
def before_request():
g.user = None
if 'user_id' in session:
g.user = query_db('select * from user where user_id = ?',
[session['user_id']], one=True)
@app.route('/')
def timeline():
"""Shows a users timeline or if no user is logged in it will
redirect to the public timeline. This timeline shows the user's
messages as well as all the messages of followed users.
"""
if not g.user:
return redirect(url_for('public_timeline'))
return render_template('timeline.html', messages=query_db('''
select message.*, user.* from message, user
where message.author_id = user.user_id and (
user.user_id = ? or
user.user_id in (select whom_id from follower
where who_id = ?))
order by message.pub_date desc limit ?''',
[session['user_id'], session['user_id'], PER_PAGE]))
@app.route('/public')
def public_timeline():
"""Displays the latest messages of all users."""
return render_template('timeline.html', messages=query_db('''
select message.*, user.* from message, user
where message.author_id = user.user_id
order by message.pub_date desc limit ?''', [PER_PAGE]))
@app.route('/<username>')
def user_timeline(username):
"""Display's a users tweets."""
profile_user = query_db('select * from user where username = ?',
[username], one=True)
if profile_user is None:
abort(404)
followed = False
if g.user:
followed = query_db('''select 1 from follower where
follower.who_id = ? and follower.whom_id = ?''',
[session['user_id'], profile_user['user_id']],
one=True) is not None
return render_template('timeline.html', messages=query_db('''
select message.*, user.* from message, user where
user.user_id = message.author_id and user.user_id = ?
order by message.pub_date desc limit ?''',
[profile_user['user_id'], PER_PAGE]), followed=followed,
profile_user=profile_user)
@app.route('/<username>/follow')
def follow_user(username):
"""Adds the current user as follower of the given user."""
if not g.user:
abort(401)
whom_id = get_user_id(username)
if whom_id is None:
abort(404)
db = get_db()
db.execute('insert into follower (who_id, whom_id) values (?, ?)',
[session['user_id'], whom_id])
db.commit()
flash('You are now following "%s"' % username)
return redirect(url_for('user_timeline', username=username))
@app.route('/<username>/unfollow')
def unfollow_user(username):
"""Removes the current user as follower of the given user."""
if not g.user:
abort(401)
whom_id = get_user_id(username)
if whom_id is None:
abort(404)
db = get_db()
db.execute('delete from follower where who_id=? and whom_id=?',
[session['user_id'], whom_id])
db.commit()
flash('You are no longer following "%s"' % username)
return redirect(url_for('user_timeline', username=username))
@app.route('/add_message', methods=['POST'])
def add_message():
"""Registers a new message for the user."""
if 'user_id' not in session:
abort(401)
if request.form['text']:
db = get_db()
db.execute('''insert into message (author_id, text, pub_date)
values (?, ?, ?)''', (session['user_id'], request.form['text'],
int(time.time())))
db.commit()
flash('Your message was recorded')
return redirect(url_for('timeline'))
@app.route('/login', methods=['GET', 'POST'])
def login():
"""Logs the user in."""
if g.user:
return redirect(url_for('timeline'))
error = None
if request.method == 'POST':
user = query_db('''select * from user where
username = ?''', [request.form['username']], one=True)
if user is None:
error = 'Invalid username'
elif not check_password_hash(user['pw_hash'],
request.form['password']):
error = 'Invalid password'
else:
flash('You were logged in')
session['user_id'] = user['user_id']
return redirect(url_for('timeline'))
return render_template('login.html', error=error)
@app.route('/register', methods=['GET', 'POST'])
def register():
"""Registers the user."""
if g.user:
return redirect(url_for('timeline'))
error = None
if request.method == 'POST':
if not request.form['username']:
error = 'You have to enter a username'
elif not request.form['email'] or \
'@' not in request.form['email']:
error = 'You have to enter a valid email address'
elif not request.form['password']:
error = 'You have to enter a password'
elif request.form['password'] != request.form['password2']:
error = 'The two passwords do not match'
elif get_user_id(request.form['username']) is not None:
error = 'The username is already taken'
else:
db = get_db()
db.execute('''insert into user (
username, email, pw_hash) values (?, ?, ?)''',
[request.form['username'], request.form['email'],
generate_password_hash(request.form['password'])])
db.commit()
flash('You were successfully registered and can login now')
return redirect(url_for('login'))
return render_template('register.html', error=error)
@app.route('/logout')
def logout():
"""Logs the user out."""
flash('You were logged out')
session.pop('user_id', None)
return redirect(url_for('public_timeline'))
# add some filters to jinja
app.jinja_env.filters['datetimeformat'] = format_datetime
app.jinja_env.filters['gravatar'] = gravatar_url
| bsd-3-clause |
burntcustard/DeskBot-Zero | src/Python/infrared.py | 1 | 2009 | #! /usr/bin/env python
# coding: utf8
"""
Test code for 4tronix Picon Zero to work with an
analog Infrared Distance Sensor (e.g. GP2Y0A21).
Currently just prints the signal from an analog pin.
#-----------------------------------------------------------------
# GP2Y0A21 info:
# Datasheet: http://www.robot-electronics.co.uk/files/gp2y0a21.pdf
# PiconZero input is 0-5v, 0 - 1023 readings.
# Sensor is 0-3.3v, actually reads ~10 (far) to ~690 at 10cm.
#-----------------------------------------------------------------
"""
import time
import sys
sys.path.insert(1, "../../lib/PiconZero/Python")
import piconzero as pz
# Ratio between 0-100% indicating reflectivity of the observed surface
# TODO: Change from constant to a variable based off camera input.
REFLECTIVE_RATIO = 100
IR_PIN = 3 # The pin number used to connect the infrared sensor
def init():
"""Initializes the infrared sensor on the Picon Zero and and print a reading."""
# pz.init()
pz.setInputConfig(IR_PIN, 1) # Set input pin to analog
time.sleep(0.5) # Wait ½ a second to ensure pin is set correctly
print "Initialized irDigital, initial reading:", analogRead()
def analogRead():
return pz.readInput(IR_PIN)
def read():
return analogRead()
def continuousRead():
"""Print infrared reading once a second until exited."""
try:
while True:
ir = pz.readInput(IR_PIN)
print ir
time.sleep(1)
except KeyboardInterrupt:
print()
finally:
pz.cleanup()
def digitalRead():
"""Returns true if the ir output is higher than threshold value."""
# indicating ~80% reflectivity ~30cm away)
return pz.readInput(IR_PIN) > 300
def readWithDelay(delayTime = 0.01):
"""Get ir reading with a delay just before and after to ensure accuracy."""
time.sleep(delayTime)
value = read()
time.sleep(delayTime)
return value
def cleanup():
"""Clears infrared and other Pizon Zero setups."""
pz.cleanup()
| mit |
paolodoz/timesheet | core/api/crud.py | 1 | 3803 | try:
from pymongo import MongoClient as Connection
except ImportError as e:
from pymongo import Connection
from core.validation.validation import TSValidationError, validate_request, recursive_merge, update_password_salt_user_list, validate_json_list, sanitize_objectify_json, stringify_objectid_cursor, stringify_objectid_list
from core.validation.permissions import check_get_permissions, check_upsert_permissions, check_remove_permissions
from bson.objectid import ObjectId
from core.config import collections, conf_mongodb, conf_auth, conf_auth_db
import string, hashlib, random, types, cherrypy, logging
connection = Connection(conf_mongodb['hostname'], conf_mongodb['port'])
db = connection[conf_mongodb['db']]
db_log_severity = logging.INFO
def get(collection, criteria_projection_order):
"""Get selected records from collection, and return it as json
Called by GET /<collection>/"""
# Check request format
validate_request('get', criteria_projection_order)
# Check permissions
check_get_permissions(collection, criteria_projection_order[0], criteria_projection_order[1], criteria_projection_order[2])
# Sanify criteria (to match with sanified documents)
sanified_criteria = sanitize_objectify_json(criteria_projection_order[0])
cherrypy.log('%s' % (criteria_projection_order), context = 'TS.GET.%s.criteria_projection_order' % collection, severity = db_log_severity)
# Request
return stringify_objectid_cursor(db[collection].find( { '$query' : sanified_criteria, '$orderby' : criteria_projection_order[2] }, criteria_projection_order[1]))
def remove(collection, criterias = []):
"""Remove selected records from collection
Called by POST /remove/<collection>"""
# Check request format
validate_request('remove', criterias)
# Check permissions before requests
for criteria in criterias:
check_remove_permissions(collection, criteria)
# Sanify criteria (to match with sanified documents)
sanified_criterias = sanitize_objectify_json(criterias)
cherrypy.log('%s' % (criterias), context = 'TS.REMOVE.%s.criteria' % collection, severity = db_log_severity)
# Requests
for criteria in sanified_criterias:
db[collection].remove(criteria)
def add(collection, documents_list):
"""Insert new record list to collection
Called by POST /add/<collection>/"""
# Check request format
validate_request('add', documents_list)
validate_json_list(collection, documents_list)
for document in documents_list:
check_upsert_permissions('add', collection, document)
# Sanify documents
sanified_documents_list = sanitize_objectify_json(documents_list)
# Eventually rewrite password and salt
update_password_salt_user_list(collection, sanified_documents_list)
cherrypy.log('%s' % (sanified_documents_list), context = 'TS.ADD.%s.documents' % collection, severity = db_log_severity)
# Request
return stringify_objectid_list(db[collection].insert(sanified_documents_list))
def update(collection, document):
"""Update an inserted record
Called by POST /update/<collection>/"""
# Check request format
validate_request('update', document)
validate_json_list(collection, [ document ])
check_upsert_permissions('update', collection, document)
sanified_document = sanitize_objectify_json(document)
cherrypy.log('%s' % (sanified_document), context = 'TS.UPDATE.%s.document' % collection, severity = db_log_severity)
db_collection = db[collection].find_one({ '_id' : sanified_document['_id'] })
db[collection].update({ '_id' : sanified_document['_id'] }, recursive_merge(db_collection, sanified_document))
| gpl-2.0 |
Scapogo/zipline | zipline/utils/run_algo.py | 1 | 13108 | import os
import re
from runpy import run_path
import sys
import warnings
from functools import partial
import pandas as pd
import click
try:
from pygments import highlight
from pygments.lexers import PythonLexer
from pygments.formatters import TerminalFormatter
PYGMENTS = True
except:
PYGMENTS = False
from toolz import valfilter, concatv
from zipline.algorithm import TradingAlgorithm
from zipline.algorithm_live import LiveTradingAlgorithm
from zipline.data.bundles.core import load
from zipline.data.data_portal import DataPortal
from zipline.data.data_portal_live import DataPortalLive
from zipline.finance.trading import TradingEnvironment
from zipline.pipeline.data import USEquityPricing
from zipline.pipeline.loaders import USEquityPricingLoader
from zipline.utils.calendars import get_calendar
from zipline.utils.factory import create_simulation_parameters
import zipline.utils.paths as pth
class _RunAlgoError(click.ClickException, ValueError):
"""Signal an error that should have a different message if invoked from
the cli.
Parameters
----------
pyfunc_msg : str
The message that will be shown when called as a python function.
cmdline_msg : str
The message that will be shown on the command line.
"""
exit_code = 1
def __init__(self, pyfunc_msg, cmdline_msg):
super(_RunAlgoError, self).__init__(cmdline_msg)
self.pyfunc_msg = pyfunc_msg
def __str__(self):
return self.pyfunc_msg
def _run(handle_data,
initialize,
before_trading_start,
analyze,
algofile,
algotext,
defines,
data_frequency,
capital_base,
data,
bundle,
bundle_timestamp,
start,
end,
output,
print_algo,
local_namespace,
environ,
broker,
state_filename,
realtime_bar_target):
"""Run a backtest for the given algorithm.
This is shared between the cli and :func:`zipline.run_algo`.
"""
if algotext is not None:
if local_namespace:
ip = get_ipython() # noqa
namespace = ip.user_ns
else:
namespace = {}
for assign in defines:
try:
name, value = assign.split('=', 2)
except ValueError:
raise ValueError(
'invalid define %r, should be of the form name=value' %
assign,
)
try:
# evaluate in the same namespace so names may refer to
# eachother
namespace[name] = eval(value, namespace)
except Exception as e:
raise ValueError(
'failed to execute definition for name %r: %s' % (name, e),
)
elif defines:
raise _RunAlgoError(
'cannot pass define without `algotext`',
"cannot pass '-D' / '--define' without '-t' / '--algotext'",
)
else:
namespace = {}
if algofile is not None:
algotext = algofile.read()
if print_algo:
if PYGMENTS:
highlight(
algotext,
PythonLexer(),
TerminalFormatter(),
outfile=sys.stdout,
)
else:
click.echo(algotext)
if bundle is not None:
bundle_data = load(
bundle,
environ,
bundle_timestamp,
)
prefix, connstr = re.split(
r'sqlite:///',
str(bundle_data.asset_finder.engine.url),
maxsplit=1,
)
if prefix:
raise ValueError(
"invalid url %r, must begin with 'sqlite:///'" %
str(bundle_data.asset_finder.engine.url),
)
env = TradingEnvironment(asset_db_path=connstr, environ=environ)
first_trading_day =\
bundle_data.equity_minute_bar_reader.first_trading_day
DataPortalClass = (partial(DataPortalLive, broker)
if broker
else DataPortal)
data = DataPortalClass(
env.asset_finder, get_calendar("NYSE"),
first_trading_day=first_trading_day,
equity_minute_reader=bundle_data.equity_minute_bar_reader,
equity_daily_reader=bundle_data.equity_daily_bar_reader,
adjustment_reader=bundle_data.adjustment_reader
)
pipeline_loader = USEquityPricingLoader(
bundle_data.equity_daily_bar_reader,
bundle_data.adjustment_reader,
)
def choose_loader(column):
if column in USEquityPricing.columns:
return pipeline_loader
raise ValueError(
"No PipelineLoader registered for column %s." % column
)
else:
env = TradingEnvironment(environ=environ)
choose_loader = None
emission_rate = 'daily'
if broker:
emission_rate = 'minute'
start = pd.Timestamp.utcnow()
end = start + pd.Timedelta('2 day')
TradingAlgorithmClass = (partial(LiveTradingAlgorithm,
broker=broker,
state_filename=state_filename,
realtime_bar_target=realtime_bar_target)
if broker else TradingAlgorithm)
perf = TradingAlgorithmClass(
namespace=namespace,
env=env,
get_pipeline_loader=choose_loader,
sim_params=create_simulation_parameters(
start=start,
end=end,
capital_base=capital_base,
emission_rate=emission_rate,
data_frequency=data_frequency,
),
**{
'initialize': initialize,
'handle_data': handle_data,
'before_trading_start': before_trading_start,
'analyze': analyze,
} if algotext is None else {
'algo_filename': getattr(algofile, 'name', '<algorithm>'),
'script': algotext,
}
).run(
data,
overwrite_sim_params=False,
)
if output == '-':
click.echo(str(perf))
elif output != os.devnull: # make the zipline magic not write any data
perf.to_pickle(output)
return perf
# All of the loaded extensions. We don't want to load an extension twice.
_loaded_extensions = set()
def load_extensions(default, extensions, strict, environ, reload=False):
"""Load all of the given extensions. This should be called by run_algo
or the cli.
Parameters
----------
default : bool
Load the default exension (~/.zipline/extension.py)?
extension : iterable[str]
The paths to the extensions to load. If the path ends in ``.py`` it is
treated as a script and executed. If it does not end in ``.py`` it is
treated as a module to be imported.
strict : bool
Should failure to load an extension raise. If this is false it will
still warn.
environ : mapping
The environment to use to find the default extension path.
reload : bool, optional
Reload any extensions that have already been loaded.
"""
if default:
default_extension_path = pth.default_extension(environ=environ)
pth.ensure_file(default_extension_path)
# put the default extension first so other extensions can depend on
# the order they are loaded
extensions = concatv([default_extension_path], extensions)
for ext in extensions:
if ext in _loaded_extensions and not reload:
continue
try:
# load all of the zipline extensionss
if ext.endswith('.py'):
run_path(ext, run_name='<extension>')
else:
__import__(ext)
except Exception as e:
if strict:
# if `strict` we should raise the actual exception and fail
raise
# without `strict` we should just log the failure
warnings.warn(
'Failed to load extension: %r\n%s' % (ext, e),
stacklevel=2
)
else:
_loaded_extensions.add(ext)
def run_algorithm(start,
end,
initialize,
capital_base,
handle_data=None,
before_trading_start=None,
analyze=None,
data_frequency='daily',
data=None,
bundle=None,
bundle_timestamp=None,
default_extension=True,
extensions=(),
strict_extensions=True,
environ=os.environ,
live_trading=False,
tws_uri=None):
"""Run a trading algorithm.
Parameters
----------
start : datetime
The start date of the backtest.
end : datetime
The end date of the backtest..
initialize : callable[context -> None]
The initialize function to use for the algorithm. This is called once
at the very begining of the backtest and should be used to set up
any state needed by the algorithm.
capital_base : float
The starting capital for the backtest.
handle_data : callable[(context, BarData) -> None], optional
The handle_data function to use for the algorithm. This is called
every minute when ``data_frequency == 'minute'`` or every day
when ``data_frequency == 'daily'``.
before_trading_start : callable[(context, BarData) -> None], optional
The before_trading_start function for the algorithm. This is called
once before each trading day (after initialize on the first day).
analyze : callable[(context, pd.DataFrame) -> None], optional
The analyze function to use for the algorithm. This function is called
once at the end of the backtest and is passed the context and the
performance data.
data_frequency : {'daily', 'minute'}, optional
The data frequency to run the algorithm at.
data : pd.DataFrame, pd.Panel, or DataPortal, optional
The ohlcv data to run the backtest with.
This argument is mutually exclusive with:
``bundle``
``bundle_timestamp``
bundle : str, optional
The name of the data bundle to use to load the data to run the backtest
with. This defaults to 'quantopian-quandl'.
This argument is mutually exclusive with ``data``.
bundle_timestamp : datetime, optional
The datetime to lookup the bundle data for. This defaults to the
current time.
This argument is mutually exclusive with ``data``.
default_extension : bool, optional
Should the default zipline extension be loaded. This is found at
``$ZIPLINE_ROOT/extension.py``
extensions : iterable[str], optional
The names of any other extensions to load. Each element may either be
a dotted module path like ``a.b.c`` or a path to a python file ending
in ``.py`` like ``a/b/c.py``.
strict_extensions : bool, optional
Should the run fail if any extensions fail to load. If this is false,
a warning will be raised instead.
environ : mapping[str -> str], optional
The os environment to use. Many extensions use this to get parameters.
This defaults to ``os.environ``.
Returns
-------
perf : pd.DataFrame
The daily performance of the algorithm.
See Also
--------
zipline.data.bundles.bundles : The available data bundles.
"""
load_extensions(default_extension, extensions, strict_extensions, environ)
non_none_data = valfilter(bool, {
'data': data is not None,
'bundle': bundle is not None,
})
if not non_none_data:
# if neither data nor bundle are passed use 'quantopian-quandl'
bundle = 'quantopian-quandl'
elif len(non_none_data) != 1:
raise ValueError(
'must specify one of `data`, `data_portal`, or `bundle`,'
' got: %r' % non_none_data,
)
elif 'bundle' not in non_none_data and bundle_timestamp is not None:
raise ValueError(
'cannot specify `bundle_timestamp` without passing `bundle`',
)
return _run(
handle_data=handle_data,
initialize=initialize,
before_trading_start=before_trading_start,
analyze=analyze,
algofile=None,
algotext=None,
defines=(),
data_frequency=data_frequency,
capital_base=capital_base,
data=data,
bundle=bundle,
bundle_timestamp=bundle_timestamp,
start=start,
end=end,
output=os.devnull,
print_algo=False,
local_namespace=False,
environ=environ,
broker=None,
state_filename=None,
realtime_bar_target=None
)
| apache-2.0 |
hkchenhongyi/django | django/forms/utils.py | 16 | 5976 | from __future__ import unicode_literals
import json
import sys
from django.conf import settings
from django.core.exceptions import ValidationError # backwards compatibility
from django.utils import six, timezone
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html import escape, format_html, format_html_join, html_safe
from django.utils.translation import ugettext_lazy as _
try:
from collections import UserList
except ImportError: # Python 2
from UserList import UserList
def flatatt(attrs):
"""
Convert a dictionary of attributes to a single string.
The returned string will contain a leading space followed by key="value",
XML-style pairs. In the case of a boolean value, the key will appear
without a value. It is assumed that the keys do not need to be
XML-escaped. If the passed dictionary is empty, then return an empty
string.
The result is passed through 'mark_safe' (by way of 'format_html_join').
"""
key_value_attrs = []
boolean_attrs = []
for attr, value in attrs.items():
if isinstance(value, bool):
if value:
boolean_attrs.append((attr,))
else:
key_value_attrs.append((attr, value))
return (
format_html_join('', ' {}="{}"', sorted(key_value_attrs)) +
format_html_join('', ' {}', sorted(boolean_attrs))
)
@html_safe
@python_2_unicode_compatible
class ErrorDict(dict):
"""
A collection of errors that knows how to display itself in various formats.
The dictionary keys are the field names, and the values are the errors.
"""
def as_data(self):
return {f: e.as_data() for f, e in self.items()}
def as_json(self, escape_html=False):
return json.dumps({f: e.get_json_data(escape_html) for f, e in self.items()})
def as_ul(self):
if not self:
return ''
return format_html(
'<ul class="errorlist">{}</ul>',
format_html_join('', '<li>{}{}</li>', ((k, force_text(v)) for k, v in self.items()))
)
def as_text(self):
output = []
for field, errors in self.items():
output.append('* %s' % field)
output.append('\n'.join(' * %s' % e for e in errors))
return '\n'.join(output)
def __str__(self):
return self.as_ul()
@html_safe
@python_2_unicode_compatible
class ErrorList(UserList, list):
"""
A collection of errors that knows how to display itself in various formats.
"""
def __init__(self, initlist=None, error_class=None):
super(ErrorList, self).__init__(initlist)
if error_class is None:
self.error_class = 'errorlist'
else:
self.error_class = 'errorlist {}'.format(error_class)
def as_data(self):
return ValidationError(self.data).error_list
def get_json_data(self, escape_html=False):
errors = []
for error in self.as_data():
message = list(error)[0]
errors.append({
'message': escape(message) if escape_html else message,
'code': error.code or '',
})
return errors
def as_json(self, escape_html=False):
return json.dumps(self.get_json_data(escape_html))
def as_ul(self):
if not self.data:
return ''
return format_html(
'<ul class="{}">{}</ul>',
self.error_class,
format_html_join('', '<li>{}</li>', ((force_text(e),) for e in self))
)
def as_text(self):
return '\n'.join('* %s' % e for e in self)
def __str__(self):
return self.as_ul()
def __repr__(self):
return repr(list(self))
def __contains__(self, item):
return item in list(self)
def __eq__(self, other):
return list(self) == other
def __ne__(self, other):
return list(self) != other
def __getitem__(self, i):
error = self.data[i]
if isinstance(error, ValidationError):
return list(error)[0]
return force_text(error)
def __reduce_ex__(self, *args, **kwargs):
# The `list` reduce function returns an iterator as the fourth element
# that is normally used for repopulating. Since we only inherit from
# `list` for `isinstance` backward compatibility (Refs #17413) we
# nullify this iterator as it would otherwise result in duplicate
# entries. (Refs #23594)
info = super(UserList, self).__reduce_ex__(*args, **kwargs)
return info[:3] + (None, None)
# Utilities for time zone support in DateTimeField et al.
def from_current_timezone(value):
"""
When time zone support is enabled, convert naive datetimes
entered in the current time zone to aware datetimes.
"""
if settings.USE_TZ and value is not None and timezone.is_naive(value):
current_timezone = timezone.get_current_timezone()
try:
return timezone.make_aware(value, current_timezone)
except Exception:
message = _(
'%(datetime)s couldn\'t be interpreted '
'in time zone %(current_timezone)s; it '
'may be ambiguous or it may not exist.'
)
params = {'datetime': value, 'current_timezone': current_timezone}
six.reraise(ValidationError, ValidationError(
message,
code='ambiguous_timezone',
params=params,
), sys.exc_info()[2])
return value
def to_current_timezone(value):
"""
When time zone support is enabled, convert aware datetimes
to naive datetimes in the current time zone for display.
"""
if settings.USE_TZ and value is not None and timezone.is_aware(value):
current_timezone = timezone.get_current_timezone()
return timezone.make_naive(value, current_timezone)
return value
| bsd-3-clause |
40223149/2015springfinal | static/Brython3.1.0-20150301-090019/Lib/unittest/test/test_runner.py | 785 | 10718 | import io
import os
import sys
import pickle
import subprocess
import unittest
from .support import LoggingResult, ResultWithNoStartTestRunStopTestRun
class TestCleanUp(unittest.TestCase):
def testCleanUp(self):
class TestableTest(unittest.TestCase):
def testNothing(self):
pass
test = TestableTest('testNothing')
self.assertEqual(test._cleanups, [])
cleanups = []
def cleanup1(*args, **kwargs):
cleanups.append((1, args, kwargs))
def cleanup2(*args, **kwargs):
cleanups.append((2, args, kwargs))
test.addCleanup(cleanup1, 1, 2, 3, four='hello', five='goodbye')
test.addCleanup(cleanup2)
self.assertEqual(test._cleanups,
[(cleanup1, (1, 2, 3), dict(four='hello', five='goodbye')),
(cleanup2, (), {})])
self.assertTrue(test.doCleanups())
self.assertEqual(cleanups, [(2, (), {}), (1, (1, 2, 3), dict(four='hello', five='goodbye'))])
def testCleanUpWithErrors(self):
class TestableTest(unittest.TestCase):
def testNothing(self):
pass
class MockOutcome(object):
success = True
errors = []
test = TestableTest('testNothing')
test._outcomeForDoCleanups = MockOutcome
exc1 = Exception('foo')
exc2 = Exception('bar')
def cleanup1():
raise exc1
def cleanup2():
raise exc2
test.addCleanup(cleanup1)
test.addCleanup(cleanup2)
self.assertFalse(test.doCleanups())
self.assertFalse(MockOutcome.success)
(Type1, instance1, _), (Type2, instance2, _) = reversed(MockOutcome.errors)
self.assertEqual((Type1, instance1), (Exception, exc1))
self.assertEqual((Type2, instance2), (Exception, exc2))
def testCleanupInRun(self):
blowUp = False
ordering = []
class TestableTest(unittest.TestCase):
def setUp(self):
ordering.append('setUp')
if blowUp:
raise Exception('foo')
def testNothing(self):
ordering.append('test')
def tearDown(self):
ordering.append('tearDown')
test = TestableTest('testNothing')
def cleanup1():
ordering.append('cleanup1')
def cleanup2():
ordering.append('cleanup2')
test.addCleanup(cleanup1)
test.addCleanup(cleanup2)
def success(some_test):
self.assertEqual(some_test, test)
ordering.append('success')
result = unittest.TestResult()
result.addSuccess = success
test.run(result)
self.assertEqual(ordering, ['setUp', 'test', 'tearDown',
'cleanup2', 'cleanup1', 'success'])
blowUp = True
ordering = []
test = TestableTest('testNothing')
test.addCleanup(cleanup1)
test.run(result)
self.assertEqual(ordering, ['setUp', 'cleanup1'])
def testTestCaseDebugExecutesCleanups(self):
ordering = []
class TestableTest(unittest.TestCase):
def setUp(self):
ordering.append('setUp')
self.addCleanup(cleanup1)
def testNothing(self):
ordering.append('test')
def tearDown(self):
ordering.append('tearDown')
test = TestableTest('testNothing')
def cleanup1():
ordering.append('cleanup1')
test.addCleanup(cleanup2)
def cleanup2():
ordering.append('cleanup2')
test.debug()
self.assertEqual(ordering, ['setUp', 'test', 'tearDown', 'cleanup1', 'cleanup2'])
class Test_TextTestRunner(unittest.TestCase):
"""Tests for TextTestRunner."""
def test_init(self):
runner = unittest.TextTestRunner()
self.assertFalse(runner.failfast)
self.assertFalse(runner.buffer)
self.assertEqual(runner.verbosity, 1)
self.assertEqual(runner.warnings, None)
self.assertTrue(runner.descriptions)
self.assertEqual(runner.resultclass, unittest.TextTestResult)
def testBufferAndFailfast(self):
class Test(unittest.TestCase):
def testFoo(self):
pass
result = unittest.TestResult()
runner = unittest.TextTestRunner(stream=io.StringIO(), failfast=True,
buffer=True)
# Use our result object
runner._makeResult = lambda: result
runner.run(Test('testFoo'))
self.assertTrue(result.failfast)
self.assertTrue(result.buffer)
def testRunnerRegistersResult(self):
class Test(unittest.TestCase):
def testFoo(self):
pass
originalRegisterResult = unittest.runner.registerResult
def cleanup():
unittest.runner.registerResult = originalRegisterResult
self.addCleanup(cleanup)
result = unittest.TestResult()
runner = unittest.TextTestRunner(stream=io.StringIO())
# Use our result object
runner._makeResult = lambda: result
self.wasRegistered = 0
def fakeRegisterResult(thisResult):
self.wasRegistered += 1
self.assertEqual(thisResult, result)
unittest.runner.registerResult = fakeRegisterResult
runner.run(unittest.TestSuite())
self.assertEqual(self.wasRegistered, 1)
def test_works_with_result_without_startTestRun_stopTestRun(self):
class OldTextResult(ResultWithNoStartTestRunStopTestRun):
separator2 = ''
def printErrors(self):
pass
class Runner(unittest.TextTestRunner):
def __init__(self):
super(Runner, self).__init__(io.StringIO())
def _makeResult(self):
return OldTextResult()
runner = Runner()
runner.run(unittest.TestSuite())
def test_startTestRun_stopTestRun_called(self):
class LoggingTextResult(LoggingResult):
separator2 = ''
def printErrors(self):
pass
class LoggingRunner(unittest.TextTestRunner):
def __init__(self, events):
super(LoggingRunner, self).__init__(io.StringIO())
self._events = events
def _makeResult(self):
return LoggingTextResult(self._events)
events = []
runner = LoggingRunner(events)
runner.run(unittest.TestSuite())
expected = ['startTestRun', 'stopTestRun']
self.assertEqual(events, expected)
def test_pickle_unpickle(self):
# Issue #7197: a TextTestRunner should be (un)pickleable. This is
# required by test_multiprocessing under Windows (in verbose mode).
stream = io.StringIO("foo")
runner = unittest.TextTestRunner(stream)
for protocol in range(2, pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(runner, protocol)
obj = pickle.loads(s)
# StringIO objects never compare equal, a cheap test instead.
self.assertEqual(obj.stream.getvalue(), stream.getvalue())
def test_resultclass(self):
def MockResultClass(*args):
return args
STREAM = object()
DESCRIPTIONS = object()
VERBOSITY = object()
runner = unittest.TextTestRunner(STREAM, DESCRIPTIONS, VERBOSITY,
resultclass=MockResultClass)
self.assertEqual(runner.resultclass, MockResultClass)
expectedresult = (runner.stream, DESCRIPTIONS, VERBOSITY)
self.assertEqual(runner._makeResult(), expectedresult)
def test_warnings(self):
"""
Check that warnings argument of TextTestRunner correctly affects the
behavior of the warnings.
"""
# see #10535 and the _test_warnings file for more information
def get_parse_out_err(p):
return [b.splitlines() for b in p.communicate()]
opts = dict(stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=os.path.dirname(__file__))
ae_msg = b'Please use assertEqual instead.'
at_msg = b'Please use assertTrue instead.'
# no args -> all the warnings are printed, unittest warnings only once
p = subprocess.Popen([sys.executable, '_test_warnings.py'], **opts)
out, err = get_parse_out_err(p)
self.assertIn(b'OK', err)
# check that the total number of warnings in the output is correct
self.assertEqual(len(out), 12)
# check that the numbers of the different kind of warnings is correct
for msg in [b'dw', b'iw', b'uw']:
self.assertEqual(out.count(msg), 3)
for msg in [ae_msg, at_msg, b'rw']:
self.assertEqual(out.count(msg), 1)
args_list = (
# passing 'ignore' as warnings arg -> no warnings
[sys.executable, '_test_warnings.py', 'ignore'],
# -W doesn't affect the result if the arg is passed
[sys.executable, '-Wa', '_test_warnings.py', 'ignore'],
# -W affects the result if the arg is not passed
[sys.executable, '-Wi', '_test_warnings.py']
)
# in all these cases no warnings are printed
for args in args_list:
p = subprocess.Popen(args, **opts)
out, err = get_parse_out_err(p)
self.assertIn(b'OK', err)
self.assertEqual(len(out), 0)
# passing 'always' as warnings arg -> all the warnings printed,
# unittest warnings only once
p = subprocess.Popen([sys.executable, '_test_warnings.py', 'always'],
**opts)
out, err = get_parse_out_err(p)
self.assertIn(b'OK', err)
self.assertEqual(len(out), 14)
for msg in [b'dw', b'iw', b'uw', b'rw']:
self.assertEqual(out.count(msg), 3)
for msg in [ae_msg, at_msg]:
self.assertEqual(out.count(msg), 1)
def testStdErrLookedUpAtInstantiationTime(self):
# see issue 10786
old_stderr = sys.stderr
f = io.StringIO()
sys.stderr = f
try:
runner = unittest.TextTestRunner()
self.assertTrue(runner.stream.stream is f)
finally:
sys.stderr = old_stderr
def testSpecifiedStreamUsed(self):
# see issue 10786
f = io.StringIO()
runner = unittest.TextTestRunner(f)
self.assertTrue(runner.stream.stream is f)
| gpl-3.0 |
BaxterStockman/ansible-modules-core | network/dellos9/dellos9_facts.py | 19 | 17799 | #!/usr/bin/python
#
# (c) 2015 Peter Sprygada, <psprygada@ansible.com>
#
# Copyright (c) 2016 Dell Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = """
---
module: dellos9_facts
version_added: "2.2"
author: "Dhivya P (@dhivyap)"
short_description: Collect facts from remote devices running Dell OS9
description:
- Collects a base set of device facts from a remote device that
is running Dell OS9. This module prepends all of the
base network fact keys with C(ansible_net_<fact>). The facts
module will always collect a base set of facts from the device
and can enable or disable collection of additional facts.
extends_documentation_fragment: dellos9
options:
gather_subset:
description:
- When supplied, this argument will restrict the facts collected
to a given subset. Possible values for this argument include
all, hardware, config, and interfaces. Can specify a list of
values to include a larger subset. Values can also be used
with an initial C(M(!)) to specify that a specific subset should
not be collected.
required: false
default: '!config'
notes:
- This module requires Dell OS9 version 9.10.0.1P13 or above.
- This module requires to increase the ssh connection rate limit.
Use the following command I(ip ssh connection-rate-limit 60)
to configure the same. This can be done via M(dnos_config) module
as well.
"""
EXAMPLES = """
# Collect all facts from the device
- dellos9_facts:
gather_subset: all
# Collect only the config and default facts
- dellos9_facts:
gather_subset:
- config
# Do not collect hardware facts
- dellos9_facts:
gather_subset:
- "!hardware"
"""
RETURN = """
ansible_net_gather_subset:
description: The list of fact subsets collected from the device
returned: always
type: list
# default
ansible_net_model:
description: The model name returned from the device
returned: always
type: str
ansible_net_serialnum:
description: The serial number of the remote device
returned: always
type: str
ansible_net_version:
description: The operating system version running on the remote device
returned: always
type: str
ansible_net_hostname:
description: The configured hostname of the device
returned: always
type: string
ansible_net_image:
description: The image file the device is running
returned: always
type: string
# hardware
ansible_net_filesystems:
description: All file system names available on the device
returned: when hardware is configured
type: list
ansible_net_memfree_mb:
description: The available free memory on the remote device in Mb
returned: when hardware is configured
type: int
ansible_net_memtotal_mb:
description: The total memory on the remote device in Mb
returned: when hardware is configured
type: int
# config
ansible_net_config:
description: The current active config from the device
returned: when config is configured
type: str
# interfaces
ansible_net_all_ipv4_addresses:
description: All IPv4 addresses configured on the device
returned: when interfaces is configured
type: list
ansible_net_all_ipv6_addresses:
description: All IPv6 addresses configured on the device
returned: when interfaces is configured
type: list
ansible_net_interfaces:
description: A hash of all interfaces running on the system
returned: when interfaces is configured
type: dict
ansible_net_neighbors:
description: The list of LLDP neighbors from the remote device
returned: when interfaces is configured
type: dict
"""
import re
import itertools
from ansible.module_utils.netcli import CommandRunner
from ansible.module_utils.network import NetworkModule
import ansible.module_utils.dellos9
class FactsBase(object):
def __init__(self, runner):
self.runner = runner
self.facts = dict()
self.commands()
class Default(FactsBase):
def commands(self):
self.runner.add_command('show version')
self.runner.add_command('show inventory')
self.runner.add_command('show running-config | grep hostname')
def populate(self):
data = self.runner.get_command('show version')
self.facts['version'] = self.parse_version(data)
self.facts['model'] = self.parse_model(data)
self.facts['image'] = self.parse_image(data)
data = self.runner.get_command('show inventory')
self.facts['serialnum'] = self.parse_serialnum(data)
data = self.runner.get_command('show running-config | grep hostname')
self.facts['hostname'] = self.parse_hostname(data)
def parse_version(self, data):
match = re.search(r'Software Version:\s*(.+)', data)
if match:
return match.group(1)
def parse_hostname(self, data):
match = re.search(r'^hostname (.+)', data, re.M)
if match:
return match.group(1)
def parse_model(self, data):
match = re.search(r'^System Type:\s*(.+)', data, re.M)
if match:
return match.group(1)
def parse_image(self, data):
match = re.search(r'image file is "(.+)"', data)
if match:
return match.group(1)
def parse_serialnum(self, data):
for line in data.split('\n'):
if line.startswith('*'):
match = re.search(
r'\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)', line, re.M)
if match:
return match.group(3)
class Hardware(FactsBase):
def commands(self):
self.runner.add_command('show file-systems')
self.runner.add_command('show memory | except Processor')
def populate(self):
data = self.runner.get_command('show file-systems')
self.facts['filesystems'] = self.parse_filesystems(data)
data = self.runner.get_command('show memory | except Processor')
match = re.findall('\s(\d+)\s', data)
if match:
self.facts['memtotal_mb'] = int(match[0]) / 1024
self.facts['memfree_mb'] = int(match[2]) / 1024
def parse_filesystems(self, data):
return re.findall(r'\s(\S+):$', data, re.M)
class Config(FactsBase):
def commands(self):
self.runner.add_command('show running-config')
def populate(self):
self.facts['config'] = self.runner.get_command('show running-config')
class Interfaces(FactsBase):
def commands(self):
self.runner.add_command('show interfaces')
self.runner.add_command('show ipv6 interface')
self.runner.add_command('show lldp neighbors detail')
def populate(self):
self.facts['all_ipv4_addresses'] = list()
self.facts['all_ipv6_addresses'] = list()
data = self.runner.get_command('show interfaces')
interfaces = self.parse_interfaces(data)
for key in interfaces.keys():
if "ManagementEthernet" in key:
temp_parsed = interfaces[key]
del interfaces[key]
interfaces.update(self.parse_mgmt_interfaces(temp_parsed))
for key in interfaces.keys():
if "Vlan" in key:
temp_parsed = interfaces[key]
del interfaces[key]
interfaces.update(self.parse_vlan_interfaces(temp_parsed))
self.facts['interfaces'] = self.populate_interfaces(interfaces)
data = self.runner.get_command('show ipv6 interface')
if len(data) > 0:
data = self.parse_ipv6_interfaces(data)
self.populate_ipv6_interfaces(data)
data = self.runner.get_command('show inventory')
if 'LLDP' in self.get_protocol_list(data):
neighbors = self.runner.get_command('show lldp neighbors detail')
self.facts['neighbors'] = self.parse_neighbors(neighbors)
def get_protocol_list(self, data):
start = False
protocol_list = list()
for line in data.split('\n'):
match = re.search(r'Software Protocol Configured\s*', line)
if match:
start = True
continue
if start:
line = line.strip()
if line.isalnum():
protocol_list.append(line)
return protocol_list
def populate_interfaces(self, interfaces):
facts = dict()
for key, value in interfaces.iteritems():
intf = dict()
intf['description'] = self.parse_description(value)
intf['macaddress'] = self.parse_macaddress(value)
ipv4 = self.parse_ipv4(value)
intf['ipv4'] = self.parse_ipv4(value)
if ipv4:
self.add_ip_address(ipv4['address'], 'ipv4')
intf['mtu'] = self.parse_mtu(value)
intf['bandwidth'] = self.parse_bandwidth(value)
intf['mediatype'] = self.parse_mediatype(value)
intf['duplex'] = self.parse_duplex(value)
intf['lineprotocol'] = self.parse_lineprotocol(value)
intf['operstatus'] = self.parse_operstatus(value)
intf['type'] = self.parse_type(value)
facts[key] = intf
return facts
def populate_ipv6_interfaces(self, data):
for key, value in data.iteritems():
self.facts['interfaces'][key]['ipv6'] = list()
addresses = re.findall(r'\s+(.+), subnet', value, re.M)
subnets = re.findall(r', subnet is (\S+)', value, re.M)
for addr, subnet in itertools.izip(addresses, subnets):
ipv6 = dict(address=addr.strip(), subnet=subnet.strip())
self.add_ip_address(addr.strip(), 'ipv6')
self.facts['interfaces'][key]['ipv6'].append(ipv6)
def add_ip_address(self, address, family):
if family == 'ipv4':
self.facts['all_ipv4_addresses'].append(address)
else:
self.facts['all_ipv6_addresses'].append(address)
def parse_neighbors(self, neighbors):
facts = dict()
for entry in neighbors.split(
'========================================================================'):
if entry == '':
continue
intf = self.parse_lldp_intf(entry)
if intf not in facts:
facts[intf] = list()
fact = dict()
fact['host'] = self.parse_lldp_host(entry)
fact['port'] = self.parse_lldp_port(entry)
facts[intf].append(fact)
return facts
def parse_interfaces(self, data):
parsed = dict()
newline_count = 0
interface_start = True
for line in data.split('\n'):
if interface_start:
newline_count = 0
if len(line) == 0:
newline_count += 1
if newline_count == 2:
interface_start = True
continue
else:
match = re.match(r'^(\S+) (\S+)', line)
if match and interface_start:
interface_start = False
key = match.group(0)
parsed[key] = line
else:
parsed[key] += '\n%s' % line
return parsed
def parse_mgmt_interfaces(self, data):
parsed = dict()
interface_start = True
for line in data.split('\n'):
match = re.match(r'^(\S+) (\S+)', line)
if "Time since" in line:
interface_start = True
parsed[key] += '\n%s' % line
continue
elif match and interface_start:
interface_start = False
key = match.group(0)
parsed[key] = line
else:
parsed[key] += '\n%s' % line
return parsed
def parse_vlan_interfaces(self, data):
parsed = dict()
interface_start = True
line_before_end = False
for line in data.split('\n'):
match = re.match(r'^(\S+) (\S+)', line)
match_endline = re.match(r'^\s*\d+ packets, \d+ bytes$', line)
if "Output Statistics" in line:
line_before_end = True
parsed[key] += '\n%s' % line
elif match_endline and line_before_end:
line_before_end = False
interface_start = True
parsed[key] += '\n%s' % line
elif match and interface_start:
interface_start = False
key = match.group(0)
parsed[key] = line
else:
parsed[key] += '\n%s' % line
return parsed
def parse_ipv6_interfaces(self, data):
parsed = dict()
for line in data.split('\n'):
if len(line) == 0:
continue
elif line[0] == ' ':
parsed[key] += '\n%s' % line
else:
match = re.match(r'^(\S+) (\S+)', line)
if match:
key = match.group(0)
parsed[key] = line
return parsed
def parse_description(self, data):
match = re.search(r'Description: (.+)$', data, re.M)
if match:
return match.group(1)
def parse_macaddress(self, data):
match = re.search(r'address is (\S+)', data)
if match:
if match.group(1) != "not":
return match.group(1)
def parse_ipv4(self, data):
match = re.search(r'Internet address is (\S+)', data)
if match:
if match.group(1) != "not":
addr, masklen = match.group(1).split('/')
return dict(address=addr, masklen=int(masklen))
def parse_mtu(self, data):
match = re.search(r'MTU (\d+)', data)
if match:
return int(match.group(1))
def parse_bandwidth(self, data):
match = re.search(r'LineSpeed (\d+)', data)
if match:
return int(match.group(1))
def parse_duplex(self, data):
match = re.search(r'(\w+) duplex', data, re.M)
if match:
return match.group(1)
def parse_mediatype(self, data):
media = re.search(r'(.+) media present, (.+)', data, re.M)
if media:
match = re.search(r'type is (.+)$', media.group(0), re.M)
return match.group(1)
def parse_type(self, data):
match = re.search(r'Hardware is (.+),', data, re.M)
if match:
return match.group(1)
def parse_lineprotocol(self, data):
match = re.search(r'line protocol is (\w+[ ]?\w*)\(?.*\)?$', data, re.M)
if match:
return match.group(1)
def parse_operstatus(self, data):
match = re.search(r'^(?:.+) is (.+),', data, re.M)
if match:
return match.group(1)
def parse_lldp_intf(self, data):
match = re.search(r'^\sLocal Interface (\S+\s\S+)', data, re.M)
if match:
return match.group(1)
def parse_lldp_host(self, data):
match = re.search(r'Remote System Name: (.+)$', data, re.M)
if match:
return match.group(1)
def parse_lldp_port(self, data):
match = re.search(r'Remote Port ID: (.+)$', data, re.M)
if match:
return match.group(1)
FACT_SUBSETS = dict(
default=Default,
hardware=Hardware,
interfaces=Interfaces,
config=Config,
)
VALID_SUBSETS = frozenset(FACT_SUBSETS.keys())
def main():
spec = dict(
gather_subset=dict(default=['!config'], type='list')
)
module = NetworkModule(argument_spec=spec, supports_check_mode=True)
gather_subset = module.params['gather_subset']
runable_subsets = set()
exclude_subsets = set()
for subset in gather_subset:
if subset == 'all':
runable_subsets.update(VALID_SUBSETS)
continue
if subset.startswith('!'):
subset = subset[1:]
if subset == 'all':
exclude_subsets.update(VALID_SUBSETS)
continue
exclude = True
else:
exclude = False
if subset not in VALID_SUBSETS:
module.fail_json(msg='Bad subset')
if exclude:
exclude_subsets.add(subset)
else:
runable_subsets.add(subset)
if not runable_subsets:
runable_subsets.update(VALID_SUBSETS)
runable_subsets.difference_update(exclude_subsets)
runable_subsets.add('default')
facts = dict()
facts['gather_subset'] = list(runable_subsets)
runner = CommandRunner(module)
instances = list()
for key in runable_subsets:
runs = FACT_SUBSETS[key](runner)
instances.append(runs)
runner.run()
try:
for inst in instances:
inst.populate()
facts.update(inst.facts)
except Exception:
module.exit_json(out=module.from_json(runner.items))
ansible_facts = dict()
for key, value in facts.iteritems():
key = 'ansible_net_%s' % key
ansible_facts[key] = value
module.exit_json(ansible_facts=ansible_facts)
if __name__ == '__main__':
main()
| gpl-3.0 |
abloomston/sympy | sympy/core/tests/test_diff.py | 115 | 2793 | from sympy import Symbol, Rational, cos, sin, tan, cot, exp, log, Function, \
Derivative, Expr, symbols, pi, I, S
from sympy.utilities.pytest import raises
def test_diff():
x, y = symbols('x, y')
assert Rational(1, 3).diff(x) is S.Zero
assert I.diff(x) is S.Zero
assert pi.diff(x) is S.Zero
assert x.diff(x, 0) == x
assert (x**2).diff(x, 2, x) == 0
assert (x**2).diff(x, y, 0) == 2*x
assert (x**2).diff(x, y) == 0
raises(ValueError, lambda: x.diff(1, x))
a = Symbol("a")
b = Symbol("b")
c = Symbol("c")
p = Rational(5)
e = a*b + b**p
assert e.diff(a) == b
assert e.diff(b) == a + 5*b**4
assert e.diff(b).diff(a) == Rational(1)
e = a*(b + c)
assert e.diff(a) == b + c
assert e.diff(b) == a
assert e.diff(b).diff(a) == Rational(1)
e = c**p
assert e.diff(c, 6) == Rational(0)
assert e.diff(c, 5) == Rational(120)
e = c**Rational(2)
assert e.diff(c) == 2*c
e = a*b*c
assert e.diff(c) == a*b
def test_diff2():
n3 = Rational(3)
n2 = Rational(2)
n6 = Rational(6)
x, c = map(Symbol, 'xc')
e = n3*(-n2 + x**n2)*cos(x) + x*(-n6 + x**n2)*sin(x)
assert e == 3*(-2 + x**2)*cos(x) + x*(-6 + x**2)*sin(x)
assert e.diff(x).expand() == x**3*cos(x)
e = (x + 1)**3
assert e.diff(x) == 3*(x + 1)**2
e = x*(x + 1)**3
assert e.diff(x) == (x + 1)**3 + 3*x*(x + 1)**2
e = 2*exp(x*x)*x
assert e.diff(x) == 2*exp(x**2) + 4*x**2*exp(x**2)
def test_diff3():
a, b, c = map(Symbol, 'abc')
p = Rational(5)
e = a*b + sin(b**p)
assert e == a*b + sin(b**5)
assert e.diff(a) == b
assert e.diff(b) == a + 5*b**4*cos(b**5)
e = tan(c)
assert e == tan(c)
assert e.diff(c) in [cos(c)**(-2), 1 + sin(c)**2/cos(c)**2, 1 + tan(c)**2]
e = c*log(c) - c
assert e == -c + c*log(c)
assert e.diff(c) == log(c)
e = log(sin(c))
assert e == log(sin(c))
assert e.diff(c) in [sin(c)**(-1)*cos(c), cot(c)]
e = (Rational(2)**a/log(Rational(2)))
assert e == 2**a*log(Rational(2))**(-1)
assert e.diff(a) == 2**a
def test_diff_no_eval_derivative():
class My(Expr):
def __new__(cls, x):
return Expr.__new__(cls, x)
x, y = symbols('x y')
# My doesn't have its own _eval_derivative method
assert My(x).diff(x).func is Derivative
# it doesn't have y so it shouldn't need a method for this case
assert My(x).diff(y) == 0
def test_speed():
# this should return in 0.0s. If it takes forever, it's wrong.
x = Symbol("x")
assert x.diff(x, 10**8) == 0
def test_deriv_noncommutative():
A = Symbol("A", commutative=False)
f = Function("f")
x = Symbol("x")
assert A*f(x)*A == f(x)*A**2
assert A*f(x).diff(x)*A == f(x).diff(x) * A**2
| bsd-3-clause |
Dangetsu/vnr | Frameworks/Sakura/py/libs/restful/online.py | 1 | 3628 | # coding: utf8
# online.py
# 8/12/2013 jichi
#
# See: http://ymotongpoo.hatenablog.com/entry/20081123/1227430671
__all__ = 'DataParser', 'FileParser', 'JsonFileParser'
if __name__ == '__main__': # DEBUG
import sys
sys.path.append("..")
import json, urllib2
from sakurakit import sknetio
from sakurakit.skdebug import dwarn
class ParserBase(object):
ENCODING = 'utf8'
class DataParser(ParserBase):
session = None # requests.Session or None
def _makereq(self, *args, **kwargs):
"""
@param kw
@return kw
"""
return {'url':self._makeurl(*args, **kwargs)}
def _makeurl(self, *args, **kwargs):
"""
@return str
"""
if args:
return args[0]
if kwargs:
return kwargs.itervalues().next()
def _fetch(self, url):
"""
@param url str
@return str
"""
return sknetio.getdata(url, gzip=True, session=self.session)
def query(self, *args, **kwargs):
"""
@return {kw} or None
"""
req = self._makereq(*args, **kwargs)
h = self._fetch(**req)
if h:
h = h.decode(self.ENCODING, errors='ignore')
if h:
return self._parse(h)
def _parse(self, h):
"""
@param h unicode html
@return {kw}
"""
return h
# API is stateless
# Make this class so that _fetch could be overridden
class FileParser(ParserBase):
#METHOD = 'GET'
URL = '' # str
def query(self, *args, **kwargs):
"""
@param text unicode
@param type str
@return list or None
"""
try: return self._dispatch(*args, **kwargs)
except Exception, e: dwarn(e)
def _parse(self, fp):
"""
@param fp file pointer
@return string
@raise
"""
return fp.read()
def _makeparams(self, **kwargs):
"""
@param kw
@return kw
"""
return kwargs
def _dispatch(self, *args, **kwargs):
"""
@return list or None
@raise
"""
params = self._makeparams(*args, **kwargs)
req = self._makereq(**params)
r = self._fetch(**req)
return self._parse(r)
def _fetch(self, url):
"""
@param url str
@return file object
@raise
"""
req = urllib2.Request(url)
#handler = urllib2.HTTPHandler(debuglevel=self.debug)
handler = urllib2.HTTPHandler()
opener = urllib2.build_opener(handler)
return opener.open(req)
def _makereq(self, **kw):
"""
@param kw
@return kw
"""
return {'url':self._makeurl(**kw)}
def _makeurl(self, **params):
"""
@param params request params
@return str
See: http://ymotongpoo.hatenablog.com/entry/20081123/1227430671
See: http://ketsuage.seesaa.net/article/263754550.html
"""
# paramsのハッシュを展開
request = ["%s=%s" % (k, urllib2.quote(self._encodeparam(v)))
for k,v in sorted(params.iteritems())]
#urllib.encodeparams
# GET
ret = self.URL + "?" + "&".join(request)
#if self.debug:
# dprint(ret)
return ret
def _encodeparam(self, v):
"""
@param v any
@return str
"""
if isinstance(v, str):
return v
elif isinstance(v, unicode):
return v.encode(self.ENCODING, errors='ignore')
elif v is None:
return ''
elif v is bool:
return 'true' if v else 'false'
else:
return str(v) # May throw
class JsonFileParser(FileParser):
def _parse(self, fp):
"""@reimp
@param fp file pointer
@return string
@raise
"""
return self._parsejson(json.load(fp))
def _parsejson(self, data):
"""
@param data object
@return object
@raise
"""
return data
# EOF
| gpl-3.0 |
toolforger/sympy | sympy/matrices/expressions/matexpr.py | 41 | 13161 | from __future__ import print_function, division
from functools import wraps
from sympy.core import S, Symbol, Tuple, Integer, Basic, Expr
from sympy.core.decorators import call_highest_priority
from sympy.core.compatibility import range
from sympy.core.sympify import SympifyError, sympify
from sympy.functions import conjugate, adjoint
from sympy.matrices import ShapeError
from sympy.simplify import simplify
def _sympifyit(arg, retval=None):
# This version of _sympifyit sympifies MutableMatrix objects
def deco(func):
@wraps(func)
def __sympifyit_wrapper(a, b):
try:
b = sympify(b, strict=True)
return func(a, b)
except SympifyError:
return retval
return __sympifyit_wrapper
return deco
class MatrixExpr(Basic):
""" Superclass for Matrix Expressions
MatrixExprs represent abstract matrices, linear transformations represented
within a particular basis.
Examples
========
>>> from sympy import MatrixSymbol
>>> A = MatrixSymbol('A', 3, 3)
>>> y = MatrixSymbol('y', 3, 1)
>>> x = (A.T*A).I * A * y
See Also
========
MatrixSymbol
MatAdd
MatMul
Transpose
Inverse
"""
_op_priority = 11.0
is_Matrix = True
is_MatrixExpr = True
is_Identity = None
is_Inverse = False
is_Transpose = False
is_ZeroMatrix = False
is_MatAdd = False
is_MatMul = False
is_commutative = False
def __new__(cls, *args, **kwargs):
args = map(sympify, args)
return Basic.__new__(cls, *args, **kwargs)
# The following is adapted from the core Expr object
def __neg__(self):
return MatMul(S.NegativeOne, self).doit()
def __abs__(self):
raise NotImplementedError
@_sympifyit('other', NotImplemented)
@call_highest_priority('__radd__')
def __add__(self, other):
return MatAdd(self, other).doit()
@_sympifyit('other', NotImplemented)
@call_highest_priority('__add__')
def __radd__(self, other):
return MatAdd(other, self).doit()
@_sympifyit('other', NotImplemented)
@call_highest_priority('__rsub__')
def __sub__(self, other):
return MatAdd(self, -other).doit()
@_sympifyit('other', NotImplemented)
@call_highest_priority('__sub__')
def __rsub__(self, other):
return MatAdd(other, -self).doit()
@_sympifyit('other', NotImplemented)
@call_highest_priority('__rmul__')
def __mul__(self, other):
return MatMul(self, other).doit()
@_sympifyit('other', NotImplemented)
@call_highest_priority('__mul__')
def __rmul__(self, other):
return MatMul(other, self).doit()
@_sympifyit('other', NotImplemented)
@call_highest_priority('__rpow__')
def __pow__(self, other):
if not self.is_square:
raise ShapeError("Power of non-square matrix %s" % self)
if other is S.NegativeOne:
return Inverse(self)
elif other is S.Zero:
return Identity(self.rows)
elif other is S.One:
return self
return MatPow(self, other)
@_sympifyit('other', NotImplemented)
@call_highest_priority('__pow__')
def __rpow__(self, other):
raise NotImplementedError("Matrix Power not defined")
@_sympifyit('other', NotImplemented)
@call_highest_priority('__rdiv__')
def __div__(self, other):
return self * other**S.NegativeOne
@_sympifyit('other', NotImplemented)
@call_highest_priority('__div__')
def __rdiv__(self, other):
raise NotImplementedError()
#return MatMul(other, Pow(self, S.NegativeOne))
__truediv__ = __div__
__rtruediv__ = __rdiv__
@property
def rows(self):
return self.shape[0]
@property
def cols(self):
return self.shape[1]
@property
def is_square(self):
return self.rows == self.cols
def _eval_conjugate(self):
from sympy.matrices.expressions.adjoint import Adjoint
from sympy.matrices.expressions.transpose import Transpose
return Adjoint(Transpose(self))
def _eval_inverse(self):
from sympy.matrices.expressions.inverse import Inverse
return Inverse(self)
def _eval_transpose(self):
return Transpose(self)
def _eval_power(self, exp):
return MatPow(self, exp)
def _eval_simplify(self, **kwargs):
if self.is_Atom:
return self
else:
return self.__class__(*[simplify(x, **kwargs) for x in self.args])
def _eval_adjoint(self):
from sympy.matrices.expressions.adjoint import Adjoint
return Adjoint(self)
def _entry(self, i, j):
raise NotImplementedError(
"Indexing not implemented for %s" % self.__class__.__name__)
def adjoint(self):
return adjoint(self)
def conjugate(self):
return conjugate(self)
def transpose(self):
from sympy.matrices.expressions.transpose import transpose
return transpose(self)
T = property(transpose, None, None, 'Matrix transposition.')
def inverse(self):
return self._eval_inverse()
@property
def I(self):
return self.inverse()
def valid_index(self, i, j):
def is_valid(idx):
return isinstance(idx, (int, Integer, Symbol, Expr))
return (is_valid(i) and is_valid(j) and
(0 <= i) != False and (i < self.rows) != False and
(0 <= j) != False and (j < self.cols) != False)
def __getitem__(self, key):
if not isinstance(key, tuple) and isinstance(key, slice):
from sympy.matrices.expressions.slice import MatrixSlice
return MatrixSlice(self, key, (0, None, 1))
if isinstance(key, tuple) and len(key) == 2:
i, j = key
if isinstance(i, slice) or isinstance(j, slice):
from sympy.matrices.expressions.slice import MatrixSlice
return MatrixSlice(self, i, j)
i, j = sympify(i), sympify(j)
if self.valid_index(i, j) != False:
return self._entry(i, j)
else:
raise IndexError("Invalid indices (%s, %s)" % (i, j))
elif isinstance(key, (int, Integer)):
# row-wise decomposition of matrix
rows, cols = self.shape
if not (isinstance(rows, Integer) and isinstance(cols, Integer)):
raise IndexError("Single index only supported for "
"non-symbolic matrix shapes.")
key = sympify(key)
i = key // cols
j = key % cols
if self.valid_index(i, j) != False:
return self._entry(i, j)
else:
raise IndexError("Invalid index %s" % key)
elif isinstance(key, (Symbol, Expr)):
raise IndexError("Single index only supported for "
"non-symbolic indices.")
raise IndexError("Invalid index, wanted %s[i,j]" % self)
def as_explicit(self):
"""
Returns a dense Matrix with elements represented explicitly
Returns an object of type ImmutableMatrix.
Examples
========
>>> from sympy import Identity
>>> I = Identity(3)
>>> I
I
>>> I.as_explicit()
Matrix([
[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
See Also
========
as_mutable: returns mutable Matrix type
"""
from sympy.matrices.immutable import ImmutableMatrix
return ImmutableMatrix([[ self[i, j]
for j in range(self.cols)]
for i in range(self.rows)])
def as_mutable(self):
"""
Returns a dense, mutable matrix with elements represented explicitly
Examples
========
>>> from sympy import Identity
>>> I = Identity(3)
>>> I
I
>>> I.shape
(3, 3)
>>> I.as_mutable()
Matrix([
[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
See Also
========
as_explicit: returns ImmutableMatrix
"""
return self.as_explicit().as_mutable()
def __array__(self):
from numpy import empty
a = empty(self.shape, dtype=object)
for i in range(self.rows):
for j in range(self.cols):
a[i, j] = self[i, j]
return a
def equals(self, other):
"""
Test elementwise equality between matrices, potentially of different
types
>>> from sympy import Identity, eye
>>> Identity(3).equals(eye(3))
True
"""
return self.as_explicit().equals(other)
def canonicalize(self):
return self
def as_coeff_mmul(self):
return 1, MatMul(self)
class MatrixElement(Expr):
parent = property(lambda self: self.args[0])
i = property(lambda self: self.args[1])
j = property(lambda self: self.args[2])
_diff_wrt = True
def doit(self, **kwargs):
deep = kwargs.get('deep', True)
if deep:
args = [arg.doit(**kwargs) for arg in self.args]
else:
args = self.args
return args[0][args[1], args[2]]
class MatrixSymbol(MatrixExpr):
"""Symbolic representation of a Matrix object
Creates a SymPy Symbol to represent a Matrix. This matrix has a shape and
can be included in Matrix Expressions
>>> from sympy import MatrixSymbol, Identity
>>> A = MatrixSymbol('A', 3, 4) # A 3 by 4 Matrix
>>> B = MatrixSymbol('B', 4, 3) # A 4 by 3 Matrix
>>> A.shape
(3, 4)
>>> 2*A*B + Identity(3)
I + 2*A*B
"""
is_commutative = False
def __new__(cls, name, n, m):
n, m = sympify(n), sympify(m)
obj = Basic.__new__(cls, name, n, m)
return obj
def _hashable_content(self):
return(self.name, self.shape)
@property
def shape(self):
return self.args[1:3]
@property
def name(self):
return self.args[0]
def _eval_subs(self, old, new):
# only do substitutions in shape
shape = Tuple(*self.shape)._subs(old, new)
return MatrixSymbol(self.name, *shape)
def __call__(self, *args):
raise TypeError( "%s object is not callable" % self.__class__ )
def _entry(self, i, j):
return MatrixElement(self, i, j)
@property
def free_symbols(self):
return set((self,))
def doit(self, **hints):
if hints.get('deep', True):
return type(self)(self.name, self.args[1].doit(**hints),
self.args[2].doit(**hints))
else:
return self
def _eval_simplify(self, **kwargs):
return self
class Identity(MatrixExpr):
"""The Matrix Identity I - multiplicative identity
>>> from sympy.matrices import Identity, MatrixSymbol
>>> A = MatrixSymbol('A', 3, 5)
>>> I = Identity(3)
>>> I*A
A
"""
is_Identity = True
def __new__(cls, n):
return super(Identity, cls).__new__(cls, sympify(n))
@property
def rows(self):
return self.args[0]
@property
def cols(self):
return self.args[0]
@property
def shape(self):
return (self.args[0], self.args[0])
def _eval_transpose(self):
return self
def _eval_trace(self):
return self.rows
def _eval_inverse(self):
return self
def conjugate(self):
return self
def _entry(self, i, j):
if i == j:
return S.One
else:
return S.Zero
def _eval_determinant(self):
return S.One
class ZeroMatrix(MatrixExpr):
"""The Matrix Zero 0 - additive identity
>>> from sympy import MatrixSymbol, ZeroMatrix
>>> A = MatrixSymbol('A', 3, 5)
>>> Z = ZeroMatrix(3, 5)
>>> A+Z
A
>>> Z*A.T
0
"""
is_ZeroMatrix = True
def __new__(cls, m, n):
return super(ZeroMatrix, cls).__new__(cls, m, n)
@property
def shape(self):
return (self.args[0], self.args[1])
@_sympifyit('other', NotImplemented)
@call_highest_priority('__rpow__')
def __pow__(self, other):
if other != 1 and not self.is_square:
raise ShapeError("Power of non-square matrix %s" % self)
if other == 0:
return Identity(self.rows)
return self
def _eval_transpose(self):
return ZeroMatrix(self.cols, self.rows)
def _eval_trace(self):
return S.Zero
def _eval_determinant(self):
return S.Zero
def conjugate(self):
return self
def _entry(self, i, j):
return S.Zero
def __nonzero__(self):
return False
__bool__ = __nonzero__
def matrix_symbols(expr):
return [sym for sym in expr.free_symbols if sym.is_Matrix]
from .matmul import MatMul
from .matadd import MatAdd
from .matpow import MatPow
from .transpose import Transpose
from .inverse import Inverse
| bsd-3-clause |
rev112/playterminal | games/utils/stepic_client.py | 1 | 1714 | import requests
STEPIC_URL = 'https://stepic.org/'
STEPIC_API_URL = STEPIC_URL + 'api/'
STEPIC_OAUTH_TOKEN_URL = STEPIC_URL + 'oauth2/token/'
STEPIC_API_ATTEMPTS_URL = STEPIC_API_URL + 'attempts'
STEPIC_API_ATTEMPT_URL = STEPIC_API_ATTEMPTS_URL + '/{id}'
class LoginError(Exception):
"""An exception raised when login failed."""
class StepicError(Exception):
"""An error occurred on the Stepic side."""
class StepicClient(object):
def __init__(self, client_id, client_secret):
# Get an OAuth token
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
response = requests.post(STEPIC_OAUTH_TOKEN_URL,
data={'grant_type': 'client_credentials'},
auth=auth)
self.token = response.json()['access_token']
auth_headers = {'Authorization': 'Bearer ' + self.token}
self.session = requests.Session()
self.session.headers.update(auth_headers)
def get_attempt(self, attempt_id):
api_url = STEPIC_API_ATTEMPT_URL.format(id=attempt_id)
response = self.session.get(api_url)
if response.status_code == 404:
return None
response.raise_for_status()
response_data = response.json()
attempt = response_data['attempts'][0]
return attempt
def create_attempt(self, step_id):
data = {'attempt': {'step': step_id}}
response = self.session.post(STEPIC_API_ATTEMPTS_URL, json=data)
response.raise_for_status()
resp_json = response.json()
if not resp_json['attempts']:
raise StepicError("Stepic didn't return an attempt")
return resp_json['attempts'][0]
| apache-2.0 |
brython-dev/brython | www/src/Lib/test/test_peepholer.py | 1 | 20531 | import dis
import unittest
from test.support.bytecode_helper import BytecodeTestCase
def count_instr_recursively(f, opname):
count = 0
for instr in dis.get_instructions(f):
if instr.opname == opname:
count += 1
if hasattr(f, '__code__'):
f = f.__code__
for c in f.co_consts:
if hasattr(c, 'co_code'):
count += count_instr_recursively(c, opname)
return count
class TestTranforms(BytecodeTestCase):
def check_jump_targets(self, code):
instructions = list(dis.get_instructions(code))
targets = {instr.offset: instr for instr in instructions}
for instr in instructions:
if 'JUMP_' not in instr.opname:
continue
tgt = targets[instr.argval]
# jump to unconditional jump
if tgt.opname in ('JUMP_ABSOLUTE', 'JUMP_FORWARD'):
self.fail(f'{instr.opname} at {instr.offset} '
f'jumps to {tgt.opname} at {tgt.offset}')
# unconditional jump to RETURN_VALUE
if (instr.opname in ('JUMP_ABSOLUTE', 'JUMP_FORWARD') and
tgt.opname == 'RETURN_VALUE'):
self.fail(f'{instr.opname} at {instr.offset} '
f'jumps to {tgt.opname} at {tgt.offset}')
# JUMP_IF_*_OR_POP jump to conditional jump
if '_OR_POP' in instr.opname and 'JUMP_IF_' in tgt.opname:
self.fail(f'{instr.opname} at {instr.offset} '
f'jumps to {tgt.opname} at {tgt.offset}')
def check_lnotab(self, code):
"Check that the lnotab byte offsets are sensible."
code = dis._get_code_object(code)
lnotab = list(dis.findlinestarts(code))
# Don't bother checking if the line info is sensible, because
# most of the line info we can get at comes from lnotab.
min_bytecode = min(t[0] for t in lnotab)
max_bytecode = max(t[0] for t in lnotab)
self.assertGreaterEqual(min_bytecode, 0)
self.assertLess(max_bytecode, len(code.co_code))
# This could conceivably test more (and probably should, as there
# aren't very many tests of lnotab), if peepholer wasn't scheduled
# to be replaced anyway.
def test_unot(self):
# UNARY_NOT POP_JUMP_IF_FALSE --> POP_JUMP_IF_TRUE'
def unot(x):
if not x == 2:
del x
self.assertNotInBytecode(unot, 'UNARY_NOT')
self.assertNotInBytecode(unot, 'POP_JUMP_IF_FALSE')
self.assertInBytecode(unot, 'POP_JUMP_IF_TRUE')
self.check_lnotab(unot)
def test_elim_inversion_of_is_or_in(self):
for line, cmp_op, invert in (
('not a is b', 'IS_OP', 1,),
('not a is not b', 'IS_OP', 0,),
('not a in b', 'CONTAINS_OP', 1,),
('not a not in b', 'CONTAINS_OP', 0,),
):
code = compile(line, '', 'single')
self.assertInBytecode(code, cmp_op, invert)
self.check_lnotab(code)
def test_global_as_constant(self):
# LOAD_GLOBAL None/True/False --> LOAD_CONST None/True/False
def f():
x = None
x = None
return x
def g():
x = True
return x
def h():
x = False
return x
for func, elem in ((f, None), (g, True), (h, False)):
self.assertNotInBytecode(func, 'LOAD_GLOBAL')
self.assertInBytecode(func, 'LOAD_CONST', elem)
self.check_lnotab(func)
def f():
'Adding a docstring made this test fail in Py2.5.0'
return None
self.assertNotInBytecode(f, 'LOAD_GLOBAL')
self.assertInBytecode(f, 'LOAD_CONST', None)
self.check_lnotab(f)
def test_while_one(self):
# Skip over: LOAD_CONST trueconst POP_JUMP_IF_FALSE xx
def f():
while 1:
pass
return list
for elem in ('LOAD_CONST', 'POP_JUMP_IF_FALSE'):
self.assertNotInBytecode(f, elem)
for elem in ('JUMP_ABSOLUTE',):
self.assertInBytecode(f, elem)
self.check_lnotab(f)
def test_pack_unpack(self):
for line, elem in (
('a, = a,', 'LOAD_CONST',),
('a, b = a, b', 'ROT_TWO',),
('a, b, c = a, b, c', 'ROT_THREE',),
):
code = compile(line,'','single')
self.assertInBytecode(code, elem)
self.assertNotInBytecode(code, 'BUILD_TUPLE')
self.assertNotInBytecode(code, 'UNPACK_TUPLE')
self.check_lnotab(code)
def test_folding_of_tuples_of_constants(self):
for line, elem in (
('a = 1,2,3', (1, 2, 3)),
('("a","b","c")', ('a', 'b', 'c')),
('a,b,c = 1,2,3', (1, 2, 3)),
('(None, 1, None)', (None, 1, None)),
('((1, 2), 3, 4)', ((1, 2), 3, 4)),
):
code = compile(line,'','single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.assertNotInBytecode(code, 'BUILD_TUPLE')
self.check_lnotab(code)
# Long tuples should be folded too.
code = compile(repr(tuple(range(10000))),'','single')
self.assertNotInBytecode(code, 'BUILD_TUPLE')
# One LOAD_CONST for the tuple, one for the None return value
load_consts = [instr for instr in dis.get_instructions(code)
if instr.opname == 'LOAD_CONST']
self.assertEqual(len(load_consts), 2)
self.check_lnotab(code)
# Bug 1053819: Tuple of constants misidentified when presented with:
# . . . opcode_with_arg 100 unary_opcode BUILD_TUPLE 1 . . .
# The following would segfault upon compilation
def crater():
(~[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
],)
self.check_lnotab(crater)
def test_folding_of_lists_of_constants(self):
for line, elem in (
# in/not in constants with BUILD_LIST should be folded to a tuple:
('a in [1,2,3]', (1, 2, 3)),
('a not in ["a","b","c"]', ('a', 'b', 'c')),
('a in [None, 1, None]', (None, 1, None)),
('a not in [(1, 2), 3, 4]', ((1, 2), 3, 4)),
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.assertNotInBytecode(code, 'BUILD_LIST')
self.check_lnotab(code)
def test_folding_of_sets_of_constants(self):
for line, elem in (
# in/not in constants with BUILD_SET should be folded to a frozenset:
('a in {1,2,3}', frozenset({1, 2, 3})),
('a not in {"a","b","c"}', frozenset({'a', 'c', 'b'})),
('a in {None, 1, None}', frozenset({1, None})),
('a not in {(1, 2), 3, 4}', frozenset({(1, 2), 3, 4})),
('a in {1, 2, 3, 3, 2, 1}', frozenset({1, 2, 3})),
):
code = compile(line, '', 'single')
self.assertNotInBytecode(code, 'BUILD_SET')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.check_lnotab(code)
# Ensure that the resulting code actually works:
def f(a):
return a in {1, 2, 3}
def g(a):
return a not in {1, 2, 3}
self.assertTrue(f(3))
self.assertTrue(not f(4))
self.check_lnotab(f)
self.assertTrue(not g(3))
self.assertTrue(g(4))
self.check_lnotab(g)
def test_folding_of_binops_on_constants(self):
for line, elem in (
('a = 2+3+4', 9), # chained fold
('"@"*4', '@@@@'), # check string ops
('a="abc" + "def"', 'abcdef'), # check string ops
('a = 3**4', 81), # binary power
('a = 3*4', 12), # binary multiply
('a = 13//4', 3), # binary floor divide
('a = 14%4', 2), # binary modulo
('a = 2+3', 5), # binary add
('a = 13-4', 9), # binary subtract
('a = (12,13)[1]', 13), # binary subscr
('a = 13 << 2', 52), # binary lshift
('a = 13 >> 2', 3), # binary rshift
('a = 13 & 7', 5), # binary and
('a = 13 ^ 7', 10), # binary xor
('a = 13 | 7', 15), # binary or
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('BINARY_'))
self.check_lnotab(code)
# Verify that unfoldables are skipped
code = compile('a=2+"b"', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 2)
self.assertInBytecode(code, 'LOAD_CONST', 'b')
self.check_lnotab(code)
# Verify that large sequences do not result from folding
code = compile('a="x"*10000', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 10000)
self.assertNotIn("x"*10000, code.co_consts)
self.check_lnotab(code)
code = compile('a=1<<1000', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 1000)
self.assertNotIn(1<<1000, code.co_consts)
self.check_lnotab(code)
code = compile('a=2**1000', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 1000)
self.assertNotIn(2**1000, code.co_consts)
self.check_lnotab(code)
def test_binary_subscr_on_unicode(self):
# valid code get optimized
code = compile('"foo"[0]', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 'f')
self.assertNotInBytecode(code, 'BINARY_SUBSCR')
self.check_lnotab(code)
code = compile('"\u0061\uffff"[1]', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', '\uffff')
self.assertNotInBytecode(code,'BINARY_SUBSCR')
self.check_lnotab(code)
# With PEP 393, non-BMP char get optimized
code = compile('"\U00012345"[0]', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', '\U00012345')
self.assertNotInBytecode(code, 'BINARY_SUBSCR')
self.check_lnotab(code)
# invalid code doesn't get optimized
# out of range
code = compile('"fuu"[10]', '', 'single')
self.assertInBytecode(code, 'BINARY_SUBSCR')
self.check_lnotab(code)
def test_folding_of_unaryops_on_constants(self):
for line, elem in (
('-0.5', -0.5), # unary negative
('-0.0', -0.0), # -0.0
('-(1.0-1.0)', -0.0), # -0.0 after folding
('-0', 0), # -0
('~-2', 1), # unary invert
('+1', 1), # unary positive
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('UNARY_'))
self.check_lnotab(code)
# Check that -0.0 works after marshaling
def negzero():
return -(1.0-1.0)
for instr in dis.get_instructions(negzero):
self.assertFalse(instr.opname.startswith('UNARY_'))
self.check_lnotab(negzero)
# Verify that unfoldables are skipped
for line, elem, opname in (
('-"abc"', 'abc', 'UNARY_NEGATIVE'),
('~"abc"', 'abc', 'UNARY_INVERT'),
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.assertInBytecode(code, opname)
self.check_lnotab(code)
def test_elim_extra_return(self):
# RETURN LOAD_CONST None RETURN --> RETURN
def f(x):
return x
self.assertNotInBytecode(f, 'LOAD_CONST', None)
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 1)
self.check_lnotab(f)
def test_elim_jump_to_return(self):
# JUMP_FORWARD to RETURN --> RETURN
def f(cond, true_value, false_value):
# Intentionally use two-line expression to test issue37213.
return (true_value if cond
else false_value)
self.check_jump_targets(f)
self.assertNotInBytecode(f, 'JUMP_FORWARD')
self.assertNotInBytecode(f, 'JUMP_ABSOLUTE')
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 2)
self.check_lnotab(f)
def test_elim_jump_to_uncond_jump(self):
# POP_JUMP_IF_FALSE to JUMP_FORWARD --> POP_JUMP_IF_FALSE to non-jump
def f():
if a:
# Intentionally use two-line expression to test issue37213.
if (c
or d):
foo()
else:
baz()
self.check_jump_targets(f)
self.check_lnotab(f)
def test_elim_jump_to_uncond_jump2(self):
# POP_JUMP_IF_FALSE to JUMP_ABSOLUTE --> POP_JUMP_IF_FALSE to non-jump
def f():
while a:
# Intentionally use two-line expression to test issue37213.
if (c
or d):
a = foo()
self.check_jump_targets(f)
self.check_lnotab(f)
def test_elim_jump_to_uncond_jump3(self):
# Intentionally use two-line expressions to test issue37213.
# JUMP_IF_FALSE_OR_POP to JUMP_IF_FALSE_OR_POP --> JUMP_IF_FALSE_OR_POP to non-jump
def f(a, b, c):
return ((a and b)
and c)
self.check_jump_targets(f)
self.check_lnotab(f)
self.assertEqual(count_instr_recursively(f, 'JUMP_IF_FALSE_OR_POP'), 2)
# JUMP_IF_TRUE_OR_POP to JUMP_IF_TRUE_OR_POP --> JUMP_IF_TRUE_OR_POP to non-jump
def f(a, b, c):
return ((a or b)
or c)
self.check_jump_targets(f)
self.check_lnotab(f)
self.assertEqual(count_instr_recursively(f, 'JUMP_IF_TRUE_OR_POP'), 2)
# JUMP_IF_FALSE_OR_POP to JUMP_IF_TRUE_OR_POP --> POP_JUMP_IF_FALSE to non-jump
def f(a, b, c):
return ((a and b)
or c)
self.check_jump_targets(f)
self.check_lnotab(f)
self.assertNotInBytecode(f, 'JUMP_IF_FALSE_OR_POP')
self.assertInBytecode(f, 'JUMP_IF_TRUE_OR_POP')
self.assertInBytecode(f, 'POP_JUMP_IF_FALSE')
# JUMP_IF_TRUE_OR_POP to JUMP_IF_FALSE_OR_POP --> POP_JUMP_IF_TRUE to non-jump
def f(a, b, c):
return ((a or b)
and c)
self.check_jump_targets(f)
self.check_lnotab(f)
self.assertNotInBytecode(f, 'JUMP_IF_TRUE_OR_POP')
self.assertInBytecode(f, 'JUMP_IF_FALSE_OR_POP')
self.assertInBytecode(f, 'POP_JUMP_IF_TRUE')
def test_elim_jump_after_return1(self):
# Eliminate dead code: jumps immediately after returns can't be reached
def f(cond1, cond2):
if cond1: return 1
if cond2: return 2
while 1:
return 3
while 1:
if cond1: return 4
return 5
return 6
self.assertNotInBytecode(f, 'JUMP_FORWARD')
self.assertNotInBytecode(f, 'JUMP_ABSOLUTE')
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertLessEqual(len(returns), 6)
self.check_lnotab(f)
def test_elim_jump_after_return2(self):
# Eliminate dead code: jumps immediately after returns can't be reached
def f(cond1, cond2):
while 1:
if cond1: return 4
self.assertNotInBytecode(f, 'JUMP_FORWARD')
# There should be one jump for the while loop.
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'JUMP_ABSOLUTE']
self.assertEqual(len(returns), 1)
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertLessEqual(len(returns), 2)
self.check_lnotab(f)
def test_make_function_doesnt_bail(self):
def f():
def g()->1+1:
pass
return g
self.assertNotInBytecode(f, 'BINARY_ADD')
self.check_lnotab(f)
def test_constant_folding(self):
# Issue #11244: aggressive constant folding.
exprs = [
'3 * -5',
'-3 * 5',
'2 * (3 * 4)',
'(2 * 3) * 4',
'(-1, 2, 3)',
'(1, -2, 3)',
'(1, 2, -3)',
'(1, 2, -3) * 6',
'lambda x: x in {(3 * -5) + (-1 - 6), (1, -2, 3) * 2, None}',
]
for e in exprs:
code = compile(e, '', 'single')
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('UNARY_'))
self.assertFalse(instr.opname.startswith('BINARY_'))
self.assertFalse(instr.opname.startswith('BUILD_'))
self.check_lnotab(code)
def test_in_literal_list(self):
def containtest():
return x in [a, b]
self.assertEqual(count_instr_recursively(containtest, 'BUILD_LIST'), 0)
self.check_lnotab(containtest)
def test_iterate_literal_list(self):
def forloop():
for x in [a, b]:
pass
self.assertEqual(count_instr_recursively(forloop, 'BUILD_LIST'), 0)
self.check_lnotab(forloop)
def test_condition_with_binop_with_bools(self):
def f():
if True or False:
return 1
return 0
self.assertEqual(f(), 1)
self.check_lnotab(f)
def test_if_with_if_expression(self):
# Check bpo-37289
def f(x):
if (True if x else False):
return True
return False
self.assertTrue(f(True))
self.check_lnotab(f)
def test_trailing_nops(self):
# Check the lnotab of a function that even after trivial
# optimization has trailing nops, which the lnotab adjustment has to
# handle properly (bpo-38115).
def f(x):
while 1:
return 3
while 1:
return 5
return 6
self.check_lnotab(f)
def test_assignment_idiom_in_comprehensions(self):
def listcomp():
return [y for x in a for y in [f(x)]]
self.assertEqual(count_instr_recursively(listcomp, 'FOR_ITER'), 1)
def setcomp():
return {y for x in a for y in [f(x)]}
self.assertEqual(count_instr_recursively(setcomp, 'FOR_ITER'), 1)
def dictcomp():
return {y: y for x in a for y in [f(x)]}
self.assertEqual(count_instr_recursively(dictcomp, 'FOR_ITER'), 1)
def genexpr():
return (y for x in a for y in [f(x)])
self.assertEqual(count_instr_recursively(genexpr, 'FOR_ITER'), 1)
class TestBuglets(unittest.TestCase):
def test_bug_11510(self):
# folded constant set optimization was commingled with the tuple
# unpacking optimization which would fail if the set had duplicate
# elements so that the set length was unexpected
def f():
x, y = {1, 1}
return x, y
with self.assertRaises(ValueError):
f()
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
saisai/phantomjs | src/breakpad/src/tools/gyp/test/additional-targets/gyptest-additional.py | 137 | 1617 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple actions when using an explicit build target of 'all'.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('all.gyp', chdir='src')
test.relocate('src', 'relocate/src')
# Build all.
test.build('all.gyp', chdir='relocate/src')
if test.format=='xcode':
chdir = 'relocate/src/dir1'
else:
chdir = 'relocate/src'
# Output is as expected.
file_content = 'Hello from emit.py\n'
test.built_file_must_match('out2.txt', file_content, chdir=chdir)
test.built_file_must_not_exist('out.txt', chdir='relocate/src')
test.built_file_must_not_exist('lib1.dll', chdir='relocate/src')
# TODO(mmoss) Make consistent with scons, with 'dir1' before 'out/Default'?
if test.format == 'make':
chdir='relocate/src'
else:
chdir='relocate/src/dir1'
# Build the action explicitly.
test.build('actions.gyp', 'action1_target', chdir=chdir)
# Check that things got run.
file_content = 'Hello from emit.py\n'
test.built_file_must_exist('out.txt', chdir=chdir)
# Build the shared library explicitly.
test.build('actions.gyp', 'foolib1', chdir=chdir)
if test.format == 'make':
# TODO(mmoss) Make consistent with scons, with 'dir1' before 'out/Default'?
test.must_exist('relocate/src/out/Default/lib.target/dir1/'
+ test.dll_ + 'foolib1' + test._dll)
else:
test.built_file_must_exist('foolib1',
type=test.SHARED_LIB,
chdir=chdir)
test.pass_test()
| bsd-3-clause |
yanheven/console | horizon/test/tests/forms.py | 10 | 3590 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from horizon import forms
from horizon.test import helpers as test
class FormMixinTests(test.TestCase):
def _prepare_view(self, cls, request_headers, *args, **kwargs):
req = self.factory.get('/my_url/', **request_headers)
req.user = self.user
view = cls()
view.request = req
view.args = args
view.kwargs = kwargs
view.template_name = 'test_template'
return view
def test_modal_form_mixin_hide_true_if_ajax(self):
view = self._prepare_view(forms.views.ModalFormView,
dict(HTTP_X_REQUESTED_WITH='XMLHttpRequest'))
context = view.get_context_data()
self.assertTrue(context['hide'])
def test_modal_form_mixin_add_to_field_header_set(self):
return self._test_form_mixin_add_to_field_header(add_field=True)
def test_modal_form_mixin_add_to_field_header_not_set(self):
return self._test_form_mixin_add_to_field_header(add_field=False)
def _test_form_mixin_add_to_field_header(self, add_field=False):
options = dict(HTTP_X_REQUESTED_WITH='XMLHttpRequest')
if add_field:
options[forms.views.ADD_TO_FIELD_HEADER] = "keepme"
view = self._prepare_view(forms.views.ModalFormView, options)
context = view.get_context_data()
if add_field:
self.assertEqual(context['add_to_field'], "keepme")
else:
self.assertNotIn('add_to_field', context)
def test_template_name_change_based_on_ajax_request(self):
view = self._prepare_view(forms.views.ModalFormView,
dict(HTTP_X_REQUESTED_WITH='XMLHttpRequest'))
self.assertEqual(view.get_template_names(),
'_' + view.template_name)
view = self._prepare_view(forms.views.ModalFormView, {})
self.assertEqual(view.get_template_names(), view.template_name)
class TestForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255")
def handle(self, request, data):
return True
class FormErrorTests(test.TestCase):
template = 'horizon/common/_form_fields.html'
def setUp(self):
super(FormErrorTests, self).setUp()
self.form = TestForm(self.request)
def _render_form(self):
return shortcuts.render(self.request, self.template,
{'form': self.form})
def test_set_warning(self):
warning_text = 'WARNING 29380'
self.form.set_warning(warning_text)
self.assertEqual([warning_text], self.form.warnings)
resp = self._render_form()
self.assertIn(warning_text, resp.content)
def test_api_error(self):
error_text = 'ERROR 12938'
self.form.full_clean()
self.form.api_error(error_text)
self.assertEqual([error_text], self.form.non_field_errors())
resp = self._render_form()
self.assertIn(error_text, resp.content)
| apache-2.0 |
Innovahn/cybex | addons/l10n_th/__init__.py | 893 | 1045 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
plumps/google-python-exercises | basic/string1.py | 1 | 3485 | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number
# passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
# +++your code here+++
return
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
# +++your code here+++
return
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
# +++your code here+++
return
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
# +++your code here+++
return
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print('%s got: %s expected: %s' % (prefix, repr(got), repr(expected)))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print('donuts')
# Each line calls donuts, compares its result to the expected for that call
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print()
print('both_ends')
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print()
print('fix_start')
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print()
print('mix_up')
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()
| apache-2.0 |
davidovitch/f90wrap | f90wrap/fortrantype.py | 1 | 2892 | import weakref
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class FortranModule(object):
"""
Baseclass for Fortran modules
Metaclass is set to Singleton, so only one instance of each subclass of
FortranModule can be created.
"""
_dt_array_initialisers = []
__metaclass__ = Singleton
def __init__(self):
self._arrays = {}
self._objs = {}
# initialise any derived type arrays
for init_array in self._dt_array_initialisers:
init_array(self)
class FortranDerivedType(object):
"""
Base class for Fortran derived types
"""
_dt_array_initialisers = []
def __init__(self):
self._handle = None
self._arrays = {}
self._objs = {}
self._alloc = True
# initialise any derived type arrays
for init_array in self._dt_array_initialisers:
init_array(self)
@classmethod
def from_handle(cls, handle):
self = cls.__new__(cls)
FortranDerivedType.__init__(self) # always call the base constructor only
self._handle = handle
self._alloc = False
return self
class FortranDerivedTypeArray(object):
def __init__(self, parent, getfunc, setfunc, lenfunc, doc, arraytype):
self.parent = weakref.ref(parent)
self.getfunc = getfunc
self.setfunc = setfunc
self.lenfunc = lenfunc
self.doc = doc
self.arraytype = arraytype
def iterindices(self):
return iter(range(len(self)))
indices = property(iterindices)
def items(self):
for idx in self.indices:
yield self[idx]
def __iter__(self):
return self.items()
def __len__(self):
parent = self.parent()
if parent is None:
raise RuntimeError("Array's parent has gone out of scope")
return self.lenfunc(parent._handle)
def __getitem__(self, i):
parent = self.parent()
if parent is None:
raise RuntimeError("Array's parent has gone out of scope")
i += 1 # convert from 0-based (Python) to 1-based indices (Fortran)
element_handle = self.getfunc(parent._handle, i)
try:
obj = parent._objs[tuple(element_handle)]
except KeyError:
obj = parent._objs[tuple(element_handle)] = self.arraytype.from_handle(element_handle)
return obj
def __setitem__(self, i, value):
parent = self.parent()
if parent is None:
raise RuntimeError("Array's parent has gone out of scope")
i += 1 # convert from 0-based (Python) to 1-based indices (Fortran)
self.setfunc(parent._handle, i, value._handle)
| gpl-2.0 |
rishikksh20/scikit-learn | sklearn/gaussian_process/tests/test_kernels.py | 51 | 12799 | """Testing for kernels for Gaussian processes."""
# Author: Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>
# License: BSD 3 clause
from sklearn.externals.funcsigs import signature
import numpy as np
from sklearn.gaussian_process.kernels import _approx_fprime
from sklearn.metrics.pairwise \
import PAIRWISE_KERNEL_FUNCTIONS, euclidean_distances, pairwise_kernels
from sklearn.gaussian_process.kernels \
import (RBF, Matern, RationalQuadratic, ExpSineSquared, DotProduct,
ConstantKernel, WhiteKernel, PairwiseKernel, KernelOperator,
Exponentiation)
from sklearn.base import clone
from sklearn.utils.testing import (assert_equal, assert_almost_equal,
assert_not_equal, assert_array_equal,
assert_array_almost_equal)
X = np.random.RandomState(0).normal(0, 1, (5, 2))
Y = np.random.RandomState(0).normal(0, 1, (6, 2))
kernel_white = RBF(length_scale=2.0) + WhiteKernel(noise_level=3.0)
kernels = [RBF(length_scale=2.0), RBF(length_scale_bounds=(0.5, 2.0)),
ConstantKernel(constant_value=10.0),
2.0 * RBF(length_scale=0.33, length_scale_bounds="fixed"),
2.0 * RBF(length_scale=0.5), kernel_white,
2.0 * RBF(length_scale=[0.5, 2.0]),
2.0 * Matern(length_scale=0.33, length_scale_bounds="fixed"),
2.0 * Matern(length_scale=0.5, nu=0.5),
2.0 * Matern(length_scale=1.5, nu=1.5),
2.0 * Matern(length_scale=2.5, nu=2.5),
2.0 * Matern(length_scale=[0.5, 2.0], nu=0.5),
3.0 * Matern(length_scale=[2.0, 0.5], nu=1.5),
4.0 * Matern(length_scale=[0.5, 0.5], nu=2.5),
RationalQuadratic(length_scale=0.5, alpha=1.5),
ExpSineSquared(length_scale=0.5, periodicity=1.5),
DotProduct(sigma_0=2.0), DotProduct(sigma_0=2.0) ** 2,
RBF(length_scale=[2.0]), Matern(length_scale=[2.0])]
for metric in PAIRWISE_KERNEL_FUNCTIONS:
if metric in ["additive_chi2", "chi2"]:
continue
kernels.append(PairwiseKernel(gamma=1.0, metric=metric))
def test_kernel_gradient():
# Compare analytic and numeric gradient of kernels.
for kernel in kernels:
K, K_gradient = kernel(X, eval_gradient=True)
assert_equal(K_gradient.shape[0], X.shape[0])
assert_equal(K_gradient.shape[1], X.shape[0])
assert_equal(K_gradient.shape[2], kernel.theta.shape[0])
def eval_kernel_for_theta(theta):
kernel_clone = kernel.clone_with_theta(theta)
K = kernel_clone(X, eval_gradient=False)
return K
K_gradient_approx = \
_approx_fprime(kernel.theta, eval_kernel_for_theta, 1e-10)
assert_almost_equal(K_gradient, K_gradient_approx, 4)
def test_kernel_theta():
# Check that parameter vector theta of kernel is set correctly.
for kernel in kernels:
if isinstance(kernel, KernelOperator) \
or isinstance(kernel, Exponentiation): # skip non-basic kernels
continue
theta = kernel.theta
_, K_gradient = kernel(X, eval_gradient=True)
# Determine kernel parameters that contribute to theta
init_sign = signature(kernel.__class__.__init__).parameters.values()
args = [p.name for p in init_sign if p.name != 'self']
theta_vars = map(lambda s: s[0:-len("_bounds")],
filter(lambda s: s.endswith("_bounds"), args))
assert_equal(
set(hyperparameter.name
for hyperparameter in kernel.hyperparameters),
set(theta_vars))
# Check that values returned in theta are consistent with
# hyperparameter values (being their logarithms)
for i, hyperparameter in enumerate(kernel.hyperparameters):
assert_equal(theta[i],
np.log(getattr(kernel, hyperparameter.name)))
# Fixed kernel parameters must be excluded from theta and gradient.
for i, hyperparameter in enumerate(kernel.hyperparameters):
# create copy with certain hyperparameter fixed
params = kernel.get_params()
params[hyperparameter.name + "_bounds"] = "fixed"
kernel_class = kernel.__class__
new_kernel = kernel_class(**params)
# Check that theta and K_gradient are identical with the fixed
# dimension left out
_, K_gradient_new = new_kernel(X, eval_gradient=True)
assert_equal(theta.shape[0], new_kernel.theta.shape[0] + 1)
assert_equal(K_gradient.shape[2], K_gradient_new.shape[2] + 1)
if i > 0:
assert_equal(theta[:i], new_kernel.theta[:i])
assert_array_equal(K_gradient[..., :i],
K_gradient_new[..., :i])
if i + 1 < len(kernel.hyperparameters):
assert_equal(theta[i + 1:], new_kernel.theta[i:])
assert_array_equal(K_gradient[..., i + 1:],
K_gradient_new[..., i:])
# Check that values of theta are modified correctly
for i, hyperparameter in enumerate(kernel.hyperparameters):
theta[i] = np.log(42)
kernel.theta = theta
assert_almost_equal(getattr(kernel, hyperparameter.name), 42)
setattr(kernel, hyperparameter.name, 43)
assert_almost_equal(kernel.theta[i], np.log(43))
def test_auto_vs_cross():
# Auto-correlation and cross-correlation should be consistent.
for kernel in kernels:
if kernel == kernel_white:
continue # Identity is not satisfied on diagonal
K_auto = kernel(X)
K_cross = kernel(X, X)
assert_almost_equal(K_auto, K_cross, 5)
def test_kernel_diag():
# Test that diag method of kernel returns consistent results.
for kernel in kernels:
K_call_diag = np.diag(kernel(X))
K_diag = kernel.diag(X)
assert_almost_equal(K_call_diag, K_diag, 5)
def test_kernel_operator_commutative():
# Adding kernels and multiplying kernels should be commutative.
# Check addition
assert_almost_equal((RBF(2.0) + 1.0)(X),
(1.0 + RBF(2.0))(X))
# Check multiplication
assert_almost_equal((3.0 * RBF(2.0))(X),
(RBF(2.0) * 3.0)(X))
def test_kernel_anisotropic():
# Anisotropic kernel should be consistent with isotropic kernels.
kernel = 3.0 * RBF([0.5, 2.0])
K = kernel(X)
X1 = np.array(X)
X1[:, 0] *= 4
K1 = 3.0 * RBF(2.0)(X1)
assert_almost_equal(K, K1)
X2 = np.array(X)
X2[:, 1] /= 4
K2 = 3.0 * RBF(0.5)(X2)
assert_almost_equal(K, K2)
# Check getting and setting via theta
kernel.theta = kernel.theta + np.log(2)
assert_array_equal(kernel.theta, np.log([6.0, 1.0, 4.0]))
assert_array_equal(kernel.k2.length_scale, [1.0, 4.0])
def test_kernel_stationary():
# Test stationarity of kernels.
for kernel in kernels:
if not kernel.is_stationary():
continue
K = kernel(X, X + 1)
assert_almost_equal(K[0, 0], np.diag(K))
def check_hyperparameters_equal(kernel1, kernel2):
# Check that hyperparameters of two kernels are equal
for attr in set(dir(kernel1) + dir(kernel2)):
if attr.startswith("hyperparameter_"):
attr_value1 = getattr(kernel1, attr)
attr_value2 = getattr(kernel2, attr)
assert_equal(attr_value1, attr_value2)
def test_kernel_clone():
# Test that sklearn's clone works correctly on kernels.
for kernel in kernels:
kernel_cloned = clone(kernel)
# XXX: Should this be fixed?
# This differs from the sklearn's estimators equality check.
assert_equal(kernel, kernel_cloned)
assert_not_equal(id(kernel), id(kernel_cloned))
# Check that all constructor parameters are equal.
assert_equal(kernel.get_params(), kernel_cloned.get_params())
# Check that all hyperparameters are equal.
yield check_hyperparameters_equal, kernel, kernel_cloned
def test_kernel_clone_after_set_params():
# This test is to verify that using set_params does not
# break clone on kernels.
# This used to break because in kernels such as the RBF, non-trivial
# logic that modified the length scale used to be in the constructor
# See https://github.com/scikit-learn/scikit-learn/issues/6961
# for more details.
bounds = (1e-5, 1e5)
for kernel in kernels:
kernel_cloned = clone(kernel)
params = kernel.get_params()
# RationalQuadratic kernel is isotropic.
isotropic_kernels = (ExpSineSquared, RationalQuadratic)
if 'length_scale' in params and not isinstance(kernel,
isotropic_kernels):
length_scale = params['length_scale']
if np.iterable(length_scale):
params['length_scale'] = length_scale[0]
params['length_scale_bounds'] = bounds
else:
params['length_scale'] = [length_scale] * 2
params['length_scale_bounds'] = bounds * 2
kernel_cloned.set_params(**params)
kernel_cloned_clone = clone(kernel_cloned)
assert_equal(kernel_cloned_clone.get_params(),
kernel_cloned.get_params())
assert_not_equal(id(kernel_cloned_clone), id(kernel_cloned))
yield (check_hyperparameters_equal, kernel_cloned,
kernel_cloned_clone)
def test_matern_kernel():
# Test consistency of Matern kernel for special values of nu.
K = Matern(nu=1.5, length_scale=1.0)(X)
# the diagonal elements of a matern kernel are 1
assert_array_almost_equal(np.diag(K), np.ones(X.shape[0]))
# matern kernel for coef0==0.5 is equal to absolute exponential kernel
K_absexp = np.exp(-euclidean_distances(X, X, squared=False))
K = Matern(nu=0.5, length_scale=1.0)(X)
assert_array_almost_equal(K, K_absexp)
# test that special cases of matern kernel (coef0 in [0.5, 1.5, 2.5])
# result in nearly identical results as the general case for coef0 in
# [0.5 + tiny, 1.5 + tiny, 2.5 + tiny]
tiny = 1e-10
for nu in [0.5, 1.5, 2.5]:
K1 = Matern(nu=nu, length_scale=1.0)(X)
K2 = Matern(nu=nu + tiny, length_scale=1.0)(X)
assert_array_almost_equal(K1, K2)
def test_kernel_versus_pairwise():
# Check that GP kernels can also be used as pairwise kernels.
for kernel in kernels:
# Test auto-kernel
if kernel != kernel_white:
# For WhiteKernel: k(X) != k(X,X). This is assumed by
# pairwise_kernels
K1 = kernel(X)
K2 = pairwise_kernels(X, metric=kernel)
assert_array_almost_equal(K1, K2)
# Test cross-kernel
K1 = kernel(X, Y)
K2 = pairwise_kernels(X, Y, metric=kernel)
assert_array_almost_equal(K1, K2)
def test_set_get_params():
# Check that set_params()/get_params() is consistent with kernel.theta.
for kernel in kernels:
# Test get_params()
index = 0
params = kernel.get_params()
for hyperparameter in kernel.hyperparameters:
if isinstance("string", type(hyperparameter.bounds)):
if hyperparameter.bounds == "fixed":
continue
size = hyperparameter.n_elements
if size > 1: # anisotropic kernels
assert_almost_equal(np.exp(kernel.theta[index:index + size]),
params[hyperparameter.name])
index += size
else:
assert_almost_equal(np.exp(kernel.theta[index]),
params[hyperparameter.name])
index += 1
# Test set_params()
index = 0
value = 10 # arbitrary value
for hyperparameter in kernel.hyperparameters:
if isinstance("string", type(hyperparameter.bounds)):
if hyperparameter.bounds == "fixed":
continue
size = hyperparameter.n_elements
if size > 1: # anisotropic kernels
kernel.set_params(**{hyperparameter.name: [value] * size})
assert_almost_equal(np.exp(kernel.theta[index:index + size]),
[value] * size)
index += size
else:
kernel.set_params(**{hyperparameter.name: value})
assert_almost_equal(np.exp(kernel.theta[index]), value)
index += 1
def test_repr_kernels():
# Smoke-test for repr in kernels.
for kernel in kernels:
repr(kernel)
| bsd-3-clause |
asimshankar/tensorflow | tensorflow/python/autograph/operators/logical.py | 8 | 3349 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Logical operators, including comparison and bool operators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_math_ops
def not_(a):
"""Functional form of "not"."""
if tensor_util.is_tensor(a):
return gen_math_ops.logical_not(a)
return not a
def and_(a, b):
"""Functional form of "and". Uses lazy evaluation semantics."""
a_val = a()
if tensor_util.is_tensor(a_val):
return _tf_lazy_and(a_val, b)
return _py_lazy_and(a_val, b)
def _tf_lazy_and(cond, b):
"""Lazy-eval equivalent of "and" for Tensors."""
# TODO(mdan): Enforce cond is scalar here?
return control_flow_ops.cond(cond, b, lambda: cond)
def _py_lazy_and(cond, b):
"""Lazy-eval equivalent of "and" in Python."""
return cond and b()
def or_(a, b):
"""Functional form of "or". Uses lazy evaluation semantics."""
a_val = a()
if tensor_util.is_tensor(a_val):
return _tf_lazy_or(a_val, b)
return _py_lazy_or(a_val, b)
def _tf_lazy_or(cond, b):
"""Lazy-eval equivalent of "or" for Tensors."""
# TODO(mdan): Enforce cond is scalar here?
return control_flow_ops.cond(cond, lambda: cond, b)
def _py_lazy_or(cond, b):
"""Lazy-eval equivalent of "or" in Python."""
return cond or b()
def eq(a, b):
"""Functional form of "equal"."""
if tensor_util.is_tensor(a) or tensor_util.is_tensor(b):
return _tf_equal(a, b)
return _py_equal(a, b)
def _tf_equal(a, b):
"""Overload of "equal" for Tensors."""
return gen_math_ops.equal(a, b)
def _py_equal(a, b):
"""Overload of "equal" that falls back to Python's default implementation."""
return a == b
def not_eq(a, b):
"""Functional form of "not-equal"."""
return not_(eq(a, b))
# Default implementation for the remainings.
def gt(a, b):
"""Functional form of "less-than"."""
return a > b
def gt_e(a, b):
"""Functional form of "less-than"."""
return a >= b
def is_(a, b):
"""Functional form of "less-than"."""
return a is b
def is_not(a, b):
"""Functional form of "less-than"."""
return a is not b
def in_(a, b):
"""Functional form of "less-than"."""
# TODO(mdan): in and not_in should probably be convertible for some types.
return a in b
def lt(a, b):
"""Functional form of "less-than"."""
return a < b
def lt_e(a, b):
"""Functional form of "less-than"."""
return a <= b
def not_in(a, b):
"""Functional form of "less-than"."""
return a not in b
def u_sub(a):
"""Functional form of "unary-sub"."""
return -a
| apache-2.0 |
barnabytprowe/great3-public | validation/test_offsets.py | 2 | 6031 | # Copyright (c) 2014, the GREAT3 executive committee (http://www.great3challenge.info/?q=contacts)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted
# provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions
# and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to
# endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""@file test_offsets.py
Module containing some basic routines to check that the offsets assumed by Melanie's code match the
offsets contained in truth catalogues.
"""
import os
import sys
import numpy as np
sys.path.append("..")
import pipelines
sys.path.append(os.path.join("..", ".."))
import great3sims.mapper
sys.path.append(os.path.join("..", "..", "public-scripts"))
import branch_id
sys.path.append(os.path.join("..", "..", "server", "great3"))
import evaluate
TRUTH_DIR = "/Users/browe/great3/truth"
NFIELDS = 10
NSUBFIELDS = 200
def melanie_identifier(subfield_index):
"""Get the 9-digit identifier integer according to Melanie's rules in presubmission.py.
"""
return
def print_offsets_times_70(experiment, obs_type, truth_dir=TRUTH_DIR):
"""Prints the offsets multiplied by 70 for comparison with branch_id x_offset and y_offset
values as per Melanie's suggestion on Issue #16.
"""
subfield_index, x_offset_deg, y_offset_deg = evaluate.get_generate_variable_offsets(
experiment, obs_type, truth_dir=truth_dir)
print "70. * x_offset ="
print x_offset_deg * 70.
print "70. * y_offset ="
print y_offset_deg * 70.
return
if __name__ == "__main__":
experiment = "control"
# First test the offsets in branch_id match those in the offset_truth files
for obs_type in ("ground", "space"):
subfield_index_truth, x_offset_truth, y_offset_truth = \
evaluate.get_generate_variable_offsets(experiment, obs_type, truth_dir=TRUTH_DIR)
x_offset_presub = branch_id.x_offset[experiment+"-"+obs_type+"-variable"]
y_offset_presub = branch_id.y_offset[experiment+"-"+obs_type+"-variable"]
# Assert that the truth * 70 = presub version
np.testing.assert_array_equal(
(70. * x_offset_truth).astype(int), np.asarray(x_offset_presub),
err_msg="Truth x_offsets do not match those in public-scripts.branch_id")
np.testing.assert_array_equal(
(70. * y_offset_truth).astype(int), np.asarray(y_offset_presub),
err_msg="Truth x_offsets do not match those in public-scripts.branch_id")
# Then try testing the whole x-y using the (no longer) hacked presubmission
pipelines.build_submission(
"im3shape-1", experiment, obs_type, "variable",
submission_dir=os.path.join("..", "submissions"),
presubmission_exec=os.path.join("..", "..", "public-scripts", "presubmission.py"))
# Then build the truth catalogues saving the x and y
field_index, theta, map_E, map_B, maperr = evaluate.get_generate_variable_truth(
experiment, obs_type, truth_dir=TRUTH_DIR,
corr2_params=os.path.join("..", "..", "server", "great3", "corr2.params"),
make_plots=False, output_xy_prefix="./cats/truth_xy_"+experiment+"-"+obs_type)
try:
xysubfile = "./cats/test_xy_"+experiment+"-"+obs_type+"-variable"+(
"-sub%03d" % 0)+".asc"
np.loadtxt(xysubfile)
except IOError as err:
# Now the hacked presubmission has moved all of these cataloges to ".", and we want to
# move them to ./cats, so do this
print "Moving cats to ./cats"
import subprocess
import glob
call_list = (["mv",]+glob.glob("test*.asc")+["./cats"])
print call_list
retcode = subprocess.call(["mv",]+glob.glob("truth*.asc")+["./cats"])
# Then check each subfield
for isub in range(NSUBFIELDS):
xysubfile = "./cats/test_xy_"+experiment+"-"+obs_type+"-variable"+(
"-sub%03d" % isub)+".asc"
xytruthfile = "./cats/truth_xy_"+experiment+"-"+obs_type+("-sub%03d" % isub)+".asc"
print "Comparing "+xysubfile+" to "+xytruthfile
subdata = np.loadtxt(xysubfile)
truthdata = np.loadtxt(xytruthfile)
try:
np.testing.assert_array_almost_equal(
subdata, truthdata, decimal=4,
err_msg="Barney's truth and presubmission-generated x,y arrays do not match "+
"at 3 d.p. for subfield = "+str(isub))
except AssertionError as err:
print str(err)
pass
#print_offsets_times_70("control", "ground")
#print_offsets_times_70("control", "space")
#print "All tests pass so far..."
| bsd-3-clause |
AlanZatarain/cortex-vfx | test/IECore/ops/maths/multiply/multiply-1.py | 17 | 2435 | ##########################################################################
#
# Copyright (c) 2007-2011, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from IECore import *
class multiply( Op ) :
def __init__( self ) :
Op.__init__( self,
"multiplies two numbers together.",
IntParameter(
name = "result",
description = "a multiplied by b",
defaultValue = 2,
)
)
self.parameters().addParameter(
IntParameter(
name = "a",
description = "first operand",
defaultValue = 1,
)
)
self.parameters().addParameter(
IntParameter(
name = "b",
description = "second operand",
defaultValue = 2,
)
)
def doOperation( self, operands ) :
return IntData( operands["a"].value * operands["b"].value )
registerRunTimeTyped( multiply )
| bsd-3-clause |
sunlightlabs/bulgogi | foreign/urls.py | 1 | 2559 | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'foreign.views.map', name='home'),
url(r'^about', 'foreign.views.about', name='about'),
url(r'^methodology', 'foreign.views.methodology', name='methodology'),
url(r'^incoming-fara', 'foreign.views.incoming_fara', name='incoming-fara'),
# proposed arms form
url(r'^form-profile/(\d+)', 'foreign.views.form_profile', name='form-profile'),
url(r'^incoming-arms', 'foreign.views.incoming_arms', name='incoming-arms'),
url(r'^arms-profile/(\d+)', 'foreign.views.arms_profile', name='arms-profile'),
url(r'^client-profile/(\d+)', 'foreign.views.client_profile', name='client-profile'),
url(r'^reg-profile/(\d+)', 'foreign.views.reg_profile', name='reg-profile'),
url(r'^location-profile/(\d+)', 'foreign.views.location_profile', name='location-profile'),
url(r'^recipient-profile/(\d+)', 'foreign.views.recipient_profile', name='recipient-profile'),
url(r'^contact-table', 'foreign.views.contact_table', name='contact-table'),
url(r'^payment-table', 'foreign.views.payment_table', name='payment-table'),
url(r'^disbursement-table', 'foreign.views.disbursement_table', name='disbursement-table'),
url(r'^contribution-table', 'foreign.views.contribution_table', name='contribution-table'),
url(r'^registrants', 'foreign.views.reg_totals', name='registrants'),
url(r'^clients', 'foreign.views.clients', name='clients'),
url(r'^lobby-clients2013','foreign.views.lobby_clients', name='lobby-clients2013' ),
url(r'^lobby-location2013', 'foreign.views.location13', name='location2013'),
url(r'^search', 'foreign.views.search', name='search'),
url(r'^more-search', 'foreign.views.search_more', name='more-search'),
url(r'^archived-data', 'foreign.views.archived_data', name='archived-data'),
# to check status
url(r'^test', 'foreign.views.test', name='test'),
url(r'^map', 'foreign.views.map', name='map'),
url(r'^generate_csv/(?P<model_str>(contact|contribution|payment|disbursement|client))$','foreign.views.generate_csv', name='generate-csv'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
handler404 = 'foreign.views.not_found'
handler500 = 'foreign.views.error'
| mit |
tmrowco/electricitymap | parsers/statnett.py | 2 | 7190 | #!/usr/bin/env python3
# The arrow library is used to handle datetimes
import arrow
import logging
# The request library is used to fetch content through HTTP
import requests
exchanges_mapping = {
'BY->LT': [
'BY->LT'
],
'DE->DK-DK1': [
'DE->DK1',
],
'DE->DK-DK2': [
'DE->DK2',
],
'DE->SE': [
'DE->SE4'
],
'DE->SE-SE4': [
'DE->SE4'
],
'DK-DK1->NO': [
'DK1->NO2'
],
'DK-DK1->NO-NO2': [
'DK1->NO2'
],
'DK-DK1->SE': [
'DK1->SE3'
],
'DK-DK1->SE-SE3': [
'DK1->SE3'
],
'DK-DK2->SE': [
'DK2->SE4'
],
'DK-DK2->SE-SE4': [
'DK2->SE4'
],
'EE->RU': [
'EE->RU'
],
'EE->RU-1':[
'EE->RU'
],
'EE->LV': [
'EE->LV'
],
'EE->FI': [
'EE->FI'
],
'FI->NO': [
'FI->NO4'
],
'FI->NO-NO4': [
'FI->NO4'
],
'FI->RU': [
'FI->RU'
],
'FI->RU-1': [
'FI->RU'
],
'FI->SE': [
'FI->SE1',
'FI->SE3'
],
'FI->SE-SE1': [
'FI->SE1',
],
'FI->SE-SE3': [
'FI->SE3'
],
'LT->LV': [
'LT->LV'
],
'LT->SE': [
'LT->SE4'
],
'LT->SE-SE4': [
'LT->SE4'
],
'LT->PL': [
'LT->PL'
],
'LT->RU-KGD': [
'LT->RU'
],
'LV->RU': [
'LV->RU'
],
'LV->RU-1': [
'LV->RU'
],
'NL->NO': [
'NL->NO2'
],
'NL->NO-NO2': [
'NL->NO2'
],
'NO->SE': [
'NO1->SE3',
'NO3->SE2',
'NO4->SE1',
'NO4->SE2'
],
'NO-NO1->NO-NO2': [
'NO1->NO2'
],
'NO-NO1->NO-NO3': [
'NO1->NO3'
],
'NO-NO1->NO-NO5': [
'NO1->NO5'
],
'NO-NO1->SE': [
'NO1->SE3'
],
'NO-NO2->NO-NO5': [
'NO2->NO5'
],
'NO-NO3->NO-NO4': [
'NO3->NO4'
],
'NO-NO3->NO-NO5': [
'NO3->NO5'
],
'NO-NO3->SE': [
'NO3->SE2'
],
'NO-NO3->SE-SE2': [
'NO3->SE2'
],
'NO->RU': [
'NO4->RU'
],
'NO->RU-1': [
'NO4->RU'
],
'NO-NO4->RU': [
'NO4->RU'
],
'NO-NO4->RU-1': [
'NO4->RU'
],
'NO-NO4->SE': [
'NO4->SE1',
'NO4->SE2'
],
'PL->SE': [
'PL->SE4',
],
'PL->SE-SE4': [
'PL->SE4'
],
'SE-SE1->SE-SE2': [
'SE1->SE2'
],
'SE-SE2->SE-SE3': [
'SE1->SE2'
],
'SE-SE3->SE-SE4': [
'SE1->SE2'
],
}
# Mappings used to go from country to bidding zone level
def fetch_production(zone_key='SE', session=None, target_datetime=None, logger=logging.getLogger(__name__)):
r = session or requests.session()
timestamp = (target_datetime.timestamp() if target_datetime else arrow.now().timestamp) * 1000
url = 'http://driftsdata.statnett.no/restapi/ProductionConsumption/GetLatestDetailedOverview?timestamp=%d' % timestamp
response = r.get(url)
obj = response.json()
data = {
'zoneKey': zone_key,
'production': {
'nuclear': float(list(filter(
lambda x: x['titleTranslationId'] == 'ProductionConsumption.%s%sDesc' % (
'Nuclear', zone_key),
obj['NuclearData']))[0]['value'].replace(u'\xa0', '')),
'hydro': float(list(filter(
lambda x: x['titleTranslationId'] == 'ProductionConsumption.%s%sDesc' % (
'Hydro', zone_key),
obj['HydroData']))[0]['value'].replace(u'\xa0', '')),
'wind': float(list(filter(
lambda x: x['titleTranslationId'] == 'ProductionConsumption.%s%sDesc' % (
'Wind', zone_key),
obj['WindData']))[0]['value'].replace(u'\xa0', '')),
'unknown':
float(list(filter(
lambda x: x['titleTranslationId'] == 'ProductionConsumption.%s%sDesc' % (
'Thermal', zone_key),
obj['ThermalData']))[0]['value'].replace(u'\xa0', '')) +
float(list(filter(
lambda x: x['titleTranslationId'] == 'ProductionConsumption.%s%sDesc' % (
'NotSpecified', zone_key),
obj['NotSpecifiedData']))[0]['value'].replace(u'\xa0', '')),
},
'storage': {},
'source': 'driftsdata.stattnet.no',
}
data['datetime'] = arrow.get(obj['MeasuredAt'] / 1000).datetime
return data
def fetch_exchange_by_bidding_zone(bidding_zone1='DK1', bidding_zone2='NO2', session=None,
target_datetime=None, logger=logging.getLogger(__name__)):
# Convert bidding zone names into statnett zones
bidding_zone_1_trimmed, bidding_zone_2_trimmed = [ x.split('-')[-1] for x in [bidding_zone1, bidding_zone2] ]
bidding_zone_a, bidding_zone_b = sorted([bidding_zone_1_trimmed, bidding_zone_2_trimmed])
r = session or requests.session()
timestamp = (target_datetime.timestamp() if target_datetime else arrow.now().timestamp) * 1000
url = 'http://driftsdata.statnett.no/restapi/PhysicalFlowMap/GetFlow?Ticks=%d' % timestamp
response = r.get(url)
obj = response.json()
exchange = list(filter(
lambda x: set([x['OutAreaElspotId'], x['InAreaElspotId']]) == set(
[bidding_zone_a, bidding_zone_b]),
obj))[0]
return {
'sortedZoneKeys': '->'.join(sorted([bidding_zone1, bidding_zone2])),
'netFlow': exchange['Value'] if bidding_zone_a == exchange['OutAreaElspotId'] else -1 * exchange['Value'],
'datetime': arrow.get(obj[0]['MeasureDate'] / 1000).datetime,
'source': 'driftsdata.stattnet.no',
}
def _fetch_exchanges_from_sorted_bidding_zones(sorted_bidding_zones, session=None, target_datetime=None):
zones = sorted_bidding_zones.split('->')
return fetch_exchange_by_bidding_zone(zones[0], zones[1], session, target_datetime)
def _sum_of_exchanges(exchanges):
exchange_list = list(exchanges)
return {
'netFlow': sum(e['netFlow'] for e in exchange_list),
'datetime': exchange_list[0]['datetime'],
'source': exchange_list[0]['source']
}
def fetch_exchange(zone_key1='DK', zone_key2='NO', session=None, target_datetime=None, logger=logging.getLogger(__name__)):
r = session or requests.session()
sorted_exchange = '->'.join(sorted([zone_key1, zone_key2]))
data = _sum_of_exchanges(map(lambda e: _fetch_exchanges_from_sorted_bidding_zones(e, r, target_datetime),
exchanges_mapping[sorted_exchange]))
data['sortedZoneKeys'] = '->'.join(sorted([zone_key1, zone_key2]))
return data
if __name__ == '__main__':
"""Main method, never used by the Electricity Map backend, but handy for testing."""
print('fetch_production(SE) ->')
print(fetch_production('SE'))
print('fetch_exchange(NO, SE) ->')
print(fetch_exchange('NO', 'SE'))
print('fetch_exchange(NO-NO4, RU-1) ->')
print(fetch_exchange('NO-NO4', 'RU-1'))
print('fetch_exchange(EE, RU-1) ->')
print(fetch_exchange('EE', 'RU-1'))
| gpl-3.0 |
impowski/servo | tests/wpt/web-platform-tests/tools/py/py/_code/_assertionold.py | 218 | 17863 | import py
import sys, inspect
from compiler import parse, ast, pycodegen
from py._code.assertion import BuiltinAssertionError, _format_explanation
passthroughex = py.builtin._sysex
class Failure:
def __init__(self, node):
self.exc, self.value, self.tb = sys.exc_info()
self.node = node
class View(object):
"""View base class.
If C is a subclass of View, then C(x) creates a proxy object around
the object x. The actual class of the proxy is not C in general,
but a *subclass* of C determined by the rules below. To avoid confusion
we call view class the class of the proxy (a subclass of C, so of View)
and object class the class of x.
Attributes and methods not found in the proxy are automatically read on x.
Other operations like setting attributes are performed on the proxy, as
determined by its view class. The object x is available from the proxy
as its __obj__ attribute.
The view class selection is determined by the __view__ tuples and the
optional __viewkey__ method. By default, the selected view class is the
most specific subclass of C whose __view__ mentions the class of x.
If no such subclass is found, the search proceeds with the parent
object classes. For example, C(True) will first look for a subclass
of C with __view__ = (..., bool, ...) and only if it doesn't find any
look for one with __view__ = (..., int, ...), and then ..., object,...
If everything fails the class C itself is considered to be the default.
Alternatively, the view class selection can be driven by another aspect
of the object x, instead of the class of x, by overriding __viewkey__.
See last example at the end of this module.
"""
_viewcache = {}
__view__ = ()
def __new__(rootclass, obj, *args, **kwds):
self = object.__new__(rootclass)
self.__obj__ = obj
self.__rootclass__ = rootclass
key = self.__viewkey__()
try:
self.__class__ = self._viewcache[key]
except KeyError:
self.__class__ = self._selectsubclass(key)
return self
def __getattr__(self, attr):
# attributes not found in the normal hierarchy rooted on View
# are looked up in the object's real class
return getattr(self.__obj__, attr)
def __viewkey__(self):
return self.__obj__.__class__
def __matchkey__(self, key, subclasses):
if inspect.isclass(key):
keys = inspect.getmro(key)
else:
keys = [key]
for key in keys:
result = [C for C in subclasses if key in C.__view__]
if result:
return result
return []
def _selectsubclass(self, key):
subclasses = list(enumsubclasses(self.__rootclass__))
for C in subclasses:
if not isinstance(C.__view__, tuple):
C.__view__ = (C.__view__,)
choices = self.__matchkey__(key, subclasses)
if not choices:
return self.__rootclass__
elif len(choices) == 1:
return choices[0]
else:
# combine the multiple choices
return type('?', tuple(choices), {})
def __repr__(self):
return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
def enumsubclasses(cls):
for subcls in cls.__subclasses__():
for subsubclass in enumsubclasses(subcls):
yield subsubclass
yield cls
class Interpretable(View):
"""A parse tree node with a few extra methods."""
explanation = None
def is_builtin(self, frame):
return False
def eval(self, frame):
# fall-back for unknown expression nodes
try:
expr = ast.Expression(self.__obj__)
expr.filename = '<eval>'
self.__obj__.filename = '<eval>'
co = pycodegen.ExpressionCodeGenerator(expr).getCode()
result = frame.eval(co)
except passthroughex:
raise
except:
raise Failure(self)
self.result = result
self.explanation = self.explanation or frame.repr(self.result)
def run(self, frame):
# fall-back for unknown statement nodes
try:
expr = ast.Module(None, ast.Stmt([self.__obj__]))
expr.filename = '<run>'
co = pycodegen.ModuleCodeGenerator(expr).getCode()
frame.exec_(co)
except passthroughex:
raise
except:
raise Failure(self)
def nice_explanation(self):
return _format_explanation(self.explanation)
class Name(Interpretable):
__view__ = ast.Name
def is_local(self, frame):
source = '%r in locals() is not globals()' % self.name
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def is_global(self, frame):
source = '%r in globals()' % self.name
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def is_builtin(self, frame):
source = '%r not in locals() and %r not in globals()' % (
self.name, self.name)
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def eval(self, frame):
super(Name, self).eval(frame)
if not self.is_local(frame):
self.explanation = self.name
class Compare(Interpretable):
__view__ = ast.Compare
def eval(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
for operation, expr2 in self.ops:
if hasattr(self, 'result'):
# shortcutting in chained expressions
if not frame.is_true(self.result):
break
expr2 = Interpretable(expr2)
expr2.eval(frame)
self.explanation = "%s %s %s" % (
expr.explanation, operation, expr2.explanation)
source = "__exprinfo_left %s __exprinfo_right" % operation
try:
self.result = frame.eval(source,
__exprinfo_left=expr.result,
__exprinfo_right=expr2.result)
except passthroughex:
raise
except:
raise Failure(self)
expr = expr2
class And(Interpretable):
__view__ = ast.And
def eval(self, frame):
explanations = []
for expr in self.nodes:
expr = Interpretable(expr)
expr.eval(frame)
explanations.append(expr.explanation)
self.result = expr.result
if not frame.is_true(expr.result):
break
self.explanation = '(' + ' and '.join(explanations) + ')'
class Or(Interpretable):
__view__ = ast.Or
def eval(self, frame):
explanations = []
for expr in self.nodes:
expr = Interpretable(expr)
expr.eval(frame)
explanations.append(expr.explanation)
self.result = expr.result
if frame.is_true(expr.result):
break
self.explanation = '(' + ' or '.join(explanations) + ')'
# == Unary operations ==
keepalive = []
for astclass, astpattern in {
ast.Not : 'not __exprinfo_expr',
ast.Invert : '(~__exprinfo_expr)',
}.items():
class UnaryArith(Interpretable):
__view__ = astclass
def eval(self, frame, astpattern=astpattern):
expr = Interpretable(self.expr)
expr.eval(frame)
self.explanation = astpattern.replace('__exprinfo_expr',
expr.explanation)
try:
self.result = frame.eval(astpattern,
__exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
keepalive.append(UnaryArith)
# == Binary operations ==
for astclass, astpattern in {
ast.Add : '(__exprinfo_left + __exprinfo_right)',
ast.Sub : '(__exprinfo_left - __exprinfo_right)',
ast.Mul : '(__exprinfo_left * __exprinfo_right)',
ast.Div : '(__exprinfo_left / __exprinfo_right)',
ast.Mod : '(__exprinfo_left % __exprinfo_right)',
ast.Power : '(__exprinfo_left ** __exprinfo_right)',
}.items():
class BinaryArith(Interpretable):
__view__ = astclass
def eval(self, frame, astpattern=astpattern):
left = Interpretable(self.left)
left.eval(frame)
right = Interpretable(self.right)
right.eval(frame)
self.explanation = (astpattern
.replace('__exprinfo_left', left .explanation)
.replace('__exprinfo_right', right.explanation))
try:
self.result = frame.eval(astpattern,
__exprinfo_left=left.result,
__exprinfo_right=right.result)
except passthroughex:
raise
except:
raise Failure(self)
keepalive.append(BinaryArith)
class CallFunc(Interpretable):
__view__ = ast.CallFunc
def is_bool(self, frame):
source = 'isinstance(__exprinfo_value, bool)'
try:
return frame.is_true(frame.eval(source,
__exprinfo_value=self.result))
except passthroughex:
raise
except:
return False
def eval(self, frame):
node = Interpretable(self.node)
node.eval(frame)
explanations = []
vars = {'__exprinfo_fn': node.result}
source = '__exprinfo_fn('
for a in self.args:
if isinstance(a, ast.Keyword):
keyword = a.name
a = a.expr
else:
keyword = None
a = Interpretable(a)
a.eval(frame)
argname = '__exprinfo_%d' % len(vars)
vars[argname] = a.result
if keyword is None:
source += argname + ','
explanations.append(a.explanation)
else:
source += '%s=%s,' % (keyword, argname)
explanations.append('%s=%s' % (keyword, a.explanation))
if self.star_args:
star_args = Interpretable(self.star_args)
star_args.eval(frame)
argname = '__exprinfo_star'
vars[argname] = star_args.result
source += '*' + argname + ','
explanations.append('*' + star_args.explanation)
if self.dstar_args:
dstar_args = Interpretable(self.dstar_args)
dstar_args.eval(frame)
argname = '__exprinfo_kwds'
vars[argname] = dstar_args.result
source += '**' + argname + ','
explanations.append('**' + dstar_args.explanation)
self.explanation = "%s(%s)" % (
node.explanation, ', '.join(explanations))
if source.endswith(','):
source = source[:-1]
source += ')'
try:
self.result = frame.eval(source, **vars)
except passthroughex:
raise
except:
raise Failure(self)
if not node.is_builtin(frame) or not self.is_bool(frame):
r = frame.repr(self.result)
self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
class Getattr(Interpretable):
__view__ = ast.Getattr
def eval(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
source = '__exprinfo_expr.%s' % self.attrname
try:
self.result = frame.eval(source, __exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
self.explanation = '%s.%s' % (expr.explanation, self.attrname)
# if the attribute comes from the instance, its value is interesting
source = ('hasattr(__exprinfo_expr, "__dict__") and '
'%r in __exprinfo_expr.__dict__' % self.attrname)
try:
from_instance = frame.is_true(
frame.eval(source, __exprinfo_expr=expr.result))
except passthroughex:
raise
except:
from_instance = True
if from_instance:
r = frame.repr(self.result)
self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
# == Re-interpretation of full statements ==
class Assert(Interpretable):
__view__ = ast.Assert
def run(self, frame):
test = Interpretable(self.test)
test.eval(frame)
# simplify 'assert False where False = ...'
if (test.explanation.startswith('False\n{False = ') and
test.explanation.endswith('\n}')):
test.explanation = test.explanation[15:-2]
# print the result as 'assert <explanation>'
self.result = test.result
self.explanation = 'assert ' + test.explanation
if not frame.is_true(test.result):
try:
raise BuiltinAssertionError
except passthroughex:
raise
except:
raise Failure(self)
class Assign(Interpretable):
__view__ = ast.Assign
def run(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
self.result = expr.result
self.explanation = '... = ' + expr.explanation
# fall-back-run the rest of the assignment
ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
mod = ast.Module(None, ast.Stmt([ass]))
mod.filename = '<run>'
co = pycodegen.ModuleCodeGenerator(mod).getCode()
try:
frame.exec_(co, __exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
class Discard(Interpretable):
__view__ = ast.Discard
def run(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
self.result = expr.result
self.explanation = expr.explanation
class Stmt(Interpretable):
__view__ = ast.Stmt
def run(self, frame):
for stmt in self.nodes:
stmt = Interpretable(stmt)
stmt.run(frame)
def report_failure(e):
explanation = e.node.nice_explanation()
if explanation:
explanation = ", in: " + explanation
else:
explanation = ""
sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
def check(s, frame=None):
if frame is None:
frame = sys._getframe(1)
frame = py.code.Frame(frame)
expr = parse(s, 'eval')
assert isinstance(expr, ast.Expression)
node = Interpretable(expr.node)
try:
node.eval(frame)
except passthroughex:
raise
except Failure:
e = sys.exc_info()[1]
report_failure(e)
else:
if not frame.is_true(node.result):
sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
###########################################################
# API / Entry points
# #########################################################
def interpret(source, frame, should_fail=False):
module = Interpretable(parse(source, 'exec').node)
#print "got module", module
if isinstance(frame, py.std.types.FrameType):
frame = py.code.Frame(frame)
try:
module.run(frame)
except Failure:
e = sys.exc_info()[1]
return getfailure(e)
except passthroughex:
raise
except:
import traceback
traceback.print_exc()
if should_fail:
return ("(assertion failed, but when it was re-run for "
"printing intermediate values, it did not fail. Suggestions: "
"compute assert expression before the assert or use --nomagic)")
else:
return None
def getmsg(excinfo):
if isinstance(excinfo, tuple):
excinfo = py.code.ExceptionInfo(excinfo)
#frame, line = gettbline(tb)
#frame = py.code.Frame(frame)
#return interpret(line, frame)
tb = excinfo.traceback[-1]
source = str(tb.statement).strip()
x = interpret(source, tb.frame, should_fail=True)
if not isinstance(x, str):
raise TypeError("interpret returned non-string %r" % (x,))
return x
def getfailure(e):
explanation = e.node.nice_explanation()
if str(e.value):
lines = explanation.split('\n')
lines[0] += " << %s" % (e.value,)
explanation = '\n'.join(lines)
text = "%s: %s" % (e.exc.__name__, explanation)
if text.startswith('AssertionError: assert '):
text = text[16:]
return text
def run(s, frame=None):
if frame is None:
frame = sys._getframe(1)
frame = py.code.Frame(frame)
module = Interpretable(parse(s, 'exec').node)
try:
module.run(frame)
except Failure:
e = sys.exc_info()[1]
report_failure(e)
if __name__ == '__main__':
# example:
def f():
return 5
def g():
return 3
def h(x):
return 'never'
check("f() * g() == 5")
check("not f()")
check("not (f() and g() or 0)")
check("f() == g()")
i = 4
check("i == f()")
check("len(f()) == 0")
check("isinstance(2+3+4, float)")
run("x = i")
check("x == 5")
run("assert not f(), 'oops'")
run("a, b, c = 1, 2")
run("a, b, c = f()")
check("max([f(),g()]) == 4")
check("'hello'[g()] == 'h'")
run("'guk%d' % h(f())")
| mpl-2.0 |
DataDog/integrations-core | exchange_server/tests/common.py | 1 | 7880 | # (C) Datadog, Inc. 2010-present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
MINIMAL_INSTANCE = {'host': '.'}
CHECK_NAME = 'exchange_server'
DATABASE_INSTANCES = [
'Information Store/_Total',
'Information Store - Mailbox Database 1266275882/_Total',
'edgetransport/_Total',
'edgetransport/Transport Mail Database',
'edgetransport/IP Filtering Database',
]
EXCHANGE_PROCESSES = [
'MSExchangeHMHost MSExchangeHM 2212',
'Microsoft.Exchange.Directory.TopologyService',
'umservice',
'MSExchangeFrontendTransport',
'MSExchangeTransportLogSearch LogSearchSvc 4932',
'Microsoft.Exchange.Store.Service',
'ForefrontActiveDirectoryConnector',
'MSExchangeMailboxAssistants',
'MSExchangeMailboxReplication MSExchMbxRepl 3832',
'MSExchangeDelivery',
'msexchangerepl',
'Microsoft.Exchange.RpcClientAccess.Service',
'Microsoft.Exchange.ServiceHost EMS 4360',
'MSExchangeSubmission',
'MSExchangeThrottling',
'MSExchangeHMWorker ExHMWorker 4668',
'Microsoft.Exchange.UM.CallRouter',
'noderunner noderunner 3876',
'noderunner noderunner 3376',
'noderunner noderunner 3736',
'noderunner noderunner 3956',
'MSExchangeTransport',
'EdgeTransport Transport 5732',
'w3wp EWS 1656',
'w3wp',
'w3wp ECP 7404',
'w3wp AirSync 7704',
'w3wp OWA 7648',
'w3wp',
'w3wp',
'w3wp RemotePS 8932',
'w3wp',
'Microsoft.Exchange.EdgeSyncSvc',
'Microsoft.Exchange.Store.Worker',
'w3wp UNKNOWN 9332',
'powershell EMS 9000',
'umservice',
'UMWorkerProcess UM 4304',
'Microsoft.Exchange.Search.Service',
'MSExchangeHMHost MSExchangeHM _Total',
'MSExchangeTransportLogSearch LogSearchSvc _Total',
'MSExchangeMailboxReplication MSExchMbxRepl _Total',
'Microsoft.Exchange.ServiceHost EMS _Total',
'MSExchangeHMWorker ExHMWorker _Total',
'noderunner noderunner _Total',
'EdgeTransport Transport _Total',
'w3wp EWS _Total',
'w3wp ECP _Total',
'w3wp AirSync _Total',
'w3wp OWA _Total',
'w3wp RemotePS _Total',
'w3wp UNKNOWN _Total',
'powershell EMS _Total',
'UMWorkerProcess UM _Total',
]
PROXY_INSTANCES = [
'remoteps',
'ews',
'ecp',
'oab',
'autodiscover',
'eas',
'owa',
'unknown',
'win-k2olfvr52p5',
'rpchttp',
]
WEB_SITE_INSTANCES = ['_Total', 'Default Web Site', 'Exchange Back End']
WORKLOAD_INSTANCES = [
'msexchangemailboxreplication_mailboxreplicationservicehighpriority',
'msexchangemailboxreplication_mailboxreplicationservice',
'msexchangemailboxassistants_sitemailboxassistant_site mailbox assistant',
'msexchangemailboxassistants_peoplerelevanceassistant',
'msexchangemailboxassistants_oabgeneratorassistant',
'msexchangemailboxassistants_publicfolderassistant',
'msexchangemailboxassistants_directoryprocessorassistant',
'msexchangemailboxassistants_storemaintenanceassistant_storedsmaintenanceassistant',
'msexchangemailboxassistants_storemaintenanceassistant',
'msexchangemailboxassistants_umreportingassistant',
'msexchangemailboxassistants_calendarsyncassistant',
'msexchangemailboxassistants_topnassistant_topnwordsassistant',
'msexchangemailboxassistants_sharingpolicyassistant',
'msexchangemailboxassistants_calendarrepairassistant',
'msexchangemailboxassistants_junkemailoptionscommitterassistant',
'msexchangemailboxassistants_elcassistant',
]
CLIENT_TYPE_INSTANCES = [
'ediscoverysearch',
'publicfoldersystem',
'simplemigration',
'loadgen',
'storeactivemonitoring',
'teammailbox',
'sms',
'inference',
'maintenance',
'ha',
'transportsync',
'migration',
'momt',
'timebasedassistants',
'approvalapi',
'webservices',
'unifiedmessaging',
'monitoring',
'management',
'elc',
'availabilityservice',
'contentindexing',
'rpchttp',
'popimap',
'owa',
'eventbasedassistants',
'airsync',
'transport',
'user',
'administrator',
'system',
'_total',
]
METRIC_INSTANCES = {
'exchange.adaccess_domain_controllers.ldap_read': ['win-k2olfvr52p5.croissant.datad0g.com'],
'exchange.adaccess_domain_controllers.ldap_search': ['win-k2olfvr52p5.croissant.datad0g.com'],
'exchange.adaccess_processes.ldap_read': EXCHANGE_PROCESSES,
'exchange.adaccess_processes.ldap_search': EXCHANGE_PROCESSES,
'exchange.processor.cpu_time': None,
'exchange.processor.cpu_user': None,
'exchange.processor.cpu_privileged': None,
'exchange.processor.queue_length': None,
'exchange.memory.available': None,
'exchange.memory.committed': None,
'exchange.network.outbound_errors': ['AWS PV Network Device', 'isatap.{C7BAFAFE-DBF4-4C76-B406-8A25283E4CF9}'],
'exchange.network.tcpv6.connection_failures': None,
'exchange.network.tcpv4.conns_reset': None,
'exchange.network.tcpv4.conns_reset': None,
'exchange.netlogon.semaphore_waiters': ['_Total'],
'exchange.netlogon.semaphore_holders': ['_Total'],
'exchange.netlogon.semaphore_acquires': ['_Total'],
'exchange.netlogon.semaphore_timeouts': ['_Total'],
'exchange.netlogon.semaphore_hold_time': ['_Total'],
# Database counters
'exchange.database.io_reads_avg_latency': DATABASE_INSTANCES,
'exchange.database.io_writes_avg_latency': DATABASE_INSTANCES,
'exchange.database.io_log_writes_avg_latency': DATABASE_INSTANCES,
'exchange.database.io_db_reads_recovery_avg_latency': DATABASE_INSTANCES,
'exchange.database.io_db_writes_recovery_avg_latency': DATABASE_INSTANCES,
'exchange.database.io_db_reads_attached_persec': DATABASE_INSTANCES,
'exchange.database.io_db_writes_attached_persec': DATABASE_INSTANCES,
'exchange.database.io_log_writes_persec': DATABASE_INSTANCES,
'exchange.activemanager.database_mounted': None,
# RPC Client Access Counters
'exchange.rpc.averaged_latency': None,
'exchange.rpc.requests': None,
'exchange.rpc.active_user_count': None,
'exchange.rpc.conn_count': None,
'exchange.rpc.ops_persec': None,
'exchange.rpc.user_count': None,
# HTTP Proxy Counters
'exchange.httpproxy.server_locator_latency': PROXY_INSTANCES,
'exchange.httpproxy.avg_auth_latency': PROXY_INSTANCES,
'exchange.httpproxy.clientaccess_processing_latency': PROXY_INSTANCES,
'exchange.httpproxy.mailbox_proxy_failure_rate': PROXY_INSTANCES,
'exchange.httpproxy.outstanding_requests': PROXY_INSTANCES,
'exchange.httpproxy.proxy_requests_persec': PROXY_INSTANCES,
'exchange.httpproxy.requests_persec': PROXY_INSTANCES,
# Information Store Counters
'exchange.is.store.rpc_requests': ['mailbox database 1266275882', '_total'],
'exchange.is.clienttype.rpc_latency': CLIENT_TYPE_INSTANCES,
'exchange.is.store.rpc_latency': ['mailbox database 1266275882', '_total'],
'exchange.is.store.rpc_ops_persec': ['mailbox database 1266275882', '_total'],
'exchange.is.clienttype.rpc_ops_persec': CLIENT_TYPE_INSTANCES,
# Client Access Server Counters
'exchange.activesync.requests_persec': None,
'exchange.activesync.ping_pending': None,
'exchange.activesync.sync_persec': None,
'exchange.owa.unique_users': None,
'exchange.owa.requests_persec': None,
'exchange.autodiscover.requests_persec': None,
'exchange.ws.requests_persec': None,
'exchange.ws.current_connections_total': None,
'exchange.ws.current_connections_default_website': WEB_SITE_INSTANCES,
'exchange.ws.connection_attempts': None,
'exchange.ws.other_attempts': None,
# Workload Management Counters
'exchange.workload_management.active_tasks': WORKLOAD_INSTANCES,
'exchange.workload_management.completed_tasks': WORKLOAD_INSTANCES,
'exchange.workload_management.queued_tasks': WORKLOAD_INSTANCES,
}
| bsd-3-clause |
mcanthony/oiio | src/python/iconvert.py | 9 | 9390 | #!/usr/bin/env python
# Parse the options the user provided from the command line
def option_parser():
parser = OptionParser()
parser.add_option("-v", action="store_true", dest="verbose", default=False)
parser.add_option("--inplace", action="store_true", dest="inplace", default=False)
parser.add_option("-d", dest="data_format_name", default="")
parser.add_option("--sRGB", action="store_true", dest="sRGB", default=False)
parser.add_option("--tile", nargs=3, dest="tile")
parser.add_option("--scanline", action="store_true", dest="scanline", default=False)
parser.add_option("--separate", action="store_true", dest="separate", default=False)
parser.add_option("--contig", action="store_true", dest="contig", default=False)
parser.add_option("--compression", dest="compression")
parser.add_option("--quality", type="int", dest="quality", default = -1)
parser.add_option("--no-copy-image", action="store_true", dest="no_copy", default=False)
parser.add_option("--adjust-time", action="store_true", dest="adjust_time", default=False)
parser.add_option("--caption", dest="caption", default=None)
parser.add_option("-k", "--keyword", action="append", dest="keywords")
parser.add_option("--clear-keywords", action="store_true", default=False)
parser.add_option("--attrib", nargs=2, action="append", dest="attribs")
parser.add_option("--orientation", type="int", dest="orientation", default = 0)
parser.add_option("--rotcw", action="store_true", dest="rotcw", default=False)
parser.add_option("--rotccw", action="store_true", dest="rotccw", default=False)
parser.add_option("--rot180", action="store_true", dest="rot180", default=False)
parser.add_option("--plugin-path", dest="path", default="")
# FIXME: I suppose there should be a way to enter input/output files without
# having to specify an option, like "python iconvert.py -g 0.9 input.jpg output.jpg"
# However, I could not find it in the docs so I've set it that the user has
# to put -i and -o before input/output.
parser.add_option("-i", action="append", dest="input_files", default=[])
parser.add_option("-o", action="append", dest="output_files", default=[])
(options, args) = parser.parse_args()
if len(options.input_files) > len(options.output_files) and not options.inplace:
print "Must have both an input and output filename specified"
return False
if len(options.input_files) == 0 and options.inplace:
print "Must have at least one filename specified"
return False
if (int(options.rotcw) + int(options.rotccw) + int(options.rot180) + \
(options.orientation>0)) > 1:
print "iconvert: more than one of --rotcw, --rotccw, --rot180, --orientation"
return False
if options.path == "":
print "OIIO plugin path not provided, assuming \"\""
return parser.parse_args()
def convert_files(in_file, out_file):
nocopy = options.no_copy
tempname = out_file
# Check whether the conversion is inplace.
if tempname == in_file:
try:
ext = out_file.rfind(".")
tempname += ".tmp" + out_file[ext:]
except:
print "Error: Output file does not have an extension"
# image input
inp = oiio.ImageInput.create(in_file, options.path)
if not inp:
msg = "Could not crete ImageInput for " + in_file
sys.exit(msg)
inspec = oiio.ImageSpec()
inp.open(in_file, inspec)
# image output
out = oiio.ImageOutput.create(tempname, options.path)
if not out:
msg = "Unable to create ImageOutput for " + out_file
sys.exit(msg)
# adjust spec
outspec = inspec
nocopy = adjust_spec(inp, inspec, outspec)
out.open(tempname, outspec, oiio.ImageOutputOpenMode.Create)
# convert
if nocopy == False:
ok = out.copy_image(inp)
if not ok:
print "Error"
else:
arr = array.array("B", "\0" * inspec.image_bytes())
ok = inp.read_image(outspec.format, arr)
if not ok:
print "Error reading"
else:
ok = out.write_image(outspec.format, arr)
if not ok:
print "Error writing"
out.close()
inp.close()
# if the conversion was --inplace, this will result to True
if out_file != tempname:
if ok:
# since it was inplace, in_file == out_file
# so we need to replace the original file with tempfile
os.remove(out_file)
os.rename(tempname, out_file)
else:
os.remove(tempname)
def adjust_spec(inp, inspec, outspec):
nocopy = options.no_copy
# the following line is from the original iconvert, but I'm not sure
# it is needed. It's already outspec = inspec, right?
#outspec.set_format(inspec.format)
if options.data_format_name != "":
if data_format_name == "uint8":
outspec.set_format(oiio.BASETYPE.UINT8)
elif data_format_name == "int8":
outspec.set_format(oiio.BASETYPE.INT8)
elif data_format_name == "uint16":
outspec.set_format(oiio.BASETYPE.UINT16)
elif data_format_name == "int16":
outspec.set_format(oiio.BASETYPE.INT16)
elif data_format_name == "half":
outspec.set_format(oiio.BASETYPE.HALF)
elif data_format_name == "float":
outspec.set_format(oiio.BASETYPE.FLOAT)
elif data_format_name == "double":
outspec.set_format(oiio.BASETYPE.DOUBLE)
if outspec.format != inspec.format:
nocopy = True
if options.sRGB:
outspec.linearity = oiio.sRGB
#ImageSpec.find_attribute() is not exposed to Python
#if inp.format_name() != "jpeg" or outspec.find_attribute("Exif:ColorSpace"):
#outspec.attribute("Exif:ColorSpace", 1)
# handling tiles is not exposed to Python
if options.tile:
outspec.tile_width = options.tile[0]
outspec.tile_height = options.tile[1]
outspec.tile_depth = options.tile[2]
if options.scanline:
outspec.tile_width = 0
outspec.tile_height = 0
outspec.tile_depth = 0
if outspec.tile_width != inspec.tile_width or \
outspec.tile_height != inspec.tile_height or \
outspec.tile_depth != inspec.tile_depth:
nocopy = True
if options.compression:
outspec.attribute("compression", options.compression)
# 2nd argument should be exposed as default
if options.compression != inspec.get_string_attribute("compression", ""):
nocopy = True
# FIXME: If quality is provided, the resultig image is larger than the
# input image, and it is always the same no matter what quality (1-100).
# (I suppose it uses the maximum possible value)
# Should a --compression method be provided if --quality is used?
if options.quality > 0:
outspec.attribute("CompressionQuality", options.quality)
# the 2nd argument should be exposed as default (in ImageSpec wrapper)
# FIXME: The default arg is supposed to be 0, and get_int_attribute always
# returns whatever is provided as the 2nd argument - 0 in this case.
# I can't find out what's wrong in the binding.
if options.quality != inspec.get_int_attribute("CompressionQuality", 0):
nocopy = True
if options.contig:
outspec.attribute("planarconfig", "contig")
if options.separate:
outspec.attribute("planarconfig", "separate")
if options.orientation >= 1:
outspec.attribute("Orientation", options.orientation)
else:
orientation = outspec.get_int_attribute("Orientation", 1)
if orientation >= 1 and orientation <= 8:
cw = [0, 6, 7, 8, 5, 2, 3, 4, 1]
if options.rotcw or options.rotccw or options.rot180:
orientation = cw[orientation]
if options.rotcw or options.rot180:
orientation = cw[orientation]
if options.rotccw:
orientation = cw[orientation]
outspec.attribute("Orientation", orientation)
if options.caption != None:
outspec.attribute("ImageDescription", options.caption)
if options.clear_keywords == True:
outspec.attribute("Keywords", "")
# this looks a lot simpler than in c++ :)
if options.keywords != None:
oldkw = outspec.get_string_attribute("Keywords", "")
newkw = oldkw
for keyword in options.keywords:
newkw += "; " + keyword
outspec.attribute("Keywords", newkw)
if options.attribs:
for i in options.attribs:
outspec.attribute(i[0], i[1])
return nocopy
# main
import OpenImageIO as oiio
import array
from optparse import OptionParser
import os
import sys
(options, args) = option_parser()
if options.inplace:
for image in options.input_files:
if convert_files(image, image) == False:
sys.exit("Conversion failed")
else:
for i in range(len(options.input_files)):
if convert_files(options.input_files[i], options.output_files[i]) == False:
sys.exit("Conversion failed")
| bsd-3-clause |
UnrememberMe/pants | tests/python/pants_test/source/test_filespec.py | 6 | 4202 | # coding=utf-8
# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import re
import unittest
from pants.source.filespec import glob_to_regex
class GlobToRegexTest(unittest.TestCase):
def assert_rule_match(self, glob, expected_matches, negate=False):
if negate:
asserter, match_state = self.assertIsNone, 'erroneously matches'
else:
asserter, match_state = self.assertIsNotNone, "doesn't match"
regex = glob_to_regex(glob)
for expected in expected_matches:
asserter(re.match(regex, expected), 'glob_to_regex(`{}`) -> `{}` {} path `{}`'
.format(glob, regex, match_state, expected))
def test_glob_to_regex_single_star_0(self):
self.assert_rule_match('a/b/*/f.py', ('a/b/c/f.py', 'a/b/q/f.py'))
def test_glob_to_regex_single_star_0_neg(self):
self.assert_rule_match('a/b/*/f.py', ('a/b/c/d/f.py','a/b/f.py'), negate=True)
def test_glob_to_regex_single_star_1(self):
self.assert_rule_match('foo/bar/*', ('foo/bar/baz', 'foo/bar/bar'))
def test_glob_to_regex_single_star_2(self):
self.assert_rule_match('*/bar/b*', ('foo/bar/baz', 'foo/bar/bar'))
def test_glob_to_regex_single_star_2_neg(self):
self.assert_rule_match('*/bar/b*', ('foo/koo/bar/baz', 'foo/bar/bar/zoo'), negate=True)
def test_glob_to_regex_single_star_3(self):
self.assert_rule_match('/*/[be]*/b*', ('/foo/bar/baz', '/foo/bar/bar'))
def test_glob_to_regex_single_star_4(self):
self.assert_rule_match('/foo*/bar', ('/foofighters/bar', '/foofighters.venv/bar'))
def test_glob_to_regex_single_star_4_neg(self):
self.assert_rule_match('/foo*/bar', ('/foofighters/baz/bar',), negate=True)
def test_glob_to_regex_double_star_0(self):
self.assert_rule_match('**', ('a/b/c', 'a'))
def test_glob_to_regex_double_star_1(self):
self.assert_rule_match('a/**/f', ('a/f', 'a/b/c/d/e/f'))
def test_glob_to_regex_double_star_2(self):
self.assert_rule_match('a/b/**', ('a/b/c', 'a/b/c/d/e/f'))
def test_glob_to_regex_double_star_2_neg(self):
self.assert_rule_match('a/b/**', ('a/b'), negate=True)
def test_glob_to_regex_leading_slash_0(self):
self.assert_rule_match('/a/*', ('/a/a', '/a/b.py'))
def test_glob_to_regex_leading_slash_0_neg(self):
self.assert_rule_match('/a/*', ('a/a', 'a/b.py'), negate=True)
def test_glob_to_regex_leading_slash_1(self):
self.assert_rule_match('/*', ('/a', '/a.py'))
def test_glob_to_regex_leading_slash_1_neg(self):
self.assert_rule_match('/*', ('a', 'a.py'), negate=True)
def test_glob_to_regex_leading_slash_2(self):
self.assert_rule_match('/**', ('/a', '/a/b/c/d/e/f'))
def test_glob_to_regex_leading_slash_2_neg(self):
self.assert_rule_match('/**', ('a', 'a/b/c/d/e/f'), negate=True)
def test_glob_to_regex_dots(self):
self.assert_rule_match('.*', ('.pants.d', '.', '..', '.pids'))
def test_glob_to_regex_dots_neg(self):
self.assert_rule_match(
'.*',
('a', 'a/non/dot/dir/file.py', 'dist', 'all/nested/.dot', '.some/hidden/nested/dir/file.py'),
negate=True
)
def test_glob_to_regex_dirs(self):
self.assert_rule_match('dist/', ('dist',))
def test_glob_to_regex_dirs_neg(self):
self.assert_rule_match('dist/', ('not_dist', 'cdist', 'dist.py', 'dist/dist'), negate=True)
def test_glob_to_regex_dirs_dots(self):
self.assert_rule_match(
'build-support/*.venv/',
('build-support/*.venv',
'build-support/rbt.venv')
)
def test_glob_to_regex_dirs_dots_neg(self):
self.assert_rule_match('build-support/*.venv/',
('build-support/rbt.venv.but_actually_a_file',),
negate=True)
def test_glob_to_regex_literals(self):
self.assert_rule_match('a', ('a',))
def test_glob_to_regex_literal_dir(self):
self.assert_rule_match('a/b/c', ('a/b/c',))
def test_glob_to_regex_literal_file(self):
self.assert_rule_match('a/b/c.py', ('a/b/c.py',))
| apache-2.0 |
fengbaicanhe/intellij-community | python/lib/Lib/site-packages/django/contrib/localflavor/id/id_choices.py | 439 | 3217 | import warnings
from django.utils.translation import ugettext_lazy as _
# Reference: http://id.wikipedia.org/wiki/Daftar_provinsi_Indonesia
# Indonesia does not have an official Province code standard.
# I decided to use unambiguous and consistent (some are common) 3-letter codes.
warnings.warn(
'There have been recent changes to the ID localflavor. See the release notes for details',
RuntimeWarning
)
PROVINCE_CHOICES = (
('ACE', _('Aceh')),
('BLI', _('Bali')),
('BTN', _('Banten')),
('BKL', _('Bengkulu')),
('DIY', _('Yogyakarta')),
('JKT', _('Jakarta')),
('GOR', _('Gorontalo')),
('JMB', _('Jambi')),
('JBR', _('Jawa Barat')),
('JTG', _('Jawa Tengah')),
('JTM', _('Jawa Timur')),
('KBR', _('Kalimantan Barat')),
('KSL', _('Kalimantan Selatan')),
('KTG', _('Kalimantan Tengah')),
('KTM', _('Kalimantan Timur')),
('BBL', _('Kepulauan Bangka-Belitung')),
('KRI', _('Kepulauan Riau')),
('LPG', _('Lampung')),
('MLK', _('Maluku')),
('MUT', _('Maluku Utara')),
('NTB', _('Nusa Tenggara Barat')),
('NTT', _('Nusa Tenggara Timur')),
('PPA', _('Papua')),
('PPB', _('Papua Barat')),
('RIU', _('Riau')),
('SLB', _('Sulawesi Barat')),
('SLS', _('Sulawesi Selatan')),
('SLT', _('Sulawesi Tengah')),
('SLR', _('Sulawesi Tenggara')),
('SLU', _('Sulawesi Utara')),
('SMB', _('Sumatera Barat')),
('SMS', _('Sumatera Selatan')),
('SMU', _('Sumatera Utara')),
)
LICENSE_PLATE_PREFIX_CHOICES = (
('A', _('Banten')),
('AA', _('Magelang')),
('AB', _('Yogyakarta')),
('AD', _('Surakarta - Solo')),
('AE', _('Madiun')),
('AG', _('Kediri')),
('B', _('Jakarta')),
('BA', _('Sumatera Barat')),
('BB', _('Tapanuli')),
('BD', _('Bengkulu')),
('BE', _('Lampung')),
('BG', _('Sumatera Selatan')),
('BH', _('Jambi')),
('BK', _('Sumatera Utara')),
('BL', _('Nanggroe Aceh Darussalam')),
('BM', _('Riau')),
('BN', _('Kepulauan Bangka Belitung')),
('BP', _('Kepulauan Riau')),
('CC', _('Corps Consulate')),
('CD', _('Corps Diplomatic')),
('D', _('Bandung')),
('DA', _('Kalimantan Selatan')),
('DB', _('Sulawesi Utara Daratan')),
('DC', _('Sulawesi Barat')),
('DD', _('Sulawesi Selatan')),
('DE', _('Maluku')),
('DG', _('Maluku Utara')),
('DH', _('NTT - Timor')),
('DK', _('Bali')),
('DL', _('Sulawesi Utara Kepulauan')),
('DM', _('Gorontalo')),
('DN', _('Sulawesi Tengah')),
('DR', _('NTB - Lombok')),
('DS', _('Papua dan Papua Barat')),
('DT', _('Sulawesi Tenggara')),
('E', _('Cirebon')),
('EA', _('NTB - Sumbawa')),
('EB', _('NTT - Flores')),
('ED', _('NTT - Sumba')),
('F', _('Bogor')),
('G', _('Pekalongan')),
('H', _('Semarang')),
('K', _('Pati')),
('KB', _('Kalimantan Barat')),
('KH', _('Kalimantan Tengah')),
('KT', _('Kalimantan Timur')),
('L', _('Surabaya')),
('M', _('Madura')),
('N', _('Malang')),
('P', _('Jember')),
('R', _('Banyumas')),
('RI', _('Federal Government')),
('S', _('Bojonegoro')),
('T', _('Purwakarta')),
('W', _('Sidoarjo')),
('Z', _('Garut')),
)
| apache-2.0 |
jnovinger/django | tests/backends/tests.py | 59 | 47291 | # -*- coding: utf-8 -*-
# Unit and doctests for specific database backends.
from __future__ import unicode_literals
import copy
import datetime
import re
import threading
import unittest
import warnings
from decimal import Decimal, Rounded
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import no_style
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connection, connections,
reset_queries, transaction,
)
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.postgresql import version as pg_version
from django.db.backends.signals import connection_created
from django.db.backends.utils import CursorWrapper, format_number
from django.db.models import Avg, StdDev, Sum, Variance
from django.db.models.sql.constants import CURSOR
from django.db.utils import ConnectionHandler
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, mock, override_settings,
skipIfDBFeature, skipUnlessDBFeature,
)
from django.test.utils import str_prefix
from django.utils import six
from django.utils.six.moves import range
from . import models
class DummyBackendTest(SimpleTestCase):
def test_no_databases(self):
"""
Test that empty DATABASES setting default to the dummy backend.
"""
DATABASES = {}
conns = ConnectionHandler(DATABASES)
self.assertEqual(conns[DEFAULT_DB_ALIAS].settings_dict['ENGINE'],
'django.db.backends.dummy')
with self.assertRaises(ImproperlyConfigured):
conns[DEFAULT_DB_ALIAS].ensure_connection()
@unittest.skipUnless(connection.vendor == 'oracle', "Test only for Oracle")
class OracleTests(unittest.TestCase):
def test_quote_name(self):
# Check that '%' chars are escaped for query execution.
name = '"SOME%NAME"'
quoted_name = connection.ops.quote_name(name)
self.assertEqual(quoted_name % (), name)
def test_dbms_session(self):
# If the backend is Oracle, test that we can call a standard
# stored procedure through our cursor wrapper.
from django.db.backends.oracle.base import convert_unicode
with connection.cursor() as cursor:
cursor.callproc(convert_unicode('DBMS_SESSION.SET_IDENTIFIER'),
[convert_unicode('_django_testing!')])
def test_cursor_var(self):
# If the backend is Oracle, test that we can pass cursor variables
# as query parameters.
from django.db.backends.oracle.base import Database
with connection.cursor() as cursor:
var = cursor.var(Database.STRING)
cursor.execute("BEGIN %s := 'X'; END; ", [var])
self.assertEqual(var.getvalue(), 'X')
def test_long_string(self):
# If the backend is Oracle, test that we can save a text longer
# than 4000 chars and read it properly
with connection.cursor() as cursor:
cursor.execute('CREATE TABLE ltext ("TEXT" NCLOB)')
long_str = ''.join(six.text_type(x) for x in range(4000))
cursor.execute('INSERT INTO ltext VALUES (%s)', [long_str])
cursor.execute('SELECT text FROM ltext')
row = cursor.fetchone()
self.assertEqual(long_str, row[0].read())
cursor.execute('DROP TABLE ltext')
def test_client_encoding(self):
# If the backend is Oracle, test that the client encoding is set
# correctly. This was broken under Cygwin prior to r14781.
connection.ensure_connection()
self.assertEqual(connection.connection.encoding, "UTF-8")
self.assertEqual(connection.connection.nencoding, "UTF-8")
def test_order_of_nls_parameters(self):
# an 'almost right' datetime should work with configured
# NLS parameters as per #18465.
with connection.cursor() as cursor:
query = "select 1 from dual where '1936-12-29 00:00' < sysdate"
# Test that the query succeeds without errors - pre #18465 this
# wasn't the case.
cursor.execute(query)
self.assertEqual(cursor.fetchone()[0], 1)
@unittest.skipUnless(connection.vendor == 'sqlite', "Test only for SQLite")
class SQLiteTests(TestCase):
longMessage = True
def test_autoincrement(self):
"""
Check that auto_increment fields are created with the AUTOINCREMENT
keyword in order to be monotonically increasing. Refs #10164.
"""
with connection.schema_editor(collect_sql=True) as editor:
editor.create_model(models.Square)
statements = editor.collected_sql
match = re.search('"id" ([^,]+),', statements[0])
self.assertIsNotNone(match)
self.assertEqual('integer NOT NULL PRIMARY KEY AUTOINCREMENT',
match.group(1), "Wrong SQL used to create an auto-increment "
"column on SQLite")
def test_aggregation(self):
"""
#19360: Raise NotImplementedError when aggregating on date/time fields.
"""
for aggregate in (Sum, Avg, Variance, StdDev):
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate, aggregate('time'))
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate, aggregate('date'))
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate, aggregate('last_modified'))
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate,
**{'complex': aggregate('last_modified') + aggregate('last_modified')})
@unittest.skipUnless(connection.vendor == 'postgresql', "Test only for PostgreSQL")
class PostgreSQLTests(TestCase):
def assert_parses(self, version_string, version):
self.assertEqual(pg_version._parse_version(version_string), version)
def test_parsing(self):
"""Test PostgreSQL version parsing from `SELECT version()` output"""
self.assert_parses("PostgreSQL 9.3 beta4", 90300)
self.assert_parses("PostgreSQL 9.3", 90300)
self.assert_parses("EnterpriseDB 9.3", 90300)
self.assert_parses("PostgreSQL 9.3.6", 90306)
self.assert_parses("PostgreSQL 9.4beta1", 90400)
self.assert_parses("PostgreSQL 9.3.1 on i386-apple-darwin9.2.2, compiled by GCC i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 (Apple Inc. build 5478)", 90301)
def test_nodb_connection(self):
"""
Test that the _nodb_connection property fallbacks to the default connection
database when access to the 'postgres' database is not granted.
"""
def mocked_connect(self):
if self.settings_dict['NAME'] is None:
raise DatabaseError()
return ''
nodb_conn = connection._nodb_connection
self.assertIsNone(nodb_conn.settings_dict['NAME'])
# Now assume the 'postgres' db isn't available
del connection._nodb_connection
with warnings.catch_warnings(record=True) as w:
with mock.patch('django.db.backends.base.base.BaseDatabaseWrapper.connect',
side_effect=mocked_connect, autospec=True):
nodb_conn = connection._nodb_connection
del connection._nodb_connection
self.assertIsNotNone(nodb_conn.settings_dict['NAME'])
self.assertEqual(nodb_conn.settings_dict['NAME'], settings.DATABASES[DEFAULT_DB_ALIAS]['NAME'])
# Check a RuntimeWarning nas been emitted
self.assertEqual(len(w), 1)
self.assertEqual(w[0].message.__class__, RuntimeWarning)
def test_version_detection(self):
"""Test PostgreSQL version detection"""
# Helper mocks
class CursorMock(object):
"Very simple mock of DB-API cursor"
def execute(self, arg):
pass
def fetchone(self):
return ["PostgreSQL 9.3"]
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
class OlderConnectionMock(object):
"Mock of psycopg2 (< 2.0.12) connection"
def cursor(self):
return CursorMock()
# psycopg2 < 2.0.12 code path
conn = OlderConnectionMock()
self.assertEqual(pg_version.get_version(conn), 90300)
def test_connect_and_rollback(self):
"""
PostgreSQL shouldn't roll back SET TIME ZONE, even if the first
transaction is rolled back (#17062).
"""
databases = copy.deepcopy(settings.DATABASES)
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Ensure the database default time zone is different than
# the time zone in new_connection.settings_dict. We can
# get the default time zone by reset & show.
cursor = new_connection.cursor()
cursor.execute("RESET TIMEZONE")
cursor.execute("SHOW TIMEZONE")
db_default_tz = cursor.fetchone()[0]
new_tz = 'Europe/Paris' if db_default_tz == 'UTC' else 'UTC'
new_connection.close()
# Invalidate timezone name cache, because the setting_changed
# handler cannot know about new_connection.
del new_connection.timezone_name
# Fetch a new connection with the new_tz as default
# time zone, run a query and rollback.
with self.settings(TIME_ZONE=new_tz):
new_connection.set_autocommit(False)
cursor = new_connection.cursor()
new_connection.rollback()
# Now let's see if the rollback rolled back the SET TIME ZONE.
cursor.execute("SHOW TIMEZONE")
tz = cursor.fetchone()[0]
self.assertEqual(new_tz, tz)
finally:
new_connection.close()
def test_connect_non_autocommit(self):
"""
The connection wrapper shouldn't believe that autocommit is enabled
after setting the time zone when AUTOCOMMIT is False (#21452).
"""
databases = copy.deepcopy(settings.DATABASES)
databases[DEFAULT_DB_ALIAS]['AUTOCOMMIT'] = False
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Open a database connection.
new_connection.cursor()
self.assertFalse(new_connection.get_autocommit())
finally:
new_connection.close()
def test_connect_isolation_level(self):
"""
Regression test for #18130 and #24318.
"""
from psycopg2.extensions import (
ISOLATION_LEVEL_READ_COMMITTED as read_committed,
ISOLATION_LEVEL_SERIALIZABLE as serializable,
)
# Since this is a django.test.TestCase, a transaction is in progress
# and the isolation level isn't reported as 0. This test assumes that
# PostgreSQL is configured with the default isolation level.
# Check the level on the psycopg2 connection, not the Django wrapper.
self.assertEqual(connection.connection.isolation_level, read_committed)
databases = copy.deepcopy(settings.DATABASES)
databases[DEFAULT_DB_ALIAS]['OPTIONS']['isolation_level'] = serializable
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Start a transaction so the isolation level isn't reported as 0.
new_connection.set_autocommit(False)
# Check the level on the psycopg2 connection, not the Django wrapper.
self.assertEqual(new_connection.connection.isolation_level, serializable)
finally:
new_connection.close()
def _select(self, val):
with connection.cursor() as cursor:
cursor.execute("SELECT %s", (val,))
return cursor.fetchone()[0]
def test_select_ascii_array(self):
a = ["awef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_select_unicode_array(self):
a = ["ᄲawef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_lookup_cast(self):
from django.db.backends.postgresql.operations import DatabaseOperations
do = DatabaseOperations(connection=None)
for lookup in ('iexact', 'contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith', 'regex', 'iregex'):
self.assertIn('::text', do.lookup_cast(lookup))
def test_correct_extraction_psycopg2_version(self):
from django.db.backends.postgresql.base import psycopg2_version
version_path = 'django.db.backends.postgresql.base.Database.__version__'
with mock.patch(version_path, '2.6.9'):
self.assertEqual(psycopg2_version(), (2, 6, 9))
with mock.patch(version_path, '2.5.dev0'):
self.assertEqual(psycopg2_version(), (2, 5))
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') - see
#12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
years = models.SchoolClass.objects.dates('last_updated', 'year')
self.assertEqual(list(years), [datetime.date(2010, 1, 1)])
def test_django_date_extract(self):
"""
Test the custom ``django_date_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') - see #12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
classes = models.SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
@override_settings(DEBUG=True)
class LastExecutedQueryTest(TestCase):
def test_last_executed_query(self):
"""
last_executed_query should not raise an exception even if no previous
query has been run.
"""
cursor = connection.cursor()
connection.ops.last_executed_query(cursor, '', ())
def test_debug_sql(self):
list(models.Reporter.objects.filter(first_name="test"))
sql = connection.queries[-1]['sql'].lower()
self.assertIn("select", sql)
self.assertIn(models.Reporter._meta.db_table, sql)
def test_query_encoding(self):
"""
Test that last_executed_query() returns an Unicode string
"""
data = models.RawData.objects.filter(raw_data=b'\x00\x46 \xFE').extra(select={'föö': 1})
sql, params = data.query.sql_with_params()
cursor = data.query.get_compiler('default').execute_sql(CURSOR)
last_sql = cursor.db.ops.last_executed_query(cursor, sql, params)
self.assertIsInstance(last_sql, six.text_type)
@unittest.skipUnless(connection.vendor == 'sqlite',
"This test is specific to SQLite.")
def test_no_interpolation_on_sqlite(self):
# Regression for #17158
# This shouldn't raise an exception
query = "SELECT strftime('%Y', 'now');"
connection.cursor().execute(query)
self.assertEqual(connection.queries[-1]['sql'],
str_prefix("QUERY = %(_)s\"SELECT strftime('%%Y', 'now');\" - PARAMS = ()"))
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
cursor = connection.cursor()
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.table_name_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
self.assertRaises(Exception, cursor.executemany, query, [(1, 2, 3)])
self.assertRaises(Exception, cursor.executemany, query, [(1,)])
# Unfortunately, the following tests would be a good test to run on all
# backends, but it breaks MySQL hard. Until #13711 is fixed, it can't be run
# everywhere (although it would be an effective test of #13711).
class LongNameTest(TransactionTestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
available_apps = ['backends']
def test_sequence_name_length_limits_create(self):
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
def test_sequence_name_length_limits_m2m(self):
"""Test an m2m save of a model with a long name and a long m2m field name doesn't error as on Django >=1.2 this now uses object saves. Ref #8901"""
obj = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
rel_obj = models.Person.objects.create(first_name='Django', last_name='Reinhardt')
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
def test_sequence_name_length_limits_flush(self):
"""Test that sequence resetting as part of a flush with model with long name and long pk name doesn't error. Ref #8901"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sequences = [
{
'column': VLM._meta.pk.column,
'table': VLM._meta.db_table
},
]
cursor = connection.cursor()
for statement in connection.ops.sql_flush(no_style(), tables, sequences):
cursor.execute(statement)
class SequenceResetTest(TestCase):
def test_generic_relation(self):
"Sequence names are correct when resetting generic relations (Ref #13941)"
# Create an object with a manually specified PK
models.Post.objects.create(id=10, name='1st post', text='hello world')
# Reset the sequences for the database
cursor = connection.cursor()
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(no_style(), [models.Post])
for sql in commands:
cursor.execute(sql)
# If we create a new object now, it should have a PK greater
# than the PK we specified manually.
obj = models.Post.objects.create(name='New post', text='goodbye world')
self.assertGreater(obj.pk, 10)
# This test needs to run outside of a transaction, otherwise closing the
# connection would implicitly rollback and cause problems during teardown.
class ConnectionCreatedSignalTest(TransactionTestCase):
available_apps = []
# Unfortunately with sqlite3 the in-memory test database cannot be closed,
# and so it cannot be re-opened during testing.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_signal(self):
data = {}
def receiver(sender, connection, **kwargs):
data["connection"] = connection
connection_created.connect(receiver)
connection.close()
connection.cursor()
self.assertIs(data["connection"].connection, connection.connection)
connection_created.disconnect(receiver)
data.clear()
connection.cursor()
self.assertEqual(data, {})
class EscapingChecks(TestCase):
"""
All tests in this test case are also run with settings.DEBUG=True in
EscapingChecksDebug test case, to also test CursorDebugWrapper.
"""
bare_select_suffix = connection.features.bare_select_suffix
def test_paramless_no_escaping(self):
cursor = connection.cursor()
cursor.execute("SELECT '%s'" + self.bare_select_suffix)
self.assertEqual(cursor.fetchall()[0][0], '%s')
def test_parameter_escaping(self):
cursor = connection.cursor()
cursor.execute("SELECT '%%', %s" + self.bare_select_suffix, ('%d',))
self.assertEqual(cursor.fetchall()[0], ('%', '%d'))
@unittest.skipUnless(connection.vendor == 'sqlite',
"This is an sqlite-specific issue")
def test_sqlite_parameter_escaping(self):
# '%s' escaping support for sqlite3 #13648
cursor = connection.cursor()
cursor.execute("select strftime('%s', date('now'))")
response = cursor.fetchall()[0][0]
# response should be an non-zero integer
self.assertTrue(int(response))
@override_settings(DEBUG=True)
class EscapingChecksDebug(EscapingChecks):
pass
class BackendTestCase(TransactionTestCase):
available_apps = ['backends']
def create_squares_with_executemany(self, args):
self.create_squares(args, 'format', True)
def create_squares(self, args, paramstyle, multiple):
cursor = connection.cursor()
opts = models.Square._meta
tbl = connection.introspection.table_name_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field('root').column)
f2 = connection.ops.quote_name(opts.get_field('square').column)
if paramstyle == 'format':
query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2)
elif paramstyle == 'pyformat':
query = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (tbl, f1, f2)
else:
raise ValueError("unsupported paramstyle in test")
if multiple:
cursor.executemany(query, args)
else:
cursor.execute(query, args)
def test_cursor_executemany(self):
# Test cursor.executemany #4896
args = [(i, i ** 2) for i in range(-5, 6)]
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 11)
for i in range(-5, 6):
square = models.Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
def test_cursor_executemany_with_empty_params_list(self):
# Test executemany with params=[] does nothing #4765
args = []
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 0)
def test_cursor_executemany_with_iterator(self):
# Test executemany accepts iterators #10320
args = iter((i, i ** 2) for i in range(-3, 2))
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 5)
args = iter((i, i ** 2) for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 9)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_execute_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = {'root': 3, 'square': 9}
self.create_squares(args, 'pyformat', multiple=False)
self.assertEqual(models.Square.objects.count(), 1)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)]
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 11)
for i in range(-5, 6):
square = models.Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat_iterator(self):
args = iter({'root': i, 'square': i ** 2} for i in range(-3, 2))
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 5)
args = iter({'root': i, 'square': i ** 2} for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 9)
def test_unicode_fetches(self):
# fetchone, fetchmany, fetchall return strings as unicode objects #6254
qn = connection.ops.quote_name
models.Person(first_name="John", last_name="Doe").save()
models.Person(first_name="Jane", last_name="Doe").save()
models.Person(first_name="Mary", last_name="Agnelline").save()
models.Person(first_name="Peter", last_name="Parker").save()
models.Person(first_name="Clark", last_name="Kent").save()
opts2 = models.Person._meta
f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
query2 = ('SELECT %s, %s FROM %s ORDER BY %s'
% (qn(f3.column), qn(f4.column), connection.introspection.table_name_converter(opts2.db_table),
qn(f3.column)))
cursor = connection.cursor()
cursor.execute(query2)
self.assertEqual(cursor.fetchone(), ('Clark', 'Kent'))
self.assertEqual(list(cursor.fetchmany(2)), [('Jane', 'Doe'), ('John', 'Doe')])
self.assertEqual(list(cursor.fetchall()), [('Mary', 'Agnelline'), ('Peter', 'Parker')])
def test_unicode_password(self):
old_password = connection.settings_dict['PASSWORD']
connection.settings_dict['PASSWORD'] = "françois"
try:
connection.cursor()
except DatabaseError:
# As password is probably wrong, a database exception is expected
pass
except Exception as e:
self.fail("Unexpected error raised with unicode password: %s" % e)
finally:
connection.settings_dict['PASSWORD'] = old_password
def test_database_operations_helper_class(self):
# Ticket #13630
self.assertTrue(hasattr(connection, 'ops'))
self.assertTrue(hasattr(connection.ops, 'connection'))
self.assertEqual(connection, connection.ops.connection)
def test_database_operations_init(self):
"""
Test that DatabaseOperations initialization doesn't query the database.
See #17656.
"""
with self.assertNumQueries(0):
connection.ops.__class__(connection)
def test_cached_db_features(self):
self.assertIn(connection.features.supports_transactions, (True, False))
self.assertIn(connection.features.supports_stddev, (True, False))
self.assertIn(connection.features.can_introspect_foreign_keys, (True, False))
def test_duplicate_table_error(self):
""" Test that creating an existing table returns a DatabaseError """
cursor = connection.cursor()
query = 'CREATE TABLE %s (id INTEGER);' % models.Article._meta.db_table
with self.assertRaises(DatabaseError):
cursor.execute(query)
def test_cursor_contextmanager(self):
"""
Test that cursors can be used as a context manager
"""
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
# Both InterfaceError and ProgrammingError seem to be used when
# accessing closed cursor (psycopg2 has InterfaceError, rest seem
# to use ProgrammingError).
with self.assertRaises(connection.features.closed_cursor_error_class):
# cursor should be closed, so no queries should be possible.
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
@unittest.skipUnless(connection.vendor == 'postgresql',
"Psycopg2 specific cursor.closed attribute needed")
def test_cursor_contextmanager_closing(self):
# There isn't a generic way to test that cursors are closed, but
# psycopg2 offers us a way to check that by closed attribute.
# So, run only on psycopg2 for that reason.
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
self.assertTrue(cursor.closed)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_is_usable_after_database_disconnects(self):
"""
Test that is_usable() doesn't crash when the database disconnects.
Regression for #21553.
"""
# Open a connection to the database.
with connection.cursor():
pass
# Emulate a connection close by the database.
connection._close()
# Even then is_usable() should not raise an exception.
try:
self.assertFalse(connection.is_usable())
finally:
# Clean up the mess created by connection._close(). Since the
# connection is already closed, this crashes on some backends.
try:
connection.close()
except Exception:
pass
@override_settings(DEBUG=True)
def test_queries(self):
"""
Test the documented API of connection.queries.
"""
with connection.cursor() as cursor:
reset_queries()
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
six.assertCountEqual(self, connection.queries[0].keys(), ['sql', 'time'])
reset_queries()
self.assertEqual(0, len(connection.queries))
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
@override_settings(DEBUG=True)
def test_queries_limit(self):
"""
Test that the backend doesn't store an unlimited number of queries.
Regression for #12581.
"""
old_queries_limit = BaseDatabaseWrapper.queries_limit
BaseDatabaseWrapper.queries_limit = 3
new_connections = ConnectionHandler(settings.DATABASES)
new_connection = new_connections[DEFAULT_DB_ALIAS]
# Initialize the connection and clear initialization statements.
with new_connection.cursor():
pass
new_connection.queries_log.clear()
try:
with new_connection.cursor() as cursor:
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(2, len(new_connection.queries))
self.assertEqual(0, len(w))
with new_connection.cursor() as cursor:
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(3, len(new_connection.queries))
self.assertEqual(1, len(w))
self.assertEqual(str(w[0].message), "Limit for query logging "
"exceeded, only the last 3 queries will be returned.")
finally:
BaseDatabaseWrapper.queries_limit = old_queries_limit
new_connection.close()
# We don't make these tests conditional because that means we would need to
# check and differentiate between:
# * MySQL+InnoDB, MySQL+MYISAM (something we currently can't do).
# * if sqlite3 (if/once we get #14204 fixed) has referential integrity turned
# on or not, something that would be controlled by runtime support and user
# preference.
# verify if its type is django.database.db.IntegrityError.
class FkConstraintsTests(TransactionTestCase):
available_apps = ['backends']
def setUp(self):
# Create a Reporter.
self.r = models.Reporter.objects.create(first_name='John', last_name='Smith')
def test_integrity_checks_on_creation(self):
"""
Try to create a model instance that violates a FK constraint. If it
fails it should fail with IntegrityError.
"""
a1 = models.Article(headline="This is a test", pub_date=datetime.datetime(2005, 7, 27), reporter_id=30)
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy models. Refs #17519
a2 = models.Article(headline='This is another test', reporter=self.r,
pub_date=datetime.datetime(2012, 8, 3),
reporter_proxy_id=30)
self.assertRaises(IntegrityError, a2.save)
def test_integrity_checks_on_update(self):
"""
Try to update a model instance introducing a FK constraint violation.
If it fails it should fail with IntegrityError.
"""
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a1 = models.Article.objects.get(headline="Test article")
a1.reporter_id = 30
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy models. Refs #17519
# Create another article
r_proxy = models.ReporterProxy.objects.get(pk=self.r.pk)
models.Article.objects.create(headline='Another article',
pub_date=datetime.datetime(1988, 5, 15),
reporter=self.r, reporter_proxy=r_proxy)
# Retrieve the second article from the DB
a2 = models.Article.objects.get(headline='Another article')
a2.reporter_proxy_id = 30
self.assertRaises(IntegrityError, a2.save)
def test_disable_constraint_checks_manually(self):
"""
When constraint checks are disabled, should be able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
connection.disable_constraint_checking()
a.save()
connection.enable_constraint_checking()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_disable_constraint_checks_context_manager(self):
"""
When constraint checks are disabled (using context manager), should be able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_check_constraints(self):
"""
Constraint checks should raise an IntegrityError when bad data is in the DB.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
with connection.constraint_checks_disabled():
a.save()
with self.assertRaises(IntegrityError):
connection.check_constraints()
transaction.set_rollback(True)
class ThreadTests(TransactionTestCase):
available_apps = ['backends']
def test_default_connection_thread_local(self):
"""
Ensure that the default connection (i.e. django.db.connection) is
different for each thread.
Refs #17258.
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
connection.cursor()
connections_dict[id(connection)] = connection
def runner():
# Passing django.db.connection between threads doesn't work while
# connections[DEFAULT_DB_ALIAS] does.
from django.db import connections
connection = connections[DEFAULT_DB_ALIAS]
# Allow thread sharing so the connection can be closed by the
# main thread.
connection.allow_thread_sharing = True
connection.cursor()
connections_dict[id(connection)] = connection
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
# Check that each created connection got different inner connection.
self.assertEqual(
len(set(conn.connection for conn in connections_dict.values())),
3)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_connections_thread_local(self):
"""
Ensure that the connections are different for each thread.
Refs #17258.
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
for conn in connections.all():
connections_dict[id(conn)] = conn
def runner():
from django.db import connections
for conn in connections.all():
# Allow thread sharing so the connection can be closed by the
# main thread.
conn.allow_thread_sharing = True
connections_dict[id(conn)] = conn
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEqual(len(connections_dict), 6)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_pass_connection_between_threads(self):
"""
Ensure that a connection can be passed from one thread to the other.
Refs #17258.
"""
models.Person.objects.create(first_name="John", last_name="Doe")
def do_thread():
def runner(main_thread_connection):
from django.db import connections
connections['default'] = main_thread_connection
try:
models.Person.objects.get(first_name="John", last_name="Doe")
except Exception as e:
exceptions.append(e)
t = threading.Thread(target=runner, args=[connections['default']])
t.start()
t.join()
# Without touching allow_thread_sharing, which should be False by default.
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to False
connections['default'].allow_thread_sharing = False
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to True
connections['default'].allow_thread_sharing = True
exceptions = []
do_thread()
# All good
self.assertEqual(exceptions, [])
def test_closing_non_shared_connections(self):
"""
Ensure that a connection that is not explicitly shareable cannot be
closed by another thread.
Refs #17258.
"""
# First, without explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# The exception was raised
self.assertEqual(len(exceptions), 1)
# Then, with explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
# Enable thread sharing
connections['default'].allow_thread_sharing = True
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# No exception was raised
self.assertEqual(len(exceptions), 0)
class MySQLPKZeroTests(TestCase):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for autoincrement primary key.
"""
@skipIfDBFeature('allows_auto_pk_0')
def test_zero_as_autoval(self):
with self.assertRaises(ValueError):
models.Square.objects.create(id=0, root=0, square=1)
class DBConstraintTestCase(TestCase):
def test_can_reference_existent(self):
obj = models.Object.objects.create()
ref = models.ObjectReference.objects.create(obj=obj)
self.assertEqual(ref.obj, obj)
ref = models.ObjectReference.objects.get(obj=obj)
self.assertEqual(ref.obj, obj)
def test_can_reference_non_existent(self):
self.assertFalse(models.Object.objects.filter(id=12345).exists())
ref = models.ObjectReference.objects.create(obj_id=12345)
ref_new = models.ObjectReference.objects.get(obj_id=12345)
self.assertEqual(ref, ref_new)
with self.assertRaises(models.Object.DoesNotExist):
ref.obj
def test_many_to_many(self):
obj = models.Object.objects.create()
obj.related_objects.create()
self.assertEqual(models.Object.objects.count(), 2)
self.assertEqual(obj.related_objects.count(), 1)
intermediary_model = models.Object._meta.get_field("related_objects").remote_field.through
intermediary_model.objects.create(from_object_id=obj.id, to_object_id=12345)
self.assertEqual(obj.related_objects.count(), 1)
self.assertEqual(intermediary_model.objects.count(), 2)
class BackendUtilTests(SimpleTestCase):
def test_format_number(self):
"""
Test the format_number converter utility
"""
def equal(value, max_d, places, result):
self.assertEqual(format_number(Decimal(value), max_d, places), result)
equal('0', 12, 3,
'0.000')
equal('0', 12, 8,
'0.00000000')
equal('1', 12, 9,
'1.000000000')
equal('0.00000000', 12, 8,
'0.00000000')
equal('0.000000004', 12, 8,
'0.00000000')
equal('0.000000008', 12, 8,
'0.00000001')
equal('0.000000000000000000999', 10, 8,
'0.00000000')
equal('0.1234567890', 12, 10,
'0.1234567890')
equal('0.1234567890', 12, 9,
'0.123456789')
equal('0.1234567890', 12, 8,
'0.12345679')
equal('0.1234567890', 12, 5,
'0.12346')
equal('0.1234567890', 12, 3,
'0.123')
equal('0.1234567890', 12, 1,
'0.1')
equal('0.1234567890', 12, 0,
'0')
equal('0.1234567890', None, 0,
'0')
equal('1234567890.1234567890', None, 0,
'1234567890')
equal('1234567890.1234567890', None, 2,
'1234567890.12')
equal('0.1234', 5, None,
'0.1234')
equal('123.12', 5, None,
'123.12')
with self.assertRaises(Rounded):
equal('0.1234567890', 5, None,
'0.12346')
with self.assertRaises(Rounded):
equal('1234567890.1234', 5, None,
'1234600000')
@unittest.skipUnless(connection.vendor == 'sqlite', 'SQLite specific test.')
@skipUnlessDBFeature('can_share_in_memory_db')
class TestSqliteThreadSharing(TransactionTestCase):
available_apps = ['backends']
def test_database_sharing_in_threads(self):
def create_object():
models.Object.objects.create()
create_object()
thread = threading.Thread(target=create_object)
thread.start()
thread.join()
self.assertEqual(models.Object.objects.count(), 2)
| bsd-3-clause |
gnuhub/intellij-community | python/helpers/profiler/thrift/TSCons.py | 237 | 1267 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from os import path
from SCons.Builder import Builder
def scons_env(env, add=''):
opath = path.dirname(path.abspath('$TARGET'))
lstr = 'thrift --gen cpp -o ' + opath + ' ' + add + ' $SOURCE'
cppbuild = Builder(action=lstr)
env.Append(BUILDERS={'ThriftCpp': cppbuild})
def gen_cpp(env, dir, file):
scons_env(env)
suffixes = ['_types.h', '_types.cpp']
targets = map(lambda s: 'gen-cpp/' + file + s, suffixes)
return env.ThriftCpp(targets, dir + file + '.thrift')
| apache-2.0 |
maximusmai/sqlalchemy-migrate | migrate/changeset/databases/oracle.py | 140 | 3655 | """
Oracle database specific implementations of changeset classes.
"""
import sqlalchemy as sa
from sqlalchemy.databases import oracle as sa_base
from migrate import exceptions
from migrate.changeset import ansisql
OracleSchemaGenerator = sa_base.OracleDDLCompiler
class OracleColumnGenerator(OracleSchemaGenerator, ansisql.ANSIColumnGenerator):
pass
class OracleColumnDropper(ansisql.ANSIColumnDropper):
pass
class OracleSchemaChanger(OracleSchemaGenerator, ansisql.ANSISchemaChanger):
def get_column_specification(self, column, **kwargs):
# Ignore the NOT NULL generated
override_nullable = kwargs.pop('override_nullable', None)
if override_nullable:
orig = column.nullable
column.nullable = True
ret = super(OracleSchemaChanger, self).get_column_specification(
column, **kwargs)
if override_nullable:
column.nullable = orig
return ret
def visit_column(self, delta):
keys = delta.keys()
if 'name' in keys:
self._run_subvisit(delta,
self._visit_column_name,
start_alter=False)
if len(set(('type', 'nullable', 'server_default')).intersection(keys)):
self._run_subvisit(delta,
self._visit_column_change,
start_alter=False)
def _visit_column_change(self, table, column, delta):
# Oracle cannot drop a default once created, but it can set it
# to null. We'll do that if default=None
# http://forums.oracle.com/forums/message.jspa?messageID=1273234#1273234
dropdefault_hack = (column.server_default is None \
and 'server_default' in delta.keys())
# Oracle apparently doesn't like it when we say "not null" if
# the column's already not null. Fudge it, so we don't need a
# new function
notnull_hack = ((not column.nullable) \
and ('nullable' not in delta.keys()))
# We need to specify NULL if we're removing a NOT NULL
# constraint
null_hack = (column.nullable and ('nullable' in delta.keys()))
if dropdefault_hack:
column.server_default = sa.PassiveDefault(sa.sql.null())
if notnull_hack:
column.nullable = True
colspec = self.get_column_specification(column,
override_nullable=null_hack)
if null_hack:
colspec += ' NULL'
if notnull_hack:
column.nullable = False
if dropdefault_hack:
column.server_default = None
self.start_alter_table(table)
self.append("MODIFY (")
self.append(colspec)
self.append(")")
class OracleConstraintCommon(object):
def get_constraint_name(self, cons):
# Oracle constraints can't guess their name like other DBs
if not cons.name:
raise exceptions.NotSupportedError(
"Oracle constraint names must be explicitly stated")
return cons.name
class OracleConstraintGenerator(OracleConstraintCommon,
ansisql.ANSIConstraintGenerator):
pass
class OracleConstraintDropper(OracleConstraintCommon,
ansisql.ANSIConstraintDropper):
pass
class OracleDialect(ansisql.ANSIDialect):
columngenerator = OracleColumnGenerator
columndropper = OracleColumnDropper
schemachanger = OracleSchemaChanger
constraintgenerator = OracleConstraintGenerator
constraintdropper = OracleConstraintDropper
| mit |
Yhgenomics/dcos-cli | tests/test_jsonitem.py | 1 | 4943 | from dcos import jsonitem
from dcos.errors import DCOSException
import pytest
@pytest.fixture(params=range(6))
def bad_object(request):
return [
'{"key":value}',
'this is a string',
'4.5',
'4',
'true',
'[1,2,3]',
][request.param]
@pytest.fixture(params=range(4))
def bad_number(request):
return [
'this is a string',
'true',
'{"key":"value"}',
'[1,2,3]',
][request.param]
@pytest.fixture(params=range(5))
def bad_integer(request):
return [
'this is a string',
'true',
'{"key":"value"}',
'45.0',
'[1,2,3]',
][request.param]
@pytest.fixture(params=range(5))
def bad_boolean(request):
return [
'this is a string',
'45',
'{"key":"value"}',
'45.0',
'[1,2,3]',
][request.param]
@pytest.fixture(params=range(6))
def bad_array(request):
return [
'this is a string',
'45',
'{"key":"value"}',
'45.0',
'true',
'[1,2,3',
][request.param]
@pytest.fixture(params=range(12))
def jsonitem_tuple(request):
return [
('string', 'this is a string', 'this is a string'),
('string', 'null', None),
('object', '{"key":"value"}', {'key': 'value'}),
('object', 'null', None),
('number', '4.2', 4.2),
('number', 'null', None),
('integer', '42', 42),
('integer', 'null', None),
('boolean', 'true', True),
('boolean', 'null', None),
('array', '[1,2,3]', [1, 2, 3]),
('array', 'null', None),
][request.param]
@pytest.fixture(params=range(13))
def parse_tuple(request):
return [
('string=null', ('"string"', None)),
('string="this is a string with ="',
('"string"', 'this is a string with =')),
("string='this is a string with ='",
('"string"', 'this is a string with =')),
('object=null', ('"object"', None)),
("""object='{"key":"value"}'""", ('"object"', {'key': 'value'})),
('number=null', ('"number"', None)),
('number=4.2', ('"number"', 4.2)),
('integer=null', ('"integer"', None)),
('integer=42', ('"integer"', 42)),
('boolean=null', ('"boolean"', None)),
('boolean=true', ('"boolean"', True)),
('array=null', ('"array"', None)),
("array='[1,2,3]'", ('"array"', [1, 2, 3])),
][request.param]
@pytest.fixture(params=range(6))
def bad_parse(request):
return [
"====",
"no equals",
"object=[]",
"something=cool",
"integer=",
"integer=45.0",
][request.param]
@pytest.fixture
def schema():
return {
'type': 'object',
'properties': {
'integer': {
'type': 'integer'
},
'number': {
'type': 'number'
},
'string': {
'type': 'string'
},
'object': {
'type': 'object'
},
'array': {
'type': 'array'
},
'boolean': {
'type': 'boolean'
}
}
}
def test_parse_string():
string = 'this is a string "'
assert jsonitem._parse_string(string) == string
def test_parse_object():
assert jsonitem._parse_object('{"key": "value"}') == {'key': 'value'}
def test_parse_invalid_objects(bad_object):
with pytest.raises(DCOSException):
jsonitem._parse_object(bad_object)
def test_parse_number():
assert jsonitem._parse_number('45') == 45
assert jsonitem._parse_number('45.0') == 45.0
def test_parse_invalid_numbers(bad_number):
with pytest.raises(DCOSException):
jsonitem._parse_number(bad_number)
def test_parse_integer():
assert jsonitem._parse_integer('45') == 45
def test_parse_invalid_integers(bad_integer):
with pytest.raises(DCOSException):
jsonitem._parse_integer(bad_integer)
def test_parse_boolean():
assert jsonitem._parse_boolean('true') is True
assert jsonitem._parse_boolean('false') is False
def test_parse_invalid_booleans(bad_boolean):
with pytest.raises(DCOSException):
jsonitem._parse_boolean(bad_boolean)
def test_parse_array():
assert jsonitem._parse_array('[1,2,3]') == [1, 2, 3]
def test_parse_invalid_arrays(bad_array):
with pytest.raises(DCOSException):
jsonitem._parse_array(bad_array)
def test_find_parser(schema, jsonitem_tuple):
key, string_value, value = jsonitem_tuple
assert jsonitem.find_parser(key, schema)(string_value) == value
def test_parse_json_item(schema, parse_tuple):
arg, result = parse_tuple
assert jsonitem.parse_json_item(arg, schema) == result
def test_parse_bad_json_item(schema, bad_parse):
with pytest.raises(DCOSException):
jsonitem.parse_json_item(bad_parse, schema)
| apache-2.0 |
lz1988/company-site | django/contrib/comments/admin.py | 96 | 3680 | from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.contrib.comments.models import Comment
from django.utils.translation import ugettext_lazy as _, ungettext
from django.contrib.comments import get_model
from django.contrib.comments.views.moderation import perform_flag, perform_approve, perform_delete
class UsernameSearch(object):
"""The User object may not be auth.User, so we need to provide
a mechanism for issuing the equivalent of a .filter(user__username=...)
search in CommentAdmin.
"""
def __str__(self):
return 'user__%s' % get_user_model().USERNAME_FIELD
class CommentsAdmin(admin.ModelAdmin):
fieldsets = (
(None,
{'fields': ('content_type', 'object_pk', 'site')}
),
(_('Content'),
{'fields': ('user', 'user_name', 'user_email', 'user_url', 'comment')}
),
(_('Metadata'),
{'fields': ('submit_date', 'ip_address', 'is_public', 'is_removed')}
),
)
list_display = ('name', 'content_type', 'object_pk', 'ip_address', 'submit_date', 'is_public', 'is_removed')
list_filter = ('submit_date', 'site', 'is_public', 'is_removed')
date_hierarchy = 'submit_date'
ordering = ('-submit_date',)
raw_id_fields = ('user',)
search_fields = ('comment', UsernameSearch(), 'user_name', 'user_email', 'user_url', 'ip_address')
actions = ["flag_comments", "approve_comments", "remove_comments"]
def get_actions(self, request):
actions = super(CommentsAdmin, self).get_actions(request)
# Only superusers should be able to delete the comments from the DB.
if not request.user.is_superuser and 'delete_selected' in actions:
actions.pop('delete_selected')
if not request.user.has_perm('comments.can_moderate'):
if 'approve_comments' in actions:
actions.pop('approve_comments')
if 'remove_comments' in actions:
actions.pop('remove_comments')
return actions
def flag_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_flag,
lambda n: ungettext('flagged', 'flagged', n))
flag_comments.short_description = _("Flag selected comments")
def approve_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_approve,
lambda n: ungettext('approved', 'approved', n))
approve_comments.short_description = _("Approve selected comments")
def remove_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_delete,
lambda n: ungettext('removed', 'removed', n))
remove_comments.short_description = _("Remove selected comments")
def _bulk_flag(self, request, queryset, action, done_message):
"""
Flag, approve, or remove some comments from an admin action. Actually
calls the `action` argument to perform the heavy lifting.
"""
n_comments = 0
for comment in queryset:
action(request, comment)
n_comments += 1
msg = ungettext('1 comment was successfully %(action)s.',
'%(count)s comments were successfully %(action)s.',
n_comments)
self.message_user(request, msg % {'count': n_comments, 'action': done_message(n_comments)})
# Only register the default admin if the model is the built-in comment model
# (this won't be true if there's a custom comment app).
if get_model() is Comment:
admin.site.register(Comment, CommentsAdmin)
| bsd-3-clause |
minhphung171093/GreenERP_V8 | openerp/addons/fleet/__openerp__.py | 267 | 2245 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Fleet Management',
'version' : '0.1',
'author' : 'OpenERP S.A.',
'sequence': 110,
'category': 'Managing vehicles and contracts',
'website' : 'https://www.odoo.com/page/fleet',
'summary' : 'Vehicle, leasing, insurances, costs',
'description' : """
Vehicle, leasing, insurances, cost
==================================
With this module, Odoo helps you managing all your vehicles, the
contracts associated to those vehicle as well as services, fuel log
entries, costs and many other features necessary to the management
of your fleet of vehicle(s)
Main Features
-------------
* Add vehicles to your fleet
* Manage contracts for vehicles
* Reminder when a contract reach its expiration date
* Add services, fuel log entry, odometer values for all vehicles
* Show all costs associated to a vehicle or to a type of service
* Analysis graph for costs
""",
'depends' : [
'base',
'mail',
'board'
],
'data' : [
'security/fleet_security.xml',
'security/ir.model.access.csv',
'fleet_view.xml',
'fleet_cars.xml',
'fleet_data.xml',
'fleet_board_view.xml',
],
'demo': ['fleet_demo.xml'],
'installable' : True,
'application' : True,
}
| agpl-3.0 |
chriscate/python-sharepoint | sharepoint/site.py | 2 | 1848 | import functools
from lxml import etree
from six.moves.urllib.request import Request
from six.moves.urllib.parse import urljoin
from .lists import SharePointLists
from .users import SharePointUsers
from .xml import soap_body, namespaces, OUT
class SharePointSite(object):
def __init__(self, url, opener, timeout=None):
if not url.endswith('/'):
url += '/'
self.opener = opener
self.opener.base_url = url
self.opener.post_soap = self.post_soap
self.opener.relative = functools.partial(urljoin, url)
self.timeout = timeout
def post_soap(self, url, xml, soapaction=None):
url = self.opener.relative(url)
request = Request(url, etree.tostring(soap_body(xml)))
request.add_header('Content-type', 'text/xml; charset=utf-8')
if soapaction:
request.add_header('Soapaction', soapaction)
response = self.opener.open(request, timeout=self.timeout)
return etree.parse(response).xpath('/soap:Envelope/soap:Body/*', namespaces=namespaces)[0]
@property
def lists(self):
if not hasattr(self, '_lists'):
self._lists = SharePointLists(self.opener)
return self._lists
@property
def users(self):
if not hasattr(self, '_users'):
self._users = SharePointUsers(self.opener)
return self._users
def as_xml(self, include_lists=False, include_users=False, **kwargs):
xml = OUT.site(url=self.opener.base_url)
if include_lists or kwargs.get('list_names'):
xml.append(self.lists.as_xml(**kwargs))
if include_users:
if 'user_ids' not in kwargs:
kwargs['user_ids'] = set(xml.xpath('.//sharepoint:user/@id', namespaces=namespaces))
xml.append(self.users.as_xml(**kwargs))
return xml
| bsd-3-clause |
axinging/sky_engine | mojo/python/tests/bindings_serialization_deserialization_unittest.py | 9 | 3005 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import math
import mojo_unittest
# pylint: disable=E0611,F0401
import mojo_bindings.serialization as serialization
import mojo_system
# Generated files
# pylint: disable=F0401
import sample_import_mojom
import sample_import2_mojom
import sample_service_mojom
def _NewHandle():
return mojo_system.MessagePipe().handle0
def _NewBar():
bar_instance = sample_service_mojom.Bar()
bar_instance.alpha = 22
bar_instance.beta = 87
bar_instance.gamma = 122
bar_instance.type = sample_service_mojom.Bar.Type.BOTH
return bar_instance
def _NewFoo():
foo_instance = sample_service_mojom.Foo()
foo_instance.name = "Foo.name"
foo_instance.x = 23
foo_instance.y = -23
foo_instance.a = False
foo_instance.b = True
foo_instance.c = True
foo_instance.bar = _NewBar()
foo_instance.extra_bars = [
_NewBar(),
_NewBar(),
]
foo_instance.data = 'Hello world'
foo_instance.source = _NewHandle()
foo_instance.input_streams = [ _NewHandle() ]
foo_instance.output_streams = [ _NewHandle(), _NewHandle() ]
foo_instance.array_of_array_of_bools = [ [ True, False ], [] ]
foo_instance.multi_array_of_strings = [
[
[ "1", "2" ],
[],
[ "3", "4" ],
],
[],
]
foo_instance.array_of_bools = [ True, 0, 1, 2, 0, 0, 0, 0, 0, True ]
return foo_instance
class SerializationDeserializationTest(mojo_unittest.MojoTestCase):
def testFooSerialization(self):
(data, _) = _NewFoo().Serialize()
self.assertTrue(len(data))
self.assertEquals(len(data) % 8, 0)
def testFooDeserialization(self):
(data, handles) = _NewFoo().Serialize()
context = serialization.RootDeserializationContext(data, handles)
self.assertTrue(
sample_service_mojom.Foo.Deserialize(context))
def testFooSerializationDeserialization(self):
foo1 = _NewFoo()
(data, handles) = foo1.Serialize()
context = serialization.RootDeserializationContext(data, handles)
foo2 = sample_service_mojom.Foo.Deserialize(context)
self.assertEquals(foo1, foo2)
def testDefaultsTestSerializationDeserialization(self):
v1 = sample_service_mojom.DefaultsTest()
v1.a18 = []
v1.a19 = ""
v1.a21 = sample_import_mojom.Point()
v1.a22.location = sample_import_mojom.Point()
v1.a22.size = sample_import2_mojom.Size()
(data, handles) = v1.Serialize()
context = serialization.RootDeserializationContext(data, handles)
v2 = sample_service_mojom.DefaultsTest.Deserialize(context)
# NaN needs to be a special case.
self.assertNotEquals(v1, v2)
self.assertTrue(math.isnan(v2.a28))
self.assertTrue(math.isnan(v2.a31))
v1.a28 = v2.a28 = v1.a31 = v2.a31 = 0
self.assertEquals(v1, v2)
def testFooDeserializationError(self):
with self.assertRaises(Exception):
sample_service_mojom.Foo.Deserialize("", [])
| bsd-3-clause |
Zen-CODE/kivy | kivy/input/providers/mouse.py | 17 | 9957 | '''
Mouse provider implementation
=============================
On linux systems, the mouse provider can be annoying when used with another
multitouch provider (hidinput or mtdev). The Mouse can conflict with them: a
single touch can generate one event from the mouse provider and another
from the multitouch provider.
To avoid this behavior, you can activate the "disable_on_activity" token in
the mouse configuration. Then, if there are any touches activated by another
provider, the mouse event will be discarded. Add this to your configuration::
[input]
mouse = mouse,disable_on_activity
Using multitouch interaction with the mouse
-------------------------------------------
.. versionadded:: 1.3.0
By default, the middle and right mouse buttons, as well as a combination of
ctrl + left mouse button are used for multitouch emulation.
If you want to use them for other purposes, you can disable this behavior by
activating the "disable_multitouch" token::
[input]
mouse = mouse,disable_multitouch
.. versionchanged:: 1.9.0
You can now selectively control whether a click initiated as described above
will emulate multi-touch. If the touch has been initiated in the above manner
(e.g. right mouse button), multitouch_sim will be added to touch's profile,
and property `multitouch_sim` to the touch. By default `multitouch_sim` is
True and multitouch will be emulated for that touch. However, if
`multitouch_on_demand` is added to the config::
[input]
mouse = mouse,multitouch_on_demand
then `multitouch_sim` defaults to `False`. In that case, if before mouse
release (e.g. in on_touch_down/move) `multitouch_sim`
is set to True, the touch will simulate multi-touch. For example::
if 'multitouch_sim' in touch.profile:
touch.multitouch_sim = True
Following is a list of the supported profiles for :class:`MouseMotionEvent`.
=================== ==========================================================
Profile name Description
------------------- ----------------------------------------------------------
button Mouse button (left, right, middle, scrollup, scrolldown)
Use property `button`
pos 2D position. Use properties `x`, `y` or `pos``
multitouch_sim If multitouch is simulated. Use property `multitouch_sim`.
See documatation above.
=================== ==========================================================
'''
__all__ = ('MouseMotionEventProvider', )
from kivy.base import EventLoop
from collections import deque
from kivy.logger import Logger
from kivy.input.provider import MotionEventProvider
from kivy.input.factory import MotionEventFactory
from kivy.input.motionevent import MotionEvent
# late binding
Color = Ellipse = None
class MouseMotionEvent(MotionEvent):
def depack(self, args):
profile = self.profile
# don't overwrite previous profile
if not profile:
profile.extend(('pos', 'button'))
self.is_touch = True
self.sx, self.sy = args[:2]
if len(args) >= 3:
self.button = args[2]
if len(args) == 4:
self.multitouch_sim = args[3]
profile.append('multitouch_sim')
super(MouseMotionEvent, self).depack(args)
#
# Create automatically touch on the surface.
#
def update_graphics(self, win, create=False):
global Color, Ellipse
de = self.ud.get('_drawelement', None)
if de is None and create:
if Color is None:
from kivy.graphics import Color, Ellipse
with win.canvas.after:
de = (
Color(.8, .2, .2, .7),
Ellipse(size=(20, 20), segments=15))
self.ud._drawelement = de
if de is not None:
self.push()
self.scale_for_screen(
win.system_size[0],
win.system_size[1],
rotation=win.rotation)
de[1].pos = self.x - 10, self.y - 10
self.pop()
def clear_graphics(self, win):
de = self.ud.pop('_drawelement', None)
if de is not None:
win.canvas.after.remove(de[0])
win.canvas.after.remove(de[1])
class MouseMotionEventProvider(MotionEventProvider):
__handlers__ = {}
def __init__(self, device, args):
super(MouseMotionEventProvider, self).__init__(device, args)
self.waiting_event = deque()
self.touches = {}
self.counter = 0
self.current_drag = None
self.alt_touch = None
self.disable_on_activity = False
self.disable_multitouch = False
self.multitouch_on_demenad = False
# split arguments
args = args.split(',')
for arg in args:
arg = arg.strip()
if arg == '':
continue
elif arg == 'disable_on_activity':
self.disable_on_activity = True
elif arg == 'disable_multitouch':
self.disable_multitouch = True
elif arg == 'multitouch_on_demand':
self.multitouch_on_demenad = True
else:
Logger.error('Mouse: unknown parameter <%s>' % arg)
def start(self):
'''Start the mouse provider'''
if not EventLoop.window:
return
EventLoop.window.bind(
on_mouse_move=self.on_mouse_motion,
on_mouse_down=self.on_mouse_press,
on_mouse_up=self.on_mouse_release)
def stop(self):
'''Stop the mouse provider'''
if not EventLoop.window:
return
EventLoop.window.unbind(
on_mouse_move=self.on_mouse_motion,
on_mouse_down=self.on_mouse_press,
on_mouse_up=self.on_mouse_release)
def test_activity(self):
if not self.disable_on_activity:
return False
# trying to get if we currently have other touch than us
# discard touches generated from kinetic
touches = EventLoop.touches
for touch in touches:
# discard all kinetic touch
if touch.__class__.__name__ == 'KineticMotionEvent':
continue
# not our instance, stop mouse
if touch.__class__ != MouseMotionEvent:
return True
return False
def find_touch(self, x, y):
factor = 10. / EventLoop.window.system_size[0]
for t in self.touches.values():
if abs(x - t.sx) < factor and abs(y - t.sy) < factor:
return t
return False
def create_touch(self, rx, ry, is_double_tap, do_graphics, button):
self.counter += 1
id = 'mouse' + str(self.counter)
args = [rx, ry, button]
if do_graphics:
args += [not self.multitouch_on_demenad]
self.current_drag = cur = MouseMotionEvent(self.device, id=id,
args=args)
cur.is_double_tap = is_double_tap
self.touches[id] = cur
if do_graphics:
cur.update_graphics(EventLoop.window, True)
self.waiting_event.append(('begin', cur))
return cur
def remove_touch(self, cur):
if cur.id not in self.touches:
return
del self.touches[cur.id]
cur.update_time_end()
self.waiting_event.append(('end', cur))
cur.clear_graphics(EventLoop.window)
def on_mouse_motion(self, win, x, y, modifiers):
width, height = EventLoop.window.system_size
rx = x / float(width)
ry = 1. - y / float(height)
if self.current_drag:
cur = self.current_drag
cur.move([rx, ry])
cur.update_graphics(win)
self.waiting_event.append(('update', cur))
elif self.alt_touch is not None and 'alt' not in modifiers:
# alt just released ?
is_double_tap = 'shift' in modifiers
cur = self.create_touch(rx, ry, is_double_tap, True)
return True
def on_mouse_press(self, win, x, y, button, modifiers):
if self.test_activity():
return
width, height = EventLoop.window.system_size
rx = x / float(width)
ry = 1. - y / float(height)
new_me = self.find_touch(rx, ry)
if new_me:
self.current_drag = new_me
else:
is_double_tap = 'shift' in modifiers
do_graphics = (not self.disable_multitouch) and (
button != 'left' or 'ctrl' in modifiers)
cur = self.create_touch(rx, ry, is_double_tap, do_graphics, button)
if 'alt' in modifiers:
self.alt_touch = cur
self.current_drag = None
return True
def on_mouse_release(self, win, x, y, button, modifiers):
# special case, if button is all, then remove all the current mouses.
if button == 'all':
for cur in list(self.touches.values())[:]:
self.remove_touch(cur)
self.current_drag = None
cur = self.current_drag
if (cur and (self.disable_multitouch or 'multitouch_sim' not in
cur.profile or not cur.multitouch_sim)) or\
(button in ('left', 'scrollup', 'scrolldown', 'scrollleft',
'scrollright') and cur and not ('ctrl' in modifiers)):
self.remove_touch(cur)
self.current_drag = None
if self.alt_touch:
self.remove_touch(self.alt_touch)
self.alt_touch = None
return True
def update(self, dispatch_fn):
'''Update the mouse provider (pop event from the queue)'''
try:
while True:
event = self.waiting_event.popleft()
dispatch_fn(*event)
except IndexError:
pass
# registers
MotionEventFactory.register('mouse', MouseMotionEventProvider)
| mit |
fangxingli/hue | desktop/core/ext-py/tablib-0.10.0/test_tablib.py | 29 | 20578 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for Tablib."""
import json
import unittest
import sys
import os
import tablib
from tablib.compat import markup, unicode, is_py3
class TablibTestCase(unittest.TestCase):
"""Tablib test cases."""
def setUp(self):
"""Create simple data set with headers."""
global data, book
data = tablib.Dataset()
book = tablib.Databook()
self.headers = ('first_name', 'last_name', 'gpa')
self.john = ('John', 'Adams', 90)
self.george = ('George', 'Washington', 67)
self.tom = ('Thomas', 'Jefferson', 50)
self.founders = tablib.Dataset(headers=self.headers, title='Founders')
self.founders.append(self.john)
self.founders.append(self.george)
self.founders.append(self.tom)
def tearDown(self):
"""Teardown."""
pass
def test_empty_append(self):
"""Verify append() correctly adds tuple with no headers."""
new_row = (1, 2, 3)
data.append(new_row)
# Verify width/data
self.assertTrue(data.width == len(new_row))
self.assertTrue(data[0] == new_row)
def test_empty_append_with_headers(self):
"""Verify append() correctly detects mismatch of number of
headers and data.
"""
data.headers = ['first', 'second']
new_row = (1, 2, 3, 4)
self.assertRaises(tablib.InvalidDimensions, data.append, new_row)
def test_set_headers_with_incorrect_dimension(self):
"""Verify headers correctly detects mismatch of number of
headers and data.
"""
data.append(self.john)
def set_header_callable():
data.headers = ['first_name']
self.assertRaises(tablib.InvalidDimensions, set_header_callable)
def test_add_column(self):
"""Verify adding column works with/without headers."""
data.append(['kenneth'])
data.append(['bessie'])
new_col = ['reitz', 'monke']
data.append_col(new_col)
self.assertEqual(data[0], ('kenneth', 'reitz'))
self.assertEqual(data.width, 2)
# With Headers
data.headers = ('fname', 'lname')
new_col = [21, 22]
data.append_col(new_col, header='age')
self.assertEqual(data['age'], new_col)
def test_add_column_no_data_no_headers(self):
"""Verify adding new column with no headers."""
new_col = ('reitz', 'monke')
data.append_col(new_col)
self.assertEqual(data[0], tuple([new_col[0]]))
self.assertEqual(data.width, 1)
self.assertEqual(data.height, len(new_col))
def test_add_column_with_header_ignored(self):
"""Verify append_col() ignores the header if data.headers has
not previously been set
"""
new_col = ('reitz', 'monke')
data.append_col(new_col, header='first_name')
self.assertEqual(data[0], tuple([new_col[0]]))
self.assertEqual(data.width, 1)
self.assertEqual(data.height, len(new_col))
self.assertEqual(data.headers, None)
def test_add_column_with_header_and_headers_only_exist(self):
"""Verify append_col() with header correctly detects mismatch when
headers exist but there is no existing row data
"""
data.headers = ['first_name']
#no data
new_col = ('allen')
def append_col_callable():
data.append_col(new_col, header='middle_name')
self.assertRaises(tablib.InvalidDimensions, append_col_callable)
def test_add_column_with_header_and_data_exists(self):
"""Verify append_col() works when headers and rows exists"""
data.headers = self.headers
data.append(self.john)
new_col = [10];
data.append_col(new_col, header='age')
self.assertEqual(data.height, 1)
self.assertEqual(data.width, len(self.john) + 1)
self.assertEqual(data['age'], new_col)
self.assertEqual(len(data.headers), len(self.headers) + 1)
def test_add_callable_column(self):
"""Verify adding column with values specified as callable."""
new_col = lambda x: x[0]
self.founders.append_col(new_col, header='first_again')
def test_header_slicing(self):
"""Verify slicing by headers."""
self.assertEqual(self.founders['first_name'],
[self.john[0], self.george[0], self.tom[0]])
self.assertEqual(self.founders['last_name'],
[self.john[1], self.george[1], self.tom[1]])
self.assertEqual(self.founders['gpa'],
[self.john[2], self.george[2], self.tom[2]])
def test_get_col(self):
"""Verify getting columns by index"""
self.assertEqual(
self.founders.get_col(list(self.headers).index('first_name')),
[self.john[0], self.george[0], self.tom[0]])
self.assertEqual(
self.founders.get_col(list(self.headers).index('last_name')),
[self.john[1], self.george[1], self.tom[1]])
self.assertEqual(
self.founders.get_col(list(self.headers).index('gpa')),
[self.john[2], self.george[2], self.tom[2]])
def test_data_slicing(self):
"""Verify slicing by data."""
# Slice individual rows
self.assertEqual(self.founders[0], self.john)
self.assertEqual(self.founders[:1], [self.john])
self.assertEqual(self.founders[1:2], [self.george])
self.assertEqual(self.founders[-1], self.tom)
self.assertEqual(self.founders[3:], [])
# Slice multiple rows
self.assertEqual(self.founders[:], [self.john, self.george, self.tom])
self.assertEqual(self.founders[0:2], [self.john, self.george])
self.assertEqual(self.founders[1:3], [self.george, self.tom])
self.assertEqual(self.founders[2:], [self.tom])
def test_delete(self):
"""Verify deleting from dataset works."""
# Delete from front of object
del self.founders[0]
self.assertEqual(self.founders[:], [self.george, self.tom])
# Verify dimensions, width should NOT change
self.assertEqual(self.founders.height, 2)
self.assertEqual(self.founders.width, 3)
# Delete from back of object
del self.founders[1]
self.assertEqual(self.founders[:], [self.george])
# Verify dimensions, width should NOT change
self.assertEqual(self.founders.height, 1)
self.assertEqual(self.founders.width, 3)
# Delete from invalid index
self.assertRaises(IndexError, self.founders.__delitem__, 3)
def test_csv_export(self):
"""Verify exporting dataset object as CSV."""
# Build up the csv string with headers first, followed by each row
csv = ''
for col in self.headers:
csv += col + ','
csv = csv.strip(',') + '\r\n'
for founder in self.founders:
for col in founder:
csv += str(col) + ','
csv = csv.strip(',') + '\r\n'
self.assertEqual(csv, self.founders.csv)
def test_tsv_export(self):
"""Verify exporting dataset object as TSV."""
# Build up the tsv string with headers first, followed by each row
tsv = ''
for col in self.headers:
tsv += col + '\t'
tsv = tsv.strip('\t') + '\r\n'
for founder in self.founders:
for col in founder:
tsv += str(col) + '\t'
tsv = tsv.strip('\t') + '\r\n'
self.assertEqual(tsv, self.founders.tsv)
def test_html_export(self):
"""HTML export"""
html = markup.page()
html.table.open()
html.thead.open()
html.tr(markup.oneliner.th(self.founders.headers))
html.thead.close()
for founder in self.founders:
html.tr(markup.oneliner.td(founder))
html.table.close()
html = str(html)
self.assertEqual(html, self.founders.html)
def test_html_export_none_value(self):
"""HTML export"""
html = markup.page()
html.table.open()
html.thead.open()
html.tr(markup.oneliner.th(['foo','', 'bar']))
html.thead.close()
html.tr(markup.oneliner.td(['foo','', 'bar']))
html.table.close()
html = str(html)
headers = ['foo', None, 'bar'];
d = tablib.Dataset(['foo', None, 'bar'], headers=headers)
self.assertEqual(html, d.html)
def test_unicode_append(self):
"""Passes in a single unicode character and exports."""
if is_py3:
new_row = ('å', 'é')
else:
exec("new_row = (u'å', u'é')")
data.append(new_row)
data.json
data.yaml
data.csv
data.tsv
data.xls
data.xlsx
data.ods
data.html
def test_book_export_no_exceptions(self):
"""Test that various exports don't error out."""
book = tablib.Databook()
book.add_sheet(data)
book.json
book.yaml
book.xls
book.xlsx
book.ods
def test_json_import_set(self):
"""Generate and import JSON set serialization."""
data.append(self.john)
data.append(self.george)
data.headers = self.headers
_json = data.json
data.json = _json
self.assertEqual(json.loads(_json), json.loads(data.json))
def test_json_import_book(self):
"""Generate and import JSON book serialization."""
data.append(self.john)
data.append(self.george)
data.headers = self.headers
book.add_sheet(data)
_json = book.json
book.json = _json
self.assertEqual(json.loads(_json), json.loads(book.json))
def test_yaml_import_set(self):
"""Generate and import YAML set serialization."""
data.append(self.john)
data.append(self.george)
data.headers = self.headers
_yaml = data.yaml
data.yaml = _yaml
self.assertEqual(_yaml, data.yaml)
def test_yaml_import_book(self):
"""Generate and import YAML book serialization."""
data.append(self.john)
data.append(self.george)
data.headers = self.headers
book.add_sheet(data)
_yaml = book.yaml
book.yaml = _yaml
self.assertEqual(_yaml, book.yaml)
def test_csv_import_set(self):
"""Generate and import CSV set serialization."""
data.append(self.john)
data.append(self.george)
data.headers = self.headers
_csv = data.csv
data.csv = _csv
self.assertEqual(_csv, data.csv)
def test_csv_import_set_with_spaces(self):
"""Generate and import CSV set serialization when row values have
spaces."""
data.append(('Bill Gates', 'Microsoft'))
data.append(('Steve Jobs', 'Apple'))
data.headers = ('Name', 'Company')
_csv = data.csv
data.csv = _csv
self.assertEqual(_csv, data.csv)
def test_csv_import_set_with_newlines(self):
"""Generate and import CSV set serialization when row values have
newlines."""
data.append(('Markdown\n=======',
'A cool language\n\nwith paragraphs'))
data.append(('reStructedText\n==============',
'Another cool language\n\nwith paragraphs'))
data.headers = ('title', 'body')
_csv = data.csv
data.csv = _csv
self.assertEqual(_csv, data.csv)
def test_tsv_import_set(self):
"""Generate and import TSV set serialization."""
data.append(self.john)
data.append(self.george)
data.headers = self.headers
_tsv = data.tsv
data.tsv = _tsv
self.assertEqual(_tsv, data.tsv)
def test_csv_format_detect(self):
"""Test CSV format detection."""
_csv = (
'1,2,3\n'
'4,5,6\n'
'7,8,9\n'
)
_bunk = (
'¡¡¡¡¡¡¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
)
self.assertTrue(tablib.formats.csv.detect(_csv))
self.assertFalse(tablib.formats.csv.detect(_bunk))
def test_tsv_format_detect(self):
"""Test TSV format detection."""
_tsv = (
'1\t2\t3\n'
'4\t5\t6\n'
'7\t8\t9\n'
)
_bunk = (
'¡¡¡¡¡¡¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
)
self.assertTrue(tablib.formats.tsv.detect(_tsv))
self.assertFalse(tablib.formats.tsv.detect(_bunk))
def test_json_format_detect(self):
"""Test JSON format detection."""
_json = '[{"last_name": "Adams","age": 90,"first_name": "John"}]'
_bunk = (
'¡¡¡¡¡¡¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
)
self.assertTrue(tablib.formats.json.detect(_json))
self.assertFalse(tablib.formats.json.detect(_bunk))
def test_yaml_format_detect(self):
"""Test YAML format detection."""
_yaml = '- {age: 90, first_name: John, last_name: Adams}'
_tsv = 'foo\tbar'
_bunk = (
'¡¡¡¡¡¡---///\n\n\n¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
)
self.assertTrue(tablib.formats.yaml.detect(_yaml))
self.assertFalse(tablib.formats.yaml.detect(_bunk))
self.assertFalse(tablib.formats.yaml.detect(_tsv))
def test_auto_format_detect(self):
"""Test auto format detection."""
_yaml = '- {age: 90, first_name: John, last_name: Adams}'
_json = '[{"last_name": "Adams","age": 90,"first_name": "John"}]'
_csv = '1,2,3\n4,5,6\n7,8,9\n'
_tsv = '1\t2\t3\n4\t5\t6\n7\t8\t9\n'
_bunk = '¡¡¡¡¡¡---///\n\n\n¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
self.assertEqual(tablib.detect(_yaml)[0], tablib.formats.yaml)
self.assertEqual(tablib.detect(_csv)[0], tablib.formats.csv)
self.assertEqual(tablib.detect(_tsv)[0], tablib.formats.tsv)
self.assertEqual(tablib.detect(_json)[0], tablib.formats.json)
self.assertEqual(tablib.detect(_bunk)[0], None)
def test_transpose(self):
"""Transpose a dataset."""
transposed_founders = self.founders.transpose()
first_row = transposed_founders[0]
second_row = transposed_founders[1]
self.assertEqual(transposed_founders.headers,
["first_name","John", "George", "Thomas"])
self.assertEqual(first_row,
("last_name","Adams", "Washington", "Jefferson"))
self.assertEqual(second_row,
("gpa",90, 67, 50))
def test_row_stacking(self):
"""Row stacking."""
to_join = tablib.Dataset(headers=self.founders.headers)
for row in self.founders:
to_join.append(row=row)
row_stacked = self.founders.stack(to_join)
for column in row_stacked.headers:
original_data = self.founders[column]
expected_data = original_data + original_data
self.assertEqual(row_stacked[column], expected_data)
def test_column_stacking(self):
"""Column stacking"""
to_join = tablib.Dataset(headers=self.founders.headers)
for row in self.founders:
to_join.append(row=row)
column_stacked = self.founders.stack_cols(to_join)
for index, row in enumerate(column_stacked):
original_data = self.founders[index]
expected_data = original_data + original_data
self.assertEqual(row, expected_data)
self.assertEqual(column_stacked[0],
("John", "Adams", 90, "John", "Adams", 90))
def test_sorting(self):
"""Sort columns."""
sorted_data = self.founders.sort(col="first_name")
self.assertEqual(sorted_data.title, 'Founders')
first_row = sorted_data[0]
second_row = sorted_data[2]
third_row = sorted_data[1]
expected_first = self.founders[1]
expected_second = self.founders[2]
expected_third = self.founders[0]
self.assertEqual(first_row, expected_first)
self.assertEqual(second_row, expected_second)
self.assertEqual(third_row, expected_third)
def test_wipe(self):
"""Purge a dataset."""
new_row = (1, 2, 3)
data.append(new_row)
# Verify width/data
self.assertTrue(data.width == len(new_row))
self.assertTrue(data[0] == new_row)
data.wipe()
new_row = (1, 2, 3, 4)
data.append(new_row)
self.assertTrue(data.width == len(new_row))
self.assertTrue(data[0] == new_row)
def test_formatters(self):
"""Confirm formatters are being triggered."""
def _formatter(cell_value):
return str(cell_value).upper()
self.founders.add_formatter('last_name', _formatter)
for name in [r['last_name'] for r in self.founders.dict]:
self.assertTrue(name.isupper())
def test_unicode_csv(self):
"""Check if unicode in csv export doesn't raise."""
data = tablib.Dataset()
if sys.version_info[0] > 2:
data.append(['\xfc', '\xfd'])
else:
exec("data.append([u'\xfc', u'\xfd'])")
data.csv
def test_csv_column_select(self):
"""Build up a CSV and test selecting a column"""
data = tablib.Dataset()
data.csv = self.founders.csv
headers = data.headers
self.assertTrue(isinstance(headers[0], unicode))
orig_first_name = self.founders[self.headers[0]]
csv_first_name = data[headers[0]]
self.assertEqual(orig_first_name, csv_first_name)
def test_csv_column_delete(self):
"""Build up a CSV and test deleting a column"""
data = tablib.Dataset()
data.csv = self.founders.csv
target_header = data.headers[0]
self.assertTrue(isinstance(target_header, unicode))
del data[target_header]
self.assertTrue(target_header not in data.headers)
def test_csv_column_sort(self):
"""Build up a CSV and test sorting a column by name"""
data = tablib.Dataset()
data.csv = self.founders.csv
orig_target_header = self.founders.headers[1]
target_header = data.headers[1]
self.founders.sort(orig_target_header)
data.sort(target_header)
self.assertEqual(self.founders[orig_target_header], data[target_header])
def test_unicode_renders_markdown_table(self):
# add another entry to test right field width for
# integer
self.founders.append(('Old', 'Man', 100500))
self.assertEqual(
"""
first_name|last_name |gpa
----------|----------|------
John |Adams |90
George |Washington|67
Thomas |Jefferson |50
Old |Man |100500
""".strip(),
unicode(self.founders)
)
def test_databook_add_sheet_accepts_only_dataset_instances(self):
class NotDataset(object):
def append(self, item):
pass
dataset = NotDataset()
dataset.append(self.john)
self.assertRaises(tablib.InvalidDatasetType, book.add_sheet, dataset)
def test_databook_add_sheet_accepts_dataset_subclasses(self):
class DatasetSubclass(tablib.Dataset):
pass
# just checking if subclass of tablib.Dataset can be added to Databook
dataset = DatasetSubclass()
dataset.append(self.john)
dataset.append(self.tom)
try:
book.add_sheet(dataset)
except tablib.InvalidDatasetType:
self.fail("Subclass of tablib.Dataset should be accepted by Databook.add_sheet")
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
spaceninja/mltshp | test/unit/external_relationship_tests.py | 1 | 1114 | from models import User, ExternalRelationship
from base import BaseTestCase
class ExternalRelationshipTests(BaseTestCase):
def test_add_relationship(self):
"""
ExternalRelationship.add_relationship should correctly save a new entry.
If a duplicate entry is passed in, no errors are thrown and no new entries are saved.
"""
user = User(name='example',email='example@example.com', email_confirmed=1)
user.save()
ExternalRelationship.add_relationship(user, '1000', ExternalRelationship.TWITTER)
all_relationships = ExternalRelationship.all()
self.assertEqual(1, len(all_relationships))
self.assertEqual(user.id, all_relationships[0].user_id)
self.assertEqual(1000, all_relationships[0].service_id)
self.assertNotEqual(None, all_relationships[0].created_at)
self.assertNotEqual(None, all_relationships[0].updated_at)
ExternalRelationship.add_relationship(user, '1000', ExternalRelationship.TWITTER)
self.assertEqual(1, len(all_relationships))
| mpl-2.0 |
danithaca/mxnet | example/neural-style/model_vgg19.py | 16 | 5189 | import find_mxnet
import mxnet as mx
import os, sys
from collections import namedtuple
ConvExecutor = namedtuple('ConvExecutor', ['executor', 'data', 'data_grad', 'style', 'content', 'arg_dict'])
def get_symbol():
# declare symbol
data = mx.sym.Variable("data")
conv1_1 = mx.symbol.Convolution(name='conv1_1', data=data , num_filter=64, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu1_1 = mx.symbol.Activation(name='relu1_1', data=conv1_1 , act_type='relu')
conv1_2 = mx.symbol.Convolution(name='conv1_2', data=relu1_1 , num_filter=64, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu1_2 = mx.symbol.Activation(name='relu1_2', data=conv1_2 , act_type='relu')
pool1 = mx.symbol.Pooling(name='pool1', data=relu1_2 , pad=(0,0), kernel=(2,2), stride=(2,2), pool_type='avg')
conv2_1 = mx.symbol.Convolution(name='conv2_1', data=pool1 , num_filter=128, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu2_1 = mx.symbol.Activation(name='relu2_1', data=conv2_1 , act_type='relu')
conv2_2 = mx.symbol.Convolution(name='conv2_2', data=relu2_1 , num_filter=128, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu2_2 = mx.symbol.Activation(name='relu2_2', data=conv2_2 , act_type='relu')
pool2 = mx.symbol.Pooling(name='pool2', data=relu2_2 , pad=(0,0), kernel=(2,2), stride=(2,2), pool_type='avg')
conv3_1 = mx.symbol.Convolution(name='conv3_1', data=pool2 , num_filter=256, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu3_1 = mx.symbol.Activation(name='relu3_1', data=conv3_1 , act_type='relu')
conv3_2 = mx.symbol.Convolution(name='conv3_2', data=relu3_1 , num_filter=256, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu3_2 = mx.symbol.Activation(name='relu3_2', data=conv3_2 , act_type='relu')
conv3_3 = mx.symbol.Convolution(name='conv3_3', data=relu3_2 , num_filter=256, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu3_3 = mx.symbol.Activation(name='relu3_3', data=conv3_3 , act_type='relu')
conv3_4 = mx.symbol.Convolution(name='conv3_4', data=relu3_3 , num_filter=256, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu3_4 = mx.symbol.Activation(name='relu3_4', data=conv3_4 , act_type='relu')
pool3 = mx.symbol.Pooling(name='pool3', data=relu3_4 , pad=(0,0), kernel=(2,2), stride=(2,2), pool_type='avg')
conv4_1 = mx.symbol.Convolution(name='conv4_1', data=pool3 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu4_1 = mx.symbol.Activation(name='relu4_1', data=conv4_1 , act_type='relu')
conv4_2 = mx.symbol.Convolution(name='conv4_2', data=relu4_1 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu4_2 = mx.symbol.Activation(name='relu4_2', data=conv4_2 , act_type='relu')
conv4_3 = mx.symbol.Convolution(name='conv4_3', data=relu4_2 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu4_3 = mx.symbol.Activation(name='relu4_3', data=conv4_3 , act_type='relu')
conv4_4 = mx.symbol.Convolution(name='conv4_4', data=relu4_3 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu4_4 = mx.symbol.Activation(name='relu4_4', data=conv4_4 , act_type='relu')
pool4 = mx.symbol.Pooling(name='pool4', data=relu4_4 , pad=(0,0), kernel=(2,2), stride=(2,2), pool_type='avg')
conv5_1 = mx.symbol.Convolution(name='conv5_1', data=pool4 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), no_bias=False, workspace=1024)
relu5_1 = mx.symbol.Activation(name='relu5_1', data=conv5_1 , act_type='relu')
# style and content layers
style = mx.sym.Group([relu1_1, relu2_1, relu3_1, relu4_1, relu5_1])
content = mx.sym.Group([relu4_2])
return style, content
def get_executor(style, content, input_size, ctx):
out = mx.sym.Group([style, content])
# make executor
arg_shapes, output_shapes, aux_shapes = out.infer_shape(data=(1, 3, input_size[0], input_size[1]))
arg_names = out.list_arguments()
arg_dict = dict(zip(arg_names, [mx.nd.zeros(shape, ctx=ctx) for shape in arg_shapes]))
grad_dict = {"data": arg_dict["data"].copyto(ctx)}
# init with pretrained weight
pretrained = mx.nd.load("./model/vgg19.params")
for name in arg_names:
if name == "data":
continue
key = "arg:" + name
if key in pretrained:
pretrained[key].copyto(arg_dict[name])
else:
print("Skip argument %s" % name)
executor = out.bind(ctx=ctx, args=arg_dict, args_grad=grad_dict, grad_req="write")
return ConvExecutor(executor=executor,
data=arg_dict["data"],
data_grad=grad_dict["data"],
style=executor.outputs[:-1],
content=executor.outputs[-1],
arg_dict=arg_dict)
def get_model(input_size, ctx):
style, content = get_symbol()
return get_executor(style, content, input_size, ctx)
| apache-2.0 |
tntnatbry/tensorflow | tensorflow/contrib/distributions/python/ops/bijectors/exp_impl.py | 107 | 2076 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Exp bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.distributions.python.ops.bijectors import power_transform
__all__ = [
"Exp",
]
class Exp(power_transform.PowerTransform):
"""Compute `Y = g(X) = exp(X)`.
Example Use:
```python
# Create the Y=g(X)=exp(X) transform which works only on Tensors with 1
# batch ndim and 2 event ndims (i.e., vector of matrices).
exp = Exp(event_ndims=2)
x = [[[1., 2],
[3, 4]],
[[5, 6],
[7, 8]]]
exp(x) == exp.forward(x)
log(x) == exp.inverse(x)
```
Note: the exp(.) is applied element-wise but the Jacobian is a reduction
over the event space.
"""
def __init__(self,
event_ndims=0,
validate_args=False,
name="exp"):
"""Instantiates the `Exp` bijector.
Args:
event_ndims: Scalar `int32` `Tensor` indicating the number of dimensions
associated with a particular draw from the distribution.
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str` name given to ops managed by this object.
"""
super(Exp, self).__init__(
event_ndims=event_ndims,
validate_args=validate_args,
name=name)
| apache-2.0 |
charlietrypsin/ATD_gui | unpickler.py | 1 | 1282 | import cPickle as pickle
from os import listdir, walk
import argparse
parser = argparse.ArgumentParser(description='pull data from .txt files')
parser.add_argument ('dataFn', help='Folder Name', action='store')
args = parser.parse_args()
dataFn = args.dataFn
for dirpath, dirnames, filenames in walk(dataFn):
# print(dataFn)
# print(dirnames)
# print(filenames)
# print(dirpath)
for dirnames in dirpath:
# print(dirnames)
if '.acal' in dirnames:
print(dirnames)
# print(dataFn)
# print(pickle.load(open(dataFn + '\cal.calibration','r')))
# print('Unpickling files')
objects = []
with (open(dataFn + '\\' + '170314.calibration', "rb")) as openfile:
while True:
try:
objects.append(pickle.load(openfile))
except EOFError:
break
print(objects)
cal = objects[0]
print(cal.waveVelocity)
print(cal.coefficientA)
print(cal.coefficientB)
ccs = cal._calculateOmega(6.840,3640,13)
print(ccs)
# calOb = Pickler(open(dataFn + '\cal.calibration','rb'))
# print('Unpickling files')
# print(calOb)
# cal0 = open(dataFn + '\cal.calibration', 'rb')
# cal = pickle.loads(cal0)
# pickle.Unpickler(cal)
# print('Reading pickled file')
# obj = range(0,100)
# pickle.Pickler(obj. 'w')
# print(obj) | gpl-2.0 |
Orav/kbengine | kbe/src/lib/python/Lib/lib2to3/fixes/fix_ws_comma.py | 5 | 1129 | """Fixer that changes 'a ,b' into 'a, b'.
This also changes '{a :b}' into '{a: b}', but does not touch other
uses of colons. It does not touch other uses of whitespace.
"""
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
class FixWsComma(fixer_base.BaseFix):
explicit = True # The user must ask for this fixers
PATTERN = """
any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]>
"""
COMMA = pytree.Leaf(token.COMMA, ",")
COLON = pytree.Leaf(token.COLON, ":")
SEPS = (COMMA, COLON)
def transform(self, node, results):
new = node.clone()
comma = False
for child in new.children:
if child in self.SEPS:
prefix = child.prefix
if prefix.isspace() and "\n" not in prefix:
child.prefix = ""
comma = True
else:
if comma:
prefix = child.prefix
if not prefix:
child.prefix = " "
comma = False
return new
| lgpl-3.0 |
tatsuhiro-t/grpc | src/python/src/grpc/framework/base/packets/interfaces.py | 5 | 3105 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Interfaces defined and used by the base layer of RPC Framework."""
import abc
# packets is referenced from specifications in this module.
from grpc.framework.base import interfaces
from grpc.framework.base.packets import packets # pylint: disable=unused-import
class ForeLink(object):
"""Accepts back-to-front tickets and emits front-to-back tickets."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def accept_back_to_front_ticket(self, ticket):
"""Accept a packets.BackToFrontPacket.
Args:
ticket: Any packets.BackToFrontPacket.
"""
raise NotImplementedError()
@abc.abstractmethod
def join_rear_link(self, rear_link):
"""Mates this object with a peer with which it will exchange tickets."""
raise NotImplementedError()
class RearLink(object):
"""Accepts front-to-back tickets and emits back-to-front tickets."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def accept_front_to_back_ticket(self, ticket):
"""Accepts a packets.FrontToBackPacket.
Args:
ticket: Any packets.FrontToBackPacket.
"""
raise NotImplementedError()
@abc.abstractmethod
def join_fore_link(self, fore_link):
"""Mates this object with a peer with which it will exchange tickets."""
raise NotImplementedError()
class Front(ForeLink, interfaces.Front):
"""Clientish objects that operate by sending and receiving tickets."""
__metaclass__ = abc.ABCMeta
class Back(RearLink, interfaces.Back):
"""Serverish objects that operate by sending and receiving tickets."""
__metaclass__ = abc.ABCMeta
| bsd-3-clause |
Reddine/dzlibs | tweeza/items/forms.py | 9 | 1931 | # -*- coding: utf-8 -*-
from flask.ext.wtf import Form
from wtforms import TextField, TextAreaField, SubmitField, SelectField
from wtforms.fields.html5 import URLField
from flask.ext.babel import lazy_gettext as _
from flask_wtf.file import FileField
class BaseForm(Form):
"""
this contains shared fields among other forms
"""
blog_post = URLField(_('Blog post URL'))
ar_title = TextField(_('Item title in Arabic'))
en_title = TextField(_('Item title in English'))
fr_title = TextField(_('Item title in French'))
thumbnail = FileField(_('Thumbnail (minimum dimensions 230x230)'))
tags = TextField(_('Tags'))
category = SelectField(_('Category'), choices=[], coerce=int)
def set_categories(self, categories, lang):
choices = []
for category in categories:
choices.append((category.id, category.get_name(lang)))
self.category.choices = choices
class AddItemForm(BaseForm):
github = URLField(_('Remote repository URL'))
description = TextAreaField(_('Description'))
files = FileField(_('Files'))
license = SelectField(_('License'), choices=[], coerce=int)
submit = SubmitField(_('Add item'))
def set_licenses(self, licenses):
choices = []
for license in licenses:
choices.append((license.id, license.name))
self.license.choices = choices
class EditGithubItemForm(BaseForm):
github = URLField(_('Remote repository URL'))
submit = SubmitField(_('Update'))
class EditItemForm(BaseForm):
files = FileField(_('Files'))
description = TextAreaField(_('Description'))
license = SelectField(_('License'), choices=[], coerce=int)
submit = SubmitField(_('Update'))
def set_licenses(self, licenses):
choices = []
for license in licenses:
choices.append((license.id, license.name))
self.license.choices = choices
| mpl-2.0 |
alexholehouse/SBMLIntegrator | libsbml-5.0.0/src/bindings/python/test/annotation/TestDate_newSetters.py | 1 | 8745 | #
# @file TestDate_newSetters.py
# @brief Date unit tests
#
# @author Akiya Jouraku (Python conversion)
# @author Sarah Keating
#
# $Id: TestDate_newSetters.py 11531 2010-07-22 17:36:00Z mhucka $
# $HeadURL: https://sbml.svn.sourceforge.net/svnroot/sbml/trunk/libsbml/src/bindings/python/test/annotation/TestDate_newSetters.py $
#
# ====== WARNING ===== WARNING ===== WARNING ===== WARNING ===== WARNING ======
#
# DO NOT EDIT THIS FILE.
#
# This file was generated automatically by converting the file located at
# src/annotation/test/TestDate_newSetters.c
# using the conversion program dev/utilities/translateTests/translateTests.pl.
# Any changes made here will be lost the next time the file is regenerated.
#
# -----------------------------------------------------------------------------
# This file is part of libSBML. Please visit http://sbml.org for more
# information about SBML, and the latest version of libSBML.
#
# Copyright 2005-2010 California Institute of Technology.
# Copyright 2002-2005 California Institute of Technology and
# Japan Science and Technology Corporation.
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation. A copy of the license agreement is provided
# in the file named "LICENSE.txt" included with this software distribution
# and also available online as http://sbml.org/software/libsbml/license.html
# -----------------------------------------------------------------------------
import sys
import unittest
import libsbml
class TestDate_newSetters(unittest.TestCase):
def test_Date_setDateAsString(self):
date = libsbml.Date(2007,10,23,14,15,16,1,3,0)
self.assert_( date != None )
i = date.setDateAsString( "20081-12-30T12:15:45+02:00")
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_(( "2007-10-23T14:15:16+03:00" == date.getDateAsString() ))
i = date.setDateAsString( "200-12-30T12:15:45+02:00")
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_(( "2007-10-23T14:15:16+03:00" == date.getDateAsString() ))
i = date.setDateAsString("")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_(( "2000-01-01T00:00:00Z" == date.getDateAsString() ))
i = date.setDateAsString( "2008-12-30T12:15:45+02:00")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( date.getYear() == 2008 )
self.assert_( date.getMonth() == 12 )
self.assert_( date.getDay() == 30 )
self.assert_( date.getHour() == 12 )
self.assert_( date.getMinute() == 15 )
self.assert_( date.getSecond() == 45 )
self.assert_( date.getSignOffset() == 1 )
self.assert_( date.getHoursOffset() == 2 )
self.assert_( date.getMinutesOffset() == 0 )
_dummyList = [ date ]; _dummyList[:] = []; del _dummyList
pass
def test_Date_setDay(self):
date = libsbml.Date(2005,2,12,12,15,45,1,2,0)
self.assert_( date != None )
i = date.setDay(29)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getDay() == 1 )
i = date.setDay(31)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getDay() == 1 )
i = date.setDay(15)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( date.getDay() == 15 )
self.assert_(( "2005-02-15T12:15:45+02:00" == date.getDateAsString() ))
_dummyList = [ date ]; _dummyList[:] = []; del _dummyList
pass
def test_Date_setHour(self):
date = libsbml.Date(2005,12,30,12,15,45,1,2,0)
self.assert_( date != None )
i = date.setHour(434)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getHour() == 0 )
i = date.setHour(12121)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getHour() == 0 )
i = date.setHour(9)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( date.getHour() == 9 )
self.assert_(( "2005-12-30T09:15:45+02:00" == date.getDateAsString() ))
_dummyList = [ date ]; _dummyList[:] = []; del _dummyList
pass
def test_Date_setHoursOffset(self):
date = libsbml.Date(2005,12,30,12,15,45,1,2,0)
self.assert_( date != None )
i = date.setHoursOffset(434)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getHoursOffset() == 0 )
i = date.setHoursOffset(11)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( date.getHoursOffset() == 11 )
self.assert_(( "2005-12-30T12:15:45+11:00" == date.getDateAsString() ))
_dummyList = [ date ]; _dummyList[:] = []; del _dummyList
pass
def test_Date_setMinute(self):
date = libsbml.Date(2005,12,30,12,15,45,1,2,0)
self.assert_( date != None )
i = date.setMinute(434)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getMinute() == 0 )
i = date.setMinute(12121)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getMinute() == 0 )
i = date.setMinute(32)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( date.getMinute() == 32 )
self.assert_(( "2005-12-30T12:32:45+02:00" == date.getDateAsString() ))
_dummyList = [ date ]; _dummyList[:] = []; del _dummyList
pass
def test_Date_setMinutesOffset(self):
date = libsbml.Date(2005,12,30,12,15,45,1,2,0)
self.assert_( date != None )
i = date.setMinutesOffset(434)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getMinutesOffset() == 0 )
i = date.setMinutesOffset(60)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getMinutesOffset() == 0 )
i = date.setMinutesOffset(45)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( date.getMinutesOffset() == 45 )
self.assert_(( "2005-12-30T12:15:45+02:45" == date.getDateAsString() ))
_dummyList = [ date ]; _dummyList[:] = []; del _dummyList
pass
def test_Date_setMonth(self):
date = libsbml.Date(2005,12,30,12,15,45,1,2,0)
self.assert_( date != None )
i = date.setMonth(434)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getMonth() == 1 )
i = date.setMonth(12121)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getMonth() == 1 )
i = date.setMonth(11)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( date.getMonth() == 11 )
self.assert_(( "2005-11-30T12:15:45+02:00" == date.getDateAsString() ))
_dummyList = [ date ]; _dummyList[:] = []; del _dummyList
pass
def test_Date_setSecond(self):
date = libsbml.Date(2005,12,30,12,15,45,1,2,0)
self.assert_( date != None )
i = date.setSecond(434)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getSecond() == 0 )
i = date.setSecond(12121)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getSecond() == 0 )
i = date.setSecond(32)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( date.getSecond() == 32 )
self.assert_(( "2005-12-30T12:15:32+02:00" == date.getDateAsString() ))
_dummyList = [ date ]; _dummyList[:] = []; del _dummyList
pass
def test_Date_setYear(self):
date = libsbml.Date(2005,12,30,12,15,45,1,2,0)
self.assert_( date != None )
i = date.setYear(434)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getYear() == 2000 )
i = date.setYear(12121)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( date.getYear() == 2000 )
i = date.setYear(2008)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( date.getYear() == 2008 )
self.assert_(( "2008-12-30T12:15:45+02:00" == date.getDateAsString() ))
_dummyList = [ date ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestDate_newSetters))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
| gpl-3.0 |
bengosney/romrescue.org | dogs/admin.py | 1 | 4979 | # Standard Library
import json
import os
# Django
from django.contrib import admin
# Third Party
from adminsortable2.admin import SortableAdminMixin, SortableInlineAdminMixin
from image_cropping import ImageCroppingMixin
# First Party
from dogs import models
from modulestatus.admin import statusAdmin
from romrescue.actions import export_as_csv_action
class DogPhotoInline(SortableInlineAdminMixin, ImageCroppingMixin, admin.TabularInline):
model = models.DogPhoto
extra = 3
class YoutubeInline(SortableInlineAdminMixin, ImageCroppingMixin, admin.TabularInline):
model = models.YoutubeVideo
extra = 1
class AboutInline(SortableInlineAdminMixin, admin.TabularInline):
model = models.AboutInfo
extra = 3
class KeyPointsAdmin(SortableAdminMixin, statusAdmin, admin.ModelAdmin):
model = models.KeyPoints
def get_querysety(self, request):
qs = self.model.admin_objects.get_queryset()
ordering = self.ordering or ()
if ordering:
qs = qs.order_by(*ordering)
return qs
def formfield_for_choice_field(self, db_field, request, **kwargs):
if db_field.name == "icon":
data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../icons/icons.json")
with open(data_path) as data_file:
data = json.load(data_file)
kwargs["choices"] = [(icon, icon) for icon in data["icons"]]
return super().formfield_for_choice_field(db_field, request, **kwargs)
def make_tag_action(tag):
def tag_action(modeladmin, request, queryset):
for dog in queryset:
dog.filters.add(tag)
tag_action.short_description = f"Tag dog with {tag.name}"
tag_action.__name__ = f"tag_dog_with_{tag.slug}"
return tag_action
def make_status_action(status, name):
def status_action(modeladmin, request, queryset):
queryset.update(dogStatus=status)
status_action.short_description = f"Change status to {name}"
status_action.__name__ = f"change_status_to_{status}"
return status_action
def set_price_to_default(modeladmin, request, queryset):
queryset.update(cost=models.Dog.DEFAULT_COST)
class DogAdmin(SortableAdminMixin, statusAdmin, admin.ModelAdmin):
model = models.Dog
inlines = [AboutInline, DogPhotoInline, YoutubeInline]
filter_horizontal = ("keypoints",)
list_display = ("name", "reserved", "location", "dogStatus", "all_filters")
list_per_page = 25
actions = ["add_tag_dog", set_price_to_default]
def __init__(self, model, admin_site):
super().__init__(model, admin_site)
self.list_filter = ["dogStatus", "reserved", "location", "promoted", "filters", "rescue"] + list(
self.list_filter
)
def get_actions(self, request):
actions = super().get_actions(request)
for tag in models.Filter.objects.all():
action = make_tag_action(tag)
actions[action.__name__] = (action, action.__name__, action.short_description)
for status in models.Dog.STATUS:
action = make_status_action(status[0], status[1])
actions[action.__name__] = (action, action.__name__, action.short_description)
return actions
class Media:
css = {"screen": ("/static/pages/css/admin.css",)}
class StatusAdmin(SortableAdminMixin, admin.ModelAdmin):
model = models.Status
list_display = ("title", "show_arrival_date")
class DogPhoto(ImageCroppingMixin, admin.ModelAdmin):
model = models.DogPhoto
class RescueAdmin(admin.ModelAdmin):
model = models.Rescue
list_display = ("name", "logo", "website")
list_per_page = 25
class FilterAdmin(admin.ModelAdmin):
model = models.Filter
class SponsorshipInfoLinksAdmin(admin.ModelAdmin):
model = models.SponsorshipInfoLink
list_display = ("title", "link", "file")
class SponsorAdmin(admin.ModelAdmin):
model = models.SponsorSubmission
readonly_fields = ("created",)
list_filter = ("created",)
list_display = (
"name",
"email",
"dog",
"created",
)
list_per_page = 25
actions = [export_as_csv_action("CSV Export", fields=["name", "email", "created", "enquiry"])]
def has_add_permission(self, request, obj=None):
return False
def has_delete_permission(self, request, obj=None):
return False
class SponsorLevelAdmin(admin.ModelAdmin):
models = models.SponsorshipLevel
list_display = (
"name",
"cost",
)
admin.site.register(models.KeyPoints, KeyPointsAdmin)
admin.site.register(models.Dog, DogAdmin)
admin.site.register(models.Status, StatusAdmin)
admin.site.register(models.Rescue, RescueAdmin)
admin.site.register(models.Filter, FilterAdmin)
admin.site.register(models.SponsorshipInfoLink, SponsorshipInfoLinksAdmin)
admin.site.register(models.SponsorSubmission, SponsorAdmin)
admin.site.register(models.SponsorshipLevel, SponsorLevelAdmin)
| gpl-3.0 |
alerta/alerta-contrib | plugins/amqp/listener.py | 1 | 1246 | #!/usr/bin/env python
from kombu import Connection, Exchange, Queue
from kombu.mixins import ConsumerMixin
AMQP_URL = 'mongodb://localhost:27017/kombu'
AMQP_TOPIC = 'notify'
class FanoutConsumer(ConsumerMixin):
def __init__(self, conn):
self.connection = conn
self.channel = self.connection.channel()
def get_consumers(self, Consumer, channel):
exchange = Exchange(
name=AMQP_TOPIC,
type='fanout',
channel=self.channel,
durable=True
)
queues = [
Queue(
name='',
exchange=exchange,
routing_key='',
channel=self.channel,
exclusive=True
)
]
return [
Consumer(queues=queues, accept=['json'], callbacks=[self.on_message])
]
def on_message(self, body, message):
try:
print(body)
except Exception as e:
print(str(e))
message.ack()
if __name__ == '__main__':
from kombu.utils.debug import setup_logging
setup_logging(loglevel='DEBUG', loggers=[''])
with Connection(AMQP_URL) as conn:
consumer = FanoutConsumer(conn)
consumer.run()
| mit |
iemejia/beam | sdks/python/apache_beam/examples/snippets/transforms/elementwise/partition.py | 5 | 4767 | # coding=utf-8
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
def partition_function(test=None):
# pylint: disable=line-too-long, expression-not-assigned
# [START partition_function]
import apache_beam as beam
durations = ['annual', 'biennial', 'perennial']
def by_duration(plant, num_partitions):
return durations.index(plant['duration'])
with beam.Pipeline() as pipeline:
annuals, biennials, perennials = (
pipeline
| 'Gardening plants' >> beam.Create([
{'icon': '🍓', 'name': 'Strawberry', 'duration': 'perennial'},
{'icon': '🥕', 'name': 'Carrot', 'duration': 'biennial'},
{'icon': '🍆', 'name': 'Eggplant', 'duration': 'perennial'},
{'icon': '🍅', 'name': 'Tomato', 'duration': 'annual'},
{'icon': '🥔', 'name': 'Potato', 'duration': 'perennial'},
])
| 'Partition' >> beam.Partition(by_duration, len(durations))
)
annuals | 'Annuals' >> beam.Map(lambda x: print('annual: {}'.format(x)))
biennials | 'Biennials' >> beam.Map(
lambda x: print('biennial: {}'.format(x)))
perennials | 'Perennials' >> beam.Map(
lambda x: print('perennial: {}'.format(x)))
# [END partition_function]
# pylint: enable=line-too-long, expression-not-assigned
if test:
test(annuals, biennials, perennials)
def partition_lambda(test=None):
# pylint: disable=line-too-long, expression-not-assigned
# [START partition_lambda]
import apache_beam as beam
durations = ['annual', 'biennial', 'perennial']
with beam.Pipeline() as pipeline:
annuals, biennials, perennials = (
pipeline
| 'Gardening plants' >> beam.Create([
{'icon': '🍓', 'name': 'Strawberry', 'duration': 'perennial'},
{'icon': '🥕', 'name': 'Carrot', 'duration': 'biennial'},
{'icon': '🍆', 'name': 'Eggplant', 'duration': 'perennial'},
{'icon': '🍅', 'name': 'Tomato', 'duration': 'annual'},
{'icon': '🥔', 'name': 'Potato', 'duration': 'perennial'},
])
| 'Partition' >> beam.Partition(
lambda plant, num_partitions: durations.index(plant['duration']),
len(durations),
)
)
annuals | 'Annuals' >> beam.Map(lambda x: print('annual: {}'.format(x)))
biennials | 'Biennials' >> beam.Map(
lambda x: print('biennial: {}'.format(x)))
perennials | 'Perennials' >> beam.Map(
lambda x: print('perennial: {}'.format(x)))
# [END partition_lambda]
# pylint: enable=line-too-long, expression-not-assigned
if test:
test(annuals, biennials, perennials)
def partition_multiple_arguments(test=None):
# pylint: disable=expression-not-assigned
# [START partition_multiple_arguments]
import apache_beam as beam
import json
def split_dataset(plant, num_partitions, ratio):
assert num_partitions == len(ratio)
bucket = sum(map(ord, json.dumps(plant))) % sum(ratio)
total = 0
for i, part in enumerate(ratio):
total += part
if bucket < total:
return i
return len(ratio) - 1
with beam.Pipeline() as pipeline:
train_dataset, test_dataset = (
pipeline
| 'Gardening plants' >> beam.Create([
{'icon': '🍓', 'name': 'Strawberry', 'duration': 'perennial'},
{'icon': '🥕', 'name': 'Carrot', 'duration': 'biennial'},
{'icon': '🍆', 'name': 'Eggplant', 'duration': 'perennial'},
{'icon': '🍅', 'name': 'Tomato', 'duration': 'annual'},
{'icon': '🥔', 'name': 'Potato', 'duration': 'perennial'},
])
| 'Partition' >> beam.Partition(split_dataset, 2, ratio=[8, 2])
)
train_dataset | 'Train' >> beam.Map(lambda x: print('train: {}'.format(x)))
test_dataset | 'Test' >> beam.Map(lambda x: print('test: {}'.format(x)))
# [END partition_multiple_arguments]
# pylint: enable=expression-not-assigned
if test:
test(train_dataset, test_dataset)
| apache-2.0 |
tensorflow/tensorflow | tensorflow/python/kernel_tests/linalg/sparse/csr_sparse_matrix_test.py | 14 | 10085 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""CSR sparse matrix tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg.sparse import sparse_csr_matrix_ops
from tensorflow.python.platform import test
class CSRSparseMatrixTest(test.TestCase):
@classmethod
def setUpClass(cls): # pylint: disable=g-missing-super-call
cls._gpu_available = test_util.is_gpu_available()
@test_util.run_in_graph_and_eager_modes
def testConstructorFromSparseTensor(self):
if not self._gpu_available:
return
a_indices = np.array([[0, 0], [2, 3], [2, 4], [3, 0]])
a_values = [1.0, 5.0, -1.0, -2.0]
a_dense_shape = [5, 6]
a_st = sparse_tensor.SparseTensor(a_indices, a_values, a_dense_shape)
a_st = math_ops.cast(a_st, dtypes.float32)
a_sm = sparse_csr_matrix_ops.CSRSparseMatrix(a_st)
self.assertEqual(a_sm.shape, a_dense_shape)
a_st_rt = a_sm.to_sparse_tensor()
a_st_rt = self.evaluate(a_st_rt)
self.assertAllEqual(a_indices, a_st_rt.indices)
self.assertAllClose(a_values, a_st_rt.values)
self.assertAllEqual(a_dense_shape, a_st_rt.dense_shape)
@test_util.run_in_graph_and_eager_modes
def testConstructorFromDenseTensorNoIndices(self):
if not self._gpu_available:
return
sparsify = lambda m: m * (m > 0)
dense_shape = [5, 7, 13]
a_mats = sparsify(np.random.randn(*dense_shape)).astype(np.float32)
a_sm = sparse_csr_matrix_ops.CSRSparseMatrix(a_mats)
self.assertEqual(a_sm.shape, a_mats.shape)
a_sm_rt = a_sm.to_dense()
a_sm_nnz = a_sm.nnz()
a_sm_nnz, a_sm_rt = self.evaluate([a_sm_nnz, a_sm_rt])
# Count number of nonzero entries for each batch using bincount.
nz = np.bincount(a_mats.nonzero()[0], minlength=a_mats.shape[0])
self.assertAllEqual(nz, a_sm_nnz)
self.assertAllClose(a_mats, a_sm_rt)
@test_util.run_in_graph_and_eager_modes
def testConstructorFromDenseTensorWithIndices(self):
if not self._gpu_available:
return
dense_shape = [5, 7, 13]
a_mats = np.random.randn(*dense_shape).astype(np.float32)
indices = np.array([[0, 0, 0],
[1, 0, 0]], dtype=np.int64)
a_sm = sparse_csr_matrix_ops.CSRSparseMatrix(a_mats, indices=indices)
self.assertEqual(a_sm.shape, a_mats.shape)
a_sm_st = a_sm.to_sparse_tensor()
a_sm_st = self.evaluate(a_sm_st)
# Count number of nonzero entries for each batch using bincount.
self.assertAllEqual(indices, a_sm_st.indices)
self.assertAllEqual(dense_shape, a_sm.shape)
self.assertAllEqual(dense_shape, a_sm_st.dense_shape)
self.assertAllClose([a_mats[tuple(x)] for x in indices], a_sm_st.values)
@test_util.run_in_graph_and_eager_modes
def testConj(self):
if not self._gpu_available:
return
sparsify = lambda m: m * (m.real > 0)
dense_shape = [5, 7, 13]
a_mats = sparsify(
(np.random.randn(*dense_shape) + 1.j * np.random.randn(*dense_shape))
.astype(np.complex64))
a_sm = sparse_csr_matrix_ops.CSRSparseMatrix(a_mats)
a_sm_conj = a_sm.conj()
self.assertIsInstance(a_sm_conj, sparse_csr_matrix_ops.CSRSparseMatrix)
a_sm_conj_dense = a_sm_conj.to_dense()
a_sm_conj_dense = self.evaluate(a_sm_conj_dense)
self.assertAllClose(a_mats.conj(), a_sm_conj_dense)
@test_util.run_in_graph_and_eager_modes
def testTranspose(self):
if not self._gpu_available:
return
for conjugate in False, True:
sparsify = lambda m: m * (m > 0)
dense_shape = [5, 7, 13]
a_mats = sparsify((np.random.randn(*dense_shape) +
1.j * np.random.randn(*dense_shape))).astype(
np.complex64)
expected = np.transpose(a_mats, (0, 2, 1))
if conjugate:
expected = np.conj(expected)
a_sm = sparse_csr_matrix_ops.CSRSparseMatrix(a_mats)
if conjugate:
a_sm_t = a_sm.hermitian_transpose()
else:
a_sm_t = a_sm.transpose()
self.assertIsInstance(a_sm_t, sparse_csr_matrix_ops.CSRSparseMatrix)
a_sm_t_dense = a_sm_t.to_dense()
a_sm_t_dense = self.evaluate(a_sm_t_dense)
self.assertAllClose(expected, a_sm_t_dense)
class SparseMatrixMatmulTest(test.TestCase):
@classmethod
def setUpClass(cls): # pylint: disable=g-missing-super-call
cls._gpu_available = test_util.is_gpu_available()
def _testSparseSparse(self, transpose_a, transpose_b, adjoint_a, adjoint_b):
if not self._gpu_available:
return
sparsify = lambda m: m * (m > 0)
dense_shape_a = [5, 13, 7] if transpose_a or adjoint_a else [5, 7, 13]
dense_shape_b = [5, 15, 13] if transpose_b or adjoint_b else [5, 13, 15]
dtypes_to_test = [np.float32, np.complex64]
for dtype in dtypes_to_test:
a_mats = sparsify((np.random.randn(*dense_shape_a) +
1.j * np.random.randn(*dense_shape_a))).astype(dtype)
b_mats = sparsify((np.random.randn(*dense_shape_b) +
1.j * np.random.randn(*dense_shape_b))).astype(dtype)
a_sm = sparse_csr_matrix_ops.CSRSparseMatrix(a_mats)
b_sm = sparse_csr_matrix_ops.CSRSparseMatrix(b_mats)
c_dense = test_util.matmul_without_tf32(
a_mats,
b_mats,
transpose_a=transpose_a,
transpose_b=transpose_b,
adjoint_a=adjoint_a,
adjoint_b=adjoint_b)
c_sm = sparse_csr_matrix_ops.matmul(
a_sm,
b_sm,
transpose_a=transpose_a,
transpose_b=transpose_b,
adjoint_a=adjoint_a,
adjoint_b=adjoint_b)
self.assertIsInstance(c_sm, sparse_csr_matrix_ops.CSRSparseMatrix)
c_sm_dense = c_sm.to_dense()
c_dense, c_sm_dense = self.evaluate([c_dense, c_sm_dense])
self.assertAllClose(c_dense, c_sm_dense)
@test_util.run_in_graph_and_eager_modes
def testSparseSparse(self):
for (t_a, t_b, adj_a, adj_b) in itertools.product(*(([False, True],) * 4)):
if (t_a and adj_a) or (t_b and adj_b):
continue
self._testSparseSparse(t_a, t_b, adj_a, adj_b)
def _testSparseDense(self, transpose_a, transpose_b, adjoint_a, adjoint_b):
if not self._gpu_available:
return
sparsify = lambda m: m * (m > 0)
dense_shape_a = [5, 13, 7] if transpose_a or adjoint_a else [5, 7, 13]
dense_shape_b = [5, 15, 13] if transpose_b or adjoint_b else [5, 13, 15]
dtypes_to_test = [np.float32, np.complex64]
for dtype in dtypes_to_test:
a_mats = sparsify((np.random.randn(*dense_shape_a) +
1.j * np.random.randn(*dense_shape_a))).astype(dtype)
b_mats = (np.random.randn(*dense_shape_b) +
1.j * np.random.randn(*dense_shape_b)).astype(dtype)
a_sm = sparse_csr_matrix_ops.CSRSparseMatrix(a_mats)
c_dense = test_util.matmul_without_tf32(
a_mats,
b_mats,
transpose_a=transpose_a,
transpose_b=transpose_b,
adjoint_a=adjoint_a,
adjoint_b=adjoint_b)
c_sm_dense = sparse_csr_matrix_ops.matmul(
a_sm,
b_mats,
transpose_a=transpose_a,
transpose_b=transpose_b,
adjoint_a=adjoint_a,
adjoint_b=adjoint_b)
c_dense, c_sm_dense = self.evaluate([c_dense, c_sm_dense])
self.assertAllClose(c_dense, c_sm_dense)
@test_util.run_in_graph_and_eager_modes
def testSparseDense(self):
for (t_a, t_b, adj_a, adj_b) in itertools.product(*(([False, True],) * 4)):
if (t_a and adj_a) or (t_b and adj_b):
continue
self._testSparseDense(t_a, t_b, adj_a, adj_b)
def _testDenseSparse(self, transpose_a, transpose_b, adjoint_a, adjoint_b):
if not self._gpu_available:
return
sparsify = lambda m: m * (m > 0)
dense_shape_a = [5, 13, 7] if transpose_a or adjoint_a else [5, 7, 13]
dense_shape_b = [5, 15, 13] if transpose_b or adjoint_b else [5, 13, 15]
dtypes_to_test = [np.float32, np.complex64]
for dtype in dtypes_to_test:
a_mats = (np.random.randn(*dense_shape_a) +
1.j * np.random.randn(*dense_shape_a)).astype(dtype)
b_mats = sparsify((np.random.randn(*dense_shape_b) +
1.j * np.random.randn(*dense_shape_b))).astype(dtype)
b_sm = sparse_csr_matrix_ops.CSRSparseMatrix(b_mats)
c_dense = test_util.matmul_without_tf32(
a_mats,
b_mats,
transpose_a=transpose_a,
transpose_b=transpose_b,
adjoint_a=adjoint_a,
adjoint_b=adjoint_b)
c_sm_dense = sparse_csr_matrix_ops.matmul(
a_mats,
b_sm,
transpose_a=transpose_a,
transpose_b=transpose_b,
adjoint_a=adjoint_a,
adjoint_b=adjoint_b)
c_dense, c_sm_dense = self.evaluate([c_dense, c_sm_dense])
self.assertAllClose(c_dense, c_sm_dense)
@test_util.run_in_graph_and_eager_modes
def testDenseSparse(self):
for (t_a, t_b, adj_a, adj_b) in itertools.product(*(([False, True],) * 4)):
if (t_a and adj_a) or (t_b and adj_b):
continue
self._testDenseSparse(t_a, t_b, adj_a, adj_b)
if __name__ == "__main__":
test.main()
| apache-2.0 |
xaviercobain88/framework-python | openerp/addons/base_report_designer/openerp_sxw2rml/openerp_sxw2rml.py | 112 | 14182 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c):
#
# 2005 pyopenoffice.py Martin Simon (http://www.bezirksreiter.de)
# 2005 Fabien Pinckaers, TINY SPRL. (http://tiny.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#!/usr/bin/python
"""
OpenERP SXW2RML - The OpenERP's report engine
OpenERP SXW2RML is part of the OpenERP Report Project.
OpenERP Report is a module that allows you to render high quality PDF document
from an OpenOffice template (.sxw) and any relationl database.
"""
__version__ = '0.9'
import re
import string
import os
import zipfile
import xml.dom.minidom
from reportlab.lib.units import toLength
import base64
import copy
class DomApiGeneral:
"""General DOM API utilities."""
def __init__(self, content_string="", file=""):
self.content_string = content_string
self.re_digits = re.compile(r"(.*?\d)(pt|cm|mm|inch|in)")
def _unitTuple(self, string):
"""Split values and units to a tuple."""
temp = self.re_digits.findall(string)
if not temp:
return (string,"")
else:
return (temp[0])
def stringPercentToFloat(self, string):
temp = string.replace("""%""","")
return float(temp)/100
def findChildrenByName(self, parent, name, attr_dict=None):
"""Helper functions. Does not work recursively.
Optional: also test for certain attribute/value pairs."""
if attr_dict is None:
attr_dict = {}
children = []
for c in parent.childNodes:
if c.nodeType == c.ELEMENT_NODE and c.nodeName == name:
children.append(c)
if attr_dict == {}:
return children
else:
return self._selectForAttributes(nodelist=children,attr_dict=attr_dict)
def _selectForAttributes(self, nodelist, attr_dict):
"Helper function."""
selected_nodes = []
for n in nodelist:
check = 1
for a in attr_dict.keys():
if n.getAttribute(a) != attr_dict[a]:
# at least one incorrect attribute value?
check = 0
if check:
selected_nodes.append(n)
return selected_nodes
def _stringToTuple(self, s):
"""Helper function."""
try:
temp = string.split(s,",")
return int(temp[0]),int(temp[1])
except:
return None
def _tupleToString(self, t):
try:
return self.openOfficeStringUtf8("%s,%s" % (t[0],t[1]))
except:
return None
def _lengthToFloat(self, value):
v = value
if not self.re_digits.search(v):
return v
try:
if v[-4:] == "inch":
# OO files use "inch" instead of "in" in Reportlab units
v = v[:-2]
except:
pass
try:
c = round(toLength(v))
return c
except:
return v
def openOfficeStringUtf8(self, string):
if type(string) == unicode:
return string.encode("utf-8")
tempstring = unicode(string,"cp1252").encode("utf-8")
return tempstring
class DomApi(DomApiGeneral):
"""This class provides a DOM-API for XML-Files from an SXW-Archive."""
def __init__(self, xml_content, xml_styles):
DomApiGeneral.__init__(self)
self.content_dom = xml.dom.minidom.parseString(xml_content)
self.styles_dom = xml.dom.minidom.parseString(xml_styles)
body = self.content_dom.getElementsByTagName("office:body")
self.body = body and body[0]
# TODO:
self.style_dict = {}
self.style_properties_dict = {}
# ******** always use the following order:
self.buildStyleDict()
self.buildStylePropertiesDict()
if self.styles_dom.getElementsByTagName("style:page-master").__len__()<>0:
self.page_master = self.styles_dom.getElementsByTagName("style:page-master")[0]
if self.styles_dom.getElementsByTagName("style:page-layout").__len__()<>0 :
self.page_master = self.styles_dom.getElementsByTagName("style:page-layout")[0]
self.document = self.content_dom.getElementsByTagName("office:document-content")[0]
def buildStylePropertiesDict(self):
for s in self.style_dict.keys():
self.style_properties_dict[s] = self.getStylePropertiesDict(s)
def updateWithPercents(self, dict, updatedict):
"""Sometimes you find values like "115%" in the style hierarchy."""
if not updatedict:
# no style hierarchies for this style? =>
return
new_updatedict = copy.copy(updatedict)
for u in new_updatedict.keys():
try:
if new_updatedict[u].find("""%""") != -1 and dict.has_key(u):
number = float(self.re_digits.search(dict[u]).group(1))
unit = self.re_digits.search(dict[u]).group(2)
new_number = self.stringPercentToFloat(new_updatedict[u]) * number
if unit == "pt":
new_number = int(new_number)
# no floats allowed for "pt"
# OOo just takes the int, does not round (try it out!)
new_updatedict[u] = "%s%s" % (new_number,unit)
else:
dict[u] = new_updatedict[u]
except:
dict[u] = new_updatedict[u]
dict.update(new_updatedict)
def normalizeStyleProperties(self):
"""Transfer all style:style-properties attributes from the
self.style_properties_hierarchical dict to the automatic-styles
from content.xml. Use this function to preprocess content.xml for
XSLT transformations etc.Do not try to implement this function
with XSlT - believe me, it's a terrible task..."""
styles_styles = self.styles_dom.getElementsByTagName("style:style")
automatic_styles = self.content_dom.getElementsByTagName("office:automatic-styles")[0]
for s in styles_styles:
automatic_styles.appendChild(s.cloneNode(deep=1))
content_styles = self.content_dom.getElementsByTagName("style:style")
# these are the content_styles with styles_styles added!!!
for s in content_styles:
c = self.findChildrenByName(s,"style:properties")
if c == []:
# some derived automatic styles do not have "style:properties":
temp = self.content_dom.createElement("style:properties")
s.appendChild(temp)
c = self.findChildrenByName(s,"style:properties")
c = c[0]
dict = self.style_properties_dict[(s.getAttribute("style:name")).encode("utf-8")] or {}
for attribute in dict.keys():
c.setAttribute(self.openOfficeStringUtf8(attribute),self.openOfficeStringUtf8(dict[attribute]))
def transferStylesXml(self):
"""Transfer certain sub-trees from styles.xml to the normalized content.xml
(see above). It is not necessary to do this - for example - with paragraph styles.
the "normalized" style properties contain all information needed for
further processing."""
# TODO: What about table styles etc.?
outline_styles = self.styles_dom.getElementsByTagName("text:outline-style")
t = self.content_dom.createElement("transferredfromstylesxml")
self.document.insertBefore(t,self.body)
t_new = self.body.previousSibling
try:
page_master = self.page_master
t_new.appendChild(page_master.cloneNode(deep=1))
t_new.appendChild(outline_styles[0].cloneNode(deep=1))
except:
pass
def normalizeLength(self):
"""Normalize all lengthes to floats (i.e: 1 inch = 72).
Always use this after "normalizeContent" and "transferStyles"!"""
# TODO: The complex attributes of table cell styles are not transferred yet.
#all_styles = self.content_dom.getElementsByTagName("style:properties")
#all_styles += self.content_dom.getElementsByTagName("draw:image")
all_styles = self.content_dom.getElementsByTagName("*")
for s in all_styles:
for x in s._attrs.keys():
v = s.getAttribute(x)
s.setAttribute(x,"%s" % self._lengthToFloat(v))
# convert float to string first!
def normalizeTableColumns(self):
"""Handle this strange table:number-columns-repeated attribute."""
columns = self.content_dom.getElementsByTagName("table:table-column")
for c in columns:
if c.hasAttribute("table:number-columns-repeated"):
number = int(c.getAttribute("table:number-columns-repeated"))
c.removeAttribute("table:number-columns-repeated")
for i in range(number-1):
(c.parentNode).insertBefore(c.cloneNode(deep=1),c)
def buildStyleDict(self):
"""Store all style:style-nodes from content.xml and styles.xml in self.style_dict.
Caution: in this dict the nodes from two dom apis are merged!"""
for st in (self.styles_dom,self.content_dom):
for s in st.getElementsByTagName("style:style"):
name = s.getAttribute("style:name").encode("utf-8")
self.style_dict[name] = s
return True
def toxml(self):
return self.content_dom.toxml(encoding="utf-8")
def getStylePropertiesDict(self, style_name):
res = {}
if self.style_dict[style_name].hasAttribute("style:parent-style-name"):
parent = self.style_dict[style_name].getAttribute("style:parent-style-name").encode("utf-8")
res = self.getStylePropertiesDict(parent)
children = self.style_dict[style_name].childNodes
for c in children:
if c.nodeType == c.ELEMENT_NODE and c.nodeName.find("properties")>0 :
for attr in c._attrs.keys():
res[attr] = c.getAttribute(attr).encode("utf-8")
return res
class PyOpenOffice(object):
"""This is the main class which provides all functionality."""
def __init__(self, path='.', save_pict=False):
self.path = path
self.save_pict = save_pict
self.images = {}
def oo_read(self, fname):
z = zipfile.ZipFile(fname,"r")
content = z.read('content.xml')
style = z.read('styles.xml')
all = z.namelist()
for a in all:
if a[:9]=='Pictures/' and len(a)>10:
pic_content = z.read(a)
self.images[a[9:]] = pic_content
if self.save_pict:
f=open(os.path.join(self.path, os.path.basename(a)),"wb")
f.write(pic_content)
f.close()
z.close()
return content,style
def oo_replace(self, content):
regex = [
(r"<para[^>]*/>", ""),
(r"<para(.*)>(.*?)<text:line-break[^>]*/>", "<para$1>$2</para><para$1>"),
]
for key,val in regex:
content = re.sub(key, val, content)
return content
def unpackNormalize(self, sourcefile):
c,s = self.oo_read(sourcefile)
c = self.oo_replace(c)
dom = DomApi(c,s)
dom.normalizeStyleProperties()
dom.transferStylesXml()
dom.normalizeLength()
dom.normalizeTableColumns()
new_c = dom.toxml()
return new_c
def sxw2rml(sxw_file, xsl, output='.', save_pict=False):
from lxml import etree
from StringIO import StringIO
tool = PyOpenOffice(output, save_pict = save_pict)
res = tool.unpackNormalize(sxw_file)
f = StringIO(xsl)
styledoc = etree.parse(f)
style = etree.XSLT(styledoc)
f = StringIO(res)
doc = etree.parse(f)
result = style(doc)
root = etree.XPathEvaluator(result)("/document/stylesheet")
if root:
root=root[0]
images = etree.Element("images")
for img in tool.images:
node = etree.Element('image', name=img)
node.text = base64.encodestring(tool.images[img])
images.append(node)
root.append(images)
try:
xml = str(result)
return xml
except:
return result
if __name__ == "__main__":
import optparse
parser = optparse.OptionParser(
version="OpenERP Report v%s" % __version__,
usage = 'openerp_sxw2rml.py [options] file.sxw')
parser.add_option("-v", "--verbose", default=False, dest="verbose", help="enable basic debugging")
parser.add_option("-o", "--output", dest="output", default='.', help="directory of image output")
(opt, args) = parser.parse_args()
if len(args) != 1:
parser.error("Incorrect number of arguments.")
import sys
fname = sys.argv[1]
f = fname
xsl_file = 'normalized_oo2rml.xsl'
z = zipfile.ZipFile(fname,"r")
mimetype = z.read('mimetype')
if mimetype.split('/')[-1] == 'vnd.oasis.opendocument.text' :
xsl_file = 'normalized_odt2rml.xsl'
xsl = file(os.path.join(os.getcwd(), os.path.dirname(sys.argv[0]), xsl_file)).read()
result = sxw2rml(f, xsl, output=opt.output, save_pict=False)
print result
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Schibum/naclports | build_tools/check_sums.py | 1 | 1075 | #!/usr/bin/env python
# Copyright (c) 2013 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tool which checks the sha1 sums of all packages.
"""
from __future__ import print_function
import argparse
import os
import sys
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(os.path.dirname(SCRIPT_DIR), 'lib'))
import naclports
import naclports.source_package
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-v', '--verbose', action='store_true',
help='Output extra information.')
options = parser.parse_args(args)
if options.verbose:
naclports.SetVerbose(True)
count = 0
for package in naclports.source_package.SourcePackageIterator():
package.Download()
if not package.Verify():
return 1
count += 1
naclports.Log("Verfied checksums for %d packages" % count)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause |
haeusser/tensorflow | tensorflow/contrib/tensor_forest/hybrid/python/models/k_feature_decisions_to_data_then_nn.py | 189 | 1874 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A model that places a soft decision tree embedding before a neural net."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tensor_forest.hybrid.python import hybrid_model
from tensorflow.contrib.tensor_forest.hybrid.python.layers import decisions_to_data
from tensorflow.contrib.tensor_forest.hybrid.python.layers import fully_connected
from tensorflow.python.training import adagrad
class KFeatureDecisionsToDataThenNN(hybrid_model.HybridModel):
"""A model that places a soft decision tree embedding before a neural net."""
def __init__(self,
params,
device_assigner=None,
optimizer_class=adagrad.AdagradOptimizer,
**kwargs):
super(KFeatureDecisionsToDataThenNN, self).__init__(
params,
device_assigner=device_assigner,
optimizer_class=optimizer_class,
**kwargs)
self.layers = [decisions_to_data.KFeatureDecisionsToDataLayer(
params, 0, device_assigner),
fully_connected.FullyConnectedLayer(
params, 1, device_assigner=device_assigner)]
| apache-2.0 |
Bysmyyr/chromium-crosswalk | tools/telemetry/third_party/gsutilz/third_party/boto/boto/ec2/cloudwatch/dimension.py | 152 | 1533 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
class Dimension(dict):
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Name':
self._name = value
elif name == 'Value':
if self._name in self:
self[self._name].append(value)
else:
self[self._name] = [value]
else:
setattr(self, name, value)
| bsd-3-clause |
SarahBA/b2share | b2share/modules/communities/signals.py | 3 | 2085 | # -*- coding: utf-8 -*-
#
# This file is part of EUDAT B2Share.
# Copyright (C) 2016 University of Tuebingen, CERN.
# Copyright (C) 2015 University of Tuebingen.
#
# B2Share is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# B2Share is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with B2Share; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Community module signals."""
from blinker import Namespace
_signals = Namespace()
before_community_insert = _signals.signal('before-community-insert')
"""Signal is sent before a community is inserted.
Example subscriber
.. code-block:: python
def listener(sender, *args, **kwargs):
sender['key'] = sum(args)
from invenio_communities.signals import before_community_insert
before_community_insert.connect(listener)
"""
after_community_insert = _signals.signal('after-community-insert')
"""Signal sent after a community is inserted."""
before_community_update = _signals.signal('before-community-update')
"""Signal is sent before a community is update."""
after_community_update = _signals.signal('after-community-update')
"""Signal sent after a community is updated."""
before_community_delete = _signals.signal('before-community-delete')
"""Signal is sent before a community is delete."""
after_community_delete = _signals.signal('after-community-delete')
"""Signal sent after a community is delete."""
| gpl-2.0 |
Srisai85/scikit-learn | examples/covariance/plot_outlier_detection.py | 235 | 3891 | """
==========================================
Outlier detection with several methods.
==========================================
When the amount of contamination is known, this example illustrates two
different ways of performing :ref:`outlier_detection`:
- based on a robust estimator of covariance, which is assuming that the
data are Gaussian distributed and performs better than the One-Class SVM
in that case.
- using the One-Class SVM and its ability to capture the shape of the
data set, hence performing better when the data is strongly
non-Gaussian, i.e. with two well-separated clusters;
The ground truth about inliers and outliers is given by the points colors
while the orange-filled area indicates which points are reported as inliers
by each method.
Here, we assume that we know the fraction of outliers in the datasets.
Thus rather than using the 'predict' method of the objects, we set the
threshold on the decision_function to separate out the corresponding
fraction.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.font_manager
from scipy import stats
from sklearn import svm
from sklearn.covariance import EllipticEnvelope
# Example settings
n_samples = 200
outliers_fraction = 0.25
clusters_separation = [0, 1, 2]
# define two outlier detection tools to be compared
classifiers = {
"One-Class SVM": svm.OneClassSVM(nu=0.95 * outliers_fraction + 0.05,
kernel="rbf", gamma=0.1),
"robust covariance estimator": EllipticEnvelope(contamination=.1)}
# Compare given classifiers under given settings
xx, yy = np.meshgrid(np.linspace(-7, 7, 500), np.linspace(-7, 7, 500))
n_inliers = int((1. - outliers_fraction) * n_samples)
n_outliers = int(outliers_fraction * n_samples)
ground_truth = np.ones(n_samples, dtype=int)
ground_truth[-n_outliers:] = 0
# Fit the problem with varying cluster separation
for i, offset in enumerate(clusters_separation):
np.random.seed(42)
# Data generation
X1 = 0.3 * np.random.randn(0.5 * n_inliers, 2) - offset
X2 = 0.3 * np.random.randn(0.5 * n_inliers, 2) + offset
X = np.r_[X1, X2]
# Add outliers
X = np.r_[X, np.random.uniform(low=-6, high=6, size=(n_outliers, 2))]
# Fit the model with the One-Class SVM
plt.figure(figsize=(10, 5))
for i, (clf_name, clf) in enumerate(classifiers.items()):
# fit the data and tag outliers
clf.fit(X)
y_pred = clf.decision_function(X).ravel()
threshold = stats.scoreatpercentile(y_pred,
100 * outliers_fraction)
y_pred = y_pred > threshold
n_errors = (y_pred != ground_truth).sum()
# plot the levels lines and the points
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
subplot = plt.subplot(1, 2, i + 1)
subplot.set_title("Outlier detection")
subplot.contourf(xx, yy, Z, levels=np.linspace(Z.min(), threshold, 7),
cmap=plt.cm.Blues_r)
a = subplot.contour(xx, yy, Z, levels=[threshold],
linewidths=2, colors='red')
subplot.contourf(xx, yy, Z, levels=[threshold, Z.max()],
colors='orange')
b = subplot.scatter(X[:-n_outliers, 0], X[:-n_outliers, 1], c='white')
c = subplot.scatter(X[-n_outliers:, 0], X[-n_outliers:, 1], c='black')
subplot.axis('tight')
subplot.legend(
[a.collections[0], b, c],
['learned decision function', 'true inliers', 'true outliers'],
prop=matplotlib.font_manager.FontProperties(size=11))
subplot.set_xlabel("%d. %s (errors: %d)" % (i + 1, clf_name, n_errors))
subplot.set_xlim((-7, 7))
subplot.set_ylim((-7, 7))
plt.subplots_adjust(0.04, 0.1, 0.96, 0.94, 0.1, 0.26)
plt.show()
| bsd-3-clause |
openstack/python-neutronclient | neutronclient/tests/unit/test_validators.py | 13 | 4101 | # Copyright 2014 NEC Corporation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from neutronclient.common import exceptions
from neutronclient.common import validators
class FakeParsedArgs(object):
pass
class ValidatorTest(testtools.TestCase):
def _test_validate_int(self, attr_val, attr_name='attr1',
min_value=1, max_value=10):
obj = FakeParsedArgs()
setattr(obj, attr_name, attr_val)
ret = validators.validate_int_range(obj, attr_name,
min_value, max_value)
# Come here only if there is no exception.
self.assertIsNone(ret)
def _test_validate_int_error(self, attr_val, expected_msg,
attr_name='attr1', expected_exc=None,
min_value=1, max_value=10):
if expected_exc is None:
expected_exc = exceptions.CommandError
e = self.assertRaises(expected_exc,
self._test_validate_int,
attr_val, attr_name, min_value, max_value)
self.assertEqual(expected_msg, str(e))
def test_validate_int_min_max(self):
self._test_validate_int(1)
self._test_validate_int(10)
self._test_validate_int('1')
self._test_validate_int('10')
self._test_validate_int('0x0a')
self._test_validate_int_error(
0, 'attr1 "0" should be an integer [1:10].')
self._test_validate_int_error(
11, 'attr1 "11" should be an integer [1:10].')
self._test_validate_int_error(
'0x10', 'attr1 "0x10" should be an integer [1:10].')
def test_validate_int_min_only(self):
self._test_validate_int(1, max_value=None)
self._test_validate_int(10, max_value=None)
self._test_validate_int(11, max_value=None)
self._test_validate_int_error(
0, 'attr1 "0" should be an integer greater than or equal to 1.',
max_value=None)
def test_validate_int_max_only(self):
self._test_validate_int(0, min_value=None)
self._test_validate_int(1, min_value=None)
self._test_validate_int(10, min_value=None)
self._test_validate_int_error(
11, 'attr1 "11" should be an integer smaller than or equal to 10.',
min_value=None)
def test_validate_int_no_limit(self):
self._test_validate_int(0, min_value=None, max_value=None)
self._test_validate_int(1, min_value=None, max_value=None)
self._test_validate_int(10, min_value=None, max_value=None)
self._test_validate_int(11, min_value=None, max_value=None)
self._test_validate_int_error(
'abc', 'attr1 "abc" should be an integer.',
min_value=None, max_value=None)
def _test_validate_subnet(self, attr_val, attr_name='attr1'):
obj = FakeParsedArgs()
setattr(obj, attr_name, attr_val)
ret = validators.validate_ip_subnet(obj, attr_name)
# Come here only if there is no exception.
self.assertIsNone(ret)
def test_validate_ip_subnet(self):
self._test_validate_subnet('192.168.2.0/24')
self._test_validate_subnet('192.168.2.3/20')
self._test_validate_subnet('192.168.2.1')
e = self.assertRaises(exceptions.CommandError,
self._test_validate_subnet,
'192.168.2.256')
self.assertEqual('attr1 "192.168.2.256" is not a valid CIDR.', str(e))
| apache-2.0 |
teanet/Nazabore | Script/archive.py | 1 | 4184 | #!/usr/bin/env python
import os
import logging
import argparse
import subprocess
import hashlib
logging.basicConfig(level=logging.INFO,
format="%(asctime)s %(levelname)s %(message)s")
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.join(script_dir, "../")
build_dir = os.path.join(root_dir, "Build")
plist_path = os.path.join(root_dir, "Cashback/Info.plist")
versionNumber = subprocess.check_output(["/usr/libexec/PlistBuddy", "-c", "Print CFBundleShortVersionString", plist_path])
buildNumber = int(subprocess.check_output(["/usr/libexec/PlistBuddy", "-c", "Print CFBundleVersion", plist_path]))
archive_name = "archive_{}_{}".format(versionNumber, buildNumber + 1)
archive_path = os.path.join(build_dir, archive_name)
archive_full_path = archive_path + ".xcarchive"
team_id = "695938Y5Q8"
app_id = "1416612668"
bundle_id = "ru.doublegis.cashback"
scheme = "Cashback"
ipa_name = "Cashback.ipa"
provisioning_uuid = "80b67dac-e7ef-46f2-96bd-0b3adf576a10"
workspace = os.path.join(root_dir, "Cashback.xcworkspace")
def main():
parser = create_parser()
args = parser.parse_args()
logging.debug("Passed arguments: {}".format(args))
incement_build()
create_archive()
# upload_dsym()
create_ipa()
upload_ipa(args)
def incement_build():
new_build_number = buildNumber + 1
logging.info("Increment build to: {}".format(new_build_number))
logging.info("Archive new path: {}".format(archive_full_path))
subprocess.check_call(["/usr/libexec/PlistBuddy", "-c", "Set CFBundleVersion {}".format(new_build_number), plist_path])
def create_parser():
parser = argparse.ArgumentParser(description="Generating v4iOS module..")
parser.add_argument("--name", "-n", help="Module name", default="")
parser.add_argument("--login", "-l", help="Login", required=True)
parser.add_argument("--password", "-p", help="Password", required=True)
return parser
def create_archive():
xcodebuild = [
"xcodebuild",
"archive",
"-workspace", workspace,
"-archivePath", archive_path,
"-scheme", scheme,
]
p1 = subprocess.Popen(xcodebuild, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
p2 = subprocess.check_call(["xcpretty", "-cs"], stdin=p1.stdout)
p1.wait()
def upload_dsym():
subprocess.check_call([
os.path.join(root_dir, "Pods/Fabric/upload-symbols"),
"-gsp", os.path.join(root_dir, "Cashback/Supporting Files/GoogleService-Info.plist"),
"-p", "ios",
os.path.join(archive_full_path, "dSYMs")
])
def upload_ipa(args):
ipa_path = os.path.join(build_dir, ipa_name)
logging.info("Upload ipa: {}".format(ipa_path))
subprocess.check_call([
"/Applications/Xcode.app/Contents/Applications/Application Loader.app/Contents/Frameworks/ITunesSoftwareService.framework/Support/altool",
"--upload-app",
"-f", ipa_path,
"-t", "ios",
"-u", args.login,
"-p", args.password,
"--output-format", "normal"
])
def create_ipa():
plsit = """
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>provisioningProfiles</key>
<dict>
<key>{}</key>
<string>{}</string>
</dict>
<key>signingCertificate</key>
<string>iOS Distribution</string>
<key>signingStyle</key>
<string>manual</string>
<key>method</key>
<string>app-store</string>
<key>teamID</key>
<string>{}</string>
</dict>
</plist>
""".format(bundle_id, provisioning_uuid, team_id)
plist_path = os.path.join(root_dir, "Build/ipa.plist")
with open(plist_path, "w") as text_file:
text_file.write(plsit)
subprocess.check_call([
"xcodebuild",
"-exportArchive",
"-archivePath", archive_full_path,
"-exportOptionsPlist", plist_path,
"-exportPath", os.path.join(root_dir, "Build/"),
])
if __name__ == "__main__":
main()
| mit |
doganaltunbay/odoo | setup/win32/win32_setup.py | 363 | 1310 | # -*- coding: utf-8 -*-
import os
import glob
import py2exe
from distutils.core import setup
execfile(os.path.join(os.path.dirname(__file__), '..', '..', 'openerp', 'release.py'))
def generate_files():
actions = {
'start': ['stop', 'start'],
'stop': ['stop'],
}
files = []
if os.name == 'nt':
files.append(("Microsoft.VC90.CRT", glob.glob('C:\Microsoft.VC90.CRT\*.*')))
for action, steps in actions.items():
fname = action + '.bat'
files.append(fname)
with open(fname, 'w') as fp:
fp.write('@PATH=%WINDIR%\system32;%WINDIR%;%WINDIR%\System32\Wbem;.\n')
for step in steps:
fp.write('@net %s %s\n' % (step, nt_service_name))
return files
setup(
service=["win32_service"],
version=version,
license=license,
url=url,
author=author,
author_email=author_email,
data_files=generate_files(),
options={
"py2exe": {
"excludes": [
'Tkconstants',
'Tkinter',
'tcl',
'_imagingtk',
'PIL._imagingtk',
'ImageTk',
'PIL.ImageTk',
'FixTk'
],
"skip_archive": 1,
"optimize": 2,
}
},
)
| agpl-3.0 |
Jenselme/servo | tests/wpt/web-platform-tests/tools/html5lib/html5lib/treewalkers/pulldom.py | 1729 | 2302 | from __future__ import absolute_import, division, unicode_literals
from xml.dom.pulldom import START_ELEMENT, END_ELEMENT, \
COMMENT, IGNORABLE_WHITESPACE, CHARACTERS
from . import _base
from ..constants import voidElements
class TreeWalker(_base.TreeWalker):
def __iter__(self):
ignore_until = None
previous = None
for event in self.tree:
if previous is not None and \
(ignore_until is None or previous[1] is ignore_until):
if previous[1] is ignore_until:
ignore_until = None
for token in self.tokens(previous, event):
yield token
if token["type"] == "EmptyTag":
ignore_until = previous[1]
previous = event
if ignore_until is None or previous[1] is ignore_until:
for token in self.tokens(previous, None):
yield token
elif ignore_until is not None:
raise ValueError("Illformed DOM event stream: void element without END_ELEMENT")
def tokens(self, event, next):
type, node = event
if type == START_ELEMENT:
name = node.nodeName
namespace = node.namespaceURI
attrs = {}
for attr in list(node.attributes.keys()):
attr = node.getAttributeNode(attr)
attrs[(attr.namespaceURI, attr.localName)] = attr.value
if name in voidElements:
for token in self.emptyTag(namespace,
name,
attrs,
not next or next[1] is not node):
yield token
else:
yield self.startTag(namespace, name, attrs)
elif type == END_ELEMENT:
name = node.nodeName
namespace = node.namespaceURI
if name not in voidElements:
yield self.endTag(namespace, name)
elif type == COMMENT:
yield self.comment(node.nodeValue)
elif type in (IGNORABLE_WHITESPACE, CHARACTERS):
for token in self.text(node.nodeValue):
yield token
else:
yield self.unknown(type)
| mpl-2.0 |
kwlzn/pants | src/python/pants/backend/jvm/tasks/classpath_products.py | 8 | 13071 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from twitter.common.collections import OrderedSet
from pants.backend.jvm.targets.exclude import Exclude
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.exceptions import TaskError
from pants.build_graph.build_graph import BuildGraph
from pants.goal.products import UnionProducts
class ClasspathEntry(object):
"""Represents a java classpath entry.
:API: public
"""
def __init__(self, path):
self._path = path
@property
def path(self):
"""Returns the pants internal path of this classpath entry.
Suitable for use in constructing classpaths for pants executions and pants generated artifacts.
:API: public
:rtype: string
"""
return self._path
def is_excluded_by(self, excludes):
"""Returns `True` if this classpath entry should be excluded given the `excludes` in play.
:param excludes: The excludes to check this classpath entry against.
:type excludes: list of :class:`pants.backend.jvm.targets.exclude.Exclude`
:rtype: bool
"""
return False
def __hash__(self):
return hash(self.path)
def __eq__(self, other):
return isinstance(other, ClasspathEntry) and self.path == other.path
def __ne__(self, other):
return not self == other
def __repr__(self):
return 'ClasspathEntry(path={!r})'.format(self.path)
@classmethod
def is_artifact_classpath_entry(cls, classpath_entry):
"""
:API: public
"""
return isinstance(classpath_entry, ArtifactClasspathEntry)
@classmethod
def is_internal_classpath_entry(cls, classpath_entry):
"""
:API: public
"""
return not cls.is_artifact_classpath_entry(classpath_entry)
class ArtifactClasspathEntry(ClasspathEntry):
"""Represents a resolved third party classpath entry.
:API: public
"""
def __init__(self, path, coordinate, cache_path):
super(ArtifactClasspathEntry, self).__init__(path)
self._coordinate = coordinate
self._cache_path = cache_path
@property
def coordinate(self):
"""Returns the maven coordinate that used to resolve this classpath entry's artifact.
:rtype: :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate`
"""
return self._coordinate
@property
def cache_path(self):
"""Returns the external cache path of this classpath entry.
For example, the `~/.m2/repository` or `~/.ivy2/cache` location of the resolved artifact for
maven and ivy resolvers respectively.
Suitable for use in constructing classpaths for external tools that should not be subject to
potential volatility in pants own internal caches.
:API: public
:rtype: string
"""
return self._cache_path
def is_excluded_by(self, excludes):
return any(_matches_exclude(self.coordinate, exclude) for exclude in excludes)
def __hash__(self):
return hash((self.path, self.coordinate, self.cache_path))
def __eq__(self, other):
return (isinstance(other, ArtifactClasspathEntry) and
self.path == other.path and
self.coordinate == other.coordinate and
self.cache_path == other.cache_path)
def __ne__(self, other):
return not self == other
def __repr__(self):
return ('ArtifactClasspathEntry(path={!r}, coordinate={!r}, cache_path={!r})'
.format(self.path, self.coordinate, self.cache_path))
def _matches_exclude(coordinate, exclude):
if not coordinate.org == exclude.org:
return False
if not exclude.name:
return True
if coordinate.name == exclude.name:
return True
return False
def _not_excluded_filter(excludes):
def not_excluded(product_to_target):
path_tuple = product_to_target[0]
conf, classpath_entry = path_tuple
return not classpath_entry.is_excluded_by(excludes)
return not_excluded
class ClasspathProducts(object):
"""
:API: public
"""
def __init__(self, pants_workdir, classpaths=None, excludes=None):
self._classpaths = classpaths or UnionProducts()
self._excludes = excludes or UnionProducts()
self._pants_workdir = pants_workdir
@staticmethod
def init_func(pants_workdir):
"""
:API: public
"""
return lambda: ClasspathProducts(pants_workdir)
def copy(self):
"""Returns a copy of this ClasspathProducts.
Edits to the copy's classpaths or exclude associations will not affect the classpaths or
excludes in the original. The copy is shallow though, so edits to the the copy's product values
will mutate the original's product values. See `UnionProducts.copy`.
:API: public
:rtype: :class:`ClasspathProducts`
"""
return ClasspathProducts(pants_workdir=self._pants_workdir,
classpaths=self._classpaths.copy(),
excludes=self._excludes.copy())
def add_for_targets(self, targets, classpath_elements):
"""Adds classpath path elements to the products of all the provided targets."""
for target in targets:
self.add_for_target(target, classpath_elements)
def add_for_target(self, target, classpath_elements):
"""Adds classpath path elements to the products of the provided target."""
self._add_elements_for_target(target, self._wrap_path_elements(classpath_elements))
def add_jars_for_targets(self, targets, conf, resolved_jars):
"""Adds jar classpath elements to the products of the provided targets.
The resolved jars are added in a way that works with excludes.
"""
classpath_entries = []
for jar in resolved_jars:
if not jar.pants_path:
raise TaskError('Jar: {!s} has no specified path.'.format(jar.coordinate))
cp_entry = ArtifactClasspathEntry(jar.pants_path, jar.coordinate, jar.cache_path)
classpath_entries.append((conf, cp_entry))
for target in targets:
self._add_elements_for_target(target, classpath_entries)
def add_excludes_for_targets(self, targets):
"""Add excludes from the provided targets.
Does not look up transitive excludes.
:param targets: The targets to add excludes for.
:type targets: list of :class:`pants.build_graph.target.Target`
"""
for target in targets:
self._add_excludes_for_target(target)
def remove_for_target(self, target, classpath_elements):
"""Removes the given entries for the target."""
self._classpaths.remove_for_target(target, self._wrap_path_elements(classpath_elements))
def get_for_target(self, target):
"""Gets the classpath products for the given target.
Products are returned in order, respecting target excludes.
:param target: The target to lookup classpath products for.
:returns: The ordered (conf, path) tuples, with paths being either classfile directories or
jars.
:rtype: list of (string, string)
"""
return self.get_for_targets([target])
def get_for_targets(self, targets):
"""Gets the classpath products for the given targets.
Products are returned in order, respecting target excludes.
:param targets: The targets to lookup classpath products for.
:returns: The ordered (conf, path) tuples, with paths being either classfile directories or
jars.
:rtype: list of (string, string)
"""
cp_entries = self.get_classpath_entries_for_targets(targets)
return [(conf, cp_entry.path) for conf, cp_entry in cp_entries]
def get_classpath_entries_for_targets(self, targets, respect_excludes=True):
"""Gets the classpath products for the given targets.
Products are returned in order, optionally respecting target excludes.
:param targets: The targets to lookup classpath products for.
:param bool respect_excludes: `True` to respect excludes; `False` to ignore them.
:returns: The ordered (conf, classpath entry) tuples.
:rtype: list of (string, :class:`ClasspathEntry`)
"""
# remove the duplicate, preserve the ordering.
return list(OrderedSet([cp for cp, target in self.get_product_target_mappings_for_targets(
targets, respect_excludes)]))
def get_product_target_mappings_for_targets(self, targets, respect_excludes=True):
"""Gets the classpath products-target associations for the given targets.
Product-target tuples are returned in order, optionally respecting target excludes.
:param targets: The targets to lookup classpath products for.
:param bool respect_excludes: `True` to respect excludes; `False` to ignore them.
:returns: The ordered (classpath products, target) tuples.
"""
classpath_target_tuples = self._classpaths.get_product_target_mappings_for_targets(targets)
if respect_excludes:
return self._filter_by_excludes(classpath_target_tuples, targets)
else:
return classpath_target_tuples
def get_artifact_classpath_entries_for_targets(self, targets, respect_excludes=True):
"""Gets the artifact classpath products for the given targets.
Products are returned in order, optionally respecting target excludes, and the products only
include external artifact classpath elements (ie: resolved jars).
:param targets: The targets to lookup classpath products for.
:param bool respect_excludes: `True` to respect excludes; `False` to ignore them.
:returns: The ordered (conf, classpath entry) tuples.
:rtype: list of (string, :class:`ArtifactClasspathEntry`)
"""
classpath_tuples = self.get_classpath_entries_for_targets(targets,
respect_excludes=respect_excludes)
return [(conf, cp_entry) for conf, cp_entry in classpath_tuples
if ClasspathEntry.is_artifact_classpath_entry(cp_entry)]
def get_internal_classpath_entries_for_targets(self, targets, respect_excludes=True):
"""Gets the internal classpath products for the given targets.
Products are returned in order, optionally respecting target excludes, and the products only
include internal artifact classpath elements (ie: no resolved jars).
:param targets: The targets to lookup classpath products for.
:param bool respect_excludes: `True` to respect excludes; `False` to ignore them.
:returns: The ordered (conf, classpath entry) tuples.
:rtype: list of (string, :class:`ClasspathEntry`)
"""
classpath_tuples = self.get_classpath_entries_for_targets(targets,
respect_excludes=respect_excludes)
return [(conf, cp_entry) for conf, cp_entry in classpath_tuples
if ClasspathEntry.is_internal_classpath_entry(cp_entry)]
def _filter_by_excludes(self, classpath_target_tuples, root_targets):
# Excludes are always applied transitively, so regardless of whether a transitive
# set of targets was included here, their closure must be included.
closure = BuildGraph.closure(root_targets, bfs=True)
excludes = self._excludes.get_for_targets(closure)
return filter(_not_excluded_filter(excludes), classpath_target_tuples)
def _add_excludes_for_target(self, target):
if target.is_exported:
self._excludes.add_for_target(target, [Exclude(target.provides.org,
target.provides.name)])
if isinstance(target, JvmTarget) and target.excludes:
self._excludes.add_for_target(target, target.excludes)
def _wrap_path_elements(self, classpath_elements):
return [(element[0], ClasspathEntry(element[1])) for element in classpath_elements]
def _add_elements_for_target(self, target, elements):
self._validate_classpath_tuples(elements, target)
self._classpaths.add_for_target(target, elements)
def _validate_classpath_tuples(self, classpath, target):
"""Validates that all files are located within the working directory, to simplify relativization.
:param classpath: The list of classpath tuples. Each tuple is a 2-tuple of ivy_conf and
ClasspathEntry.
:param target: The target that the classpath tuple is being registered for.
:raises: `TaskError` when the path is outside the work directory
"""
for classpath_tuple in classpath:
conf, classpath_entry = classpath_tuple
path = classpath_entry.path
if os.path.relpath(path, self._pants_workdir).startswith(os.pardir):
raise TaskError(
'Classpath entry {} for target {} is located outside the working directory "{}".'
.format(path, target.address.spec, self._pants_workdir))
def __eq__(self, other):
return (isinstance(other, ClasspathProducts) and
self._pants_workdir == other._pants_workdir and
self._classpaths == other._classpaths and
self._excludes == other._excludes)
def __ne__(self, other):
return not self == other
| apache-2.0 |
mtndesign/mtnvim | myvim/bundle/ropevim/ftplugin/python/libs/rope/refactor/importutils/__init__.py | 90 | 12748 | """A package for handling imports
This package provides tools for modifying module imports after
refactorings or as a separate task.
"""
import rope.base.evaluate
from rope.base.change import ChangeSet, ChangeContents
from rope.refactor import occurrences, rename
from rope.refactor.importutils import module_imports, actions
from rope.refactor.importutils.importinfo import NormalImport, FromImport
import rope.base.codeanalyze
class ImportOrganizer(object):
"""Perform some import-related commands
Each method returns a `rope.base.change.Change` object.
"""
def __init__(self, project):
self.project = project
self.pycore = project.pycore
self.import_tools = ImportTools(self.pycore)
def organize_imports(self, resource, offset=None):
return self._perform_command_on_import_tools(
self.import_tools.organize_imports, resource, offset)
def expand_star_imports(self, resource, offset=None):
return self._perform_command_on_import_tools(
self.import_tools.expand_stars, resource, offset)
def froms_to_imports(self, resource, offset=None):
return self._perform_command_on_import_tools(
self.import_tools.froms_to_imports, resource, offset)
def relatives_to_absolutes(self, resource, offset=None):
return self._perform_command_on_import_tools(
self.import_tools.relatives_to_absolutes, resource, offset)
def handle_long_imports(self, resource, offset=None):
return self._perform_command_on_import_tools(
self.import_tools.handle_long_imports, resource, offset)
def _perform_command_on_import_tools(self, method, resource, offset):
pymodule = self.pycore.resource_to_pyobject(resource)
before_performing = pymodule.source_code
import_filter = None
if offset is not None:
import_filter = self._line_filter(
pymodule.lines.get_line_number(offset))
result = method(pymodule, import_filter=import_filter)
if result is not None and result != before_performing:
changes = ChangeSet(method.__name__.replace('_', ' ') +
' in <%s>' % resource.path)
changes.add_change(ChangeContents(resource, result))
return changes
def _line_filter(self, lineno):
def import_filter(import_stmt):
return import_stmt.start_line <= lineno < import_stmt.end_line
return import_filter
class ImportTools(object):
def __init__(self, pycore):
self.pycore = pycore
def get_import(self, resource):
"""The import statement for `resource`"""
module_name = self.pycore.modname(resource)
return NormalImport(((module_name, None), ))
def get_from_import(self, resource, name):
"""The from import statement for `name` in `resource`"""
module_name = self.pycore.modname(resource)
names = []
if isinstance(name, list):
names = [(imported, None) for imported in name]
else:
names = [(name, None),]
return FromImport(module_name, 0, tuple(names))
def module_imports(self, module, imports_filter=None):
return module_imports.ModuleImports(self.pycore, module,
imports_filter)
def froms_to_imports(self, pymodule, import_filter=None):
pymodule = self._clean_up_imports(pymodule, import_filter)
module_imports = self.module_imports(pymodule, import_filter)
for import_stmt in module_imports.imports:
if import_stmt.readonly or \
not self._is_transformable_to_normal(import_stmt.import_info):
continue
pymodule = self._from_to_normal(pymodule, import_stmt)
# Adding normal imports in place of froms
module_imports = self.module_imports(pymodule, import_filter)
for import_stmt in module_imports.imports:
if not import_stmt.readonly and \
self._is_transformable_to_normal(import_stmt.import_info):
import_stmt.import_info = \
NormalImport(((import_stmt.import_info.module_name, None),))
module_imports.remove_duplicates()
return module_imports.get_changed_source()
def expand_stars(self, pymodule, import_filter=None):
module_imports = self.module_imports(pymodule, import_filter)
module_imports.expand_stars()
return module_imports.get_changed_source()
def _from_to_normal(self, pymodule, import_stmt):
resource = pymodule.get_resource()
from_import = import_stmt.import_info
module_name = from_import.module_name
for name, alias in from_import.names_and_aliases:
imported = name
if alias is not None:
imported = alias
occurrence_finder = occurrences.create_finder(
self.pycore, imported, pymodule[imported], imports=False)
source = rename.rename_in_module(
occurrence_finder, module_name + '.' + name,
pymodule=pymodule, replace_primary=True)
if source is not None:
pymodule = self.pycore.get_string_module(source, resource)
return pymodule
def _clean_up_imports(self, pymodule, import_filter):
resource = pymodule.get_resource()
module_with_imports = self.module_imports(pymodule, import_filter)
module_with_imports.expand_stars()
source = module_with_imports.get_changed_source()
if source is not None:
pymodule = self.pycore.get_string_module(source, resource)
source = self.relatives_to_absolutes(pymodule)
if source is not None:
pymodule = self.pycore.get_string_module(source, resource)
module_with_imports = self.module_imports(pymodule, import_filter)
module_with_imports.remove_duplicates()
module_with_imports.remove_unused_imports()
source = module_with_imports.get_changed_source()
if source is not None:
pymodule = self.pycore.get_string_module(source, resource)
return pymodule
def relatives_to_absolutes(self, pymodule, import_filter=None):
module_imports = self.module_imports(pymodule, import_filter)
to_be_absolute_list = module_imports.get_relative_to_absolute_list()
for name, absolute_name in to_be_absolute_list:
pymodule = self._rename_in_module(pymodule, name, absolute_name)
module_imports = self.module_imports(pymodule, import_filter)
module_imports.get_relative_to_absolute_list()
source = module_imports.get_changed_source()
if source is None:
source = pymodule.source_code
return source
def _is_transformable_to_normal(self, import_info):
if not isinstance(import_info, FromImport):
return False
return True
def organize_imports(self, pymodule,
unused=True, duplicates=True,
selfs=True, sort=True, import_filter=None):
if unused or duplicates:
module_imports = self.module_imports(pymodule, import_filter)
if unused:
module_imports.remove_unused_imports()
if duplicates:
module_imports.remove_duplicates()
source = module_imports.get_changed_source()
if source is not None:
pymodule = self.pycore.get_string_module(
source, pymodule.get_resource())
if selfs:
pymodule = self._remove_self_imports(pymodule, import_filter)
if sort:
return self.sort_imports(pymodule, import_filter)
else:
return pymodule.source_code
def _remove_self_imports(self, pymodule, import_filter=None):
module_imports = self.module_imports(pymodule, import_filter)
to_be_fixed, to_be_renamed = module_imports.get_self_import_fix_and_rename_list()
for name in to_be_fixed:
try:
pymodule = self._rename_in_module(pymodule, name, '', till_dot=True)
except ValueError:
# There is a self import with direct access to it
return pymodule
for name, new_name in to_be_renamed:
pymodule = self._rename_in_module(pymodule, name, new_name)
module_imports = self.module_imports(pymodule, import_filter)
module_imports.get_self_import_fix_and_rename_list()
source = module_imports.get_changed_source()
if source is not None:
pymodule = self.pycore.get_string_module(source, pymodule.get_resource())
return pymodule
def _rename_in_module(self, pymodule, name, new_name, till_dot=False):
old_name = name.split('.')[-1]
old_pyname = rope.base.evaluate.eval_str(pymodule.get_scope(), name)
occurrence_finder = occurrences.create_finder(
self.pycore, old_name, old_pyname, imports=False)
changes = rope.base.codeanalyze.ChangeCollector(pymodule.source_code)
for occurrence in occurrence_finder.find_occurrences(pymodule=pymodule):
start, end = occurrence.get_primary_range()
if till_dot:
new_end = pymodule.source_code.index('.', end) + 1
space = pymodule.source_code[end:new_end - 1].strip()
if not space == '':
for c in space:
if not c.isspace() and c not in '\\':
raise ValueError()
end = new_end
changes.add_change(start, end, new_name)
source = changes.get_changed()
if source is not None:
pymodule = self.pycore.get_string_module(source, pymodule.get_resource())
return pymodule
def sort_imports(self, pymodule, import_filter=None):
module_imports = self.module_imports(pymodule, import_filter)
module_imports.sort_imports()
return module_imports.get_changed_source()
def handle_long_imports(self, pymodule, maxdots=2, maxlength=27,
import_filter=None):
# IDEA: `maxdots` and `maxlength` can be specified in project config
# adding new from imports
module_imports = self.module_imports(pymodule, import_filter)
to_be_fixed = module_imports.handle_long_imports(maxdots, maxlength)
# performing the renaming
pymodule = self.pycore.get_string_module(
module_imports.get_changed_source(),
resource=pymodule.get_resource())
for name in to_be_fixed:
pymodule = self._rename_in_module(pymodule, name,
name.split('.')[-1])
# organizing imports
return self.organize_imports(pymodule, selfs=False, sort=False,
import_filter=import_filter)
def get_imports(pycore, pydefined):
"""A shortcut for getting the `ImportInfo`\s used in a scope"""
pymodule = pydefined.get_module()
module = module_imports.ModuleImports(pycore, pymodule)
if pymodule == pydefined:
return [stmt.import_info for stmt in module.imports]
return module.get_used_imports(pydefined)
def get_module_imports(pycore, pymodule):
"""A shortcut for creating a `module_imports.ModuleImports` object"""
return module_imports.ModuleImports(pycore, pymodule)
def add_import(pycore, pymodule, module_name, name=None):
imports = get_module_imports(pycore, pymodule)
candidates = []
names = []
# from mod import name
if name is not None:
from_import = FromImport(module_name, 0, [(name, None)])
names.append(name)
candidates.append(from_import)
# from pkg import mod
if '.' in module_name:
pkg, mod = module_name.rsplit('.', 1)
candidates.append(FromImport(pkg, 0, [(mod, None)]))
if name:
names.append(mod + '.' + name)
else:
names.append(mod)
# import mod
normal_import = NormalImport([(module_name, None)])
if name:
names.append(module_name + '.' + name)
else:
names.append(module_name)
candidates.append(normal_import)
visitor = actions.AddingVisitor(pycore, candidates)
selected_import = normal_import
for import_statement in imports.imports:
if import_statement.accept(visitor):
selected_import = visitor.import_info
break
imports.add_import(selected_import)
imported_name = names[candidates.index(selected_import)]
return imports.get_changed_source(), imported_name
| gpl-2.0 |
HyperBaton/ansible | test/units/modules/network/check_point/test_cp_mgmt_publish.py | 19 | 2400 | # Ansible module to manage CheckPoint Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import cp_mgmt_publish
PAYLOAD = {
"wait_for_task": False
}
RETURN_PAYLOAD = {
"task-id": "53de74b7-8f19-4cbe-99fc-a81ef0759bad"
}
command = 'publish'
failure_msg = '{command failed}'
class TestCheckpointPublish(object):
module = cp_mgmt_publish
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection')
return connection_class_mock.return_value
def test_command(self, mocker, connection_mock):
connection_mock.send_request.return_value = (200, RETURN_PAYLOAD)
result = self._run_module(PAYLOAD)
assert result['changed']
assert RETURN_PAYLOAD == result[command]
def test_command_fail(self, mocker, connection_mock):
connection_mock.send_request.return_value = (404, failure_msg)
try:
result = self._run_module(PAYLOAD)
except Exception as e:
result = e.args[0]
assert 'Checkpoint device returned error 404 with message ' + failure_msg == result['msg']
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
| gpl-3.0 |
lavvy/osmc | package/mediacenter-skin-osmc/files/usr/share/kodi/addons/script.module.unidecode/lib/unidecode/x09a.py | 253 | 4623 | data = (
'E ', # 0x00
'Cheng ', # 0x01
'Xin ', # 0x02
'Ai ', # 0x03
'Lu ', # 0x04
'Zhui ', # 0x05
'Zhou ', # 0x06
'She ', # 0x07
'Pian ', # 0x08
'Kun ', # 0x09
'Tao ', # 0x0a
'Lai ', # 0x0b
'Zong ', # 0x0c
'Ke ', # 0x0d
'Qi ', # 0x0e
'Qi ', # 0x0f
'Yan ', # 0x10
'Fei ', # 0x11
'Sao ', # 0x12
'Yan ', # 0x13
'Jie ', # 0x14
'Yao ', # 0x15
'Wu ', # 0x16
'Pian ', # 0x17
'Cong ', # 0x18
'Pian ', # 0x19
'Qian ', # 0x1a
'Fei ', # 0x1b
'Huang ', # 0x1c
'Jian ', # 0x1d
'Huo ', # 0x1e
'Yu ', # 0x1f
'Ti ', # 0x20
'Quan ', # 0x21
'Xia ', # 0x22
'Zong ', # 0x23
'Kui ', # 0x24
'Rou ', # 0x25
'Si ', # 0x26
'Gua ', # 0x27
'Tuo ', # 0x28
'Kui ', # 0x29
'Sou ', # 0x2a
'Qian ', # 0x2b
'Cheng ', # 0x2c
'Zhi ', # 0x2d
'Liu ', # 0x2e
'Pang ', # 0x2f
'Teng ', # 0x30
'Xi ', # 0x31
'Cao ', # 0x32
'Du ', # 0x33
'Yan ', # 0x34
'Yuan ', # 0x35
'Zou ', # 0x36
'Sao ', # 0x37
'Shan ', # 0x38
'Li ', # 0x39
'Zhi ', # 0x3a
'Shuang ', # 0x3b
'Lu ', # 0x3c
'Xi ', # 0x3d
'Luo ', # 0x3e
'Zhang ', # 0x3f
'Mo ', # 0x40
'Ao ', # 0x41
'Can ', # 0x42
'Piao ', # 0x43
'Cong ', # 0x44
'Qu ', # 0x45
'Bi ', # 0x46
'Zhi ', # 0x47
'Yu ', # 0x48
'Xu ', # 0x49
'Hua ', # 0x4a
'Bo ', # 0x4b
'Su ', # 0x4c
'Xiao ', # 0x4d
'Lin ', # 0x4e
'Chan ', # 0x4f
'Dun ', # 0x50
'Liu ', # 0x51
'Tuo ', # 0x52
'Zeng ', # 0x53
'Tan ', # 0x54
'Jiao ', # 0x55
'Tie ', # 0x56
'Yan ', # 0x57
'Luo ', # 0x58
'Zhan ', # 0x59
'Jing ', # 0x5a
'Yi ', # 0x5b
'Ye ', # 0x5c
'Tuo ', # 0x5d
'Bin ', # 0x5e
'Zou ', # 0x5f
'Yan ', # 0x60
'Peng ', # 0x61
'Lu ', # 0x62
'Teng ', # 0x63
'Xiang ', # 0x64
'Ji ', # 0x65
'Shuang ', # 0x66
'Ju ', # 0x67
'Xi ', # 0x68
'Huan ', # 0x69
'Li ', # 0x6a
'Biao ', # 0x6b
'Ma ', # 0x6c
'Yu ', # 0x6d
'Tuo ', # 0x6e
'Xun ', # 0x6f
'Chi ', # 0x70
'Qu ', # 0x71
'Ri ', # 0x72
'Bo ', # 0x73
'Lu ', # 0x74
'Zang ', # 0x75
'Shi ', # 0x76
'Si ', # 0x77
'Fu ', # 0x78
'Ju ', # 0x79
'Zou ', # 0x7a
'Zhu ', # 0x7b
'Tuo ', # 0x7c
'Nu ', # 0x7d
'Jia ', # 0x7e
'Yi ', # 0x7f
'Tai ', # 0x80
'Xiao ', # 0x81
'Ma ', # 0x82
'Yin ', # 0x83
'Jiao ', # 0x84
'Hua ', # 0x85
'Luo ', # 0x86
'Hai ', # 0x87
'Pian ', # 0x88
'Biao ', # 0x89
'Li ', # 0x8a
'Cheng ', # 0x8b
'Yan ', # 0x8c
'Xin ', # 0x8d
'Qin ', # 0x8e
'Jun ', # 0x8f
'Qi ', # 0x90
'Qi ', # 0x91
'Ke ', # 0x92
'Zhui ', # 0x93
'Zong ', # 0x94
'Su ', # 0x95
'Can ', # 0x96
'Pian ', # 0x97
'Zhi ', # 0x98
'Kui ', # 0x99
'Sao ', # 0x9a
'Wu ', # 0x9b
'Ao ', # 0x9c
'Liu ', # 0x9d
'Qian ', # 0x9e
'Shan ', # 0x9f
'Piao ', # 0xa0
'Luo ', # 0xa1
'Cong ', # 0xa2
'Chan ', # 0xa3
'Zou ', # 0xa4
'Ji ', # 0xa5
'Shuang ', # 0xa6
'Xiang ', # 0xa7
'Gu ', # 0xa8
'Wei ', # 0xa9
'Wei ', # 0xaa
'Wei ', # 0xab
'Yu ', # 0xac
'Gan ', # 0xad
'Yi ', # 0xae
'Ang ', # 0xaf
'Tou ', # 0xb0
'Xie ', # 0xb1
'Bao ', # 0xb2
'Bi ', # 0xb3
'Chi ', # 0xb4
'Ti ', # 0xb5
'Di ', # 0xb6
'Ku ', # 0xb7
'Hai ', # 0xb8
'Qiao ', # 0xb9
'Gou ', # 0xba
'Kua ', # 0xbb
'Ge ', # 0xbc
'Tui ', # 0xbd
'Geng ', # 0xbe
'Pian ', # 0xbf
'Bi ', # 0xc0
'Ke ', # 0xc1
'Ka ', # 0xc2
'Yu ', # 0xc3
'Sui ', # 0xc4
'Lou ', # 0xc5
'Bo ', # 0xc6
'Xiao ', # 0xc7
'Pang ', # 0xc8
'Bo ', # 0xc9
'Ci ', # 0xca
'Kuan ', # 0xcb
'Bin ', # 0xcc
'Mo ', # 0xcd
'Liao ', # 0xce
'Lou ', # 0xcf
'Nao ', # 0xd0
'Du ', # 0xd1
'Zang ', # 0xd2
'Sui ', # 0xd3
'Ti ', # 0xd4
'Bin ', # 0xd5
'Kuan ', # 0xd6
'Lu ', # 0xd7
'Gao ', # 0xd8
'Gao ', # 0xd9
'Qiao ', # 0xda
'Kao ', # 0xdb
'Qiao ', # 0xdc
'Lao ', # 0xdd
'Zao ', # 0xde
'Biao ', # 0xdf
'Kun ', # 0xe0
'Kun ', # 0xe1
'Ti ', # 0xe2
'Fang ', # 0xe3
'Xiu ', # 0xe4
'Ran ', # 0xe5
'Mao ', # 0xe6
'Dan ', # 0xe7
'Kun ', # 0xe8
'Bin ', # 0xe9
'Fa ', # 0xea
'Tiao ', # 0xeb
'Peng ', # 0xec
'Zi ', # 0xed
'Fa ', # 0xee
'Ran ', # 0xef
'Ti ', # 0xf0
'Pao ', # 0xf1
'Pi ', # 0xf2
'Mao ', # 0xf3
'Fu ', # 0xf4
'Er ', # 0xf5
'Rong ', # 0xf6
'Qu ', # 0xf7
'Gong ', # 0xf8
'Xiu ', # 0xf9
'Gua ', # 0xfa
'Ji ', # 0xfb
'Peng ', # 0xfc
'Zhua ', # 0xfd
'Shao ', # 0xfe
'Sha ', # 0xff
)
| gpl-2.0 |
synicalsyntax/zulip | zproject/dev_settings.py | 1 | 6926 | import os
import pwd
from typing import Optional, Set, Tuple
from six.moves.urllib.parse import SplitResult
ZULIP_ADMINISTRATOR = "desdemona+admin@zulip.com"
# We want LOCAL_UPLOADS_DIR to be an absolute path so that code can
# chdir without having problems accessing it. Unfortunately, this
# means we need a duplicate definition of DEPLOY_ROOT with the one in
# settings.py.
DEPLOY_ROOT = os.path.realpath(os.path.dirname(os.path.dirname(__file__)))
LOCAL_UPLOADS_DIR = os.path.join(DEPLOY_ROOT, 'var/uploads')
# We assume dev droplets are the only places where
# users use zulipdev as the user.
IS_DEV_DROPLET = pwd.getpwuid(os.getuid()).pw_name == 'zulipdev'
FORWARD_ADDRESS_CONFIG_FILE = "var/forward_address.ini"
# Check if test_settings.py set EXTERNAL_HOST.
external_host_env = os.getenv('EXTERNAL_HOST')
if external_host_env is None:
if IS_DEV_DROPLET:
# For our droplets, we use the hostname (eg github_username.zulipdev.org) by default.
EXTERNAL_HOST = os.uname()[1].lower() + ":9991"
else:
# For local development environments, we use localhost by
# default, via the "zulipdev.com" hostname.
EXTERNAL_HOST = 'zulipdev.com:9991'
# Serve the main dev realm at the literal name "localhost",
# so it works out of the box even when not on the Internet.
REALM_HOSTS = {
'zulip': 'localhost:9991',
}
else:
EXTERNAL_HOST = external_host_env
REALM_HOSTS = {
'zulip': EXTERNAL_HOST,
}
# TODO: Replace with scripts.lib.zulip_tools.deport when this no longer needs to
# be Python 2 compatible for zthumbor.
r = SplitResult("", EXTERNAL_HOST, "", "", "")
assert r.hostname is not None
EXTERNAL_HOST_WITHOUT_PORT = "[" + r.hostname + "]" if ":" in r.hostname else r.hostname
ALLOWED_HOSTS = ['*']
# Uncomment extra backends if you want to test with them. Note that
# for Google and GitHub auth you'll need to do some pre-setup.
AUTHENTICATION_BACKENDS = (
'zproject.backends.DevAuthBackend',
'zproject.backends.EmailAuthBackend',
'zproject.backends.GitHubAuthBackend',
'zproject.backends.GoogleAuthBackend',
'zproject.backends.SAMLAuthBackend',
# 'zproject.backends.AzureADAuthBackend',
'zproject.backends.GitLabAuthBackend',
'zproject.backends.AppleAuthBackend',
) # type: Tuple[str, ...]
EXTERNAL_URI_SCHEME = "http://"
EMAIL_GATEWAY_PATTERN = "%s@" + EXTERNAL_HOST_WITHOUT_PORT
NOTIFICATION_BOT = "notification-bot@zulip.com"
ERROR_BOT = "error-bot@zulip.com"
EMAIL_GATEWAY_BOT = "emailgateway@zulip.com"
PHYSICAL_ADDRESS = "Zulip Headquarters, 123 Octo Stream, South Pacific Ocean"
STAFF_SUBDOMAIN = "zulip"
EXTRA_INSTALLED_APPS = ["zilencer", "analytics", "corporate"]
# Disable Camo in development
CAMO_URI = ''
OPEN_REALM_CREATION = True
INVITES_MIN_USER_AGE_DAYS = 0
EMBEDDED_BOTS_ENABLED = True
SAVE_FRONTEND_STACKTRACES = True
EVENT_LOGS_ENABLED = True
STAGING_ERROR_NOTIFICATIONS = True
SYSTEM_ONLY_REALMS = set() # type: Set[str]
USING_PGROONGA = True
# Flush cache after migration.
POST_MIGRATION_CACHE_FLUSHING = True # type: bool
# Don't require anything about password strength in development
PASSWORD_MIN_LENGTH = 0
PASSWORD_MIN_GUESSES = 0
# SMTP settings for forwarding emails sent in development
# environment to an email account.
EMAIL_HOST = ""
EMAIL_HOST_USER = ""
# Two factor authentication: Use the fake backend for development.
TWO_FACTOR_CALL_GATEWAY = 'two_factor.gateways.fake.Fake'
TWO_FACTOR_SMS_GATEWAY = 'two_factor.gateways.fake.Fake'
# Make sendfile use django to serve files in development
SENDFILE_BACKEND = 'django_sendfile.backends.development'
# Set this True to send all hotspots in development
ALWAYS_SEND_ALL_HOTSPOTS = False # type: bool
# FAKE_LDAP_MODE supports using a fake LDAP database in the
# development environment, without needing an LDAP server!
#
# Three modes are allowed, and each will setup Zulip and the fake LDAP
# database in a way appropriate for the corresponding mode described
# in https://zulip.readthedocs.io/en/latest/production/authentication-methods.html#ldap-including-active-directory
# (A) If users' email addresses are in LDAP and used as username.
# (B) If LDAP only has usernames but email addresses are of the form
# username@example.com
# (C) If LDAP usernames are completely unrelated to email addresses.
#
# Fake LDAP data has e.g. ("ldapuser1", "ldapuser1@zulip.com") for username/email.
FAKE_LDAP_MODE = None # type: Optional[str]
# FAKE_LDAP_NUM_USERS = 8
if FAKE_LDAP_MODE:
import ldap
from django_auth_ldap.config import LDAPSearch
# To understand these parameters, read the docs in
# prod_settings_template.py and on ReadTheDocs.
LDAP_APPEND_DOMAIN = None
AUTH_LDAP_USER_SEARCH = LDAPSearch("ou=users,dc=zulip,dc=com",
ldap.SCOPE_ONELEVEL, "(uid=%(user)s)")
AUTH_LDAP_REVERSE_EMAIL_SEARCH = LDAPSearch("ou=users,dc=zulip,dc=com",
ldap.SCOPE_ONELEVEL, "(email=%(email)s)")
if FAKE_LDAP_MODE == 'a':
AUTH_LDAP_REVERSE_EMAIL_SEARCH = LDAPSearch("ou=users,dc=zulip,dc=com",
ldap.SCOPE_ONELEVEL, "(uid=%(email)s)")
AUTH_LDAP_USERNAME_ATTR = "uid"
AUTH_LDAP_USER_ATTR_MAP = {
"full_name": "cn",
"avatar": "thumbnailPhoto",
# This won't do much unless one changes the fact that
# all users have LDAP_USER_ACCOUNT_CONTROL_NORMAL in
# zerver/lib/dev_ldap_directory.py
"userAccountControl": "userAccountControl",
}
elif FAKE_LDAP_MODE == 'b':
LDAP_APPEND_DOMAIN = 'zulip.com'
AUTH_LDAP_USER_ATTR_MAP = {
"full_name": "cn",
"avatar": "jpegPhoto",
"custom_profile_field__birthday": "birthDate",
"custom_profile_field__phone_number": "phoneNumber",
}
elif FAKE_LDAP_MODE == 'c':
AUTH_LDAP_USERNAME_ATTR = "uid"
LDAP_EMAIL_ATTR = 'email'
AUTH_LDAP_USER_ATTR_MAP = {
"full_name": "cn",
}
AUTHENTICATION_BACKENDS += ('zproject.backends.ZulipLDAPAuthBackend',)
THUMBOR_URL = 'http://127.0.0.1:9995'
THUMBNAIL_IMAGES = True
SEARCH_PILLS_ENABLED = bool(os.getenv('SEARCH_PILLS_ENABLED', False))
BILLING_ENABLED = True
LANDING_PAGE_NAVBAR_MESSAGE = None
# Test Custom TOS template rendering
TERMS_OF_SERVICE = 'corporate/terms.md'
# Our run-dev.py proxy uses X-Forwarded-Port to communicate to Django
# that the request is actually on port 9991, not port 9992 (the Django
# server's own port); this setting tells Django to read that HTTP
# header. Important for SAML authentication in the development
# environment.
USE_X_FORWARDED_PORT = True
# Override the default SAML entity ID
SOCIAL_AUTH_SAML_SP_ENTITY_ID = "http://localhost:9991"
MEMCACHED_USERNAME = None
| apache-2.0 |
tobyhodges/timers | browsertimer.py | 2 | 8025 | #! /usr/bin/env python
'''
A script to create a simple, browser-based countdown timer.
usage: python browsertimer.py
'''
# Imports
from bokeh.layouts import column, row
from bokeh import plotting as bkplt
from bokeh.models import ColumnDataSource, Button, Slider, CustomJS
# Set default/starting values
default_minutes = 5
default_seconds = 0
seconds_left = default_minutes*60 + default_seconds
# ColumnDataSource data values must be iterable,
# so the values below are placed inside single-element lists
color = ['#78c400']
text_color = ['#ffffff']
start_time = [default_minutes*60 + default_seconds]
time_remaining = list(start_time)
time_string = ['%02d:%02d' % (default_minutes, default_seconds)]
# Create data source for timer plot
source=ColumnDataSource(data=dict(x=[0],
y=[0],
start_time=start_time,
start_mins=[default_minutes],
start_secs=[default_seconds],
time_remaining=time_remaining,
time_string=time_string,
fill_color=color,
text_color=text_color,
interval_id=[0]))
# No tools required for this one
tools = ''
# JS Callbacks
run_timer_JS = CustomJS(args=dict(source=source), code="""
function disable_button(button) {
button_id = button.get('id');
button_element = document.querySelector('#modelid_' + button_id + '>button');
button_element.setAttribute('disabled',true);
}
function enable_button(button) {
button_id = button.get('id');
button_element = document.querySelector('#modelid_' + button_id + '>button');
button_element.removeAttribute('disabled');
}
var data = source.get('data');
interval_id = data['interval_id'];
function countdown() {
start_time = data['start_time'];
time_remaining = data['time_remaining'];
time_string = data['time_string'];
text_color = data['text_color'];
fill_color = data['fill_color'];
if (time_remaining[0] == 0) {
if (text_color[0] == '#ffffff') {
text_color[0] = '#ff0000';
}else {
text_color[0] = '#ffffff';
}
}else{
time_remaining[0]--;
time_string[0] = ('0' + Math.floor(time_remaining[0] / 60)).slice(-2) + ':' + ('0' + Math.floor(time_remaining[0] % 60)).slice(-2);
if (time_remaining[0] <= start_time[0]/10) {
fill_color[0] = '#ff0000';
}
}
source.trigger('change');
}
disable_button(start_button);
enable_button(stop_button);
enable_button(reset_button);
if (interval_id[0] == 0) {
interval_id[0] = setInterval(countdown, 1000);
source.trigger('change');
}
""")
stop_timer_JS = CustomJS(args=dict(source=source), code="""
function disable_button(button) {
button_id = button.get('id');
button_element = document.querySelector('#modelid_' + button_id + '>button');
button_element.setAttribute('disabled',true);
}
function enable_button(button) {
button_id = button.get('id');
button_element = document.querySelector('#modelid_' + button_id + '>button');
button_element.removeAttribute('disabled');
}
var data = source.get('data');
interval_id = data['interval_id'];
if (interval_id[0] != 0){
clearInterval(interval_id[0]);
interval_id[0] = 0;
}
enable_button(start_button);
disable_button(stop_button);
enable_button(reset_button);
source.trigger('change');
""")
reset_timer_JS = CustomJS(args=dict(source=source), code="""
function disable_button(button) {
button_id = button.get('id');
button_element = document.querySelector('#modelid_' + button_id + '>button');
button_element.setAttribute('disabled',true);
}
function enable_button(button) {
button_id = button.get('id');
button_element = document.querySelector('#modelid_' + button_id + '>button');
button_element.removeAttribute('disabled');
}
var data = source.get('data');
interval_id = data['interval_id'];
start_time = data['start_time'];
time_remaining = data['time_remaining'];
time_string = data['time_string'];
text_color = data['text_color'];
fill_color = data['fill_color'];
text_color[0] = '#ffffff';
fill_color[0] = '#78c400';
if (interval_id[0] != 0) {
clearInterval(interval_id[0]);
interval_id[0] = 0;
}
time_remaining[0] = start_time[0];
time_string[0] = ('0' + Math.floor(time_remaining[0] / 60)).slice(-2) + ':' + ('0' + Math.floor(time_remaining[0] % 60)).slice(-2);
enable_button(start_button);
disable_button(stop_button);
source.trigger('change');
""")
set_start_time_JS = CustomJS(args=dict(source=source), code="""
var data = source.get('data');
var input_mins = mins_slider.get('value');
var input_secs = secs_slider.get('value');
time_string = data['time_string'];
time_remaining = data['time_remaining'];
start_button_id = start_button.get('id');
start_button_element = document.querySelector('#modelid_' + start_button_id + '>button');
start_time = data['start_time'];
start_time[0] = (input_mins * 60) + (input_secs);
if (start_button_element.hasAttribute('disabled')) {
} else {
time_remaining[0] = start_time[0];
time_string[0] = ('0' + Math.floor(time_remaining[0] / 60)).slice(-2) + ':' + ('0' + Math.floor(time_remaining[0] % 60)).slice(-2);
}
source.trigger('change');
""")
# Create plot: a color block, with text centered inside
p1 = bkplt.figure(x_range=(-8, 8), y_range=(-5, 5),
plot_width=900, plot_height=600,
title=None, tools=tools)
p1.rect(x='x', y='y',
width=16, height=10,
fill_color='fill_color',
line_color=None,
name='block',
source=source)
p1.text(x='x', y='y',
text='time_string',
text_color='text_color',
alpha=0.75,
text_font_size='128pt',
text_baseline='middle',
text_align='center',
name='timer',
source=source)
# Remove axes, labels & tick lines
p1.ygrid.grid_line_color = None
p1.xgrid.grid_line_color = None
p1.axis.axis_line_color = None
p1.axis.major_label_text_color = None
p1.axis.major_tick_line_color = None
p1.axis.minor_tick_line_color = None
# Sliders
minutes_slider = Slider(start=0, end=99,
value=default_minutes, step=1,
title="Minutes",
callback=set_start_time_JS)
seconds_slider = Slider(start=0, end=59,
value=default_seconds, step=1,
title="Seconds",
callback=set_start_time_JS)
set_start_time_JS.args['mins_slider'] = minutes_slider
set_start_time_JS.args['secs_slider'] = seconds_slider
# Buttons
start_button = Button(label="Start", callback=run_timer_JS)
run_timer_JS.args['start_button'] = stop_timer_JS.args['start_button'] = reset_timer_JS.args['start_button'] = set_start_time_JS.args['start_button'] = start_button
stop_button = Button(label="Stop", disabled=True, callback=stop_timer_JS)
run_timer_JS.args['stop_button'] = stop_timer_JS.args['stop_button'] = reset_timer_JS.args['stop_button'] = stop_button
reset_button = Button(label="Reset", callback=reset_timer_JS)
run_timer_JS.args['reset_button'] = stop_timer_JS.args['reset_button'] = reset_timer_JS.args['reset_button'] = reset_button
# Layout plot & widgets
layout = column(row(minutes_slider, seconds_slider),
row(start_button, stop_button, reset_button),
p1)
# Show figure
bkplt.output_file('browsertimer.html', title="Countdown Timer")
bkplt.show(layout)
| mit |
SpaceKatt/CSPLN | apps/scaffolding/mac/web2py/web2py.app/Contents/Resources/gluon/contrib/gae_retry.py | 43 | 3653 | def autoretry_datastore_timeouts(attempts=5.0, interval=0.1, exponent=2.0):
"""
Copyright (C) 2009 twitter.com/rcb
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
======================================================================
This function wraps the AppEngine Datastore API to autoretry
datastore timeouts at the lowest accessible level.
The benefits of this approach are:
1. Small Footprint: Does not monkey with Model internals
which may break in future releases.
2. Max Performance: Retrying at this lowest level means
serialization and key formatting is not
needlessly repeated on each retry.
At initialization time, execute this:
>>> autoretry_datastore_timeouts()
Should only be called once, subsequent calls have no effect.
>>> autoretry_datastore_timeouts() # no effect
Default (5) attempts: .1, .2, .4, .8, 1.6 seconds
Parameters can each be specified as floats.
:param attempts: maximum number of times to retry.
:param interval: base seconds to sleep between retries.
:param exponent: rate of exponential back-off.
"""
import time
import logging
from google.appengine.api import apiproxy_stub_map
from google.appengine.runtime import apiproxy_errors
from google.appengine.datastore import datastore_pb
attempts = float(attempts)
interval = float(interval)
exponent = float(exponent)
wrapped = apiproxy_stub_map.MakeSyncCall
errors = {datastore_pb.Error.TIMEOUT: 'Timeout',
datastore_pb.Error.CONCURRENT_TRANSACTION: 'TransactionFailedError'}
def wrapper(*args, **kwargs):
count = 0.0
while True:
try:
return wrapped(*args, **kwargs)
except apiproxy_errors.ApplicationError, err:
errno = err.application_error
if errno not in errors:
raise
sleep = (exponent ** count) * interval
count += 1.0
if count > attempts:
raise
msg = "Datastore %s: retry #%d in %s seconds.\n%s"
vals = ''
if count == 1.0:
vals = '\n'.join([str(a) for a in args])
logging.warning(msg % (errors[errno], count, sleep, vals))
time.sleep(sleep)
setattr(wrapper, '_autoretry_datastore_timeouts', False)
if getattr(wrapped, '_autoretry_datastore_timeouts', True):
apiproxy_stub_map.MakeSyncCall = wrapper
| gpl-3.0 |
pabloborrego93/edx-platform | openedx/core/djangoapps/service_status/views.py | 11 | 1336 | """
Django Views for service status app
"""
import json
import time
from django.http import HttpResponse
from dogapi import dog_stats_api
from djcelery import celery
from celery.exceptions import TimeoutError
from openedx.core.djangoapps.service_status.tasks import delayed_ping
def index(_):
"""
An empty view
"""
return HttpResponse()
@dog_stats_api.timed('status.service.celery.status')
def celery_status(_):
"""
A view that returns Celery stats
"""
stats = celery.control.inspect().stats() or {}
return HttpResponse(json.dumps(stats, indent=4),
content_type="application/json")
@dog_stats_api.timed('status.service.celery.ping')
def celery_ping(_):
"""
A Simple view that checks if Celery can process a simple task
"""
start = time.time()
result = delayed_ping.apply_async(('ping', 0.1))
task_id = result.id
# Wait until we get the result
try:
value = result.get(timeout=4.0)
success = True
except TimeoutError:
value = None
success = False
output = {
'success': success,
'task_id': task_id,
'value': value,
'time': time.time() - start,
}
return HttpResponse(json.dumps(output, indent=4),
content_type="application/json")
| agpl-3.0 |
petrutlucian94/nova_dev | nova/filters.py | 21 | 3305 | # Copyright (c) 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Filter support
"""
from nova import loadables
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class BaseFilter(object):
"""Base class for all filter classes."""
def _filter_one(self, obj, filter_properties):
"""Return True if it passes the filter, False otherwise.
Override this in a subclass.
"""
return True
def filter_all(self, filter_obj_list, filter_properties):
"""Yield objects that pass the filter.
Can be overridden in a subclass, if you need to base filtering
decisions on all objects. Otherwise, one can just override
_filter_one() to filter a single object.
"""
for obj in filter_obj_list:
if self._filter_one(obj, filter_properties):
yield obj
# Set to true in a subclass if a filter only needs to be run once
# for each request rather than for each instance
run_filter_once_per_request = False
def run_filter_for_index(self, index):
"""Return True if the filter needs to be run for the "index-th"
instance in a request. Only need to override this if a filter
needs anything other than "first only" or "all" behaviour.
"""
if self.run_filter_once_per_request and index > 0:
return False
else:
return True
class BaseFilterHandler(loadables.BaseLoader):
"""Base class to handle loading filter classes.
This class should be subclassed where one needs to use filters.
"""
def get_filtered_objects(self, filter_classes, objs,
filter_properties, index=0):
list_objs = list(objs)
LOG.debug(_("Starting with %d host(s)"), len(list_objs))
for filter_cls in filter_classes:
cls_name = filter_cls.__name__
filter = filter_cls()
if filter.run_filter_for_index(index):
objs = filter.filter_all(list_objs,
filter_properties)
if objs is None:
LOG.debug(_("Filter %(cls_name)s says to stop filtering"),
{'cls_name': cls_name})
return
list_objs = list(objs)
if not list_objs:
LOG.info(_("Filter %s returned 0 hosts"), cls_name)
break
LOG.debug(_("Filter %(cls_name)s returned "
"%(obj_len)d host(s)"),
{'cls_name': cls_name, 'obj_len': len(list_objs)})
return list_objs
| apache-2.0 |
openwrt-es/linux | scripts/checkkconfigsymbols.py | 345 | 15854 | #!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-only
"""Find Kconfig symbols that are referenced but not defined."""
# (c) 2014-2017 Valentin Rothberg <valentinrothberg@gmail.com>
# (c) 2014 Stefan Hengelein <stefan.hengelein@fau.de>
#
import argparse
import difflib
import os
import re
import signal
import subprocess
import sys
from multiprocessing import Pool, cpu_count
# regex expressions
OPERATORS = r"&|\(|\)|\||\!"
SYMBOL = r"(?:\w*[A-Z0-9]\w*){2,}"
DEF = r"^\s*(?:menu){,1}config\s+(" + SYMBOL + r")\s*"
EXPR = r"(?:" + OPERATORS + r"|\s|" + SYMBOL + r")+"
DEFAULT = r"default\s+.*?(?:if\s.+){,1}"
STMT = r"^\s*(?:if|select|imply|depends\s+on|(?:" + DEFAULT + r"))\s+" + EXPR
SOURCE_SYMBOL = r"(?:\W|\b)+[D]{,1}CONFIG_(" + SYMBOL + r")"
# regex objects
REGEX_FILE_KCONFIG = re.compile(r".*Kconfig[\.\w+\-]*$")
REGEX_SYMBOL = re.compile(r'(?!\B)' + SYMBOL + r'(?!\B)')
REGEX_SOURCE_SYMBOL = re.compile(SOURCE_SYMBOL)
REGEX_KCONFIG_DEF = re.compile(DEF)
REGEX_KCONFIG_EXPR = re.compile(EXPR)
REGEX_KCONFIG_STMT = re.compile(STMT)
REGEX_KCONFIG_HELP = re.compile(r"^\s+help\s*$")
REGEX_FILTER_SYMBOLS = re.compile(r"[A-Za-z0-9]$")
REGEX_NUMERIC = re.compile(r"0[xX][0-9a-fA-F]+|[0-9]+")
REGEX_QUOTES = re.compile("(\"(.*?)\")")
def parse_options():
"""The user interface of this module."""
usage = "Run this tool to detect Kconfig symbols that are referenced but " \
"not defined in Kconfig. If no option is specified, " \
"checkkconfigsymbols defaults to check your current tree. " \
"Please note that specifying commits will 'git reset --hard\' " \
"your current tree! You may save uncommitted changes to avoid " \
"losing data."
parser = argparse.ArgumentParser(description=usage)
parser.add_argument('-c', '--commit', dest='commit', action='store',
default="",
help="check if the specified commit (hash) introduces "
"undefined Kconfig symbols")
parser.add_argument('-d', '--diff', dest='diff', action='store',
default="",
help="diff undefined symbols between two commits "
"(e.g., -d commmit1..commit2)")
parser.add_argument('-f', '--find', dest='find', action='store_true',
default=False,
help="find and show commits that may cause symbols to be "
"missing (required to run with --diff)")
parser.add_argument('-i', '--ignore', dest='ignore', action='store',
default="",
help="ignore files matching this Python regex "
"(e.g., -i '.*defconfig')")
parser.add_argument('-s', '--sim', dest='sim', action='store', default="",
help="print a list of max. 10 string-similar symbols")
parser.add_argument('--force', dest='force', action='store_true',
default=False,
help="reset current Git tree even when it's dirty")
parser.add_argument('--no-color', dest='color', action='store_false',
default=True,
help="don't print colored output (default when not "
"outputting to a terminal)")
args = parser.parse_args()
if args.commit and args.diff:
sys.exit("Please specify only one option at once.")
if args.diff and not re.match(r"^[\w\-\.\^]+\.\.[\w\-\.\^]+$", args.diff):
sys.exit("Please specify valid input in the following format: "
"\'commit1..commit2\'")
if args.commit or args.diff:
if not args.force and tree_is_dirty():
sys.exit("The current Git tree is dirty (see 'git status'). "
"Running this script may\ndelete important data since it "
"calls 'git reset --hard' for some performance\nreasons. "
" Please run this script in a clean Git tree or pass "
"'--force' if you\nwant to ignore this warning and "
"continue.")
if args.commit:
args.find = False
if args.ignore:
try:
re.match(args.ignore, "this/is/just/a/test.c")
except:
sys.exit("Please specify a valid Python regex.")
return args
def main():
"""Main function of this module."""
args = parse_options()
global COLOR
COLOR = args.color and sys.stdout.isatty()
if args.sim and not args.commit and not args.diff:
sims = find_sims(args.sim, args.ignore)
if sims:
print("%s: %s" % (yel("Similar symbols"), ', '.join(sims)))
else:
print("%s: no similar symbols found" % yel("Similar symbols"))
sys.exit(0)
# dictionary of (un)defined symbols
defined = {}
undefined = {}
if args.commit or args.diff:
head = get_head()
# get commit range
commit_a = None
commit_b = None
if args.commit:
commit_a = args.commit + "~"
commit_b = args.commit
elif args.diff:
split = args.diff.split("..")
commit_a = split[0]
commit_b = split[1]
undefined_a = {}
undefined_b = {}
# get undefined items before the commit
reset(commit_a)
undefined_a, _ = check_symbols(args.ignore)
# get undefined items for the commit
reset(commit_b)
undefined_b, defined = check_symbols(args.ignore)
# report cases that are present for the commit but not before
for symbol in sorted(undefined_b):
# symbol has not been undefined before
if symbol not in undefined_a:
files = sorted(undefined_b.get(symbol))
undefined[symbol] = files
# check if there are new files that reference the undefined symbol
else:
files = sorted(undefined_b.get(symbol) -
undefined_a.get(symbol))
if files:
undefined[symbol] = files
# reset to head
reset(head)
# default to check the entire tree
else:
undefined, defined = check_symbols(args.ignore)
# now print the output
for symbol in sorted(undefined):
print(red(symbol))
files = sorted(undefined.get(symbol))
print("%s: %s" % (yel("Referencing files"), ", ".join(files)))
sims = find_sims(symbol, args.ignore, defined)
sims_out = yel("Similar symbols")
if sims:
print("%s: %s" % (sims_out, ', '.join(sims)))
else:
print("%s: %s" % (sims_out, "no similar symbols found"))
if args.find:
print("%s:" % yel("Commits changing symbol"))
commits = find_commits(symbol, args.diff)
if commits:
for commit in commits:
commit = commit.split(" ", 1)
print("\t- %s (\"%s\")" % (yel(commit[0]), commit[1]))
else:
print("\t- no commit found")
print() # new line
def reset(commit):
"""Reset current git tree to %commit."""
execute(["git", "reset", "--hard", commit])
def yel(string):
"""
Color %string yellow.
"""
return "\033[33m%s\033[0m" % string if COLOR else string
def red(string):
"""
Color %string red.
"""
return "\033[31m%s\033[0m" % string if COLOR else string
def execute(cmd):
"""Execute %cmd and return stdout. Exit in case of error."""
try:
stdout = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=False)
stdout = stdout.decode(errors='replace')
except subprocess.CalledProcessError as fail:
exit(fail)
return stdout
def find_commits(symbol, diff):
"""Find commits changing %symbol in the given range of %diff."""
commits = execute(["git", "log", "--pretty=oneline",
"--abbrev-commit", "-G",
symbol, diff])
return [x for x in commits.split("\n") if x]
def tree_is_dirty():
"""Return true if the current working tree is dirty (i.e., if any file has
been added, deleted, modified, renamed or copied but not committed)."""
stdout = execute(["git", "status", "--porcelain"])
for line in stdout:
if re.findall(r"[URMADC]{1}", line[:2]):
return True
return False
def get_head():
"""Return commit hash of current HEAD."""
stdout = execute(["git", "rev-parse", "HEAD"])
return stdout.strip('\n')
def partition(lst, size):
"""Partition list @lst into eveni-sized lists of size @size."""
return [lst[i::size] for i in range(size)]
def init_worker():
"""Set signal handler to ignore SIGINT."""
signal.signal(signal.SIGINT, signal.SIG_IGN)
def find_sims(symbol, ignore, defined=[]):
"""Return a list of max. ten Kconfig symbols that are string-similar to
@symbol."""
if defined:
return difflib.get_close_matches(symbol, set(defined), 10)
pool = Pool(cpu_count(), init_worker)
kfiles = []
for gitfile in get_files():
if REGEX_FILE_KCONFIG.match(gitfile):
kfiles.append(gitfile)
arglist = []
for part in partition(kfiles, cpu_count()):
arglist.append((part, ignore))
for res in pool.map(parse_kconfig_files, arglist):
defined.extend(res[0])
return difflib.get_close_matches(symbol, set(defined), 10)
def get_files():
"""Return a list of all files in the current git directory."""
# use 'git ls-files' to get the worklist
stdout = execute(["git", "ls-files"])
if len(stdout) > 0 and stdout[-1] == "\n":
stdout = stdout[:-1]
files = []
for gitfile in stdout.rsplit("\n"):
if ".git" in gitfile or "ChangeLog" in gitfile or \
".log" in gitfile or os.path.isdir(gitfile) or \
gitfile.startswith("tools/"):
continue
files.append(gitfile)
return files
def check_symbols(ignore):
"""Find undefined Kconfig symbols and return a dict with the symbol as key
and a list of referencing files as value. Files matching %ignore are not
checked for undefined symbols."""
pool = Pool(cpu_count(), init_worker)
try:
return check_symbols_helper(pool, ignore)
except KeyboardInterrupt:
pool.terminate()
pool.join()
sys.exit(1)
def check_symbols_helper(pool, ignore):
"""Helper method for check_symbols(). Used to catch keyboard interrupts in
check_symbols() in order to properly terminate running worker processes."""
source_files = []
kconfig_files = []
defined_symbols = []
referenced_symbols = dict() # {file: [symbols]}
for gitfile in get_files():
if REGEX_FILE_KCONFIG.match(gitfile):
kconfig_files.append(gitfile)
else:
if ignore and not re.match(ignore, gitfile):
continue
# add source files that do not match the ignore pattern
source_files.append(gitfile)
# parse source files
arglist = partition(source_files, cpu_count())
for res in pool.map(parse_source_files, arglist):
referenced_symbols.update(res)
# parse kconfig files
arglist = []
for part in partition(kconfig_files, cpu_count()):
arglist.append((part, ignore))
for res in pool.map(parse_kconfig_files, arglist):
defined_symbols.extend(res[0])
referenced_symbols.update(res[1])
defined_symbols = set(defined_symbols)
# inverse mapping of referenced_symbols to dict(symbol: [files])
inv_map = dict()
for _file, symbols in referenced_symbols.items():
for symbol in symbols:
inv_map[symbol] = inv_map.get(symbol, set())
inv_map[symbol].add(_file)
referenced_symbols = inv_map
undefined = {} # {symbol: [files]}
for symbol in sorted(referenced_symbols):
# filter some false positives
if symbol == "FOO" or symbol == "BAR" or \
symbol == "FOO_BAR" or symbol == "XXX":
continue
if symbol not in defined_symbols:
if symbol.endswith("_MODULE"):
# avoid false positives for kernel modules
if symbol[:-len("_MODULE")] in defined_symbols:
continue
undefined[symbol] = referenced_symbols.get(symbol)
return undefined, defined_symbols
def parse_source_files(source_files):
"""Parse each source file in @source_files and return dictionary with source
files as keys and lists of references Kconfig symbols as values."""
referenced_symbols = dict()
for sfile in source_files:
referenced_symbols[sfile] = parse_source_file(sfile)
return referenced_symbols
def parse_source_file(sfile):
"""Parse @sfile and return a list of referenced Kconfig symbols."""
lines = []
references = []
if not os.path.exists(sfile):
return references
with open(sfile, "r", encoding='utf-8', errors='replace') as stream:
lines = stream.readlines()
for line in lines:
if "CONFIG_" not in line:
continue
symbols = REGEX_SOURCE_SYMBOL.findall(line)
for symbol in symbols:
if not REGEX_FILTER_SYMBOLS.search(symbol):
continue
references.append(symbol)
return references
def get_symbols_in_line(line):
"""Return mentioned Kconfig symbols in @line."""
return REGEX_SYMBOL.findall(line)
def parse_kconfig_files(args):
"""Parse kconfig files and return tuple of defined and references Kconfig
symbols. Note, @args is a tuple of a list of files and the @ignore
pattern."""
kconfig_files = args[0]
ignore = args[1]
defined_symbols = []
referenced_symbols = dict()
for kfile in kconfig_files:
defined, references = parse_kconfig_file(kfile)
defined_symbols.extend(defined)
if ignore and re.match(ignore, kfile):
# do not collect references for files that match the ignore pattern
continue
referenced_symbols[kfile] = references
return (defined_symbols, referenced_symbols)
def parse_kconfig_file(kfile):
"""Parse @kfile and update symbol definitions and references."""
lines = []
defined = []
references = []
skip = False
if not os.path.exists(kfile):
return defined, references
with open(kfile, "r", encoding='utf-8', errors='replace') as stream:
lines = stream.readlines()
for i in range(len(lines)):
line = lines[i]
line = line.strip('\n')
line = line.split("#")[0] # ignore comments
if REGEX_KCONFIG_DEF.match(line):
symbol_def = REGEX_KCONFIG_DEF.findall(line)
defined.append(symbol_def[0])
skip = False
elif REGEX_KCONFIG_HELP.match(line):
skip = True
elif skip:
# ignore content of help messages
pass
elif REGEX_KCONFIG_STMT.match(line):
line = REGEX_QUOTES.sub("", line)
symbols = get_symbols_in_line(line)
# multi-line statements
while line.endswith("\\"):
i += 1
line = lines[i]
line = line.strip('\n')
symbols.extend(get_symbols_in_line(line))
for symbol in set(symbols):
if REGEX_NUMERIC.match(symbol):
# ignore numeric values
continue
references.append(symbol)
return defined, references
if __name__ == "__main__":
main()
| gpl-2.0 |
nanditav/15712-TensorFlow | tensorflow/python/training/proximal_adagrad_test.py | 12 | 7941 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for Proximal Adagrad operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class ProximalAdagradOptimizerTest(tf.test.TestCase):
def testProximalAdagradwithoutRegularization(self):
with self.test_session() as sess:
var0 = tf.Variable([0.0, 0.0])
var1 = tf.Variable([0.0, 0.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
opt = tf.train.ProximalAdagradOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([0.0, 0.0], v0_val)
self.assertAllClose([0.0, 0.0], v1_val)
# Run 3 steps Proximal Adagrad.
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose(np.array([-2.60260963, -4.29698515]),
v0_val)
self.assertAllClose(np.array([-0.28432083, -0.56694895]),
v1_val)
def testProximalAdagradwithoutRegularization2(self):
with self.test_session() as sess:
var0 = tf.Variable([1.0, 2.0])
var1 = tf.Variable([4.0, 3.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
opt = tf.train.ProximalAdagradOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([1.0, 2.0], v0_val)
self.assertAllClose([4.0, 3.0], v1_val)
# Run 3 steps Proximal Adagrad.
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose(np.array([-1.60261, -2.296985]),
v0_val)
self.assertAllClose(np.array([3.715679, 2.433051]),
v1_val)
def testProximalAdagradWithL1(self):
with self.test_session() as sess:
var0 = tf.Variable([1.0, 2.0])
var1 = tf.Variable([4.0, 3.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
opt = tf.train.ProximalAdagradOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([1.0, 2.0], v0_val)
self.assertAllClose([4.0, 3.0], v1_val)
# Run 10 steps Proximal Adagrad
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose(np.array([0.662907, 0.767398]),
v0_val)
self.assertAllClose(np.array([2.959304, 1.029232]),
v1_val)
def testProximalAdagradWithL1_L2(self):
with self.test_session() as sess:
var0 = tf.Variable([1.0, 2.0])
var1 = tf.Variable([4.0, 3.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
opt = tf.train.ProximalAdagradOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([1.0, 2.0], v0_val)
self.assertAllClose([4.0, 3.0], v1_val)
# Run 10 steps Proximal Adagrad.
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose(np.array([0.043069, 0.080461]),
v0_val)
self.assertAllClose(np.array([0.004069, 0.008578]),
v1_val)
def applyOptimizer(self, opt, steps=5, is_sparse=False):
if is_sparse:
var0 = tf.Variable([[1.0], [2.0]])
var1 = tf.Variable([[3.0], [4.0]])
grads0 = tf.IndexedSlices(tf.constant([0.1], shape=[1, 1]),
tf.constant([0]),
tf.constant([2, 1]))
grads1 = tf.IndexedSlices(tf.constant([0.02], shape=[1, 1]),
tf.constant([1]),
tf.constant([2, 1]))
else:
var0 = tf.Variable([1.0, 2.0])
var1 = tf.Variable([3.0, 4.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
sess = tf.get_default_session()
v0_val, v1_val = sess.run([var0, var1])
if is_sparse:
self.assertAllClose([[1.0], [2.0]], v0_val)
self.assertAllClose([[3.0], [4.0]], v1_val)
else:
self.assertAllClose([1.0, 2.0], v0_val)
self.assertAllClose([3.0, 4.0], v1_val)
# Run ProximalAdagrad for a few steps
for _ in range(steps):
update.run()
v0_val, v1_val = sess.run([var0, var1])
return v0_val, v1_val
def testEquivAdagradwithoutRegularization(self):
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.ProximalAdagradOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0))
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.AdagradOptimizer(3.0, initial_accumulator_value=0.1))
self.assertAllClose(val0, val2)
self.assertAllClose(val1, val3)
def testEquivSparseAdagradwithoutRegularization(self):
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.ProximalAdagradOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
is_sparse=True)
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.AdagradOptimizer(3.0, initial_accumulator_value=0.1),
is_sparse=True)
self.assertAllClose(val0, val2)
self.assertAllClose(val1, val3)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
DataDog/integrations-core | tokumx/datadog_checks/tokumx/vendor/bson/son.py | 1 | 8713 | # Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for creating and manipulating SON, the Serialized Ocument Notation.
Regular dictionaries can be used instead of SON objects, but not when the order
of keys is important. A SON object can be used just like a normal Python
dictionary."""
import collections
import copy
import re
from datadog_checks.tokumx.vendor.bson.py3compat import iteritems
# This sort of sucks, but seems to be as good as it gets...
# This is essentially the same as re._pattern_type
RE_TYPE = type(re.compile(""))
class SON(dict):
"""SON data.
A subclass of dict that maintains ordering of keys and provides a
few extra niceties for dealing with SON. SON objects can be
converted to and from BSON.
The mapping from Python types to BSON types is as follows:
======================================= ============= ===================
Python Type BSON Type Supported Direction
======================================= ============= ===================
None null both
bool boolean both
int [#int]_ int32 / int64 py -> bson
long int64 py -> bson
`bson.int64.Int64` int64 both
float number (real) both
string string py -> bson
unicode string both
list array both
dict / `SON` object both
datetime.datetime [#dt]_ [#dt2]_ date both
`bson.regex.Regex` regex both
compiled re [#re]_ regex py -> bson
`bson.binary.Binary` binary both
`bson.objectid.ObjectId` oid both
`bson.dbref.DBRef` dbref both
None undefined bson -> py
unicode code bson -> py
`bson.code.Code` code py -> bson
unicode symbol bson -> py
bytes (Python 3) [#bytes]_ binary both
======================================= ============= ===================
Note that to save binary data it must be wrapped as an instance of
`bson.binary.Binary`. Otherwise it will be saved as a BSON string
and retrieved as unicode.
.. [#int] A Python int will be saved as a BSON int32 or BSON int64 depending
on its size. A BSON int32 will always decode to a Python int. A BSON
int64 will always decode to a :class:`~bson.int64.Int64`.
.. [#dt] datetime.datetime instances will be rounded to the nearest
millisecond when saved
.. [#dt2] all datetime.datetime instances are treated as *naive*. clients
should always use UTC.
.. [#re] :class:`~bson.regex.Regex` instances and regular expression
objects from ``re.compile()`` are both saved as BSON regular expressions.
BSON regular expressions are decoded as :class:`~bson.regex.Regex`
instances.
.. [#bytes] The bytes type from Python 3.x is encoded as BSON binary with
subtype 0. In Python 3.x it will be decoded back to bytes. In Python 2.x
it will be decoded to an instance of :class:`~bson.binary.Binary` with
subtype 0.
"""
def __init__(self, data=None, **kwargs):
self.__keys = []
dict.__init__(self)
self.update(data)
self.update(kwargs)
def __new__(cls, *args, **kwargs):
instance = super(SON, cls).__new__(cls, *args, **kwargs)
instance.__keys = []
return instance
def __repr__(self):
result = []
for key in self.__keys:
result.append("(%r, %r)" % (key, self[key]))
return "SON([%s])" % ", ".join(result)
def __setitem__(self, key, value):
if key not in self.__keys:
self.__keys.append(key)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
self.__keys.remove(key)
dict.__delitem__(self, key)
def keys(self):
return list(self.__keys)
def copy(self):
other = SON()
other.update(self)
return other
# TODO this is all from UserDict.DictMixin. it could probably be made more
# efficient.
# second level definitions support higher levels
def __iter__(self):
for k in self.__keys:
yield k
def has_key(self, key):
return key in self.__keys
# third level takes advantage of second level definitions
def iteritems(self):
for k in self:
yield (k, self[k])
def iterkeys(self):
return self.__iter__()
# fourth level uses definitions from lower levels
def itervalues(self):
for _, v in self.iteritems():
yield v
def values(self):
return [v for _, v in self.iteritems()]
def items(self):
return [(key, self[key]) for key in self]
def clear(self):
self.__keys = []
super(SON, self).clear()
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def pop(self, key, *args):
if len(args) > 1:
raise TypeError("pop expected at most 2 arguments, got "\
+ repr(1 + len(args)))
try:
value = self[key]
except KeyError:
if args:
return args[0]
raise
del self[key]
return value
def popitem(self):
try:
k, v = next(self.iteritems())
except StopIteration:
raise KeyError('container is empty')
del self[k]
return (k, v)
def update(self, other=None, **kwargs):
# Make progressively weaker assumptions about "other"
if other is None:
pass
elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups
for k, v in other.iteritems():
self[k] = v
elif hasattr(other, 'keys'):
for k in other.keys():
self[k] = other[k]
else:
for k, v in other:
self[k] = v
if kwargs:
self.update(kwargs)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __eq__(self, other):
"""Comparison to another SON is order-sensitive while comparison to a
regular dictionary is order-insensitive.
"""
if isinstance(other, SON):
return len(self) == len(other) and self.items() == other.items()
return self.to_dict() == other
def __ne__(self, other):
return not self == other
def __len__(self):
return len(self.__keys)
def to_dict(self):
"""Convert a SON document to a normal Python dictionary instance.
This is trickier than just *dict(...)* because it needs to be
recursive.
"""
def transform_value(value):
if isinstance(value, list):
return [transform_value(v) for v in value]
elif isinstance(value, collections.Mapping):
return dict([
(k, transform_value(v))
for k, v in iteritems(value)])
else:
return value
return transform_value(dict(self))
def __deepcopy__(self, memo):
out = SON()
val_id = id(self)
if val_id in memo:
return memo.get(val_id)
memo[val_id] = out
for k, v in self.iteritems():
if not isinstance(v, RE_TYPE):
v = copy.deepcopy(v, memo)
out[k] = v
return out
| bsd-3-clause |
sdague/home-assistant | homeassistant/components/zha/core/channels/lighting.py | 2 | 4177 | """Lighting channels module for Zigbee Home Automation."""
from typing import Optional
import zigpy.zcl.clusters.lighting as lighting
from .. import registries, typing as zha_typing
from ..const import REPORT_CONFIG_DEFAULT
from .base import ClientChannel, ZigbeeChannel
@registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id)
class Ballast(ZigbeeChannel):
"""Ballast channel."""
@registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id)
class ColorClientChannel(ClientChannel):
"""Color client channel."""
@registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id)
@registries.LIGHT_CLUSTERS.register(lighting.Color.cluster_id)
@registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id)
class ColorChannel(ZigbeeChannel):
"""Color channel."""
CAPABILITIES_COLOR_XY = 0x08
CAPABILITIES_COLOR_TEMP = 0x10
UNSUPPORTED_ATTRIBUTE = 0x86
REPORT_CONFIG = (
{"attr": "current_x", "config": REPORT_CONFIG_DEFAULT},
{"attr": "current_y", "config": REPORT_CONFIG_DEFAULT},
{"attr": "color_temperature", "config": REPORT_CONFIG_DEFAULT},
)
def __init__(
self, cluster: zha_typing.ZigpyClusterType, ch_pool: zha_typing.ChannelPoolType
) -> None:
"""Initialize ColorChannel."""
super().__init__(cluster, ch_pool)
self._color_capabilities = None
self._min_mireds = 153
self._max_mireds = 500
@property
def color_loop_active(self) -> Optional[int]:
"""Return cached value of the color_loop_active attribute."""
return self.cluster.get("color_loop_active")
@property
def color_temperature(self) -> Optional[int]:
"""Return cached value of color temperature."""
return self.cluster.get("color_temperature")
@property
def current_x(self) -> Optional[int]:
"""Return cached value of the current_x attribute."""
return self.cluster.get("current_x")
@property
def current_y(self) -> Optional[int]:
"""Return cached value of the current_y attribute."""
return self.cluster.get("current_y")
@property
def min_mireds(self) -> int:
"""Return the coldest color_temp that this channel supports."""
return self.cluster.get("color_temp_physical_min", self._min_mireds)
@property
def max_mireds(self) -> int:
"""Return the warmest color_temp that this channel supports."""
return self.cluster.get("color_temp_physical_max", self._max_mireds)
def get_color_capabilities(self):
"""Return the color capabilities."""
return self._color_capabilities
async def async_configure(self):
"""Configure channel."""
await self.fetch_color_capabilities(False)
await super().async_configure()
async def async_initialize(self, from_cache):
"""Initialize channel."""
await self.fetch_color_capabilities(True)
attributes = ["color_temperature", "current_x", "current_y"]
await self.get_attributes(attributes, from_cache=from_cache)
async def fetch_color_capabilities(self, from_cache):
"""Get the color configuration."""
attributes = [
"color_temp_physical_min",
"color_temp_physical_max",
"color_capabilities",
]
results = await self.get_attributes(attributes, from_cache=from_cache)
capabilities = results.get("color_capabilities")
if capabilities is None:
# ZCL Version 4 devices don't support the color_capabilities
# attribute. In this version XY support is mandatory, but we
# need to probe to determine if the device supports color
# temperature.
capabilities = self.CAPABILITIES_COLOR_XY
result = await self.get_attribute_value(
"color_temperature", from_cache=from_cache
)
if result is not None and result is not self.UNSUPPORTED_ATTRIBUTE:
capabilities |= self.CAPABILITIES_COLOR_TEMP
self._color_capabilities = capabilities
await super().async_initialize(from_cache)
| apache-2.0 |
jperon/musite_old | lib/cherrypy/_cpserver.py | 58 | 8275 | """Manage HTTP servers with CherryPy."""
import warnings
import cherrypy
from cherrypy.lib import attributes
from cherrypy._cpcompat import basestring, py3k
# We import * because we want to export check_port
# et al as attributes of this module.
from cherrypy.process.servers import *
class Server(ServerAdapter):
"""An adapter for an HTTP server.
You can set attributes (like socket_host and socket_port)
on *this* object (which is probably cherrypy.server), and call
quickstart. For example::
cherrypy.server.socket_port = 80
cherrypy.quickstart()
"""
socket_port = 8080
"""The TCP port on which to listen for connections."""
_socket_host = '127.0.0.1'
def _get_socket_host(self):
return self._socket_host
def _set_socket_host(self, value):
if value == '':
raise ValueError("The empty string ('') is not an allowed value. "
"Use '0.0.0.0' instead to listen on all active "
"interfaces (INADDR_ANY).")
self._socket_host = value
socket_host = property(
_get_socket_host,
_set_socket_host,
doc="""The hostname or IP address on which to listen for connections.
Host values may be any IPv4 or IPv6 address, or any valid hostname.
The string 'localhost' is a synonym for '127.0.0.1' (or '::1', if
your hosts file prefers IPv6). The string '0.0.0.0' is a special
IPv4 entry meaning "any active interface" (INADDR_ANY), and '::'
is the similar IN6ADDR_ANY for IPv6. The empty string or None are
not allowed.""")
socket_file = None
"""If given, the name of the UNIX socket to use instead of TCP/IP.
When this option is not None, the `socket_host` and `socket_port` options
are ignored."""
socket_queue_size = 5
"""The 'backlog' argument to socket.listen(); specifies the maximum number
of queued connections (default 5)."""
socket_timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
accepted_queue_size = -1
"""The maximum number of requests which will be queued up before
the server refuses to accept it (default -1, meaning no limit)."""
accepted_queue_timeout = 10
"""The timeout in seconds for attempting to add a request to the
queue when the queue is full (default 10)."""
shutdown_timeout = 5
"""The time to wait for HTTP worker threads to clean up."""
protocol_version = 'HTTP/1.1'
"""The version string to write in the Status-Line of all HTTP responses,
for example, "HTTP/1.1" (the default). Depending on the HTTP server used,
this should also limit the supported features used in the response."""
thread_pool = 10
"""The number of worker threads to start up in the pool."""
thread_pool_max = -1
"""The maximum size of the worker-thread pool. Use -1 to indicate no limit.
"""
max_request_header_size = 500 * 1024
"""The maximum number of bytes allowable in the request headers.
If exceeded, the HTTP server should return "413 Request Entity Too Large".
"""
max_request_body_size = 100 * 1024 * 1024
"""The maximum number of bytes allowable in the request body. If exceeded,
the HTTP server should return "413 Request Entity Too Large"."""
instance = None
"""If not None, this should be an HTTP server instance (such as
CPWSGIServer) which cherrypy.server will control. Use this when you need
more control over object instantiation than is available in the various
configuration options."""
ssl_context = None
"""When using PyOpenSSL, an instance of SSL.Context."""
ssl_certificate = None
"""The filename of the SSL certificate to use."""
ssl_certificate_chain = None
"""When using PyOpenSSL, the certificate chain to pass to
Context.load_verify_locations."""
ssl_private_key = None
"""The filename of the private key to use with SSL."""
if py3k:
ssl_module = 'builtin'
"""The name of a registered SSL adaptation module to use with
the builtin WSGI server. Builtin options are: 'builtin' (to
use the SSL library built into recent versions of Python).
You may also register your own classes in the
wsgiserver.ssl_adapters dict."""
else:
ssl_module = 'pyopenssl'
"""The name of a registered SSL adaptation module to use with the
builtin WSGI server. Builtin options are 'builtin' (to use the SSL
library built into recent versions of Python) and 'pyopenssl' (to
use the PyOpenSSL project, which you must install separately). You
may also register your own classes in the wsgiserver.ssl_adapters
dict."""
statistics = False
"""Turns statistics-gathering on or off for aware HTTP servers."""
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
wsgi_version = (1, 0)
"""The WSGI version tuple to use with the builtin WSGI server.
The provided options are (1, 0) [which includes support for PEP 3333,
which declares it covers WSGI version 1.0.1 but still mandates the
wsgi.version (1, 0)] and ('u', 0), an experimental unicode version.
You may create and register your own experimental versions of the WSGI
protocol by adding custom classes to the wsgiserver.wsgi_gateways dict."""
def __init__(self):
self.bus = cherrypy.engine
self.httpserver = None
self.interrupt = None
self.running = False
def httpserver_from_self(self, httpserver=None):
"""Return a (httpserver, bind_addr) pair based on self attributes."""
if httpserver is None:
httpserver = self.instance
if httpserver is None:
from cherrypy import _cpwsgi_server
httpserver = _cpwsgi_server.CPWSGIServer(self)
if isinstance(httpserver, basestring):
# Is anyone using this? Can I add an arg?
httpserver = attributes(httpserver)(self)
return httpserver, self.bind_addr
def start(self):
"""Start the HTTP server."""
if not self.httpserver:
self.httpserver, self.bind_addr = self.httpserver_from_self()
ServerAdapter.start(self)
start.priority = 75
def _get_bind_addr(self):
if self.socket_file:
return self.socket_file
if self.socket_host is None and self.socket_port is None:
return None
return (self.socket_host, self.socket_port)
def _set_bind_addr(self, value):
if value is None:
self.socket_file = None
self.socket_host = None
self.socket_port = None
elif isinstance(value, basestring):
self.socket_file = value
self.socket_host = None
self.socket_port = None
else:
try:
self.socket_host, self.socket_port = value
self.socket_file = None
except ValueError:
raise ValueError("bind_addr must be a (host, port) tuple "
"(for TCP sockets) or a string (for Unix "
"domain sockets), not %r" % value)
bind_addr = property(
_get_bind_addr,
_set_bind_addr,
doc='A (host, port) tuple for TCP sockets or '
'a str for Unix domain sockets.')
def base(self):
"""Return the base (scheme://host[:port] or sock file) for this server.
"""
if self.socket_file:
return self.socket_file
host = self.socket_host
if host in ('0.0.0.0', '::'):
# 0.0.0.0 is INADDR_ANY and :: is IN6ADDR_ANY.
# Look up the host name, which should be the
# safest thing to spit out in a URL.
import socket
host = socket.gethostname()
port = self.socket_port
if self.ssl_certificate:
scheme = "https"
if port != 443:
host += ":%s" % port
else:
scheme = "http"
if port != 80:
host += ":%s" % port
return "%s://%s" % (scheme, host)
| gpl-3.0 |
Contraz/demosys-py | demosys/geometry/points.py | 1 | 1066 | import random
import numpy
import moderngl
from demosys.opengl.vao import VAO
def points_random_3d(count, range_x=(-10.0, 10.0), range_y=(-10.0, 10.0), range_z=(-10.0, 10.0), seed=None) -> VAO:
"""
Generates random positions inside a confied box.
Args:
count (int): Number of points to generate
Keyword Args:
range_x (tuple): min-max range for x axis: Example (-10.0. 10.0)
range_y (tuple): min-max range for y axis: Example (-10.0. 10.0)
range_z (tuple): min-max range for z axis: Example (-10.0. 10.0)
seed (int): The random seed
Returns:
A :py:class:`demosys.opengl.vao.VAO` instance
"""
random.seed(seed)
def gen():
for _ in range(count):
yield random.uniform(*range_x)
yield random.uniform(*range_y)
yield random.uniform(*range_z)
data = numpy.fromiter(gen(), count=count * 3, dtype=numpy.float32)
vao = VAO("geometry:points_random_3d", mode=moderngl.POINTS)
vao.buffer(data, '3f', ['in_position'])
return vao
| isc |
UdS-TelecommunicationsLab/SDNalytics | sdnalyzer/analyzer/topology.py | 1 | 2673 | # The MIT License (MIT)
#
# Copyright (c) 2015 Saarland University
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# Contributor(s): Andreas Schmidt (Saarland University)
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# This license applies to all parts of SDNalytics that are not externally
# maintained libraries.
import json
from task import AnalysisTask
from sdnalyzer.store import NodeSample, LinkSample
from sqlalchemy import func
class SimpleTopologyCentrality(AnalysisTask):
def __init__(self):
super(SimpleTopologyCentrality, self).__init__()
self.type = "TopologyCentrality"
def _analyze(self, session):
newest_timestamp = session.execute(func.max(NodeSample.sampled)).scalar()
self.nodes = session.query(NodeSample).filter(NodeSample.sampled == newest_timestamp).all()
newest_timestamp = session.execute(func.max(LinkSample.sampled)).scalar()
self.links = session.query(LinkSample).filter(LinkSample.sampled == newest_timestamp).all()
self.samples.add(newest_timestamp)
def _write_report(self, report):
content = {
"devices": {},
"links": {}
}
for node_sample in self.nodes:
content["devices"][node_sample.node.device_id] = {
"degree": node_sample.degree,
"betweenness": node_sample.betweenness,
"closeness": node_sample.closeness if node_sample.degree != 0 else 0
}
for link_sample in self.links:
link_id = self.generate_link_id(link_sample.link)
content["links"][link_id] = {"betweenness": link_sample.betweenness}
report.content = json.dumps(content) | mit |
hsaputra/tensorflow | tensorflow/python/estimator/model_fn.py | 21 | 12770 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes and methods related to model_fn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from tensorflow.python.estimator.export.export_output import ExportOutput
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import nest
class ModeKeys(object):
"""Standard names for model modes.
The following standard keys are defined:
* `TRAIN`: training mode.
* `EVAL`: evaluation mode.
* `PREDICT`: inference mode.
"""
TRAIN = 'train'
EVAL = 'eval'
PREDICT = 'infer'
LOSS_METRIC_KEY = 'loss'
AVERAGE_LOSS_METRIC_KEY = 'average_loss'
class EstimatorSpec(
collections.namedtuple('EstimatorSpec', [
'mode', 'predictions', 'loss', 'train_op', 'eval_metric_ops',
'export_outputs', 'training_chief_hooks', 'training_hooks', 'scaffold',
'evaluation_hooks'
])):
"""Ops and objects returned from a `model_fn` and passed to an `Estimator`.
`EstimatorSpec` fully defines the model to be run by an `Estimator`.
"""
def __new__(cls,
mode,
predictions=None,
loss=None,
train_op=None,
eval_metric_ops=None,
export_outputs=None,
training_chief_hooks=None,
training_hooks=None,
scaffold=None,
evaluation_hooks=None):
"""Creates a validated `EstimatorSpec` instance.
Depending on the value of `mode`, different arguments are required. Namely
* For `mode == ModeKeys.TRAIN`: required fields are `loss` and `train_op`.
* For `mode == ModeKeys.EVAL`: required field is `loss`.
* For `mode == ModeKeys.PREDICT`: required fields are `predictions`.
model_fn can populate all arguments independent of mode. In this case, some
arguments will be ignored by an `Estimator`. E.g. `train_op` will be
ignored in eval and infer modes. Example:
```python
def my_model_fn(mode, features, labels):
predictions = ...
loss = ...
train_op = ...
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Alternatively, model_fn can just populate the arguments appropriate to the
given mode. Example:
```python
def my_model_fn(mode, features, labels):
if (mode == tf.estimator.ModeKeys.TRAIN or
mode == tf.estimator.ModeKeys.EVAL):
loss = ...
else:
loss = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = ...
else:
train_op = None
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = ...
else:
predictions = None
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Args:
mode: A `ModeKeys`. Specifies if this is training, evaluation or
prediction.
predictions: Predictions `Tensor` or dict of `Tensor`.
loss: Training loss `Tensor`. Must be either scalar, or with shape `[1]`.
train_op: Op for the training step.
eval_metric_ops: Dict of metric results keyed by name. The values of the
dict are the results of calling a metric function, namely a
`(metric_tensor, update_op)` tuple. `metric_tensor` should be evaluated
without any impact on state (typically is a pure computation results
based on variables.). For example, it should not trigger the `update_op`
or requires any input fetching.
export_outputs: Describes the output signatures to be exported to
`SavedModel` and used during serving.
A dict `{name: output}` where:
* name: An arbitrary name for this output.
* output: an `ExportOutput` object such as `ClassificationOutput`,
`RegressionOutput`, or `PredictOutput`.
Single-headed models only need to specify one entry in this dictionary.
Multi-headed models should specify one entry for each head, one of
which must be named using
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.
training_chief_hooks: Iterable of `tf.train.SessionRunHook` objects to
run on the chief worker during training.
training_hooks: Iterable of `tf.train.SessionRunHook` objects to run
on all workers during training.
scaffold: A `tf.train.Scaffold` object that can be used to set
initialization, saver, and more to be used in training.
evaluation_hooks: Iterable of `tf.train.SessionRunHook` objects to
run during evaluation.
Returns:
A validated `EstimatorSpec` object.
Raises:
ValueError: If validation fails.
TypeError: If any of the arguments is not the expected type.
"""
# Validate train_op.
if train_op is None:
if mode == ModeKeys.TRAIN:
raise ValueError('Missing train_op.')
else:
_check_is_tensor_or_operation(train_op, 'train_op')
# Validate loss.
if loss is None:
if mode in (ModeKeys.TRAIN, ModeKeys.EVAL):
raise ValueError('Missing loss.')
else:
loss = _check_is_tensor(loss, 'loss')
loss_shape = loss.get_shape()
if loss_shape.num_elements() not in (None, 1):
raise ValueError('Loss must be scalar, given: {}'.format(loss))
if not loss_shape.is_compatible_with(tensor_shape.scalar()):
loss = array_ops.reshape(loss, [])
# Validate predictions.
if predictions is None:
if mode == ModeKeys.PREDICT:
raise ValueError('Missing predictions.')
predictions = {}
else:
if isinstance(predictions, dict):
predictions = {
k: _check_is_tensor(v, 'predictions[{}]'.format(k))
for k, v in six.iteritems(predictions)
}
else:
predictions = _check_is_tensor(predictions, 'predictions')
# Validate eval_metric_ops.
if eval_metric_ops is None:
eval_metric_ops = {}
else:
if not isinstance(eval_metric_ops, dict):
raise TypeError(
'eval_metric_ops must be a dict, given: {}'.format(eval_metric_ops))
for key, metric_value_and_update in six.iteritems(eval_metric_ops):
if (not isinstance(metric_value_and_update, tuple) or
len(metric_value_and_update) != 2):
raise TypeError(
'Values of eval_metric_ops must be (metric_value, update_op) '
'tuples, given: {} for key: {}'.format(
metric_value_and_update, key))
metric_value, metric_update = metric_value_and_update
for metric_value_member in nest.flatten(metric_value):
# Allow (possibly nested) tuples for metric values, but require that
# each of them be Tensors or Operations.
_check_is_tensor_or_operation(metric_value_member,
'eval_metric_ops[{}]'.format(key))
_check_is_tensor_or_operation(metric_update,
'eval_metric_ops[{}]'.format(key))
# Validate export_outputs.
if export_outputs is not None:
if not isinstance(export_outputs, dict):
raise TypeError('export_outputs must be dict, given: {}'.format(
export_outputs))
for v in six.itervalues(export_outputs):
if not isinstance(v, ExportOutput):
raise TypeError(
'Values in export_outputs must be ExportOutput objects. '
'Given: {}'.format(export_outputs))
# Note export_outputs is allowed to be empty.
if len(export_outputs) == 1:
(key, value), = export_outputs.items()
if key != signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
export_outputs[
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = value
if len(export_outputs) > 1:
if (signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
not in export_outputs):
raise ValueError(
'Multiple export_outputs were provided, but none of them is '
'specified as the default. Do this by naming one of them with '
'signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.')
# Validate that all tensors and ops are from the default graph.
default_graph = ops.get_default_graph()
# We enumerate possible error causes here to aid in debugging.
error_message_template = (
'{0} with "{1}" must be from the default graph. '
'Possible causes of this error include: \n\n'
'1) {0} was created outside the context of the default graph.'
'\n\n'
'2) The object passed through to EstimatorSpec was not created '
'in the most recent call to "model_fn".')
if isinstance(predictions, dict):
for key, value in six.iteritems(predictions):
if value.graph is not default_graph:
raise ValueError(error_message_template.format(
'prediction values',
'{0}: {1}'.format(key, value.name)))
elif predictions is not None:
# 'predictions' must be a single Tensor.
if predictions.graph is not default_graph:
raise ValueError(error_message_template.format(
'prediction values', predictions.name))
if loss is not None and loss.graph is not default_graph:
raise ValueError(error_message_template.format('loss', loss.name))
if train_op is not None and train_op.graph is not default_graph:
raise ValueError(error_message_template.format('train_op', train_op.name))
for key, value in list(six.iteritems(eval_metric_ops)):
values = nest.flatten(value)
for value in values:
if value.graph is not default_graph:
raise ValueError(error_message_template.format(
'eval_metric_ops',
'{0}: {1}'.format(key, value.name)))
# Validate hooks.
training_chief_hooks = tuple(training_chief_hooks or [])
training_hooks = tuple(training_hooks or [])
evaluation_hooks = tuple(evaluation_hooks or [])
for hook in training_hooks + training_chief_hooks + evaluation_hooks:
if not isinstance(hook, session_run_hook.SessionRunHook):
raise TypeError(
'All hooks must be SessionRunHook instances, given: {}'.format(
hook))
scaffold = scaffold or monitored_session.Scaffold()
# Validate scaffold.
if not isinstance(scaffold, monitored_session.Scaffold):
raise TypeError(
'scaffold must be tf.train.Scaffold. Given: {}'.format(scaffold))
return super(EstimatorSpec, cls).__new__(
cls,
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
export_outputs=export_outputs,
training_chief_hooks=training_chief_hooks,
training_hooks=training_hooks,
scaffold=scaffold,
evaluation_hooks=evaluation_hooks)
def _replace(self, **kwds):
"""Return a new EstimatorSpec replacing specified fields with new values."""
if 'mode' in kwds:
if self.mode != kwds['mode']:
raise ValueError('mode of EstimatorSpec cannot be changed.')
new_fields = map(kwds.pop, self._fields, list(self))
return EstimatorSpec(*new_fields)
def _check_is_tensor_or_operation(x, name):
if not (isinstance(x, ops.Operation) or isinstance(x, ops.Tensor)):
raise TypeError('{} must be Operation or Tensor, given: {}'.format(name, x))
def _check_is_tensor(x, tensor_name):
"""Returns `x` if it is a `Tensor`, raises TypeError otherwise."""
if not isinstance(x, ops.Tensor):
raise TypeError('{} must be Tensor, given: {}'.format(tensor_name, x))
return x
| apache-2.0 |
jjs0sbw/CSPLN | apps/scaffolding/win/web2py/gluon/contrib/pyrtf/PropertySets.py | 43 | 14016 | """
PropertySets group common attributes together, each property set is used to control a specific part of the rendering.
PropertySets can be used in different elements of the document.
For example the FramePropertySet is used in paragraphs, tables, cells, etc.
The TextPropertySet can be used for text or in a Paragraph Style.
"""
from types import StringType
from copy import deepcopy
#
# We need some basic Type like fonts, colours and paper definitions
#
def MakeAttributeName( value ) :
assert value and type( value ) is StringType
value = value.replace( ' ', '' )
return value
class AttributedList( list ) :
def __init__( self, accepted_type=None ) :
super( AttributedList, self ).__init__()
self.AcceptedType = accepted_type
self._append = super( AttributedList, self ).append
def append( self, *values ) :
for value in values :
if self.AcceptedType : assert isinstance( value, self.AcceptedType )
self._append( value )
name = getattr( value, 'Name', None )
if name :
name = MakeAttributeName( value.Name )
setattr( self, name, value )
def __deepcopy__( self, memo ) :
result = self.__class__()
result.append( *self[:] )
return result
class Colour :
def __init__( self, name, red, green, blue ) :
self.SetName ( name )
self.SetRed ( red )
self.SetGreen( green )
self.SetBlue ( blue )
def SetName( self, value ) :
self.Name = value
return self
def SetRed( self, value ) :
self.Red = value
return self
def SetGreen( self, value ) :
self.Green = value
return self
def SetBlue( self, value ) :
self.Blue = value
return self
class Colours( AttributedList ) :
def __init__( self ) :
super( Colours, self ).__init__( Colour )
class Font :
def __init__( self, name, family, character_set = 0, pitch = None, panose = None, alternate = None ) :
self.SetName ( name )
self.SetFamily ( family )
self.SetCharacterSet( character_set )
self.SetPitch ( pitch )
self.SetPanose ( panose )
self.SetAlternate ( alternate )
def SetName( self, value ) :
self.Name = value
return self
def SetFamily( self, value ) :
self.Family = value
return self
def SetCharacterSet( self, value ) :
self.CharacterSet = value
return self
def SetPitch( self, value ) :
self.Pitch = value
return self
def SetPanose( self, value ) :
self.Panose = value
return self
def SetAlternate( self, value ) :
self.Alternate = value
return self
class Fonts( AttributedList ) :
def __init__( self ) :
super( Fonts, self ).__init__( Font )
class Paper :
def __init__( self, name, code, description, width, height ) :
self.SetName ( name )
self.SetCode ( code )
self.SetDescription( description )
self.SetWidth ( width )
self.SetHeight ( height )
def SetName( self, value ) :
self.Name = value
return self
def SetCode( self, value ) :
self.Code = value
return self
def SetDescription( self, value ) :
self.Description = value
return self
def SetWidth( self, value ) :
self.Width = value
return self
def SetHeight( self, value ) :
self.Height = value
return self
class Papers( AttributedList ) :
def __init__( self ) :
super( Papers, self ).__init__( Paper )
#
# Then we have property sets which represent different aspects of Styles
#
class MarginsPropertySet :
def __init__( self, top=None, left=None, bottom=None, right=None ) :
self.SetTop ( top )
self.SetLeft ( left )
self.SetBottom( bottom )
self.SetRight ( right )
def SetTop( self, value ) :
self.Top = value
return self
def SetLeft( self, value ) :
self.Left = value
return self
def SetBottom( self, value ) :
self.Bottom = value
return self
def SetRight( self, value ) :
self.Right = value
return self
class ShadingPropertySet :
HORIZONTAL = 1
VERTICAL = 2
FORWARD_DIAGONAL = 3
BACKWARD_DIAGONAL = 4
VERTICAL_CROSS = 5
DIAGONAL_CROSS = 6
DARK_HORIZONTAL = 7
DARK_VERTICAL = 8
DARK_FORWARD_DIAGONAL = 9
DARK_BACKWARD_DIAGONAL = 10
DARK_VERTICAL_CROSS = 11
DARK_DIAGONAL_CROSS = 12
PATTERNS = [ HORIZONTAL,
VERTICAL,
FORWARD_DIAGONAL,
BACKWARD_DIAGONAL,
VERTICAL_CROSS,
DIAGONAL_CROSS,
DARK_HORIZONTAL,
DARK_VERTICAL,
DARK_FORWARD_DIAGONAL,
DARK_BACKWARD_DIAGONAL,
DARK_VERTICAL_CROSS,
DARK_DIAGONAL_CROSS ]
def __init__( self, shading=None, pattern=None, foreground=None, background=None ) :
self.SetShading ( shading )
self.SetForeground( foreground )
self.SetBackground( background )
self.SetPattern ( pattern )
def __deepcopy__( self, memo ) :
return ShadingPropertySet( self.Shading,
self.Foreground,
self.Background,
self.Pattern )
def SetShading( self, value ) :
self.Shading = value
return self
def SetPattern( self, value ) :
assert value is None or value in self.PATTERNS
self.Pattern = value
return self
def SetForeground( self, value ) :
assert not value or isinstance( value, Colour )
self.Foreground = value
return self
def SetBackground( self, value ) :
assert not value or isinstance( value, Colour )
self.Background = value
return self
class BorderPropertySet :
SINGLE = 1
DOUBLE = 2
SHADOWED = 3
DOUBLED = 4
DOTTED = 5
DASHED = 6
HAIRLINE = 7
STYLES = [ SINGLE, DOUBLE, SHADOWED, DOUBLED, DOTTED, DASHED, HAIRLINE ]
def __init__( self, width=None, style=None, colour=None, spacing=None ) :
self.SetWidth ( width )
self.SetStyle ( style or self.SINGLE )
self.SetColour ( colour )
self.SetSpacing( spacing )
def SetWidth( self, value ) :
self.Width = value
return self
def SetStyle( self, value ) :
assert value is None or value in self.STYLES
self.Style = value
return self
def SetColour( self, value ) :
assert value is None or isinstance( value, Colour )
self.Colour = value
return self
def SetSpacing( self, value ) :
self.Spacing = value
return self
class FramePropertySet :
def __init__( self, top=None, left=None, bottom=None, right=None ) :
self.SetTop ( top )
self.SetLeft ( left )
self.SetBottom( bottom )
self.SetRight ( right )
def SetTop( self, value ) :
assert value is None or isinstance( value, BorderPropertySet )
self.Top = value
return self
def SetLeft( self, value ) :
assert value is None or isinstance( value, BorderPropertySet )
self.Left = value
return self
def SetBottom( self, value ) :
assert value is None or isinstance( value, BorderPropertySet )
self.Bottom = value
return self
def SetRight( self, value ) :
assert value is None or isinstance( value, BorderPropertySet )
self.Right = value
return self
class TabPropertySet :
DEFAULT_WIDTH = 720
LEFT = 1
RIGHT = 2
CENTER = 3
DECIMAL = 4
ALIGNMENT = [ LEFT, RIGHT, CENTER, DECIMAL ]
DOTS = 1
HYPHENS = 2
UNDERLINE = 3
THICK_LINE = 4
EQUAL_SIGN = 5
LEADERS = [ DOTS, HYPHENS, UNDERLINE, THICK_LINE, EQUAL_SIGN ]
def __init__( self, width=None, alignment=None, leader=None ) :
self.SetWidth ( width )
self.SetAlignment( alignment or self.LEFT )
self.SetLeader ( leader )
def SetWidth( self, value ) :
self.Width = value
return self
def SetAlignment( self, value ) :
assert value in self.ALIGNMENT
self.Alignment = value
return self
def SetLeader( self, value ) :
assert not value or value in self.LEADERS
self.Leader = value
return self
class TextPropertySet :
def __init__( self, font=None, size=None, bold=None, italic=None, underline=None, colour=None, frame=None, expansion=None ) :
self.SetFont ( font )
self.SetSize ( size )
self.SetBold ( bold or False )
self.SetItalic ( italic or False )
self.SetUnderline ( underline or False )
self.SetColour( colour )
self.SetFrame ( frame )
self.SetStrikeThrough ( False )
self.SetDottedUnderline( False )
self.SetDoubleUnderline( False )
self.SetWordUnderline ( False )
self.SetExpansion ( expansion )
def Copy( self ) :
return deepcopy( self )
def __deepcopy__( self, memo ) :
# the font must remain a reference to the same font that we are looking at
# so we want to stop the recursiveness at this point and return an object
# with the right references.
result = TextPropertySet( self.Font,
self.Size,
self.Bold,
self.Italic,
self.Underline,
self.Colour,
deepcopy( self.Frame, memo ) )
result.SetStrikeThrough( self.StrikeThrough )
return result
def SetFont( self, value ) :
assert not value or isinstance( value, Font )
self.Font = value
return self
def SetSize( self, value ) :
self.Size = value
return self
def SetBold( self, value ) :
self.Bold = False
if value : self.Bold = True
return self
def SetItalic( self, value ) :
self.Italic = False
if value : self.Italic = True
return self
def SetUnderline( self, value ) :
self.Underline = False
if value : self.Underline = True
return self
def SetColour( self, value ) :
assert value is None or isinstance( value, Colour )
self.Colour = value
return self
def SetFrame( self, value ) :
assert value is None or isinstance( value, BorderPropertySet )
self.Frame = value
return self
def SetStrikeThrough( self, value ) :
self.StrikeThrough = False
if value : self.StrikeThrough = True
return self
def SetDottedUnderline( self, value ) :
self.DottedUnderline = False
if value : self.DottedUnderline = True
return self
def SetDoubleUnderline( self, value ) :
self.DoubleUnderline = False
if value : self.DoubleUnderline = True
return self
def SetWordUnderline( self, value ) :
self.WordUnderline = False
if value : self.WordUnderline = True
return self
def SetExpansion( self, value ) :
self.Expansion = value
return self
class ParagraphPropertySet :
LEFT = 1
RIGHT = 2
CENTER = 3
JUSTIFY = 4
DISTRIBUTE = 5
ALIGNMENT = [ LEFT, RIGHT, CENTER, JUSTIFY, DISTRIBUTE ]
def __init__( self, alignment=None, space_before=None, space_after=None, tabs=None, first_line_indent=None, left_indent=None, right_indent=None, page_break_before=None ) :
self.SetAlignment ( alignment or self.LEFT )
self.SetSpaceBefore( space_before )
self.SetSpaceAfter ( space_after )
self.Tabs = []
if tabs : apply( self.SetTabs, tabs )
self.SetFirstLineIndent( first_line_indent or None )
self.SetLeftIndent ( left_indent or None )
self.SetRightIndent ( right_indent or None )
self.SetPageBreakBefore( page_break_before )
self.SetSpaceBetweenLines( None )
def Copy( self ) :
return deepcopy( self )
def SetAlignment( self, value ) :
assert not value or value in self.ALIGNMENT
self.Alignment = value or self.LEFT
return self
def SetSpaceBefore( self, value ) :
self.SpaceBefore = value
return self
def SetSpaceAfter( self, value ) :
self.SpaceAfter = value
return self
def SetTabs( self, *params ) :
self.Tabs = params
return self
def SetFirstLineIndent( self, value ) :
self.FirstLineIndent = value
return self
def SetLeftIndent( self, value ) :
self.LeftIndent = value
return self
def SetRightIndent( self, value ) :
self.RightIndent = value
return self
def SetSpaceBetweenLines( self, value ) :
self.SpaceBetweenLines = value
return self
def SetPageBreakBefore( self, value ) :
self.PageBreakBefore = False
if value : self.PageBreakBefore = True
return self
# Some short cuts to make the code a bit easier to read
MarginsPS = MarginsPropertySet
ShadingPS = ShadingPropertySet
BorderPS = BorderPropertySet
FramePS = FramePropertySet
TabPS = TabPropertySet
TextPS = TextPropertySet
ParagraphPS = ParagraphPropertySet
| gpl-3.0 |