repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
ajgallegog/gem5_arm | src/mem/ruby/network/BasicLink.py | 53 | 2758 | # Copyright (c) 2011 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Steve Reinhardt
# Brad Beckmann
from m5.params import *
from m5.SimObject import SimObject
class BasicLink(SimObject):
type = 'BasicLink'
cxx_header = "mem/ruby/network/BasicLink.hh"
link_id = Param.Int("ID in relation to other links")
latency = Param.Cycles(1, "latency")
# The following banwidth factor does not translate to the same value for
# both the simple and Garnet models. For the most part, the bandwidth
# factor is the width of the link in bytes, expect for certain situations
# with regard to the simple network.
bandwidth_factor = Param.Int("generic bandwidth factor, usually in bytes")
weight = Param.Int(1, "used to restrict routing in shortest path analysis")
class BasicExtLink(BasicLink):
type = 'BasicExtLink'
cxx_header = "mem/ruby/network/BasicLink.hh"
ext_node = Param.RubyController("External node")
int_node = Param.BasicRouter("ID of internal node")
bandwidth_factor = 16
class BasicIntLink(BasicLink):
type = 'BasicIntLink'
cxx_header = "mem/ruby/network/BasicLink.hh"
node_a = Param.BasicRouter("Router on one end")
node_b = Param.BasicRouter("Router on other end")
bandwidth_factor = 16
| bsd-3-clause |
miracle2k/graphql-relay-py | graphql_relay/node/tests/test_plural.py | 1 | 3850 | from collections import namedtuple
from graphql import graphql
from graphql.type import (
GraphQLSchema,
GraphQLObjectType,
GraphQLField,
GraphQLString,
)
from graphql_relay.node.plural import plural_identifying_root_field
userType = GraphQLObjectType(
'User',
fields=lambda: {
'username': GraphQLField(GraphQLString),
'url': GraphQLField(GraphQLString),
}
)
User = namedtuple('User', ['username', 'url'])
queryType = GraphQLObjectType(
'Query',
fields=lambda: {
'usernames': plural_identifying_root_field(
'usernames',
description='Map from a username to the user',
input_type=GraphQLString,
output_type=userType,
resolve_single_input=lambda info, username: User(
username=username,
url='www.facebook.com/' + username + '?lang=' + info.root_value.lang
)
)
}
)
class RootValue:
lang = 'en'
schema = GraphQLSchema(query=queryType)
def test_allows_fetching():
query = '''
{
usernames(usernames:["dschafer", "leebyron", "schrockn"]) {
username
url
}
}
'''
expected = {
'usernames': [
{
'username': 'dschafer',
'url': 'www.facebook.com/dschafer?lang=en'
},
{
'username': 'leebyron',
'url': 'www.facebook.com/leebyron?lang=en'
},
{
'username': 'schrockn',
'url': 'www.facebook.com/schrockn?lang=en'
},
]
}
result = graphql(schema, query, root=RootValue())
assert not result.errors
assert result.data == expected
def test_correctly_introspects():
query = '''
{
__schema {
queryType {
fields {
name
args {
name
type {
kind
ofType {
kind
ofType {
kind
ofType {
name
kind
}
}
}
}
}
type {
kind
ofType {
name
kind
}
}
}
}
}
}
'''
expected = {
'__schema': {
'queryType': {
'fields': [
{
'name': 'usernames',
'args': [
{
'name': 'usernames',
'type': {
'kind': 'NON_NULL',
'ofType': {
'kind': 'LIST',
'ofType': {
'kind': 'NON_NULL',
'ofType': {
'name': 'String',
'kind': 'SCALAR',
}
}
}
}
}
],
'type': {
'kind': 'LIST',
'ofType': {
'name': 'User',
'kind': 'OBJECT',
}
}
}
]
}
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
| mit |
tqchen/tvm | python/tvm/micro/debugger.py | 1 | 6484 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Defines functions for controlling debuggers for micro TVM binaries."""
import abc
import os
import signal
import subprocess
import threading
from . import transport as _transport
class Debugger(metaclass=abc.ABCMeta):
"""An interface for controlling micro TVM debuggers."""
def __init__(self):
self.on_terminate_callbacks = []
@abc.abstractmethod
def start(self):
"""Start the debugger, but do not block on it.
The runtime will continue to be driven in the background.
"""
raise NotImplementedError()
@abc.abstractmethod
def stop(self):
"""Terminate the debugger."""
raise NotImplementedError()
class GdbDebugger(Debugger):
"""Handles launching, suspending signals, and potentially dealing with terminal issues."""
@abc.abstractmethod
def popen_kwargs(self):
raise NotImplementedError()
def _wait_restore_signal(self):
self.popen.wait()
if not self.did_terminate.is_set():
for callback in self.on_terminate_callbacks:
try:
callback()
except Exception: # pylint: disable=broad-except
logging.warn("on_terminate_callback raised exception", exc_info=True)
def start(self):
kwargs = self.popen_kwargs()
self.did_terminate = threading.Event()
self.old_signal = signal.signal(signal.SIGINT, signal.SIG_IGN)
self.popen = subprocess.Popen(**kwargs)
threading.Thread(target=self._WaitRestoreSignal).start()
def stop(self):
self.did_terminate.set()
self.popen.terminate()
signal.signal(signal.SIGINT, self.old_signal)
class GdbTransportDebugger(GdbDebugger):
"""A debugger that uses a single GDB subprocess as both the transport and the debugger.
Opens pipes for the target's stdin and stdout, launches GDB and configures GDB's target
arguments to read and write from the pipes using /dev/fd.
"""
def __init__(self, args, **popen_kw):
super(GdbTransportDebugger, self).__init__()
self.args = args
self.popen_kw = popen_kw
def popen_kwargs(self):
stdin_read, stdin_write = os.pipe()
stdout_read, stdout_write = os.pipe()
os.set_inheritable(stdin_read, True)
os.set_inheritable(stdout_write, True)
sysname = os.uname()[0]
if sysname == "Darwin":
args = [
"lldb",
"-O",
f"target create {self.args[0]}",
"-O",
f"settings set target.input-path /dev/fd/{stdin_read}",
"-O",
f"settings set target.output-path /dev/fd/{stdout_write}",
]
if len(self.args) > 1:
args.extend(
["-O", "settings set target.run-args {}".format(" ".join(self.args[1:]))]
)
elif sysname == "Linux":
args = (
["gdb", "--args"] + self.args + ["</dev/fd/{stdin_read}", ">/dev/fd/{stdout_write}"]
)
else:
raise NotImplementedError(f"System {sysname} is not yet supported")
self.stdin = os.fdopen(stdin_write, "wb", buffering=0)
self.stdout = os.fdopen(stdout_read, "rb", buffering=0)
return {
"args": args,
"pass_fds": [stdin_read, stdout_write],
}
def _wait_for_process_death(self):
self.popen.wait()
self.stdin.close()
self.stdout.close()
def start(self):
to_return = super(GdbTransportDebugger, self).Start()
threading.Thread(target=self._wait_for_process_death, daemon=True).start()
return to_return
def stop(self):
self.stdin.close()
self.stdout.close()
super(GdbTransportDebugger, self).Stop()
class _Transport(_transport.Transport):
def __init__(self, gdb_transport_debugger):
self.gdb_transport_debugger = gdb_transport_debugger
def open(self):
pass # Pipes opened by parent class.
def write(self, data):
return self.gdb_transport_debugger.stdin.write(data)
def read(self, n):
return self.gdb_transport_debugger.stdout.read(n)
def close(self):
pass # Pipes closed by parent class.
def transport(self):
return self._Transport(self)
class GdbRemoteDebugger(GdbDebugger):
"""A Debugger that invokes GDB and attaches to a remote GDBserver-based target."""
def __init__(
self, gdb_binary, remote_hostport, debug_binary, wrapping_context_manager=None, **popen_kw
):
super(GdbRemoteDebugger, self).__init__()
self.gdb_binary = gdb_binary
self.remote_hostport = remote_hostport
self.debug_binary = debug_binary
self.wrapping_context_manager = wrapping_context_manager
self.popen_kw = popen_kw
def popen_kwargs(self):
kwargs = {
"args": [
self.gdb_binary,
"-iex",
f"file {self.debug_binary}",
"-iex",
f"target remote {self.remote_hostport}",
],
}
kwargs.update(self.popen_kw)
return kwargs
def start(self):
if self.wrapping_context_manager is not None:
self.wrapping_context_manager.__enter__()
super(GdbRemoteDebugger, self).Start()
def stop(self):
try:
super(GdbRemoteDebugger, self).Stop()
finally:
if self.wrapping_context_manager is not None:
self.wrapping_context_manager.__exit__(None, None, None)
| apache-2.0 |
nbenoit/bitcoin | contrib/seeds/makeseeds.py | 24 | 5717 | #!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 337600
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
"130.211.129.106", "178.63.107.226",
"83.81.130.26", "88.198.17.7", "148.251.238.178", "176.9.46.6",
"54.173.72.127", "54.174.10.182", "54.183.64.54", "54.194.231.211",
"54.66.214.167", "54.66.220.137", "54.67.33.14", "54.77.251.214",
"54.94.195.96", "54.94.200.247"
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/Satoshi:0.13.(1|2|99)/|/Satoshi:0.14.(0|1|2|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| mit |
gylian/sickrage | sickbeard/notifiers/trakt.py | 1 | 9490 | # Author: Dieter Blomme <dieterblomme@gmail.com>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import sickbeard
from sickbeard import logger
from sickbeard.exceptions import ex
from lib.trakt import TraktAPI
from lib.trakt.exceptions import traktException, traktServerBusy, traktAuthException
class TraktNotifier:
"""
A "notifier" for trakt.tv which keeps track of what has and hasn't been added to your library.
"""
def notify_snatch(self, ep_name):
pass
def notify_download(self, ep_name):
pass
def notify_subtitle_download(self, ep_name, lang):
pass
def notify_git_update(self, new_version):
pass
def update_library(self, ep_obj):
"""
Sends a request to trakt indicating that the given episode is part of our library.
ep_obj: The TVEpisode object to add to trakt
"""
trakt_id = sickbeard.indexerApi(ep_obj.show.indexer).config['trakt_id']
trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD, sickbeard.TRAKT_DISABLE_SSL_VERIFY, sickbeard.TRAKT_TIMEOUT)
if sickbeard.USE_TRAKT:
try:
# URL parameters
data = {
'shows': [
{
'title': ep_obj.show.name,
'year': ep_obj.show.startyear,
'ids': {},
'seasons': [
{
'number': ep_obj.season,
'episodes': [
{
'number': ep_obj.episode
}
]
}
]
}
]
}
if trakt_id == 'tvdb_id':
data['shows'][0]['ids']['tvdb'] = ep_obj.show.indexerid
else:
data['shows'][0]['ids']['tvrage'] = ep_obj.show.indexerid
# update library
trakt_api.traktRequest("sync/collection", data, method='POST')
if sickbeard.TRAKT_REMOVE_SERIESLIST:
data = {
'shows': [
{
'title': ep_obj.show.name,
'year': ep_obj.show.startyear,
'ids': {}
}
]
}
if trakt_id == 'tvdb_id':
data['shows'][0]['ids']['tvdb'] = ep_obj.show.indexerid
else:
data['shows'][0]['ids']['tvrage'] = ep_obj.show.indexerid
trakt_api.traktRequest("sync/watchlist/remove", data, method='POST')
except (traktException, traktAuthException, traktServerBusy) as e:
logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
def update_watchlist (self, show_obj = None, s = None, e = None, data_show = None, data_episode = None, update = "add"):
"""
Sends a request to trakt indicating that the given episode is part of our library.
show_obj: The TVShow object to add to trakt
s: season number
e: episode number
data_show: structured object of shows traktv type
data_episode: structured object of episodes traktv type
update: type o action add or remove
"""
trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
if sickbeard.USE_TRAKT:
data = {}
try:
# URL parameters
if show_obj is not None:
trakt_id = sickbeard.indexerApi(show_obj.indexer).config['trakt_id']
data = {
'shows': [
{
'title': show_obj.name,
'year': show_obj.startyear,
'ids': {},
}
]
}
if trakt_id == 'tvdb_id':
data['shows'][0]['ids']['tvdb'] = show_obj.indexerid
else:
data['shows'][0]['ids']['tvrage'] = show_obj.indexerid
elif data_show is not None:
data.update(data_show)
else:
logger.log(u"there's a coding problem contact developer. It's needed to be provided at lest one of the two: data_show or show_obj", logger.WARNING)
return False
if data_episode is not None:
data['shows'][0].update(data_episode)
elif s is not None:
# traktv URL parameters
season = {
'season': [
{
'number': s,
}
]
}
if e is not None:
# traktv URL parameters
episode = {
'episodes': [
{
'number': e
}
]
}
season['season'][0].update(episode)
data['shows'][0].update(season)
trakt_url = "sync/watchlist"
if update=="remove":
trakt_url += "/remove"
trakt_api.traktRequest(trakt_url, data, method='POST')
except (traktException, traktAuthException, traktServerBusy) as e:
logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
return False
return True
def trakt_show_data_generate(self, data):
showList = []
for indexer, indexerid, title, year in data:
trakt_id = sickbeard.indexerApi(indexer).config['trakt_id']
show = {'title': title, 'year': year, 'ids': {}}
if trakt_id == 'tvdb_id':
show['ids']['tvdb'] = indexerid
else:
show['ids']['tvrage'] = indexerid
showList.append(show)
post_data = {'shows': showList}
return post_data
def trakt_episode_data_generate(self, data):
# Find how many unique season we have
uniqueSeasons = []
for season, episode in data:
if season not in uniqueSeasons:
uniqueSeasons.append(season)
#build the query
seasonsList = []
for searchedSeason in uniqueSeasons:
episodesList = []
for season, episode in data:
if season == searchedSeason:
episodesList.append({'number': episode})
seasonsList.append({'number': searchedSeason, 'episodes': episodesList})
post_data = {'seasons': seasonsList}
return post_data
def test_notify(self, username, password, disable_ssl, blacklist_name=None):
"""
Sends a test notification to trakt with the given authentication info and returns a boolean
representing success.
api: The api string to use
username: The username to use
password: The password to use
blacklist_name: slug of trakt list used to hide not interested show
Returns: True if the request succeeded, False otherwise
"""
try:
trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, username, password, disable_ssl, sickbeard.TRAKT_TIMEOUT)
trakt_api.validateAccount()
if blacklist_name and blacklist_name is not None:
trakt_lists = trakt_api.traktRequest("users/" + username + "/lists")
found = False
for trakt_list in trakt_lists:
if (trakt_list['ids']['slug'] == blacklist_name):
return "Test notice sent successfully to Trakt"
if not found:
return "Trakt blacklist doesn't exists"
else:
return "Test notice sent successfully to Trakt"
except (traktException, traktAuthException, traktServerBusy) as e:
logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
return "Test notice failed to Trakt: %s" % ex(e)
notifier = TraktNotifier
| gpl-3.0 |
osharim/xhtml2pdf | test/pdfjoiner.py | 154 | 1447 | # -*- coding: utf-8 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__reversion__ = "$Revision: 20 $"
__author__ = "$Author: holtwick $"
__date__ = "$Date: 2007-10-09 12:58:24 +0200 (Di, 09 Okt 2007) $"
from sx.pisa3 import pisa
from sx.pisa3 import pisa_pdf
if __name__=="__main__":
pdf = pisa_pdf.pisaPDF()
subPdf = pisa.pisaDocument(
u"""
Hello <strong>World</strong>
""")
pdf.addDocument(subPdf)
raw = open("test-loremipsum.pdf", "rb").read()
pdf.addFromString(raw)
pdf.addFromURI("test-loremipsum.pdf")
pdf.addFromFile(open("test-loremipsum.pdf", "rb"))
datauri = pisa.makeDataURIFromFile("test-loremipsum.pdf")
pdf.addFromURI(datauri)
# Write the result to a file and open it
filename = __file__ + ".pdf"
result = pdf.getvalue()
open(filename, "wb").write(result)
pisa.startViewer(filename)
| apache-2.0 |
adiyengar/Spirit | spirit/core/utils/markdown/markdown.py | 8 | 1352 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import mistune
from .block import BlockLexer
from .inline import InlineLexer
from .renderer import Renderer
class Markdown(mistune.Markdown):
def __init__(self, renderer=None, **kwargs):
if renderer is None:
renderer = Renderer()
if kwargs.get('block', None) is None:
kwargs['block'] = BlockLexer
if kwargs.get('inline', None) is None:
kwargs['inline'] = InlineLexer
super(Markdown, self).__init__(renderer=renderer, **kwargs)
def render(self, text):
return super(Markdown, self).render(text).strip()
def get_mentions(self):
return self.inline.mentions
def parse_audio_link(self):
return self.renderer.audio_link(link=self.token['link'])
def parse_image_link(self):
return self.renderer.image_link(src=self.token['src'], title=self.token['title'], text=self.token['text'])
def parse_video_link(self):
return self.renderer.video_link(link=self.token['link'])
def parse_youtube(self):
return self.renderer.youtube(video_id=self.token['video_id'])
def parse_vimeo(self):
return self.renderer.vimeo(video_id=self.token['video_id'])
| mit |
spektom/incubator-airflow | airflow/providers/amazon/aws/sensors/sqs.py | 4 | 3705 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Reads and then deletes the message from SQS queue
"""
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.sqs import SQSHook
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.utils.decorators import apply_defaults
class SQSSensor(BaseSensorOperator):
"""
Get messages from an SQS queue and then deletes the message from the SQS queue.
If deletion of messages fails an AirflowException is thrown otherwise, the message
is pushed through XCom with the key ``message``.
:param aws_conn_id: AWS connection id
:type aws_conn_id: str
:param sqs_queue: The SQS queue url (templated)
:type sqs_queue: str
:param max_messages: The maximum number of messages to retrieve for each poke (templated)
:type max_messages: int
:param wait_time_seconds: The time in seconds to wait for receiving messages (default: 1 second)
:type wait_time_seconds: int
"""
template_fields = ('sqs_queue', 'max_messages')
@apply_defaults
def __init__(self,
sqs_queue,
aws_conn_id='aws_default',
max_messages=5,
wait_time_seconds=1,
*args,
**kwargs):
super().__init__(*args, **kwargs)
self.sqs_queue = sqs_queue
self.aws_conn_id = aws_conn_id
self.max_messages = max_messages
self.wait_time_seconds = wait_time_seconds
def poke(self, context):
"""
Check for message on subscribed queue and write to xcom the message with key ``messages``
:param context: the context object
:type context: dict
:return: ``True`` if message is available or ``False``
"""
sqs_hook = SQSHook(aws_conn_id=self.aws_conn_id)
sqs_conn = sqs_hook.get_conn()
self.log.info('SQSSensor checking for message on queue: %s', self.sqs_queue)
messages = sqs_conn.receive_message(QueueUrl=self.sqs_queue,
MaxNumberOfMessages=self.max_messages,
WaitTimeSeconds=self.wait_time_seconds)
self.log.info("received message %s", str(messages))
if 'Messages' in messages and messages['Messages']:
entries = [{'Id': message['MessageId'], 'ReceiptHandle': message['ReceiptHandle']}
for message in messages['Messages']]
result = sqs_conn.delete_message_batch(QueueUrl=self.sqs_queue,
Entries=entries)
if 'Successful' in result:
context['ti'].xcom_push(key='messages', value=messages)
return True
else:
raise AirflowException(
'Delete SQS Messages failed ' + str(result) + ' for messages ' + str(messages))
return False
| apache-2.0 |
Danfocus/Flexget | flexget/utils/qualities.py | 2 | 17972 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from past.builtins import basestring
import re
import copy
import logging
log = logging.getLogger('utils.qualities')
class QualityComponent(object):
""""""
def __init__(self, type, value, name, regexp=None, modifier=None, defaults=None):
"""
:param type: Type of quality component. (resolution, source, codec, or audio)
:param value: Value used to sort this component with others of like type.
:param name: Canonical name for this quality component.
:param regexp: Regexps used to match this component.
:param modifier: An integer that affects sorting above all other components.
:param defaults: An iterable defining defaults for other quality components if this component matches.
"""
if type not in ['resolution', 'source', 'codec', 'audio']:
raise ValueError('%s is not a valid quality component type.' % type)
self.type = type
self.value = value
self.name = name
self.modifier = modifier
self.defaults = defaults or []
# compile regexp
if regexp is None:
regexp = re.escape(name)
self.regexp = re.compile('(?<![^\W_])(' + regexp + ')(?![^\W_])', re.IGNORECASE)
def matches(self, text):
"""Test if quality matches to text.
:param string text: data te be tested against
:returns: tuple (matches, remaining text without quality data)
"""
match = self.regexp.search(text)
if not match:
return False, ""
else:
# remove matching part from the text
text = text[: match.start()] + text[match.end() :]
return True, text
def __hash__(self):
return hash(self.type + str(self.value))
def __bool__(self):
return bool(self.value)
def __eq__(self, other):
if isinstance(other, basestring):
other = _registry.get(other)
if not isinstance(other, QualityComponent):
raise TypeError('Cannot compare %r and %r' % (self, other))
if other.type == self.type:
return self.value == other.value
else:
raise TypeError('Cannot compare %s and %s' % (self.type, other.type))
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
if isinstance(other, basestring):
other = _registry.get(other)
if not isinstance(other, QualityComponent):
raise TypeError('Cannot compare %r and %r' % (self, other))
if other.type == self.type:
return self.value < other.value
else:
raise TypeError('Cannot compare %s and %s' % (self.type, other.type))
def __ge__(self, other):
return not self.__lt__(other)
def __le__(self, other):
return self.__lt__(other) or self.__eq__(other)
def __gt__(self, other):
return not self.__le__(other)
def __add__(self, other):
if not isinstance(other, int):
raise TypeError()
l = globals().get('_' + self.type + 's')
index = l.index(self) + other
if index >= len(l):
index = -1
return l[index]
def __sub__(self, other):
if not isinstance(other, int):
raise TypeError()
l = globals().get('_' + self.type + 's')
index = l.index(self) - other
if index < 0:
index = 0
return l[index]
def __repr__(self):
return '<%s(name=%s,value=%s)>' % (self.type.title(), self.name, self.value)
def __str__(self):
return self.name
def __deepcopy__(self, memo=None):
# No mutable attributes, return a regular copy
return copy.copy(self)
_resolutions = [
QualityComponent('resolution', 10, '360p'),
QualityComponent('resolution', 20, '368p', '368p?'),
QualityComponent('resolution', 30, '480p', '480p?'),
QualityComponent('resolution', 40, '576p', '576p?'),
QualityComponent('resolution', 45, 'hr'),
QualityComponent('resolution', 50, '720i'),
QualityComponent('resolution', 60, '720p', '(1280x)?720(p|hd)?x?([56]0)?'),
QualityComponent('resolution', 70, '1080i'),
QualityComponent('resolution', 80, '1080p', '(1920x)?1080p?x?([56]0)?'),
QualityComponent('resolution', 90, '2160p', '((3840x)?2160p?x?([56]0)?)|4k'),
]
_sources = [
QualityComponent('source', 10, 'workprint', modifier=-8),
QualityComponent('source', 20, 'cam', '(?:hd)?cam', modifier=-7),
QualityComponent('source', 30, 'ts', '(?:hd)?ts|telesync', modifier=-6),
QualityComponent('source', 40, 'tc', 'tc|telecine', modifier=-5),
QualityComponent('source', 50, 'r5', 'r[2-8c]', modifier=-4),
QualityComponent('source', 60, 'hdrip', 'hd[\W_]?rip', modifier=-3),
QualityComponent('source', 70, 'ppvrip', 'ppv[\W_]?rip', modifier=-2),
QualityComponent('source', 80, 'preair', modifier=-1),
QualityComponent('source', 90, 'tvrip', 'tv[\W_]?rip'),
QualityComponent('source', 100, 'dsr', 'dsr|ds[\W_]?rip'),
QualityComponent('source', 110, 'sdtv', '(?:[sp]dtv|dvb)(?:[\W_]?rip)?'),
QualityComponent('source', 120, 'dvdscr', '(?:(?:dvd|web)[\W_]?)?scr(?:eener)?', modifier=0),
QualityComponent('source', 130, 'bdscr', 'bdscr(?:eener)?'),
QualityComponent('source', 140, 'webrip', 'web[\W_]?rip'),
QualityComponent('source', 150, 'hdtv', 'a?hdtv(?:[\W_]?rip)?'),
QualityComponent('source', 160, 'webdl', 'web(?:[\W_]?(dl|hd))?'),
QualityComponent('source', 170, 'dvdrip', 'dvd(?:[\W_]?rip)?'),
QualityComponent('source', 175, 'remux'),
QualityComponent('source', 180, 'bluray', '(?:b[dr][\W_]?rip|blu[\W_]?ray(?:[\W_]?rip)?)'),
]
_codecs = [
QualityComponent('codec', 10, 'divx'),
QualityComponent('codec', 20, 'xvid'),
QualityComponent('codec', 30, 'h264', '[hx].?264'),
QualityComponent('codec', 35, 'vp9'),
QualityComponent('codec', 40, 'h265', '[hx].?265|hevc'),
QualityComponent('codec', 50, '10bit', '10.?bit|hi10p'),
]
channels = '(?:(?:[^\w+]?[1-7][\W_]?(?:0|1|ch)))'
_audios = [
QualityComponent('audio', 10, 'mp3'),
# TODO: No idea what order these should go in or if we need different regexps
QualityComponent('audio', 20, 'aac', 'aac%s?' % channels),
QualityComponent('audio', 30, 'dd5.1', 'dd%s' % channels),
QualityComponent('audio', 40, 'ac3', 'ac3%s?' % channels),
QualityComponent('audio', 45, 'dd+5.1', 'dd[p+]%s' % channels),
QualityComponent('audio', 50, 'flac', 'flac%s?' % channels),
# The DTSs are a bit backwards, but the more specific one needs to be parsed first
QualityComponent('audio', 60, 'dtshd', 'dts[\W_]?hd(?:[\W_]?ma)?%s?' % channels),
QualityComponent('audio', 70, 'dts'),
QualityComponent('audio', 80, 'truehd', 'truehd%s?' % channels),
]
_UNKNOWNS = {
'resolution': QualityComponent('resolution', 0, 'unknown'),
'source': QualityComponent('source', 0, 'unknown'),
'codec': QualityComponent('codec', 0, 'unknown'),
'audio': QualityComponent('audio', 0, 'unknown'),
}
# For wiki generating help
'''for type in (_resolutions, _sources, _codecs, _audios):
print '{{{#!td style="vertical-align: top"'
for item in reversed(type):
print '- ' + item.name
print '}}}'
'''
_registry = {}
for items in (_resolutions, _sources, _codecs, _audios):
for item in items:
_registry[item.name] = item
def all_components():
return iter(_registry.values())
class Quality(object):
"""Parses and stores the quality of an entry in the four component categories."""
def __init__(self, text=''):
"""
:param text: A string to parse quality from
"""
self.text = text
self.clean_text = text
if text:
self.parse(text)
else:
self.resolution = _UNKNOWNS['resolution']
self.source = _UNKNOWNS['source']
self.codec = _UNKNOWNS['codec']
self.audio = _UNKNOWNS['audio']
def parse(self, text):
"""Parses a string to determine the quality in the four component categories.
:param text: The string to parse
"""
self.text = text
self.clean_text = text
self.resolution = self._find_best(_resolutions, _UNKNOWNS['resolution'], False)
self.source = self._find_best(_sources, _UNKNOWNS['source'])
self.codec = self._find_best(_codecs, _UNKNOWNS['codec'])
self.audio = self._find_best(_audios, _UNKNOWNS['audio'])
# If any of the matched components have defaults, set them now.
for component in self.components:
for default in component.defaults:
default = _registry[default]
if not getattr(self, default.type):
setattr(self, default.type, default)
def _find_best(self, qlist, default=None, strip_all=True):
"""Finds the highest matching quality component from `qlist`"""
result = None
search_in = self.clean_text
for item in qlist:
match = item.matches(search_in)
if match[0]:
result = item
self.clean_text = match[1]
if strip_all:
# In some cases we want to strip all found quality components,
# even though we're going to return only the last of them.
search_in = self.clean_text
if item.modifier is not None:
# If this item has a modifier, do not proceed to check higher qualities in the list
break
return result or default
@property
def name(self):
name = ' '.join(
str(p) for p in (self.resolution, self.source, self.codec, self.audio) if p.value != 0
)
return name or 'unknown'
@property
def components(self):
return [self.resolution, self.source, self.codec, self.audio]
@property
def _comparator(self):
modifier = sum(c.modifier for c in self.components if c.modifier)
return [modifier] + self.components
def __contains__(self, other):
if isinstance(other, basestring):
other = Quality(other)
if not other or not self:
return False
for cat in ('resolution', 'source', 'audio', 'codec'):
othercat = getattr(other, cat)
if othercat and othercat != getattr(self, cat):
return False
return True
def __bool__(self):
return any(self._comparator)
def __eq__(self, other):
if isinstance(other, basestring):
other = Quality(other)
if not isinstance(other, Quality):
if other is None:
return False
raise TypeError('Cannot compare %r and %r' % (self, other))
return self._comparator == other._comparator
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
if isinstance(other, basestring):
other = Quality(other)
if not isinstance(other, Quality):
raise TypeError('Cannot compare %r and %r' % (self, other))
return self._comparator < other._comparator
def __ge__(self, other):
return not self.__lt__(other)
def __le__(self, other):
return self.__lt__(other) or self.__eq__(other)
def __gt__(self, other):
return not self.__le__(other)
def __repr__(self):
return '<Quality(resolution=%s,source=%s,codec=%s,audio=%s)>' % (
self.resolution,
self.source,
self.codec,
self.audio,
)
def __str__(self):
return self.name
def __hash__(self):
# Make these usable as dict keys
return hash(self.name)
def get(quality_name):
"""Returns a quality object based on canonical quality name."""
found_components = {}
for part in quality_name.lower().split():
component = _registry.get(part)
if not component:
raise ValueError('`%s` is not a valid quality string' % part)
if component.type in found_components:
raise ValueError('`%s` cannot be defined twice in a quality' % component.type)
found_components[component.type] = component
if not found_components:
raise ValueError('No quality specified')
result = Quality()
for type, component in found_components.items():
setattr(result, type, component)
return result
class RequirementComponent(object):
"""Represents requirements for a given component type. Can evaluate whether a given QualityComponent
meets those requirements."""
def __init__(self, type):
self.type = type
self.reset()
def reset(self):
self.min = None
self.max = None
self.acceptable = set()
self.none_of = set()
def allows(self, comp, loose=False):
if comp.type != self.type:
raise TypeError('Cannot compare %r against %s' % (comp, self.type))
if comp in self.none_of:
return False
if loose:
return True
if comp in self.acceptable:
return True
if self.min or self.max:
if self.min and comp < self.min:
return False
if self.max and comp > self.max:
return False
return True
if not self.acceptable:
return True
return False
def add_requirement(self, text):
if '-' in text:
min, max = text.split('-')
min, max = _registry[min], _registry[max]
if min.type != max.type != self.type:
raise ValueError('Component type mismatch: %s' % text)
self.min, self.max = min, max
elif '|' in text:
quals = text.split('|')
quals = {_registry[qual] for qual in quals}
if any(qual.type != self.type for qual in quals):
raise ValueError('Component type mismatch: %s' % text)
self.acceptable |= quals
else:
qual = _registry[text.strip('!<>=+')]
if qual.type != self.type:
raise ValueError('Component type mismatch!')
if text in _registry:
self.acceptable.add(qual)
else:
if text[0] == '<':
if text[1] != '=':
qual -= 1
self.max = qual
elif text[0] == '>' or text.endswith('+'):
if text[1] != '=' and not text.endswith('+'):
qual += 1
self.min = qual
elif text[0] == '!':
self.none_of.add(qual)
def __eq__(self, other):
return (self.max, self.max, self.acceptable, self.none_of) == (
other.max,
other.max,
other.acceptable,
other.none_of,
)
def __hash__(self):
return hash(
tuple(
[self.min, self.max, tuple(sorted(self.acceptable)), tuple(sorted(self.none_of))]
)
)
class Requirements(object):
"""Represents requirements for allowable qualities. Can determine whether a given Quality passes requirements."""
def __init__(self, req=''):
self.text = ''
self.resolution = RequirementComponent('resolution')
self.source = RequirementComponent('source')
self.codec = RequirementComponent('codec')
self.audio = RequirementComponent('audio')
if req:
self.parse_requirements(req)
@property
def components(self):
return [self.resolution, self.source, self.codec, self.audio]
def parse_requirements(self, text):
"""
Parses a requirements string.
:param text: The string containing quality requirements.
"""
text = text.lower()
if self.text:
self.text += ' '
self.text += text
if self.text == 'any':
for component in self.components:
component.reset()
return
text = text.replace(',', ' ')
parts = text.split()
try:
for part in parts:
if '-' in part:
found = _registry[part.split('-')[0]]
elif '|' in part:
found = _registry[part.split('|')[0]]
else:
found = _registry[part.strip('!<>=+')]
for component in self.components:
if found.type == component.type:
component.add_requirement(part)
except KeyError as e:
raise ValueError('%s is not a valid quality component.' % e.args[0])
def allows(self, qual, loose=False):
"""Determine whether this set of requirements allows a given quality.
:param Quality qual: The quality to evaluate.
:param bool loose: If True, only ! (not) requirements will be enforced.
:rtype: bool
:returns: True if given quality passes all component requirements.
"""
if isinstance(qual, basestring):
qual = Quality(qual)
for r_component, q_component in zip(self.components, qual.components):
if not r_component.allows(q_component, loose=loose):
return False
return True
def __eq__(self, other):
if isinstance(other, str):
other = Requirements(other)
return self.components == other.components
def __hash__(self):
return hash(tuple(self.components))
def __str__(self):
return self.text or 'any'
def __repr__(self):
return '<Requirements(%s)>' % self
| mit |
gundalow/ansible | test/units/plugins/become/test_sudo.py | 11 | 1252 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2020 Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
from ansible import context
from ansible.plugins.loader import become_loader, shell_loader
def test_sudo(mocker, parser, reset_cli_args):
options = parser.parse_args([])
context._init_global_context(options)
sudo = become_loader.get('sudo')
sh = shell_loader.get('sh')
sh.executable = "/bin/bash"
sudo.set_options(direct={
'become_user': 'foo',
'become_flags': '-n -s -H',
})
cmd = sudo.build_become_command('/bin/foo', sh)
assert re.match(r"""sudo\s+-n -s -H\s+-u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
sudo.set_options(direct={
'become_user': 'foo',
'become_flags': '-n -s -H',
'become_pass': 'testpass',
})
cmd = sudo.build_become_command('/bin/foo', sh)
assert re.match(r"""sudo\s+-s\s-H\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
| gpl-3.0 |
xukunfeng/ardupilot | mk/PX4/Tools/genmsg/src/genmsg/msgs.py | 161 | 12254 | # Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
"""
ROS msg library for Python
Implements: U{http://ros.org/wiki/msg}
"""
import os
import sys
from . base import InvalidMsgSpec, EXT_MSG, MSG_DIR, SEP, log
from . names import is_legal_resource_name, is_legal_resource_base_name, package_resource_name, resource_name
#TODOXXX: unit test
def bare_msg_type(msg_type):
"""
Compute the bare data type, e.g. for arrays, get the underlying array item type
:param msg_type: ROS msg type (e.g. 'std_msgs/String'), ``str``
:returns: base type, ``str``
"""
if msg_type is None:
return None
if '[' in msg_type:
return msg_type[:msg_type.find('[')]
return msg_type
def resolve_type(msg_type, package_context):
"""
Resolve type name based on current package context.
NOTE: in ROS Diamondback, 'Header' resolves to
'std_msgs/Header'. In previous releases, it resolves to
'roslib/Header' (REP 100).
e.g.::
resolve_type('String', 'std_msgs') -> 'std_msgs/String'
resolve_type('String[]', 'std_msgs') -> 'std_msgs/String[]'
resolve_type('std_msgs/String', 'foo') -> 'std_msgs/String'
resolve_type('uint16', 'std_msgs') -> 'uint16'
resolve_type('uint16[]', 'std_msgs') -> 'uint16[]'
"""
bt = bare_msg_type(msg_type)
if bt in BUILTIN_TYPES:
return msg_type
elif bt == HEADER:
return HEADER_FULL_NAME
elif SEP in msg_type:
return msg_type
else:
return "%s%s%s"%(package_context, SEP, msg_type)
#NOTE: this assumes that we aren't going to support multi-dimensional
def parse_type(msg_type):
"""
Parse ROS message field type
:param msg_type: ROS field type, ``str``
:returns: base_type, is_array, array_length, ``(str, bool, int)``
:raises: :exc:`ValueError` If *msg_type* cannot be parsed
"""
if not msg_type:
raise ValueError("Invalid empty type")
if '[' in msg_type:
var_length = msg_type.endswith('[]')
splits = msg_type.split('[')
if len(splits) > 2:
raise ValueError("Currently only support 1-dimensional array types: %s"%msg_type)
if var_length:
return msg_type[:-2], True, None
else:
try:
length = int(splits[1][:-1])
return splits[0], True, length
except ValueError:
raise ValueError("Invalid array dimension: [%s]"%splits[1][:-1])
else:
return msg_type, False, None
################################################################################
# name validation
def is_valid_msg_type(x):
"""
:returns: True if the name is a syntatically legal message type name, ``bool``
"""
if not x or len(x) != len(x.strip()):
return False
base = bare_msg_type(x)
if not is_legal_resource_name(base):
return False
#parse array indicies
x = x[len(base):]
state = 0
i = 0
for c in x:
if state == 0:
if c != '[':
return False
state = 1 #open
elif state == 1:
if c == ']':
state = 0 #closed
else:
try:
int(c)
except:
return False
return state == 0
def is_valid_constant_type(x):
"""
:returns: ``True`` if the name is a legal constant type. Only simple types are allowed, ``bool``
"""
return x in PRIMITIVE_TYPES
def is_valid_msg_field_name(x):
"""
:returns: ``True`` if the name is a syntatically legal message field name, ``bool``
"""
return is_legal_resource_base_name(x)
# msg spec representation ##########################################
class Constant(object):
"""
Container class for holding a Constant declaration
Attributes:
- ``type``
- ``name``
- ``val``
- ``val_text``
"""
__slots__ = ['type', 'name', 'val', 'val_text']
def __init__(self, type_, name, val, val_text):
"""
:param type_: constant type, ``str``
:param name: constant name, ``str``
:param val: constant value, ``str``
:param val_text: Original text definition of *val*, ``str``
"""
if type is None or name is None or val is None or val_text is None:
raise ValueError('Constant must have non-None parameters')
self.type = type_
self.name = name.strip() #names are always stripped of whitespace
self.val = val
self.val_text = val_text
def __eq__(self, other):
if not isinstance(other, Constant):
return False
return self.type == other.type and self.name == other.name and self.val == other.val
def __repr__(self):
return "%s %s=%s"%(self.type, self.name, self.val)
def __str__(self):
return "%s %s=%s"%(self.type, self.name, self.val)
class Field(object):
"""
Container class for storing information about a single field in a MsgSpec
Attributes:
- ``name``
- ``type``
- ``base_type``
- ``is_array``
- ``array_len``
- ``is_builtin``
- ``is_header``
"""
def __init__(self, name, type):
self.name = name
self.type = type
(self.base_type, self.is_array, self.array_len) = parse_type(type)
self.is_header = is_header_type(self.type)
self.is_builtin = is_builtin(self.base_type)
def __eq__(self, other):
if not isinstance(other, Field):
return False
else:
return self.name == other.name and \
self.type == other.type
def __repr__(self):
return "[%s, %s, %s, %s, %s]"%(self.name, self.type, self.base_type, self.is_array, self.array_len)
class MsgSpec(object):
"""
Container class for storing loaded msg description files. Field
types and names are stored in separate lists with 1-to-1
correspondence. MsgSpec can also return an md5 of the source text.
"""
def __init__(self, types, names, constants, text, full_name, package = '', short_name = ''):
"""
:param types: list of field types, in order of declaration, ``[str]]``
:param names: list of field names, in order of declaration, ``[str]]``
:param constants: List of :class:`Constant` declarations, ``[Constant]``
:param text: text of declaration, ``str`
:raises: :exc:`InvalidMsgSpec` If spec is invalid (e.g. fields with the same name)
"""
alt_package, alt_short_name = package_resource_name(full_name)
if not package:
package = alt_package
if not short_name:
short_name = alt_short_name
self.types = types
if len(set(names)) != len(names):
raise InvalidMsgSpec("Duplicate field names in message: %s"%names)
self.names = names
self.constants = constants
assert len(self.types) == len(self.names), "len(%s) != len(%s)"%(self.types, self.names)
#Header.msg support
if (len(self.types)):
self.header_present = self.types[0] == HEADER_FULL_NAME and self.names[0] == 'header'
else:
self.header_present = False
self.text = text
self.full_name = full_name
self.short_name = short_name
self.package = package
try:
self._parsed_fields = [Field(name, type) for (name, type) in zip(self.names, self.types)]
except ValueError as e:
raise InvalidMsgSpec("invalid field: %s"%(e))
def fields(self):
"""
:returns: zip list of types and names (e.g. [('int32', 'x'), ('int32', 'y')], ``[(str,str),]``
"""
return list(zip(self.types, self.names)) #py3k
def parsed_fields(self):
"""
:returns: list of :class:`Field` classes, ``[Field,]``
"""
return self._parsed_fields
def has_header(self):
"""
:returns: ``True`` if msg decription contains a 'Header header'
declaration at the beginning, ``bool``
"""
return self.header_present
def __eq__(self, other):
if not other or not isinstance(other, MsgSpec):
return False
return self.types == other.types and self.names == other.names and \
self.constants == other.constants and self.text == other.text and \
self.full_name == other.full_name and self.short_name == other.short_name and \
self.package == other.package
def __ne__(self, other):
if not other or not isinstance(other, MsgSpec):
return True
return not self.__eq__(other)
def __repr__(self):
if self.constants:
return "MsgSpec[%s, %s, %s]"%(repr(self.constants), repr(self.types), repr(self.names))
else:
return "MsgSpec[%s, %s]"%(repr(self.types), repr(self.names))
def __str__(self):
return self.text
# .msg file routines ##############################################################
# adjustable constants, in case we change our minds
HEADER = 'Header'
TIME = 'time'
DURATION = 'duration'
HEADER_FULL_NAME = 'std_msgs/Header'
def is_header_type(msg_type):
"""
:param msg_type: message type name, ``str``
:returns: ``True`` if *msg_type* refers to the ROS Header type, ``bool``
"""
# for backwards compatibility, include roslib/Header. REP 100
return msg_type in [HEADER, HEADER_FULL_NAME, 'roslib/Header']
# time and duration types are represented as aggregate data structures
# for the purposes of serialization from the perspective of
# roslib.msgs. genmsg_py will do additional special handling is required
# to convert them into rospy.msg.Time/Duration instances.
## time as msg spec. time is unsigned
TIME_MSG = "uint32 secs\nuint32 nsecs"
## duration as msg spec. duration is just like time except signed
DURATION_MSG = "int32 secs\nint32 nsecs"
## primitive types are those for which we allow constants, i.e. have primitive representation
PRIMITIVE_TYPES = ['int8','uint8','int16','uint16','int32','uint32','int64','uint64','float32','float64',
'string',
'bool',
# deprecated:
'char','byte']
BUILTIN_TYPES = PRIMITIVE_TYPES + [TIME, DURATION]
def is_builtin(msg_type_name):
"""
:param msg_type_name: name of message type, ``str``
:returns: True if msg_type_name is a builtin/primitive type, ``bool``
"""
return msg_type_name in BUILTIN_TYPES
| gpl-3.0 |
madprime/django_study_enrollment | users/views.py | 2 | 2215 | from django.contrib.sites.models import RequestSite
from django.contrib.sites.models import Site
from registration import signals
from registration.models import RegistrationProfile
from registration.backends.default.views import RegistrationView as BaseRegistrationView
from registration.backends.default.views import ActivationView
from users.forms import RegistrationForm
from study_enrollment.settings import IS_ELIGIBLE_FLAG
from study_enrollment.mixins import ReqsMetMixin
from study_enrollment.models import UserEnrollment
class RegistrationView(BaseRegistrationView):
"""
A registration view with the following workflow:
1. User signs up with email as username, inactive account is created.
2. Email is sent to user with activation link.
3. User clicks activation link, account is now active.
"""
def register(self, request, **cleaned_data):
"""
Adapted from the django-registration default backend.
"""
email, password = cleaned_data['email'], cleaned_data['password1']
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
new_user = RegistrationProfile.objects.create_inactive_user(email, email,
password, site)
signals.user_registered.send(sender=self.__class__,
user=new_user,
request=request)
return new_user
class EnrollmentRegistrationView(ReqsMetMixin, RegistrationView):
"""
A customization of RegistrationView for use with study_enrollment
In this implementation, study requirements must be met before
the user is invited to create an account.
"""
def register(self, request, **cleaned_data):
new_user = super(EnrollmentRegistrationView, self).register(request, **cleaned_data)
# Get or create a new UserEnrollment and add is_eligible = True
user_enrollment, _ = UserEnrollment.objects.get_or_create(user=new_user)
user_enrollment.is_eligible = True
user_enrollment.save()
request.session.pop(IS_ELIGIBLE_FLAG)
| mit |
Zentyal/samba | third_party/pep8/testsuite/test_api.py | 36 | 15157 | # -*- coding: utf-8 -*-
import os.path
import shlex
import sys
import unittest
import pep8
from testsuite.support import ROOT_DIR, PseudoFile
E11 = os.path.join(ROOT_DIR, 'testsuite', 'E11.py')
class DummyChecker(object):
def __init__(self, tree, filename):
pass
def run(self):
if False:
yield
class APITestCase(unittest.TestCase):
"""Test the public methods."""
def setUp(self):
self._saved_stdout = sys.stdout
self._saved_stderr = sys.stderr
self._saved_checks = pep8._checks
sys.stdout = PseudoFile()
sys.stderr = PseudoFile()
pep8._checks = dict((k, dict((f, (vals[0][:], vals[1]))
for (f, vals) in v.items()))
for (k, v) in self._saved_checks.items())
def tearDown(self):
sys.stdout = self._saved_stdout
sys.stderr = self._saved_stderr
pep8._checks = self._saved_checks
def reset(self):
del sys.stdout[:], sys.stderr[:]
def test_register_physical_check(self):
def check_dummy(physical_line, line_number):
if False:
yield
pep8.register_check(check_dummy, ['Z001'])
self.assertTrue(check_dummy in pep8._checks['physical_line'])
codes, args = pep8._checks['physical_line'][check_dummy]
self.assertTrue('Z001' in codes)
self.assertEqual(args, ['physical_line', 'line_number'])
options = pep8.StyleGuide().options
self.assertTrue(any(func == check_dummy
for name, func, args in options.physical_checks))
def test_register_logical_check(self):
def check_dummy(logical_line, tokens):
if False:
yield
pep8.register_check(check_dummy, ['Z401'])
self.assertTrue(check_dummy in pep8._checks['logical_line'])
codes, args = pep8._checks['logical_line'][check_dummy]
self.assertTrue('Z401' in codes)
self.assertEqual(args, ['logical_line', 'tokens'])
pep8.register_check(check_dummy, [])
pep8.register_check(check_dummy, ['Z402', 'Z403'])
codes, args = pep8._checks['logical_line'][check_dummy]
self.assertEqual(codes, ['Z401', 'Z402', 'Z403'])
self.assertEqual(args, ['logical_line', 'tokens'])
options = pep8.StyleGuide().options
self.assertTrue(any(func == check_dummy
for name, func, args in options.logical_checks))
def test_register_ast_check(self):
pep8.register_check(DummyChecker, ['Z701'])
self.assertTrue(DummyChecker in pep8._checks['tree'])
codes, args = pep8._checks['tree'][DummyChecker]
self.assertTrue('Z701' in codes)
self.assertTrue(args is None)
options = pep8.StyleGuide().options
self.assertTrue(any(cls == DummyChecker
for name, cls, args in options.ast_checks))
def test_register_invalid_check(self):
class InvalidChecker(DummyChecker):
def __init__(self, filename):
pass
def check_dummy(logical, tokens):
if False:
yield
pep8.register_check(InvalidChecker, ['Z741'])
pep8.register_check(check_dummy, ['Z441'])
for checkers in pep8._checks.values():
self.assertTrue(DummyChecker not in checkers)
self.assertTrue(check_dummy not in checkers)
self.assertRaises(TypeError, pep8.register_check)
def test_styleguide(self):
report = pep8.StyleGuide().check_files()
self.assertEqual(report.total_errors, 0)
self.assertFalse(sys.stdout)
self.assertFalse(sys.stderr)
self.reset()
report = pep8.StyleGuide().check_files(['missing-file'])
stdout = sys.stdout.getvalue().splitlines()
self.assertEqual(len(stdout), report.total_errors)
self.assertEqual(report.total_errors, 1)
# < 3.3 returns IOError; >= 3.3 returns FileNotFoundError
self.assertTrue(stdout[0].startswith("missing-file:1:1: E902 "))
self.assertFalse(sys.stderr)
self.reset()
report = pep8.StyleGuide().check_files([E11])
stdout = sys.stdout.getvalue().splitlines()
self.assertEqual(len(stdout), report.total_errors)
self.assertEqual(report.total_errors, 17)
self.assertFalse(sys.stderr)
self.reset()
# Passing the paths in the constructor gives same result
report = pep8.StyleGuide(paths=[E11]).check_files()
stdout = sys.stdout.getvalue().splitlines()
self.assertEqual(len(stdout), report.total_errors)
self.assertEqual(report.total_errors, 17)
self.assertFalse(sys.stderr)
self.reset()
def test_styleguide_options(self):
# Instanciate a simple checker
pep8style = pep8.StyleGuide(paths=[E11])
# Check style's attributes
self.assertEqual(pep8style.checker_class, pep8.Checker)
self.assertEqual(pep8style.paths, [E11])
self.assertEqual(pep8style.runner, pep8style.input_file)
self.assertEqual(pep8style.options.ignore_code, pep8style.ignore_code)
self.assertEqual(pep8style.options.paths, pep8style.paths)
# Check unset options
for o in ('benchmark', 'config', 'count', 'diff',
'doctest', 'quiet', 'show_pep8', 'show_source',
'statistics', 'testsuite', 'verbose'):
oval = getattr(pep8style.options, o)
self.assertTrue(oval in (None, False), msg='%s = %r' % (o, oval))
# Check default options
self.assertTrue(pep8style.options.repeat)
self.assertEqual(pep8style.options.benchmark_keys,
['directories', 'files',
'logical lines', 'physical lines'])
self.assertEqual(pep8style.options.exclude,
['.svn', 'CVS', '.bzr', '.hg',
'.git', '__pycache__', '.tox'])
self.assertEqual(pep8style.options.filename, ['*.py'])
self.assertEqual(pep8style.options.format, 'default')
self.assertEqual(pep8style.options.select, ())
self.assertEqual(pep8style.options.ignore, ('E226', 'E24'))
self.assertEqual(pep8style.options.max_line_length, 79)
def test_styleguide_ignore_code(self):
def parse_argv(argstring):
_saved_argv = sys.argv
sys.argv = shlex.split('pep8 %s /dev/null' % argstring)
try:
return pep8.StyleGuide(parse_argv=True)
finally:
sys.argv = _saved_argv
options = parse_argv('').options
self.assertEqual(options.select, ())
self.assertEqual(
options.ignore,
('E121', 'E123', 'E126', 'E226', 'E24', 'E704')
)
options = parse_argv('--doctest').options
self.assertEqual(options.select, ())
self.assertEqual(options.ignore, ())
options = parse_argv('--ignore E,W').options
self.assertEqual(options.select, ())
self.assertEqual(options.ignore, ('E', 'W'))
options = parse_argv('--select E,W').options
self.assertEqual(options.select, ('E', 'W'))
self.assertEqual(options.ignore, ('',))
options = parse_argv('--select E --ignore E24').options
self.assertEqual(options.select, ('E',))
self.assertEqual(options.ignore, ('',))
options = parse_argv('--ignore E --select E24').options
self.assertEqual(options.select, ('E24',))
self.assertEqual(options.ignore, ('',))
options = parse_argv('--ignore W --select E24').options
self.assertEqual(options.select, ('E24',))
self.assertEqual(options.ignore, ('',))
pep8style = pep8.StyleGuide(paths=[E11])
self.assertFalse(pep8style.ignore_code('E112'))
self.assertFalse(pep8style.ignore_code('W191'))
self.assertTrue(pep8style.ignore_code('E241'))
pep8style = pep8.StyleGuide(select='E', paths=[E11])
self.assertFalse(pep8style.ignore_code('E112'))
self.assertTrue(pep8style.ignore_code('W191'))
self.assertFalse(pep8style.ignore_code('E241'))
pep8style = pep8.StyleGuide(select='W', paths=[E11])
self.assertTrue(pep8style.ignore_code('E112'))
self.assertFalse(pep8style.ignore_code('W191'))
self.assertTrue(pep8style.ignore_code('E241'))
pep8style = pep8.StyleGuide(select=('F401',), paths=[E11])
self.assertEqual(pep8style.options.select, ('F401',))
self.assertEqual(pep8style.options.ignore, ('',))
self.assertFalse(pep8style.ignore_code('F'))
self.assertFalse(pep8style.ignore_code('F401'))
self.assertTrue(pep8style.ignore_code('F402'))
def test_styleguide_excluded(self):
pep8style = pep8.StyleGuide(paths=[E11])
self.assertFalse(pep8style.excluded('./foo/bar'))
self.assertFalse(pep8style.excluded('./foo/bar/main.py'))
self.assertTrue(pep8style.excluded('./CVS'))
self.assertTrue(pep8style.excluded('./.tox'))
self.assertTrue(pep8style.excluded('./subdir/CVS'))
self.assertTrue(pep8style.excluded('__pycache__'))
self.assertTrue(pep8style.excluded('./__pycache__'))
self.assertTrue(pep8style.excluded('subdir/__pycache__'))
self.assertFalse(pep8style.excluded('draftCVS'))
self.assertFalse(pep8style.excluded('./CVSoup'))
self.assertFalse(pep8style.excluded('./CVS/subdir'))
def test_styleguide_checks(self):
pep8style = pep8.StyleGuide(paths=[E11])
# Default lists of checkers
self.assertTrue(len(pep8style.options.physical_checks) > 4)
self.assertTrue(len(pep8style.options.logical_checks) > 10)
self.assertEqual(len(pep8style.options.ast_checks), 0)
# Sanity check
for name, check, args in pep8style.options.physical_checks:
self.assertEqual(check.__name__, name)
self.assertEqual(args[0], 'physical_line')
for name, check, args in pep8style.options.logical_checks:
self.assertEqual(check.__name__, name)
self.assertEqual(args[0], 'logical_line')
# Do run E11 checks
options = pep8.StyleGuide().options
self.assertTrue(any(func == pep8.indentation
for name, func, args in options.logical_checks))
options = pep8.StyleGuide(select=['E']).options
self.assertTrue(any(func == pep8.indentation
for name, func, args in options.logical_checks))
options = pep8.StyleGuide(ignore=['W']).options
self.assertTrue(any(func == pep8.indentation
for name, func, args in options.logical_checks))
options = pep8.StyleGuide(ignore=['E12']).options
self.assertTrue(any(func == pep8.indentation
for name, func, args in options.logical_checks))
# Do not run E11 checks
options = pep8.StyleGuide(select=['W']).options
self.assertFalse(any(func == pep8.indentation
for name, func, args in options.logical_checks))
options = pep8.StyleGuide(ignore=['E']).options
self.assertFalse(any(func == pep8.indentation
for name, func, args in options.logical_checks))
options = pep8.StyleGuide(ignore=['E11']).options
self.assertFalse(any(func == pep8.indentation
for name, func, args in options.logical_checks))
def test_styleguide_init_report(self):
pep8style = pep8.StyleGuide(paths=[E11])
self.assertEqual(pep8style.options.reporter, pep8.StandardReport)
self.assertEqual(type(pep8style.options.report), pep8.StandardReport)
class MinorityReport(pep8.BaseReport):
pass
report = pep8style.init_report(MinorityReport)
self.assertEqual(pep8style.options.report, report)
self.assertEqual(type(report), MinorityReport)
pep8style = pep8.StyleGuide(paths=[E11], reporter=MinorityReport)
self.assertEqual(type(pep8style.options.report), MinorityReport)
self.assertEqual(pep8style.options.reporter, MinorityReport)
def test_styleguide_check_files(self):
pep8style = pep8.StyleGuide(paths=[E11])
report = pep8style.check_files()
self.assertTrue(report.total_errors)
self.assertRaises(TypeError, pep8style.check_files, 42)
# < 3.3 raises TypeError; >= 3.3 raises AttributeError
self.assertRaises(Exception, pep8style.check_files, [42])
def test_check_unicode(self):
# Do not crash if lines are Unicode (Python 2.x)
pep8.register_check(DummyChecker, ['Z701'])
source = '#\n'
if hasattr(source, 'decode'):
source = source.decode('ascii')
pep8style = pep8.StyleGuide()
count_errors = pep8style.input_file('stdin', lines=[source])
self.assertFalse(sys.stdout)
self.assertFalse(sys.stderr)
self.assertEqual(count_errors, 0)
def test_check_nullbytes(self):
pep8.register_check(DummyChecker, ['Z701'])
pep8style = pep8.StyleGuide()
count_errors = pep8style.input_file('stdin', lines=['\x00\n'])
stdout = sys.stdout.getvalue()
if 'SyntaxError' in stdout:
# PyPy 2.2 returns a SyntaxError
expected = "stdin:1:2: E901 SyntaxError"
else:
expected = "stdin:1:1: E901 TypeError"
self.assertTrue(stdout.startswith(expected),
msg='Output %r does not start with %r' %
(stdout, expected))
self.assertFalse(sys.stderr)
self.assertEqual(count_errors, 1)
def test_styleguide_unmatched_triple_quotes(self):
pep8.register_check(DummyChecker, ['Z701'])
lines = [
'def foo():\n',
' """test docstring""\'\n',
]
pep8style = pep8.StyleGuide()
pep8style.input_file('stdin', lines=lines)
stdout = sys.stdout.getvalue()
expected = 'stdin:2:5: E901 TokenError: EOF in multi-line string'
self.assertTrue(expected in stdout)
def test_styleguide_continuation_line_outdented(self):
pep8.register_check(DummyChecker, ['Z701'])
lines = [
'def foo():\n',
' pass\n',
'\n',
'\\\n',
'\n',
'def bar():\n',
' pass\n',
]
pep8style = pep8.StyleGuide()
count_errors = pep8style.input_file('stdin', lines=lines)
self.assertEqual(count_errors, 2)
stdout = sys.stdout.getvalue()
expected = (
'stdin:6:1: '
'E122 continuation line missing indentation or outdented'
)
self.assertTrue(expected in stdout)
expected = 'stdin:6:1: E302 expected 2 blank lines, found 1'
self.assertTrue(expected in stdout)
# TODO: runner
# TODO: input_file
| gpl-3.0 |
dbmi-pitt/DIKB-Micropublication | scripts/mp-scripts/Bio/PopGen/Async/Local.py | 5 | 2004 | # Copyright 2007 by Tiago Antao <tiagoantao@gmail.com>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
'''
Asynchronous local execution.
Supports multicore architectures.
'''
from Bio.PopGen.Async import Async, FileRetriever
import thread
class Local(Async):
'''Execution on Local machine.
'''
def __init__(self, num_cores = 1):
'''Constructor.
parameters:
num_cores - Number of cores (for multiprocessor machines,
multiply accordingly)
'''
Async.__init__(self)
self.num_cores = num_cores
self.cores_used = 0
def _run_program(self, id, hook, parameters, input_files):
'''Run program.
For parameters, please check Async.run_program.
Either runs a program if a core is available or
schedules it.
'''
self.access_ds.acquire()
self.waiting.append((id, hook, parameters, input_files))
if self.cores_used < self.num_cores:
self.cores_used += 1
thread.start_new_thread(self.start_work, ())
self.access_ds.release()
def start_work(self):
'''Starts work.
Thread initial point.
While there are tasks to be done, runs them.
The thread dies as soon as there is nothing waiting to be
executed.
'''
self.access_ds.acquire()
while (len(self.waiting) > 0):
id, hook, parameters, input_files = self.waiting[0]
del self.waiting[0]
self.running[id] = True
self.access_ds.release()
ret_code, output_files = hook.run_job(parameters, input_files)
self.access_ds.acquire()
del self.running[id]
self.done[id] = ret_code, output_files
self.cores_used -= 1
self.access_ds.release()
| apache-2.0 |
Fat-Zer/FreeCAD_sf_master | src/Mod/Fem/femsolver/elmer/equations/linear.py | 12 | 4560 | # ***************************************************************************
# * Copyright (c) 2017 Markus Hovorka <m.hovorka@live.de> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "FreeCAD FEM solver Elmer equation object _Linear"
__author__ = "Markus Hovorka"
__url__ = "https://www.freecadweb.org"
## \addtogroup FEM
# @{
from . import equation
# the linear equation object defines some attributes for some various elmer equations
# these various elmer equations are based on the linear equation object
# thus in ObjectsFem module is no method to add a linear equation object
LINEAR_SOLVER = ["Direct", "Iterative"]
LINEAR_DIRECT = ["Banded", "umfpack"]
LINEAR_ITERATIVE = [
"CG",
"CGS",
"BiCGStab",
"BiCGStabl",
"TFQMR",
"GMRES",
"GCR",
]
LINEAR_PRECONDITIONING = [
"None",
"Diagonal",
"ILU0",
"ILU1",
"ILU2",
"ILU3",
"ILU4",
]
class Proxy(equation.Proxy):
def __init__(self, obj):
super(Proxy, self).__init__(obj)
obj.addProperty(
"App::PropertyEnumeration",
"LinearSolverType",
"Linear System",
""
)
obj.LinearSolverType = LINEAR_SOLVER
obj.LinearSolverType = "Iterative"
obj.addProperty(
"App::PropertyEnumeration",
"LinearDirectMethod",
"Linear System",
""
)
obj.LinearDirectMethod = LINEAR_DIRECT
obj.addProperty(
"App::PropertyEnumeration",
"LinearIterativeMethod",
"Linear System",
""
)
obj.LinearIterativeMethod = LINEAR_ITERATIVE
obj.LinearIterativeMethod = "BiCGStab"
obj.addProperty(
"App::PropertyInteger",
"BiCGstablDegree",
"Linear System",
""
)
obj.addProperty(
"App::PropertyEnumeration",
"LinearPreconditioning",
"Linear System",
""
)
obj.LinearPreconditioning = LINEAR_PRECONDITIONING
obj.LinearPreconditioning = "ILU0"
obj.addProperty(
"App::PropertyFloat",
"LinearTolerance",
"Linear System",
""
)
obj.LinearTolerance = 1e-8
obj.addProperty(
"App::PropertyInteger",
"LinearIterations",
"Linear System",
""
)
obj.LinearIterations = 500
obj.addProperty(
"App::PropertyFloat",
"SteadyStateTolerance",
"Steady State",
""
)
obj.SteadyStateTolerance = 1e-5
obj.addProperty(
"App::PropertyBool",
"Stabilize",
"Base",
""
)
obj.Stabilize = True
obj.addProperty(
"App::PropertyBool",
"Bubbles",
"Base",
""
)
obj.Bubbles = False
class ViewProxy(equation.ViewProxy):
pass
## @}
| lgpl-2.1 |
hynnet/openwrt-mt7620 | staging_dir/target-mipsel_r2_uClibc-0.9.33.2/usr/lib/python2.7/lib-tk/FixTk.py | 96 | 2938 | import sys, os
# Delay import _tkinter until we have set TCL_LIBRARY,
# so that Tcl_FindExecutable has a chance to locate its
# encoding directory.
# Unfortunately, we cannot know the TCL_LIBRARY directory
# if we don't know the tcl version, which we cannot find out
# without import Tcl. Fortunately, Tcl will itself look in
# <TCL_LIBRARY>\..\tcl<TCL_VERSION>, so anything close to
# the real Tcl library will do.
# Expand symbolic links on Vista
try:
import ctypes
ctypes.windll.kernel32.GetFinalPathNameByHandleW
except (ImportError, AttributeError):
def convert_path(s):
return s
else:
def convert_path(s):
assert isinstance(s, str) # sys.prefix contains only bytes
udir = s.decode("mbcs")
hdir = ctypes.windll.kernel32.\
CreateFileW(udir, 0x80, # FILE_READ_ATTRIBUTES
1, # FILE_SHARE_READ
None, 3, # OPEN_EXISTING
0x02000000, # FILE_FLAG_BACKUP_SEMANTICS
None)
if hdir == -1:
# Cannot open directory, give up
return s
buf = ctypes.create_unicode_buffer(u"", 32768)
res = ctypes.windll.kernel32.\
GetFinalPathNameByHandleW(hdir, buf, len(buf),
0) # VOLUME_NAME_DOS
ctypes.windll.kernel32.CloseHandle(hdir)
if res == 0:
# Conversion failed (e.g. network location)
return s
s = buf[:res].encode("mbcs")
# Ignore leading \\?\
if s.startswith("\\\\?\\"):
s = s[4:]
if s.startswith("UNC"):
s = "\\" + s[3:]
return s
prefix = os.path.join(sys.prefix,"tcl")
if not os.path.exists(prefix):
# devdir/../tcltk/lib
prefix = os.path.join(sys.prefix, os.path.pardir, "tcltk", "lib")
prefix = os.path.abspath(prefix)
# if this does not exist, no further search is needed
if os.path.exists(prefix):
prefix = convert_path(prefix)
if "TCL_LIBRARY" not in os.environ:
for name in os.listdir(prefix):
if name.startswith("tcl"):
tcldir = os.path.join(prefix,name)
if os.path.isdir(tcldir):
os.environ["TCL_LIBRARY"] = tcldir
# Compute TK_LIBRARY, knowing that it has the same version
# as Tcl
import _tkinter
ver = str(_tkinter.TCL_VERSION)
if "TK_LIBRARY" not in os.environ:
v = os.path.join(prefix, 'tk'+ver)
if os.path.exists(os.path.join(v, "tclIndex")):
os.environ['TK_LIBRARY'] = v
# We don't know the Tix version, so we must search the entire
# directory
if "TIX_LIBRARY" not in os.environ:
for name in os.listdir(prefix):
if name.startswith("tix"):
tixdir = os.path.join(prefix,name)
if os.path.isdir(tixdir):
os.environ["TIX_LIBRARY"] = tixdir
| gpl-2.0 |
jostep/tensorflow | tensorflow/python/estimator/canned/metric_keys.py | 48 | 1578 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Enum for model prediction keys."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.estimator import model_fn
class MetricKeys(object):
"""Metric key strings."""
LOSS = model_fn.LOSS_METRIC_KEY
LOSS_MEAN = model_fn.AVERAGE_LOSS_METRIC_KEY
ACCURACY = 'accuracy'
# This is the best the model could do by always predicting one class.
# Should be < ACCURACY in a trained model.
ACCURACY_BASELINE = 'accuracy_baseline'
AUC = 'auc'
AUC_PR = 'auc_precision_recall'
LABEL_MEAN = 'label/mean'
PREDICTION_MEAN = 'prediction/mean'
# The following require a threshold applied, should be float in range (0, 1).
ACCURACY_AT_THRESHOLD = 'accuracy/positive_threshold_%g'
PRECISION_AT_THRESHOLD = 'precision/positive_threshold_%g'
RECALL_AT_THRESHOLD = 'recall/positive_threshold_%g'
| apache-2.0 |
fxia22/ASM_xf | PythonD/lib/python2.4/UserString.py | 13 | 7963 | #!/usr/bin/env python
## vim:ts=4:et:nowrap
"""A user-defined wrapper around string objects
Note: string objects have grown methods in Python 1.6
This module requires Python 1.6 or later.
"""
from types import StringTypes
import sys
__all__ = ["UserString","MutableString"]
class UserString:
def __init__(self, seq):
if isinstance(seq, StringTypes):
self.data = seq
elif isinstance(seq, UserString):
self.data = seq.data[:]
else:
self.data = str(seq)
def __str__(self): return str(self.data)
def __repr__(self): return repr(self.data)
def __int__(self): return int(self.data)
def __long__(self): return long(self.data)
def __float__(self): return float(self.data)
def __complex__(self): return complex(self.data)
def __hash__(self): return hash(self.data)
def __cmp__(self, string):
if isinstance(string, UserString):
return cmp(self.data, string.data)
else:
return cmp(self.data, string)
def __contains__(self, char):
return char in self.data
def __len__(self): return len(self.data)
def __getitem__(self, index): return self.__class__(self.data[index])
def __getslice__(self, start, end):
start = max(start, 0); end = max(end, 0)
return self.__class__(self.data[start:end])
def __add__(self, other):
if isinstance(other, UserString):
return self.__class__(self.data + other.data)
elif isinstance(other, StringTypes):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + str(other))
def __radd__(self, other):
if isinstance(other, StringTypes):
return self.__class__(other + self.data)
else:
return self.__class__(str(other) + self.data)
def __mul__(self, n):
return self.__class__(self.data*n)
__rmul__ = __mul__
def __mod__(self, args):
return self.__class__(self.data % args)
# the following methods are defined in alphabetical order:
def capitalize(self): return self.__class__(self.data.capitalize())
def center(self, width, *args):
return self.__class__(self.data.center(width, *args))
def count(self, sub, start=0, end=sys.maxint):
return self.data.count(sub, start, end)
def decode(self, encoding=None, errors=None): # XXX improve this?
if encoding:
if errors:
return self.__class__(self.data.decode(encoding, errors))
else:
return self.__class__(self.data.decode(encoding))
else:
return self.__class__(self.data.decode())
def encode(self, encoding=None, errors=None): # XXX improve this?
if encoding:
if errors:
return self.__class__(self.data.encode(encoding, errors))
else:
return self.__class__(self.data.encode(encoding))
else:
return self.__class__(self.data.encode())
def endswith(self, suffix, start=0, end=sys.maxint):
return self.data.endswith(suffix, start, end)
def expandtabs(self, tabsize=8):
return self.__class__(self.data.expandtabs(tabsize))
def find(self, sub, start=0, end=sys.maxint):
return self.data.find(sub, start, end)
def index(self, sub, start=0, end=sys.maxint):
return self.data.index(sub, start, end)
def isalpha(self): return self.data.isalpha()
def isalnum(self): return self.data.isalnum()
def isdecimal(self): return self.data.isdecimal()
def isdigit(self): return self.data.isdigit()
def islower(self): return self.data.islower()
def isnumeric(self): return self.data.isnumeric()
def isspace(self): return self.data.isspace()
def istitle(self): return self.data.istitle()
def isupper(self): return self.data.isupper()
def join(self, seq): return self.data.join(seq)
def ljust(self, width, *args):
return self.__class__(self.data.ljust(width, *args))
def lower(self): return self.__class__(self.data.lower())
def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars))
def replace(self, old, new, maxsplit=-1):
return self.__class__(self.data.replace(old, new, maxsplit))
def rfind(self, sub, start=0, end=sys.maxint):
return self.data.rfind(sub, start, end)
def rindex(self, sub, start=0, end=sys.maxint):
return self.data.rindex(sub, start, end)
def rjust(self, width, *args):
return self.__class__(self.data.rjust(width, *args))
def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars))
def split(self, sep=None, maxsplit=-1):
return self.data.split(sep, maxsplit)
def rsplit(self, sep=None, maxsplit=-1):
return self.data.rsplit(sep, maxsplit)
def splitlines(self, keepends=0): return self.data.splitlines(keepends)
def startswith(self, prefix, start=0, end=sys.maxint):
return self.data.startswith(prefix, start, end)
def strip(self, chars=None): return self.__class__(self.data.strip(chars))
def swapcase(self): return self.__class__(self.data.swapcase())
def title(self): return self.__class__(self.data.title())
def translate(self, *args):
return self.__class__(self.data.translate(*args))
def upper(self): return self.__class__(self.data.upper())
def zfill(self, width): return self.__class__(self.data.zfill(width))
class MutableString(UserString):
"""mutable string objects
Python strings are immutable objects. This has the advantage, that
strings may be used as dictionary keys. If this property isn't needed
and you insist on changing string values in place instead, you may cheat
and use MutableString.
But the purpose of this class is an educational one: to prevent
people from inventing their own mutable string class derived
from UserString and than forget thereby to remove (override) the
__hash__ method inherited from UserString. This would lead to
errors that would be very hard to track down.
A faster and better solution is to rewrite your program using lists."""
def __init__(self, string=""):
self.data = string
def __hash__(self):
raise TypeError, "unhashable type (it is mutable)"
def __setitem__(self, index, sub):
if index < 0 or index >= len(self.data): raise IndexError
self.data = self.data[:index] + sub + self.data[index+1:]
def __delitem__(self, index):
if index < 0 or index >= len(self.data): raise IndexError
self.data = self.data[:index] + self.data[index+1:]
def __setslice__(self, start, end, sub):
start = max(start, 0); end = max(end, 0)
if isinstance(sub, UserString):
self.data = self.data[:start]+sub.data+self.data[end:]
elif isinstance(sub, StringTypes):
self.data = self.data[:start]+sub+self.data[end:]
else:
self.data = self.data[:start]+str(sub)+self.data[end:]
def __delslice__(self, start, end):
start = max(start, 0); end = max(end, 0)
self.data = self.data[:start] + self.data[end:]
def immutable(self):
return UserString(self.data)
def __iadd__(self, other):
if isinstance(other, UserString):
self.data += other.data
elif isinstance(other, StringTypes):
self.data += other
else:
self.data += str(other)
return self
def __imul__(self, n):
self.data *= n
return self
if __name__ == "__main__":
# execute the regression test to stdout, if called as a script:
import os
called_in_dir, called_as = os.path.split(sys.argv[0])
called_as, py = os.path.splitext(called_as)
if '-q' in sys.argv:
from test import test_support
test_support.verbose = 0
__import__('test.test_' + called_as.lower())
| gpl-2.0 |
le9i0nx/ansible | lib/ansible/modules/cloud/openstack/os_port.py | 25 | 12185 | #!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_port
short_description: Add/Update/Delete ports from an OpenStack cloud.
extends_documentation_fragment: openstack
author: "Davide Agnello (@dagnello)"
version_added: "2.0"
description:
- Add, Update or Remove ports from an OpenStack cloud. A I(state) of
'present' will ensure the port is created or updated if required.
options:
network:
description:
- Network ID or name this port belongs to.
required: true
name:
description:
- Name that has to be given to the port.
required: false
default: None
fixed_ips:
description:
- Desired IP and/or subnet for this port. Subnet is referenced by
subnet_id and IP is referenced by ip_address.
required: false
default: None
admin_state_up:
description:
- Sets admin state.
required: false
default: None
mac_address:
description:
- MAC address of this port.
required: false
default: None
security_groups:
description:
- Security group(s) ID(s) or name(s) associated with the port (comma
separated string or YAML list)
required: false
default: None
no_security_groups:
description:
- Do not associate a security group with this port.
required: false
default: False
allowed_address_pairs:
description:
- "Allowed address pairs list. Allowed address pairs are supported with
dictionary structure.
e.g. allowed_address_pairs:
- ip_address: 10.1.0.12
mac_address: ab:cd:ef:12:34:56
- ip_address: ..."
required: false
default: None
extra_dhcp_opts:
description:
- "Extra dhcp options to be assigned to this port. Extra options are
supported with dictionary structure.
e.g. extra_dhcp_opts:
- opt_name: opt name1
opt_value: value1
- opt_name: ..."
required: false
default: None
device_owner:
description:
- The ID of the entity that uses this port.
required: false
default: None
device_id:
description:
- Device ID of device using this port.
required: false
default: None
state:
description:
- Should the resource be present or absent.
choices: [present, absent]
default: present
availability_zone:
description:
- Ignored. Present for backwards compatibility
required: false
'''
EXAMPLES = '''
# Create a port
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/
username: admin
password: admin
project_name: admin
name: port1
network: foo
# Create a port with a static IP
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/
username: admin
password: admin
project_name: admin
name: port1
network: foo
fixed_ips:
- ip_address: 10.1.0.21
# Create a port with No security groups
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/
username: admin
password: admin
project_name: admin
name: port1
network: foo
no_security_groups: True
# Update the existing 'port1' port with multiple security groups (version 1)
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/d
username: admin
password: admin
project_name: admin
name: port1
security_groups: 1496e8c7-4918-482a-9172-f4f00fc4a3a5,057d4bdf-6d4d-472...
# Update the existing 'port1' port with multiple security groups (version 2)
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/d
username: admin
password: admin
project_name: admin
name: port1
security_groups:
- 1496e8c7-4918-482a-9172-f4f00fc4a3a5
- 057d4bdf-6d4d-472...
'''
RETURN = '''
id:
description: Unique UUID.
returned: success
type: string
name:
description: Name given to the port.
returned: success
type: string
network_id:
description: Network ID this port belongs in.
returned: success
type: string
security_groups:
description: Security group(s) associated with this port.
returned: success
type: list
status:
description: Port's status.
returned: success
type: string
fixed_ips:
description: Fixed ip(s) associated with this port.
returned: success
type: list
tenant_id:
description: Tenant id associated with this port.
returned: success
type: string
allowed_address_pairs:
description: Allowed address pairs with this port.
returned: success
type: list
admin_state_up:
description: Admin state up flag for this port.
returned: success
type: bool
'''
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.openstack import openstack_full_argument_spec, openstack_module_kwargs
def _needs_update(module, port, cloud):
"""Check for differences in the updatable values.
NOTE: We don't currently allow name updates.
"""
compare_simple = ['admin_state_up',
'mac_address',
'device_owner',
'device_id']
compare_dict = ['allowed_address_pairs',
'extra_dhcp_opts']
compare_list = ['security_groups']
for key in compare_simple:
if module.params[key] is not None and module.params[key] != port[key]:
return True
for key in compare_dict:
if module.params[key] is not None and module.params[key] != port[key]:
return True
for key in compare_list:
if module.params[key] is not None and (set(module.params[key]) !=
set(port[key])):
return True
# NOTE: if port was created or updated with 'no_security_groups=True',
# subsequent updates without 'no_security_groups' flag or
# 'no_security_groups=False' and no specified 'security_groups', will not
# result in an update to the port where the default security group is
# applied.
if module.params['no_security_groups'] and port['security_groups'] != []:
return True
if module.params['fixed_ips'] is not None:
for item in module.params['fixed_ips']:
if 'ip_address' in item:
# if ip_address in request does not match any in existing port,
# update is required.
if not any(match['ip_address'] == item['ip_address']
for match in port['fixed_ips']):
return True
if 'subnet_id' in item:
return True
for item in port['fixed_ips']:
# if ip_address in existing port does not match any in request,
# update is required.
if not any(match.get('ip_address') == item['ip_address']
for match in module.params['fixed_ips']):
return True
return False
def _system_state_change(module, port, cloud):
state = module.params['state']
if state == 'present':
if not port:
return True
return _needs_update(module, port, cloud)
if state == 'absent' and port:
return True
return False
def _compose_port_args(module, cloud):
port_kwargs = {}
optional_parameters = ['name',
'fixed_ips',
'admin_state_up',
'mac_address',
'security_groups',
'allowed_address_pairs',
'extra_dhcp_opts',
'device_owner',
'device_id']
for optional_param in optional_parameters:
if module.params[optional_param] is not None:
port_kwargs[optional_param] = module.params[optional_param]
if module.params['no_security_groups']:
port_kwargs['security_groups'] = []
return port_kwargs
def get_security_group_id(module, cloud, security_group_name_or_id):
security_group = cloud.get_security_group(security_group_name_or_id)
if not security_group:
module.fail_json(msg="Security group: %s, was not found"
% security_group_name_or_id)
return security_group['id']
def main():
argument_spec = openstack_full_argument_spec(
network=dict(required=False),
name=dict(required=False),
fixed_ips=dict(type='list', default=None),
admin_state_up=dict(type='bool', default=None),
mac_address=dict(default=None),
security_groups=dict(default=None, type='list'),
no_security_groups=dict(default=False, type='bool'),
allowed_address_pairs=dict(type='list', default=None),
extra_dhcp_opts=dict(type='list', default=None),
device_owner=dict(default=None),
device_id=dict(default=None),
state=dict(default='present', choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs(
mutually_exclusive=[
['no_security_groups', 'security_groups'],
]
)
module = AnsibleModule(argument_spec,
supports_check_mode=True,
**module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
name = module.params['name']
state = module.params['state']
try:
cloud = shade.openstack_cloud(**module.params)
if module.params['security_groups']:
# translate security_groups to UUID's if names where provided
module.params['security_groups'] = [
get_security_group_id(module, cloud, v)
for v in module.params['security_groups']
]
port = None
network_id = None
if name:
port = cloud.get_port(name)
if module.check_mode:
module.exit_json(changed=_system_state_change(module, port, cloud))
changed = False
if state == 'present':
if not port:
network = module.params['network']
if not network:
module.fail_json(
msg="Parameter 'network' is required in Port Create"
)
port_kwargs = _compose_port_args(module, cloud)
network_object = cloud.get_network(network)
if network_object:
network_id = network_object['id']
else:
module.fail_json(
msg="Specified network was not found."
)
port = cloud.create_port(network_id, **port_kwargs)
changed = True
else:
if _needs_update(module, port, cloud):
port_kwargs = _compose_port_args(module, cloud)
port = cloud.update_port(port['id'], **port_kwargs)
changed = True
module.exit_json(changed=changed, id=port['id'], port=port)
if state == 'absent':
if port:
cloud.delete_port(port['id'])
changed = True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| gpl-3.0 |
cgwalters/imagefactory | imgfac/PersistentImageManager.py | 4 | 2885 | # encoding: utf-8
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ApplicationConfiguration import ApplicationConfiguration
class PersistentImageManager(object):
""" Abstract base class for the Persistence managers """
_default_manager = None
@classmethod
def default_manager(cls):
if not cls._default_manager:
appconfig = ApplicationConfiguration().configuration
class_name = appconfig['image_manager'].capitalize() + "PersistentImageManager"
kwargs = appconfig['image_manager_args']
# The current defaults are 'file' for class name and
# { "storage_location": "/var/lib/imagefactory/storage" } for the args
pim_module = __import__(class_name, globals(), locals(), [ class_name ], -1)
pim_class = getattr(pim_module, class_name)
cls._default_manager = pim_class(**kwargs)
return cls._default_manager
def __init__(self, storage_path = None):
raise NotImplementedError("PersistentImageManager is an abstract class. You must instantiate a real manager.")
def image_with_id(self, image_id):
"""
TODO: Docstring for image_with_id
@param image_id TODO
@return TODO
"""
raise NotImplementedError("image_with_id() not implemented - cannot continue")
def images_from_query(self, query):
"""
TODO: Docstring for images_from_query
@param image_id TODO
@return TODO
"""
raise NotImplementedError("images_from_query() not implemented - cannot continue")
def add_image(self, image):
"""
TODO: Docstring for add_image
@param image TODO
@return TODO
"""
raise NotImplementedError("add_image() not implemented - cannot continue")
def save_image(self, image):
"""
TODO: Docstring for save_image
@param image TODO
@return TODO
"""
raise NotImplementedError("save_image() not implemented - cannot continue")
def delete_image_with_id(self, image_id):
"""
TODO: Docstring for delete_image_with_id
@param image_id TODO
@return TODO
"""
raise NotImplementedError("delete_image_with_id() not implemented - cannot continue")
| apache-2.0 |
kusha/dialog | dialog/link_parser.py | 1 | 4683 | #!/usr/bin/env python3
"""
Link parser Python bindings.
"""
__author__ = "Mark Birger"
__date__ = "19 Jan 2015"
import subprocess, re, shelve, sys
from dialog import STORAGEPATH
def parse(string):
"""
Link-parser output data parser.
"""
global STORAGEPATH
cache = shelve.open(STORAGEPATH + "/sentences")
if string in cache:
return cache[string]
proc = subprocess.Popen(
['link-grammar-5.2.5/link-parser/link-parser', '-postscript', '-graphics', '-verbosity=0'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout_data = proc.communicate(input=string.encode('utf-8'))[0]
stdout = stdout_data.decode('utf-8')
if proc.returncode != 0:
print("ERROR: dialog system is unable to run link-parser")
sys.exit(1)
# filter newlines
r_unwanted = re.compile("[\n\t\r]")
stdout = r_unwanted.sub("", stdout)
# find needed information
parsed = re.findall(r"\[(.*?)\]\[(.*)\]\[.*?\]", stdout)[0]
result = {}
# creating structure
result["words"] = re.findall(r"\((.*?)\)", parsed[0])
result["links"] = []
links = re.findall(r"(\[(\d+) (\d+) (\d+) \((.*?)\)\])", parsed[1])
for link in links:
link = list(link) # was returned tuple
del link[3] # ignoring height level of the link
del link[0]
link[0] = int(link[0])
link[1] = int(link[1])
link[2] = generalize_link(link[2])
result["links"].append(link)
cache[string] = result
return result
def generalize_link(link_type):
return re.findall(r"^[A-Z]*", link_type)[0]
def compare(flexibles, sentence_self, sentence_input):
# print(sentence_self)
# print(sentence_input)
# print(flexibles)
subs_self = substitute(sentence_self, flexibles)
subs_input = substitute(sentence_input)
# print(subs_self)
# print(subs_input)
equal_links = 0
for link1 in subs_self:
for link2 in subs_input:
# if link1[0] in link2[0] and \
# link1[1] in link2[1] and \
# print(subs_self, subs_input)
# print(link1[0], link2[0])
# print(link1[1], link2[1])
# print(link1[2], link2[2])
if (link1[0][0] in link2[0][0]) == link1[0][1] and \
(link1[1][0] in link2[1][0]) == link1[1][1] and \
link1[2] == link2[2]:
# print("OK")
# print(link1, "\t", link2)
equal_links += 1
# TODO: understand why it is problem here
if len(subs_self) != 0:
# print(similarity, len(subs_self), len(subs_input))
similarity = equal_links/len(subs_self)
else:
similarity = 0
return similarity, equal_links
def word_links(idx, sentence):
important = []
for link in sentence["links"]:
copy = link[:]
if link[0] == idx:
copy[0] = None
copy[1] = sentence["words"][link[1]]
elif link[1] == idx:
copy[0] = sentence["words"][link[0]]
copy[1] = None
else:
continue
# copy[2] = generalize_link(copy[2])
important.append(copy)
return important
def extract(idx, sentence1, sentence2):
"""
Extracts word from sentence with similar structure.
"""
# print(idx, sentence1, sentence2)
important = word_links(idx, sentence1)
# print(important)
for word in range(len(sentence2["words"])):
links = word_links(word, sentence2)
# print(word, links)
needed = important[:]
for link in links:
if link in needed:
needed.remove(link)
if len(needed) == 0:
#TODO: check is it can be more results?
result = re.findall(r"\w+", sentence2["words"][word])[0]
return result
def substitute(sentence, clean=[]):
words_wo_flex = [[word, True] for word in sentence["words"]]
for idx in clean:
pos_tag = re.findall(r"\..*$", words_wo_flex[idx][0])
if len(pos_tag):
words_wo_flex[idx][0] = pos_tag[0]
else:
words_wo_flex[idx][0] = "."
words_wo_flex[idx][1] = False
result = []
for link in sentence["links"]:
first = words_wo_flex[link[0]]
second = words_wo_flex[link[1]]
result.append([first, second, link[2]])
return result
# s1 = parse("What is your mark, Mark")
# print(s1["words"])
# print(s1["links"])
# s2 = parse("What is your mark, John")
# print(s2["words"])
# print(s2["links"])
# print()
# word = substruct(6, s1, s2)
# print(word)
| mit |
roopali8/keystone | keystone/tests/unit/backend/core_ldap.py | 5 | 5520 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ldap
from oslo_config import cfg
from keystone.common import cache
from keystone.common import ldap as common_ldap
from keystone.common.ldap import core as common_ldap_core
from keystone.tests import unit
from keystone.tests.unit import default_fixtures
from keystone.tests.unit import fakeldap
from keystone.tests.unit.ksfixtures import database
CONF = cfg.CONF
def create_group_container(identity_api):
# Create the groups base entry (ou=Groups,cn=example,cn=com)
group_api = identity_api.driver.group
conn = group_api.get_connection()
dn = 'ou=Groups,cn=example,cn=com'
conn.add_s(dn, [('objectclass', ['organizationalUnit']),
('ou', ['Groups'])])
class BaseBackendLdapCommon(object):
"""Mixin class to set up generic LDAP backends."""
def setUp(self):
super(BaseBackendLdapCommon, self).setUp()
common_ldap.register_handler('fake://', fakeldap.FakeLdap)
self.load_backends()
self.load_fixtures(default_fixtures)
self.addCleanup(common_ldap_core._HANDLERS.clear)
self.addCleanup(self.clear_database)
def _get_domain_fixture(self):
"""Domains in LDAP are read-only, so just return the static one."""
return self.resource_api.get_domain(CONF.identity.default_domain_id)
def clear_database(self):
for shelf in fakeldap.FakeShelves:
fakeldap.FakeShelves[shelf].clear()
def get_config(self, domain_id):
# Only one conf structure unless we are using separate domain backends
return CONF
def config_overrides(self):
super(BaseBackendLdapCommon, self).config_overrides()
self.config_fixture.config(group='identity', driver='ldap')
def config_files(self):
config_files = super(BaseBackendLdapCommon, self).config_files()
config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
return config_files
def get_user_enabled_vals(self, user):
user_dn = (
self.identity_api.driver.user._id_to_dn_string(user['id']))
enabled_attr_name = CONF.ldap.user_enabled_attribute
ldap_ = self.identity_api.driver.user.get_connection()
res = ldap_.search_s(user_dn,
ldap.SCOPE_BASE,
u'(sn=%s)' % user['name'])
if enabled_attr_name in res[0][1]:
return res[0][1][enabled_attr_name]
else:
return None
class BaseBackendLdap(object):
"""Mixin class to set up an all-LDAP configuration."""
def setUp(self):
# NOTE(dstanek): The database must be setup prior to calling the
# parent's setUp. The parent's setUp uses services (like
# credentials) that require a database.
self.useFixture(database.Database())
super(BaseBackendLdap, self).setUp()
def load_fixtures(self, fixtures):
# Override super impl since need to create group container.
create_group_container(self.identity_api)
super(BaseBackendLdap, self).load_fixtures(fixtures)
class BaseBackendLdapIdentitySqlEverythingElse(unit.SQLDriverOverrides):
"""Mixin base for Identity LDAP, everything else SQL backend tests."""
def config_files(self):
config_files = super(BaseBackendLdapIdentitySqlEverythingElse,
self).config_files()
config_files.append(unit.dirs.tests_conf('backend_ldap_sql.conf'))
return config_files
def setUp(self):
sqldb = self.useFixture(database.Database())
super(BaseBackendLdapIdentitySqlEverythingElse, self).setUp()
self.clear_database()
self.load_backends()
cache.configure_cache_region(cache.REGION)
sqldb.recreate()
self.load_fixtures(default_fixtures)
# defaulted by the data load
self.user_foo['enabled'] = True
def config_overrides(self):
super(BaseBackendLdapIdentitySqlEverythingElse,
self).config_overrides()
self.config_fixture.config(group='identity', driver='ldap')
self.config_fixture.config(group='resource', driver='sql')
self.config_fixture.config(group='assignment', driver='sql')
class BaseBackendLdapIdentitySqlEverythingElseWithMapping(object):
"""Mixin base class to test mapping of default LDAP backend.
The default configuration is not to enable mapping when using a single
backend LDAP driver. However, a cloud provider might want to enable
the mapping, hence hiding the LDAP IDs from any clients of keystone.
Setting backward_compatible_ids to False will enable this mapping.
"""
def config_overrides(self):
super(BaseBackendLdapIdentitySqlEverythingElseWithMapping,
self).config_overrides()
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=False)
| apache-2.0 |
chacoroot/planetary | openerp/tools/image.py | 26 | 10435 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
try:
import cStringIO as StringIO
except ImportError:
import StringIO
from PIL import Image
from PIL import ImageEnhance
from random import randint
# ----------------------------------------
# Image resizing
# ----------------------------------------
def image_resize_image(base64_source, size=(1024, 1024), encoding='base64', filetype=None, avoid_if_small=False):
""" Function to resize an image. The image will be resized to the given
size, while keeping the aspect ratios, and holes in the image will be
filled with transparent background. The image will not be stretched if
smaller than the expected size.
Steps of the resizing:
- Compute width and height if not specified.
- if avoid_if_small: if both image sizes are smaller than the requested
sizes, the original image is returned. This is used to avoid adding
transparent content around images that we do not want to alter but
just resize if too big. This is used for example when storing images
in the 'image' field: we keep the original image, resized to a maximal
size, without adding transparent content around it if smaller.
- create a thumbnail of the source image through using the thumbnail
function. Aspect ratios are preserved when using it. Note that if the
source image is smaller than the expected size, it will not be
extended, but filled to match the size.
- create a transparent background that will hold the final image.
- paste the thumbnail on the transparent background and center it.
:param base64_source: base64-encoded version of the source
image; if False, returns False
:param size: 2-tuple(width, height). A None value for any of width or
height mean an automatically computed value based respectivelly
on height or width of the source image.
:param encoding: the output encoding
:param filetype: the output filetype, by default the source image's
:type filetype: str, any PIL image format (supported for creation)
:param avoid_if_small: do not resize if image height and width
are smaller than the expected size.
"""
if not base64_source:
return False
if size == (None, None):
return base64_source
image_stream = StringIO.StringIO(base64_source.decode(encoding))
image = Image.open(image_stream)
# store filetype here, as Image.new below will lose image.format
filetype = (filetype or image.format).upper()
filetype = {
'BMP': 'PNG',
}.get(filetype, filetype)
asked_width, asked_height = size
if asked_width is None:
asked_width = int(image.size[0] * (float(asked_height) / image.size[1]))
if asked_height is None:
asked_height = int(image.size[1] * (float(asked_width) / image.size[0]))
size = asked_width, asked_height
# check image size: do not create a thumbnail if avoiding smaller images
if avoid_if_small and image.size[0] <= size[0] and image.size[1] <= size[1]:
return base64_source
if image.size != size:
image = image_resize_and_sharpen(image, size)
if image.mode not in ["1", "L", "P", "RGB", "RGBA"]:
image = image.convert("RGB")
background_stream = StringIO.StringIO()
image.save(background_stream, filetype)
return background_stream.getvalue().encode(encoding)
def image_resize_and_sharpen(image, size, preserve_aspect_ratio=False, factor=2.0):
"""
Create a thumbnail by resizing while keeping ratio.
A sharpen filter is applied for a better looking result.
:param image: PIL.Image.Image()
:param size: 2-tuple(width, height)
:param preserve_aspect_ratio: boolean (default: False)
:param factor: Sharpen factor (default: 2.0)
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
image.thumbnail(size, Image.ANTIALIAS)
if preserve_aspect_ratio:
size = image.size
sharpener = ImageEnhance.Sharpness(image)
resized_image = sharpener.enhance(factor)
# create a transparent image for background and paste the image on it
image = Image.new('RGBA', size, (255, 255, 255, 0))
image.paste(resized_image, ((size[0] - resized_image.size[0]) / 2, (size[1] - resized_image.size[1]) / 2))
return image
def image_save_for_web(image, fp=None, format=None):
"""
Save image optimized for web usage.
:param image: PIL.Image.Image()
:param fp: File name or file object. If not specified, a bytestring is returned.
:param format: File format if could not be deduced from image.
"""
opt = dict(format=image.format or format)
if image.format == 'PNG':
opt.update(optimize=True)
if image.mode != 'P':
# Floyd Steinberg dithering by default
image = image.convert('RGBA').convert('P', palette=Image.WEB, colors=256)
elif image.format == 'JPEG':
opt.update(optimize=True, quality=80)
if fp:
image.save(fp, **opt)
else:
img = StringIO.StringIO()
image.save(img, **opt)
return img.getvalue()
def image_resize_image_big(base64_source, size=(1024, 1024), encoding='base64', filetype=None, avoid_if_small=True):
""" Wrapper on image_resize_image, to resize images larger than the standard
'big' image size: 1024x1024px.
:param size, encoding, filetype, avoid_if_small: refer to image_resize_image
"""
return image_resize_image(base64_source, size, encoding, filetype, avoid_if_small)
def image_resize_image_medium(base64_source, size=(128, 128), encoding='base64', filetype=None, avoid_if_small=False):
""" Wrapper on image_resize_image, to resize to the standard 'medium'
image size: 180x180.
:param size, encoding, filetype, avoid_if_small: refer to image_resize_image
"""
return image_resize_image(base64_source, size, encoding, filetype, avoid_if_small)
def image_resize_image_small(base64_source, size=(64, 64), encoding='base64', filetype=None, avoid_if_small=False):
""" Wrapper on image_resize_image, to resize to the standard 'small' image
size: 50x50.
:param size, encoding, filetype, avoid_if_small: refer to image_resize_image
"""
return image_resize_image(base64_source, size, encoding, filetype, avoid_if_small)
# ----------------------------------------
# Colors
# ---------------------------------------
def image_colorize(original, randomize=True, color=(255, 255, 255)):
""" Add a color to the transparent background of an image.
:param original: file object on the original image file
:param randomize: randomize the background color
:param color: background-color, if not randomize
"""
# create a new image, based on the original one
original = Image.open(StringIO.StringIO(original))
image = Image.new('RGB', original.size)
# generate the background color, past it as background
if randomize:
color = (randint(32, 224), randint(32, 224), randint(32, 224))
image.paste(color)
image.paste(original, mask=original)
# return the new image
buffer = StringIO.StringIO()
image.save(buffer, 'PNG')
return buffer.getvalue()
# ----------------------------------------
# Misc image tools
# ---------------------------------------
def image_get_resized_images(base64_source, return_big=False, return_medium=True, return_small=True,
big_name='image', medium_name='image_medium', small_name='image_small',
avoid_resize_big=True, avoid_resize_medium=False, avoid_resize_small=False):
""" Standard tool function that returns a dictionary containing the
big, medium and small versions of the source image. This function
is meant to be used for the methods of functional fields for
models using images.
Default parameters are given to be used for the getter of functional
image fields, for example with res.users or res.partner. It returns
only image_medium and image_small values, to update those fields.
:param base64_source: base64-encoded version of the source
image; if False, all returnes values will be False
:param return_{..}: if set, computes and return the related resizing
of the image
:param {..}_name: key of the resized image in the return dictionary;
'image', 'image_medium' and 'image_small' by default.
:param avoid_resize_[..]: see avoid_if_small parameter
:return return_dict: dictionary with resized images, depending on
previous parameters.
"""
return_dict = dict()
if return_big:
return_dict[big_name] = image_resize_image_big(base64_source, avoid_if_small=avoid_resize_big)
if return_medium:
return_dict[medium_name] = image_resize_image_medium(base64_source, avoid_if_small=avoid_resize_medium)
if return_small:
return_dict[small_name] = image_resize_image_small(base64_source, avoid_if_small=avoid_resize_small)
return return_dict
if __name__=="__main__":
import sys
assert len(sys.argv)==3, 'Usage to Test: image.py SRC.png DEST.png'
img = file(sys.argv[1],'rb').read().encode('base64')
new = image_resize_image(img, (128,100))
file(sys.argv[2], 'wb').write(new.decode('base64'))
| agpl-3.0 |
bpilania/SDNGini | backend/src/gloader/xml/dom/expatbuilder.py | 10 | 36484 | """Facility to use the Expat parser to load a minidom instance
from a string or file.
This avoids all the overhead of SAX and pulldom to gain performance.
"""
# Warning!
#
# This module is tightly bound to the implementation details of the
# minidom DOM and can't be used with other DOM implementations. This
# is due, in part, to a lack of appropriate methods in the DOM (there is
# no way to create Entity and Notation nodes via the DOM Level 2
# interface), and for performance. The later is the cause of some fairly
# cryptic code.
#
# Performance hacks:
#
# - .character_data_handler() has an extra case in which continuing
# data is appended to an existing Text node; this can be a
# speedup since pyexpat can break up character data into multiple
# callbacks even though we set the buffer_text attribute on the
# parser. This also gives us the advantage that we don't need a
# separate normalization pass.
#
# - Determining that a node exists is done using an identity comparison
# with None rather than a truth test; this avoids searching for and
# calling any methods on the node object if it exists. (A rather
# nice speedup is achieved this way as well!)
from xml.dom import xmlbuilder, minidom, Node
from xml.dom import EMPTY_NAMESPACE, EMPTY_PREFIX, XMLNS_NAMESPACE
from xml.parsers import expat
from xml.dom.minidom import _append_child, _set_attribute_node
from xml.dom.NodeFilter import NodeFilter
from xml.dom.minicompat import *
TEXT_NODE = Node.TEXT_NODE
CDATA_SECTION_NODE = Node.CDATA_SECTION_NODE
DOCUMENT_NODE = Node.DOCUMENT_NODE
FILTER_ACCEPT = xmlbuilder.DOMBuilderFilter.FILTER_ACCEPT
FILTER_REJECT = xmlbuilder.DOMBuilderFilter.FILTER_REJECT
FILTER_SKIP = xmlbuilder.DOMBuilderFilter.FILTER_SKIP
FILTER_INTERRUPT = xmlbuilder.DOMBuilderFilter.FILTER_INTERRUPT
theDOMImplementation = minidom.getDOMImplementation()
# Expat typename -> TypeInfo
_typeinfo_map = {
"CDATA": minidom.TypeInfo(None, "cdata"),
"ENUM": minidom.TypeInfo(None, "enumeration"),
"ENTITY": minidom.TypeInfo(None, "entity"),
"ENTITIES": minidom.TypeInfo(None, "entities"),
"ID": minidom.TypeInfo(None, "id"),
"IDREF": minidom.TypeInfo(None, "idref"),
"IDREFS": minidom.TypeInfo(None, "idrefs"),
"NMTOKEN": minidom.TypeInfo(None, "nmtoken"),
"NMTOKENS": minidom.TypeInfo(None, "nmtokens"),
}
class ElementInfo(NewStyle):
__slots__ = '_attr_info', '_model', 'tagName'
def __init__(self, tagName, model=None):
self.tagName = tagName
self._attr_info = []
self._model = model
def __getstate__(self):
return self._attr_info, self._model, self.tagName
def __setstate__(self, state):
self._attr_info, self._model, self.tagName = state
def getAttributeType(self, aname):
for info in self._attr_info:
if info[1] == aname:
t = info[-2]
if t[0] == "(":
return _typeinfo_map["ENUM"]
else:
return _typeinfo_map[info[-2]]
return minidom._no_type
def getAttributeTypeNS(self, namespaceURI, localName):
return minidom._no_type
def isElementContent(self):
if self._model:
type = self._model[0]
return type not in (expat.model.XML_CTYPE_ANY,
expat.model.XML_CTYPE_MIXED)
else:
return False
def isEmpty(self):
if self._model:
return self._model[0] == expat.model.XML_CTYPE_EMPTY
else:
return False
def isId(self, aname):
for info in self._attr_info:
if info[1] == aname:
return info[-2] == "ID"
return False
def isIdNS(self, euri, ename, auri, aname):
# not sure this is meaningful
return self.isId((auri, aname))
def _intern(builder, s):
return builder._intern_setdefault(s, s)
def _parse_ns_name(builder, name):
assert ' ' in name
parts = name.split(' ')
intern = builder._intern_setdefault
if len(parts) == 3:
uri, localname, prefix = parts
prefix = intern(prefix, prefix)
qname = "%s:%s" % (prefix, localname)
qname = intern(qname, qname)
localname = intern(localname, localname)
else:
uri, localname = parts
prefix = EMPTY_PREFIX
qname = localname = intern(localname, localname)
return intern(uri, uri), localname, prefix, qname
class ExpatBuilder:
"""Document builder that uses Expat to build a ParsedXML.DOM document
instance."""
def __init__(self, options=None):
if options is None:
options = xmlbuilder.Options()
self._options = options
if self._options.filter is not None:
self._filter = FilterVisibilityController(self._options.filter)
else:
self._filter = None
# This *really* doesn't do anything in this case, so
# override it with something fast & minimal.
self._finish_start_element = id
self._parser = None
self.reset()
def createParser(self):
"""Create a new parser object."""
return expat.ParserCreate()
def getParser(self):
"""Return the parser object, creating a new one if needed."""
if not self._parser:
self._parser = self.createParser()
self._intern_setdefault = self._parser.intern.setdefault
self._parser.buffer_text = True
self._parser.ordered_attributes = True
self._parser.specified_attributes = True
self.install(self._parser)
return self._parser
def reset(self):
"""Free all data structures used during DOM construction."""
self.document = theDOMImplementation.createDocument(
EMPTY_NAMESPACE, None, None)
self.curNode = self.document
self._elem_info = self.document._elem_info
self._cdata = False
def install(self, parser):
"""Install the callbacks needed to build the DOM into the parser."""
# This creates circular references!
parser.StartDoctypeDeclHandler = self.start_doctype_decl_handler
parser.StartElementHandler = self.first_element_handler
parser.EndElementHandler = self.end_element_handler
parser.ProcessingInstructionHandler = self.pi_handler
if self._options.entities:
parser.EntityDeclHandler = self.entity_decl_handler
parser.NotationDeclHandler = self.notation_decl_handler
if self._options.comments:
parser.CommentHandler = self.comment_handler
if self._options.cdata_sections:
parser.StartCdataSectionHandler = self.start_cdata_section_handler
parser.EndCdataSectionHandler = self.end_cdata_section_handler
parser.CharacterDataHandler = self.character_data_handler_cdata
else:
parser.CharacterDataHandler = self.character_data_handler
parser.ExternalEntityRefHandler = self.external_entity_ref_handler
parser.XmlDeclHandler = self.xml_decl_handler
parser.ElementDeclHandler = self.element_decl_handler
parser.AttlistDeclHandler = self.attlist_decl_handler
def parseFile(self, file):
"""Parse a document from a file object, returning the document
node."""
parser = self.getParser()
first_buffer = True
try:
while 1:
buffer = file.read(16*1024)
if not buffer:
break
parser.Parse(buffer, 0)
if first_buffer and self.document.documentElement:
self._setup_subset(buffer)
first_buffer = False
parser.Parse("", True)
except ParseEscape:
pass
doc = self.document
self.reset()
self._parser = None
return doc
def parseString(self, string):
"""Parse a document from a string, returning the document node."""
parser = self.getParser()
try:
parser.Parse(string, True)
self._setup_subset(string)
except ParseEscape:
pass
doc = self.document
self.reset()
self._parser = None
return doc
def _setup_subset(self, buffer):
"""Load the internal subset if there might be one."""
if self.document.doctype:
extractor = InternalSubsetExtractor()
extractor.parseString(buffer)
subset = extractor.getSubset()
self.document.doctype.internalSubset = subset
def start_doctype_decl_handler(self, doctypeName, systemId, publicId,
has_internal_subset):
doctype = self.document.implementation.createDocumentType(
doctypeName, publicId, systemId)
doctype.ownerDocument = self.document
self.document.childNodes.append(doctype)
self.document.doctype = doctype
if self._filter and self._filter.acceptNode(doctype) == FILTER_REJECT:
self.document.doctype = None
del self.document.childNodes[-1]
doctype = None
self._parser.EntityDeclHandler = None
self._parser.NotationDeclHandler = None
if has_internal_subset:
if doctype is not None:
doctype.entities._seq = []
doctype.notations._seq = []
self._parser.CommentHandler = None
self._parser.ProcessingInstructionHandler = None
self._parser.EndDoctypeDeclHandler = self.end_doctype_decl_handler
def end_doctype_decl_handler(self):
if self._options.comments:
self._parser.CommentHandler = self.comment_handler
self._parser.ProcessingInstructionHandler = self.pi_handler
if not (self._elem_info or self._filter):
self._finish_end_element = id
def pi_handler(self, target, data):
node = self.document.createProcessingInstruction(target, data)
_append_child(self.curNode, node)
if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
self.curNode.removeChild(node)
def character_data_handler_cdata(self, data):
childNodes = self.curNode.childNodes
if self._cdata:
if ( self._cdata_continue
and childNodes[-1].nodeType == CDATA_SECTION_NODE):
childNodes[-1].appendData(data)
return
node = self.document.createCDATASection(data)
self._cdata_continue = True
elif childNodes and childNodes[-1].nodeType == TEXT_NODE:
node = childNodes[-1]
value = node.data + data
d = node.__dict__
d['data'] = d['nodeValue'] = value
return
else:
node = minidom.Text()
d = node.__dict__
d['data'] = d['nodeValue'] = data
d['ownerDocument'] = self.document
_append_child(self.curNode, node)
def character_data_handler(self, data):
childNodes = self.curNode.childNodes
if childNodes and childNodes[-1].nodeType == TEXT_NODE:
node = childNodes[-1]
d = node.__dict__
d['data'] = d['nodeValue'] = node.data + data
return
node = minidom.Text()
d = node.__dict__
d['data'] = d['nodeValue'] = node.data + data
d['ownerDocument'] = self.document
_append_child(self.curNode, node)
def entity_decl_handler(self, entityName, is_parameter_entity, value,
base, systemId, publicId, notationName):
if is_parameter_entity:
# we don't care about parameter entities for the DOM
return
if not self._options.entities:
return
node = self.document._create_entity(entityName, publicId,
systemId, notationName)
if value is not None:
# internal entity
# node *should* be readonly, but we'll cheat
child = self.document.createTextNode(value)
node.childNodes.append(child)
self.document.doctype.entities._seq.append(node)
if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
del self.document.doctype.entities._seq[-1]
def notation_decl_handler(self, notationName, base, systemId, publicId):
node = self.document._create_notation(notationName, publicId, systemId)
self.document.doctype.notations._seq.append(node)
if self._filter and self._filter.acceptNode(node) == FILTER_ACCEPT:
del self.document.doctype.notations._seq[-1]
def comment_handler(self, data):
node = self.document.createComment(data)
_append_child(self.curNode, node)
if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
self.curNode.removeChild(node)
def start_cdata_section_handler(self):
self._cdata = True
self._cdata_continue = False
def end_cdata_section_handler(self):
self._cdata = False
self._cdata_continue = False
def external_entity_ref_handler(self, context, base, systemId, publicId):
return 1
def first_element_handler(self, name, attributes):
if self._filter is None and not self._elem_info:
self._finish_end_element = id
self.getParser().StartElementHandler = self.start_element_handler
self.start_element_handler(name, attributes)
def start_element_handler(self, name, attributes):
node = self.document.createElement(name)
_append_child(self.curNode, node)
self.curNode = node
if attributes:
for i in range(0, len(attributes), 2):
a = minidom.Attr(attributes[i], EMPTY_NAMESPACE,
None, EMPTY_PREFIX)
value = attributes[i+1]
d = a.childNodes[0].__dict__
d['data'] = d['nodeValue'] = value
d = a.__dict__
d['value'] = d['nodeValue'] = value
d['ownerDocument'] = self.document
_set_attribute_node(node, a)
if node is not self.document.documentElement:
self._finish_start_element(node)
def _finish_start_element(self, node):
if self._filter:
# To be general, we'd have to call isSameNode(), but this
# is sufficient for minidom:
if node is self.document.documentElement:
return
filt = self._filter.startContainer(node)
if filt == FILTER_REJECT:
# ignore this node & all descendents
Rejecter(self)
elif filt == FILTER_SKIP:
# ignore this node, but make it's children become
# children of the parent node
Skipper(self)
else:
return
self.curNode = node.parentNode
node.parentNode.removeChild(node)
node.unlink()
# If this ever changes, Namespaces.end_element_handler() needs to
# be changed to match.
#
def end_element_handler(self, name):
curNode = self.curNode
self.curNode = curNode.parentNode
self._finish_end_element(curNode)
def _finish_end_element(self, curNode):
info = self._elem_info.get(curNode.tagName)
if info:
self._handle_white_text_nodes(curNode, info)
if self._filter:
if curNode is self.document.documentElement:
return
if self._filter.acceptNode(curNode) == FILTER_REJECT:
self.curNode.removeChild(curNode)
curNode.unlink()
def _handle_white_text_nodes(self, node, info):
if (self._options.whitespace_in_element_content
or not info.isElementContent()):
return
# We have element type information and should remove ignorable
# whitespace; identify for text nodes which contain only
# whitespace.
L = []
for child in node.childNodes:
if child.nodeType == TEXT_NODE and not child.data.strip():
L.append(child)
# Remove ignorable whitespace from the tree.
for child in L:
node.removeChild(child)
def element_decl_handler(self, name, model):
info = self._elem_info.get(name)
if info is None:
self._elem_info[name] = ElementInfo(name, model)
else:
assert info._model is None
info._model = model
def attlist_decl_handler(self, elem, name, type, default, required):
info = self._elem_info.get(elem)
if info is None:
info = ElementInfo(elem)
self._elem_info[elem] = info
info._attr_info.append(
[None, name, None, None, default, 0, type, required])
def xml_decl_handler(self, version, encoding, standalone):
self.document.version = version
self.document.encoding = encoding
# This is still a little ugly, thanks to the pyexpat API. ;-(
if standalone >= 0:
if standalone:
self.document.standalone = True
else:
self.document.standalone = False
# Don't include FILTER_INTERRUPT, since that's checked separately
# where allowed.
_ALLOWED_FILTER_RETURNS = (FILTER_ACCEPT, FILTER_REJECT, FILTER_SKIP)
class FilterVisibilityController(NewStyle):
"""Wrapper around a DOMBuilderFilter which implements the checks
to make the whatToShow filter attribute work."""
__slots__ = 'filter',
def __init__(self, filter):
self.filter = filter
def startContainer(self, node):
mask = self._nodetype_mask[node.nodeType]
if self.filter.whatToShow & mask:
val = self.filter.startContainer(node)
if val == FILTER_INTERRUPT:
raise ParseEscape
if val not in _ALLOWED_FILTER_RETURNS:
raise ValueError, \
"startContainer() returned illegal value: " + repr(val)
return val
else:
return FILTER_ACCEPT
def acceptNode(self, node):
mask = self._nodetype_mask[node.nodeType]
if self.filter.whatToShow & mask:
val = self.filter.acceptNode(node)
if val == FILTER_INTERRUPT:
raise ParseEscape
if val == FILTER_SKIP:
# move all child nodes to the parent, and remove this node
parent = node.parentNode
for child in node.childNodes[:]:
parent.appendChild(child)
# node is handled by the caller
return FILTER_REJECT
if val not in _ALLOWED_FILTER_RETURNS:
raise ValueError, \
"acceptNode() returned illegal value: " + repr(val)
return val
else:
return FILTER_ACCEPT
_nodetype_mask = {
Node.ELEMENT_NODE: NodeFilter.SHOW_ELEMENT,
Node.ATTRIBUTE_NODE: NodeFilter.SHOW_ATTRIBUTE,
Node.TEXT_NODE: NodeFilter.SHOW_TEXT,
Node.CDATA_SECTION_NODE: NodeFilter.SHOW_CDATA_SECTION,
Node.ENTITY_REFERENCE_NODE: NodeFilter.SHOW_ENTITY_REFERENCE,
Node.ENTITY_NODE: NodeFilter.SHOW_ENTITY,
Node.PROCESSING_INSTRUCTION_NODE: NodeFilter.SHOW_PROCESSING_INSTRUCTION,
Node.COMMENT_NODE: NodeFilter.SHOW_COMMENT,
Node.DOCUMENT_NODE: NodeFilter.SHOW_DOCUMENT,
Node.DOCUMENT_TYPE_NODE: NodeFilter.SHOW_DOCUMENT_TYPE,
Node.DOCUMENT_FRAGMENT_NODE: NodeFilter.SHOW_DOCUMENT_FRAGMENT,
Node.NOTATION_NODE: NodeFilter.SHOW_NOTATION,
}
class FilterCrutch(NewStyle):
__slots__ = '_builder', '_level', '_old_start', '_old_end'
def __init__(self, builder):
self._level = 0
self._builder = builder
parser = builder._parser
self._old_start = parser.StartElementHandler
self._old_end = parser.EndElementHandler
parser.StartElementHandler = self.start_element_handler
parser.EndElementHandler = self.end_element_handler
class Rejecter(FilterCrutch):
__slots__ = ()
def __init__(self, builder):
FilterCrutch.__init__(self, builder)
parser = builder._parser
for name in ("ProcessingInstructionHandler",
"CommentHandler",
"CharacterDataHandler",
"StartCdataSectionHandler",
"EndCdataSectionHandler",
"ExternalEntityRefHandler",
):
setattr(parser, name, None)
def start_element_handler(self, *args):
self._level = self._level + 1
def end_element_handler(self, *args):
if self._level == 0:
# restore the old handlers
parser = self._builder._parser
self._builder.install(parser)
parser.StartElementHandler = self._old_start
parser.EndElementHandler = self._old_end
else:
self._level = self._level - 1
class Skipper(FilterCrutch):
__slots__ = ()
def start_element_handler(self, *args):
node = self._builder.curNode
self._old_start(*args)
if self._builder.curNode is not node:
self._level = self._level + 1
def end_element_handler(self, *args):
if self._level == 0:
# We're popping back out of the node we're skipping, so we
# shouldn't need to do anything but reset the handlers.
self._builder._parser.StartElementHandler = self._old_start
self._builder._parser.EndElementHandler = self._old_end
self._builder = None
else:
self._level = self._level - 1
self._old_end(*args)
# framework document used by the fragment builder.
# Takes a string for the doctype, subset string, and namespace attrs string.
_FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID = \
"http://xml.python.org/entities/fragment-builder/internal"
_FRAGMENT_BUILDER_TEMPLATE = (
'''\
<!DOCTYPE wrapper
%%s [
<!ENTITY fragment-builder-internal
SYSTEM "%s">
%%s
]>
<wrapper %%s
>&fragment-builder-internal;</wrapper>'''
% _FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID)
class FragmentBuilder(ExpatBuilder):
"""Builder which constructs document fragments given XML source
text and a context node.
The context node is expected to provide information about the
namespace declarations which are in scope at the start of the
fragment.
"""
def __init__(self, context, options=None):
if context.nodeType == DOCUMENT_NODE:
self.originalDocument = context
self.context = context
else:
self.originalDocument = context.ownerDocument
self.context = context
ExpatBuilder.__init__(self, options)
def reset(self):
ExpatBuilder.reset(self)
self.fragment = None
def parseFile(self, file):
"""Parse a document fragment from a file object, returning the
fragment node."""
return self.parseString(file.read())
def parseString(self, string):
"""Parse a document fragment from a string, returning the
fragment node."""
self._source = string
parser = self.getParser()
doctype = self.originalDocument.doctype
ident = ""
if doctype:
subset = doctype.internalSubset or self._getDeclarations()
if doctype.publicId:
ident = ('PUBLIC "%s" "%s"'
% (doctype.publicId, doctype.systemId))
elif doctype.systemId:
ident = 'SYSTEM "%s"' % doctype.systemId
else:
subset = ""
nsattrs = self._getNSattrs() # get ns decls from node's ancestors
document = _FRAGMENT_BUILDER_TEMPLATE % (ident, subset, nsattrs)
try:
parser.Parse(document, 1)
except:
self.reset()
raise
fragment = self.fragment
self.reset()
## self._parser = None
return fragment
def _getDeclarations(self):
"""Re-create the internal subset from the DocumentType node.
This is only needed if we don't already have the
internalSubset as a string.
"""
doctype = self.context.ownerDocument.doctype
s = ""
if doctype:
for i in range(doctype.notations.length):
notation = doctype.notations.item(i)
if s:
s = s + "\n "
s = "%s<!NOTATION %s" % (s, notation.nodeName)
if notation.publicId:
s = '%s PUBLIC "%s"\n "%s">' \
% (s, notation.publicId, notation.systemId)
else:
s = '%s SYSTEM "%s">' % (s, notation.systemId)
for i in range(doctype.entities.length):
entity = doctype.entities.item(i)
if s:
s = s + "\n "
s = "%s<!ENTITY %s" % (s, entity.nodeName)
if entity.publicId:
s = '%s PUBLIC "%s"\n "%s"' \
% (s, entity.publicId, entity.systemId)
elif entity.systemId:
s = '%s SYSTEM "%s"' % (s, entity.systemId)
else:
s = '%s "%s"' % (s, entity.firstChild.data)
if entity.notationName:
s = "%s NOTATION %s" % (s, entity.notationName)
s = s + ">"
return s
def _getNSattrs(self):
return ""
def external_entity_ref_handler(self, context, base, systemId, publicId):
if systemId == _FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID:
# this entref is the one that we made to put the subtree
# in; all of our given input is parsed in here.
old_document = self.document
old_cur_node = self.curNode
parser = self._parser.ExternalEntityParserCreate(context)
# put the real document back, parse into the fragment to return
self.document = self.originalDocument
self.fragment = self.document.createDocumentFragment()
self.curNode = self.fragment
try:
parser.Parse(self._source, 1)
finally:
self.curNode = old_cur_node
self.document = old_document
self._source = None
return -1
else:
return ExpatBuilder.external_entity_ref_handler(
self, context, base, systemId, publicId)
class Namespaces:
"""Mix-in class for builders; adds support for namespaces."""
def _initNamespaces(self):
# list of (prefix, uri) ns declarations. Namespace attrs are
# constructed from this and added to the element's attrs.
self._ns_ordered_prefixes = []
def createParser(self):
"""Create a new namespace-handling parser."""
parser = expat.ParserCreate(namespace_separator=" ")
parser.namespace_prefixes = True
return parser
def install(self, parser):
"""Insert the namespace-handlers onto the parser."""
ExpatBuilder.install(self, parser)
if self._options.namespace_declarations:
parser.StartNamespaceDeclHandler = (
self.start_namespace_decl_handler)
def start_namespace_decl_handler(self, prefix, uri):
"""Push this namespace declaration on our storage."""
self._ns_ordered_prefixes.append((prefix, uri))
def start_element_handler(self, name, attributes):
if ' ' in name:
uri, localname, prefix, qname = _parse_ns_name(self, name)
else:
uri = EMPTY_NAMESPACE
qname = name
localname = None
prefix = EMPTY_PREFIX
node = minidom.Element(qname, uri, prefix, localname)
node.ownerDocument = self.document
_append_child(self.curNode, node)
self.curNode = node
if self._ns_ordered_prefixes:
for prefix, uri in self._ns_ordered_prefixes:
if prefix:
a = minidom.Attr(_intern(self, 'xmlns:' + prefix),
XMLNS_NAMESPACE, prefix, "xmlns")
else:
a = minidom.Attr("xmlns", XMLNS_NAMESPACE,
"xmlns", EMPTY_PREFIX)
# we're only interested in the URI as text at this point
uri = uri or ""
d = a.childNodes[0].__dict__
d['data'] = d['nodeValue'] = uri
d = a.__dict__
d['value'] = d['nodeValue'] = uri
d['ownerDocument'] = self.document
_set_attribute_node(node, a)
del self._ns_ordered_prefixes[:]
if attributes:
_attrs = node._attrs
_attrsNS = node._attrsNS
for i in range(0, len(attributes), 2):
aname = attributes[i]
value = attributes[i+1]
if ' ' in aname:
uri, localname, prefix, qname = _parse_ns_name(self, aname)
a = minidom.Attr(qname, uri, localname, prefix)
_attrs[qname] = a
_attrsNS[(uri, localname)] = a
else:
a = minidom.Attr(aname, EMPTY_NAMESPACE,
aname, EMPTY_PREFIX)
_attrs[aname] = a
_attrsNS[(EMPTY_NAMESPACE, aname)] = a
d = a.childNodes[0].__dict__
d['data'] = d['nodeValue'] = value
d = a.__dict__
d['ownerDocument'] = self.document
d['value'] = d['nodeValue'] = value
d['ownerElement'] = node
if __debug__:
# This only adds some asserts to the original
# end_element_handler(), so we only define this when -O is not
# used. If changing one, be sure to check the other to see if
# it needs to be changed as well.
#
def end_element_handler(self, name):
curNode = self.curNode
if ' ' in name:
uri, localname, prefix, qname = _parse_ns_name(self, name)
assert (curNode.namespaceURI == uri
and curNode.localName == localname
and curNode.prefix == prefix), \
"element stack messed up! (namespace)"
else:
assert curNode.nodeName == name, \
"element stack messed up - bad nodeName"
assert curNode.namespaceURI == EMPTY_NAMESPACE, \
"element stack messed up - bad namespaceURI"
self.curNode = curNode.parentNode
self._finish_end_element(curNode)
class ExpatBuilderNS(Namespaces, ExpatBuilder):
"""Document builder that supports namespaces."""
def reset(self):
ExpatBuilder.reset(self)
self._initNamespaces()
class FragmentBuilderNS(Namespaces, FragmentBuilder):
"""Fragment builder that supports namespaces."""
def reset(self):
FragmentBuilder.reset(self)
self._initNamespaces()
def _getNSattrs(self):
"""Return string of namespace attributes from this element and
ancestors."""
# XXX This needs to be re-written to walk the ancestors of the
# context to build up the namespace information from
# declarations, elements, and attributes found in context.
# Otherwise we have to store a bunch more data on the DOM
# (though that *might* be more reliable -- not clear).
attrs = ""
context = self.context
L = []
while context:
if hasattr(context, '_ns_prefix_uri'):
for prefix, uri in context._ns_prefix_uri.items():
# add every new NS decl from context to L and attrs string
if prefix in L:
continue
L.append(prefix)
if prefix:
declname = "xmlns:" + prefix
else:
declname = "xmlns"
if attrs:
attrs = "%s\n %s='%s'" % (attrs, declname, uri)
else:
attrs = " %s='%s'" % (declname, uri)
context = context.parentNode
return attrs
class ParseEscape(Exception):
"""Exception raised to short-circuit parsing in InternalSubsetExtractor."""
pass
class InternalSubsetExtractor(ExpatBuilder):
"""XML processor which can rip out the internal document type subset."""
subset = None
def getSubset(self):
"""Return the internal subset as a string."""
return self.subset
def parseFile(self, file):
try:
ExpatBuilder.parseFile(self, file)
except ParseEscape:
pass
def parseString(self, string):
try:
ExpatBuilder.parseString(self, string)
except ParseEscape:
pass
def install(self, parser):
parser.StartDoctypeDeclHandler = self.start_doctype_decl_handler
parser.StartElementHandler = self.start_element_handler
def start_doctype_decl_handler(self, name, publicId, systemId,
has_internal_subset):
if has_internal_subset:
parser = self.getParser()
self.subset = []
parser.DefaultHandler = self.subset.append
parser.EndDoctypeDeclHandler = self.end_doctype_decl_handler
else:
raise ParseEscape()
def end_doctype_decl_handler(self):
s = ''.join(self.subset).replace('\r\n', '\n').replace('\r', '\n')
self.subset = s
raise ParseEscape()
def start_element_handler(self, name, attrs):
raise ParseEscape()
def parse(file, namespaces=1):
"""Parse a document, returning the resulting Document node.
'file' may be either a file name or an open file object.
"""
if namespaces:
builder = ExpatBuilderNS()
else:
builder = ExpatBuilder()
if isinstance(file, StringTypes):
fp = open(file, 'rb')
try:
result = builder.parseFile(fp)
finally:
fp.close()
else:
result = builder.parseFile(file)
return result
def parseString(string, namespaces=1):
"""Parse a document from a string, returning the resulting
Document node.
"""
if namespaces:
builder = ExpatBuilderNS()
else:
builder = ExpatBuilder()
return builder.parseString(string)
def parseFragment(file, context, namespaces=1):
"""Parse a fragment of a document, given the context from which it
was originally extracted. context should be the parent of the
node(s) which are in the fragment.
'file' may be either a file name or an open file object.
"""
if namespaces:
builder = FragmentBuilderNS(context)
else:
builder = FragmentBuilder(context)
if isinstance(file, StringTypes):
fp = open(file, 'rb')
try:
result = builder.parseFile(fp)
finally:
fp.close()
else:
result = builder.parseFile(file)
return result
def parseFragmentString(string, context, namespaces=1):
"""Parse a fragment of a document from a string, given the context
from which it was originally extracted. context should be the
parent of the node(s) which are in the fragment.
"""
if namespaces:
builder = FragmentBuilderNS(context)
else:
builder = FragmentBuilder(context)
return builder.parseString(string)
def makeBuilder(options):
"""Create a builder based on an Options object."""
if options.namespaces:
return ExpatBuilderNS(options)
else:
return ExpatBuilder(options)
| mit |
hmit/livestreamer | src/livestreamer/packages/flashmedia/amf.py | 37 | 3315 | from .error import AMFError
from .packet import Packet
from .types import AMF0String, AMF0Value, U8, U16BE, U32BE
class AMFHeader(Packet):
exception = AMFError
def __init__(self, name, value, must_understand=False):
self.name = name
self.value = value
self.must_understand = must_understand
@property
def size(self):
size = 4+1
size += AMF0String.size(self.name)
size += AMF0Value.size(self.value)
return size
def _serialize(self, packet):
packet += AMF0String(self.name)
packet += U8(int(self.must_understand))
packet += U32BE(self.size)
packet += AMF0Value(self.value)
@classmethod
def _deserialize(cls, io):
name = AMF0String.read(io)
must_understand = bool(U8.read(io))
length = U32BE.read(io)
value = AMF0Value.read(io)
return cls(name, value, must_understand)
class AMFMessage(Packet):
exception = AMFError
def __init__(self, target_uri, response_uri, value):
self.target_uri = target_uri
self.response_uri = response_uri
self.value = value
@property
def size(self):
size = 4
size += AMF0String.size(self.target_uri)
size += AMF0String.size(self.response_uri)
size += AMF0Value.size(self.value)
return size
def _serialize(self, packet):
packet += AMF0String(self.target_uri)
packet += AMF0String(self.response_uri)
packet += U32BE(self.size)
packet += AMF0Value.pack(self.value)
@classmethod
def _deserialize(cls, io):
target_uri = AMF0String.read(io)
response_uri = AMF0String.read(io)
length = U32BE.read(io)
value = AMF0Value.read(io)
return cls(target_uri, response_uri, value)
class AMFPacket(Packet):
exception = AMFError
def __init__(self, version, headers=None, messages=None):
if headers is None:
headers = []
if messages is None:
messages = []
self.version = version
self.headers = headers
self.messages = messages
@property
def size(self):
size = 2+2+2
for header in self.headers:
size += header.size
for message in self.messages:
size += message.size
return size
def _serialize(self, packet):
packet += U16BE(self.version)
packet += U16BE(len(self.headers))
for header in self.headers:
header.serialize(packet)
packet += U16BE(len(self.messages))
for message in self.messages:
message.serialize(packet)
@classmethod
def _deserialize(cls, io):
version = U16BE.read(io)
if not version in (0, 3):
raise AMFError("AMF version must be 0 or 3")
headers = []
header_count = U16BE.read(io)
for i in range(header_count):
header = AMFHeader.deserialize(io)
headers.append(header)
messages = []
message_count = U16BE.read(io)
for i in range(message_count):
message = AMFMessage.deserialize(io)
messages.append(message)
return cls(version, headers, messages)
__all__ = ["AMFPacket", "AMFHeader", "AMFMessage"]
| bsd-2-clause |
xzturn/tensorflow | tensorflow/python/distribute/device_util_test.py | 22 | 3457 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for device utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.distribute import device_util
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
class DeviceUtilTest(test.TestCase):
@test_util.run_deprecated_v1
def testCurrentDeviceWithGlobalGraph(self):
with ops.device("/cpu:0"):
self.assertEqual(device_util.current(), "/device:CPU:0")
with ops.device("/job:worker"):
with ops.device("/cpu:0"):
self.assertEqual(device_util.current(), "/job:worker/device:CPU:0")
with ops.device("/cpu:0"):
with ops.device("/gpu:0"):
self.assertEqual(device_util.current(), "/device:GPU:0")
def testCurrentDeviceWithNonGlobalGraph(self):
with ops.Graph().as_default():
with ops.device("/cpu:0"):
self.assertEqual(device_util.current(), "/device:CPU:0")
def testCurrentDeviceWithEager(self):
with context.eager_mode():
with ops.device("/cpu:0"):
self.assertEqual(device_util.current(),
"/job:localhost/replica:0/task:0/device:CPU:0")
@test_util.run_deprecated_v1
def testCanonicalizeWithoutDefaultDevice(self):
self.assertEqual(
device_util.canonicalize("/cpu:0"),
"/replica:0/task:0/device:CPU:0")
self.assertEqual(
device_util.canonicalize("/job:worker/cpu:0"),
"/job:worker/replica:0/task:0/device:CPU:0")
self.assertEqual(
device_util.canonicalize("/job:worker/task:1/cpu:0"),
"/job:worker/replica:0/task:1/device:CPU:0")
def testCanonicalizeWithDefaultDevice(self):
self.assertEqual(
device_util.canonicalize("/job:worker/task:1/cpu:0", default="/gpu:0"),
"/job:worker/replica:0/task:1/device:CPU:0")
self.assertEqual(
device_util.canonicalize("/job:worker/task:1", default="/gpu:0"),
"/job:worker/replica:0/task:1/device:GPU:0")
self.assertEqual(
device_util.canonicalize("/cpu:0", default="/job:worker"),
"/job:worker/replica:0/task:0/device:CPU:0")
def testResolveWithDeviceScope(self):
with ops.device("/gpu:0"):
self.assertEqual(
device_util.resolve("/job:worker/task:1/cpu:0"),
"/job:worker/replica:0/task:1/device:CPU:0")
self.assertEqual(
device_util.resolve("/job:worker/task:1"),
"/job:worker/replica:0/task:1/device:GPU:0")
with ops.device("/job:worker"):
self.assertEqual(
device_util.resolve("/cpu:0"),
"/job:worker/replica:0/task:0/device:CPU:0")
if __name__ == "__main__":
test.main()
| apache-2.0 |
pansapiens/mytardis | tardis/tardis_portal/tests/test_ands_doi.py | 5 | 2809 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2010, Monash e-Research Centre
# (Monash University, Australia)
# Copyright (c) 2010, VeRSI Consortium
# (Victorian eResearch Strategic Initiative, Australia)
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the VeRSI, the VeRSI Consortium members, nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
test_ands_doi.py
"""
from django.conf import settings
from django.test import TestCase
from tardis.tardis_portal.ands_doi import DOIService
from tardis.tardis_portal.models import User, Experiment, Schema, ParameterName
class ANDSDOITestCase(TestCase):
def setUp(self):
self.user = User.objects.create_user('test', '', 'test')
self.expt = Experiment(title='test exp1',
institution_name='monash',
created_by=self.user,
)
self.schema, _ = Schema.objects.get_or_create(namespace=settings.DOI_NAMESPACE)
self.doi_name, _ = ParameterName.objects.get_or_create(schema=self.schema, full_name='DOI', name='doi')
self.expt.save()
settings.DOI_ENABLE = True
def tearDown(self):
settings.DOI_ENABLE = False
def test_init(self):
doi_service = DOIService(self.expt)
def test_get_doi_none(self):
doi_service = DOIService(self.expt)
self.assertEquals(None, doi_service.get_doi())
| bsd-3-clause |
vriera/micropython | drivers/nrf24l01/nrf24l01test.py | 70 | 2887 | """Test for nrf24l01 module."""
import struct
import pyb
from pyb import Pin, SPI
from nrf24l01 import NRF24L01
pipes = (b'\xf0\xf0\xf0\xf0\xe1', b'\xf0\xf0\xf0\xf0\xd2')
def master():
nrf = NRF24L01(SPI(2), Pin('Y5'), Pin('Y4'), payload_size=8)
nrf.open_tx_pipe(pipes[0])
nrf.open_rx_pipe(1, pipes[1])
nrf.start_listening()
num_needed = 16
num_successes = 0
num_failures = 0
led_state = 0
print('NRF24L01 master mode, sending %d packets...' % num_needed)
while num_successes < num_needed and num_failures < num_needed:
# stop listening and send packet
nrf.stop_listening()
millis = pyb.millis()
led_state = max(1, (led_state << 1) & 0x0f)
print('sending:', millis, led_state)
try:
nrf.send(struct.pack('ii', millis, led_state))
except OSError:
pass
# start listening again
nrf.start_listening()
# wait for response, with 250ms timeout
start_time = pyb.millis()
timeout = False
while not nrf.any() and not timeout:
if pyb.elapsed_millis(start_time) > 250:
timeout = True
if timeout:
print('failed, respones timed out')
num_failures += 1
else:
# recv packet
got_millis, = struct.unpack('i', nrf.recv())
# print response and round-trip delay
print('got response:', got_millis, '(delay', pyb.millis() - got_millis, 'ms)')
num_successes += 1
# delay then loop
pyb.delay(250)
print('master finished sending; succeses=%d, failures=%d' % (num_successes, num_failures))
def slave():
nrf = NRF24L01(SPI(2), Pin('Y5'), Pin('Y4'), payload_size=8)
nrf.open_tx_pipe(pipes[1])
nrf.open_rx_pipe(1, pipes[0])
nrf.start_listening()
print('NRF24L01 slave mode, waiting for packets... (ctrl-C to stop)')
while True:
pyb.wfi()
if nrf.any():
while nrf.any():
buf = nrf.recv()
millis, led_state = struct.unpack('ii', buf)
print('received:', millis, led_state)
for i in range(4):
if led_state & (1 << i):
pyb.LED(i + 1).on()
else:
pyb.LED(i + 1).off()
pyb.delay(15)
nrf.stop_listening()
try:
nrf.send(struct.pack('i', millis))
except OSError:
pass
print('sent response')
nrf.start_listening()
print('NRF24L01 test module loaded')
print('NRF24L01 pinout for test:')
print(' CE on Y4')
print(' CSN on Y5')
print(' SCK on Y6')
print(' MISO on Y7')
print(' MOSI on Y8')
print('run nrf24l01test.slave() on slave, then nrf24l01test.master() on master')
| mit |
wkschwartz/django | tests/messages_tests/test_fallback.py | 18 | 6869 | import random
from django.contrib.messages import constants
from django.contrib.messages.storage.fallback import (
CookieStorage, FallbackStorage,
)
from django.test import SimpleTestCase
from django.utils.crypto import get_random_string
from .base import BaseTests
from .test_cookie import set_cookie_data, stored_cookie_messages_count
from .test_session import set_session_data, stored_session_messages_count
class FallbackTests(BaseTests, SimpleTestCase):
storage_class = FallbackStorage
def get_request(self):
self.session = {}
request = super().get_request()
request.session = self.session
return request
def get_cookie_storage(self, storage):
return storage.storages[-2]
def get_session_storage(self, storage):
return storage.storages[-1]
def stored_cookie_messages_count(self, storage, response):
return stored_cookie_messages_count(self.get_cookie_storage(storage), response)
def stored_session_messages_count(self, storage, response):
return stored_session_messages_count(self.get_session_storage(storage))
def stored_messages_count(self, storage, response):
"""
Return the storage totals from both cookie and session backends.
"""
return (
self.stored_cookie_messages_count(storage, response) +
self.stored_session_messages_count(storage, response)
)
def test_get(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
# Set initial cookie data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, example_messages)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
self.assertEqual(list(storage), example_messages)
def test_get_empty(self):
request = self.get_request()
storage = self.storage_class(request)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
self.assertEqual(list(storage), [])
def test_get_fallback(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, example_messages[:4] + [CookieStorage.not_finished])
set_session_data(session_storage, example_messages[4:])
self.assertEqual(list(storage), example_messages)
def test_get_fallback_only(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, [CookieStorage.not_finished], encode_empty=True)
set_session_data(session_storage, example_messages)
self.assertEqual(list(storage), example_messages)
def test_flush_used_backends(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
set_cookie_data(cookie_storage, ['cookie', CookieStorage.not_finished])
set_session_data(session_storage, ['session'])
# When updating, previously used but no longer needed backends are
# flushed.
response = self.get_response()
list(storage)
storage.update(response)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_no_fallback(self):
"""
(1) A short number of messages whose data size doesn't exceed what is
allowed in a cookie will all be stored in the CookieBackend.
(2) If the CookieBackend can store all messages, the SessionBackend
won't be written to at all.
"""
storage = self.get_storage()
response = self.get_response()
# Overwrite the _store method of the fallback storage to prove it isn't
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._store = None
for i in range(5):
storage.add(constants.INFO, str(i) * 100)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 5)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_session_fallback(self):
"""
If the data exceeds what is allowed in a cookie, messages which did
not fit are stored in the SessionBackend.
"""
storage = self.get_storage()
response = self.get_response()
# see comment in CookieTests.test_cookie_max_length()
msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37)
# Generate the same (tested) content every time that does not get run
# through zlib compression.
random.seed(42)
for i in range(5):
storage.add(constants.INFO, get_random_string(msg_size))
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 4)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
def test_session_fallback_only(self):
"""
Large messages, none of which fit in a cookie, are stored in the
SessionBackend (and nothing is stored in the CookieBackend).
"""
storage = self.get_storage()
response = self.get_response()
# Generate the same (tested) content every time that does not get run
# through zlib compression.
random.seed(42)
storage.add(constants.INFO, get_random_string(5000))
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 0)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
| bsd-3-clause |
sramsay64/python-iview | cherrypy/test/test_httpauth.py | 5 | 6357 | from hashlib import md5, sha1
import cherrypy
from cherrypy._cpcompat import ntob
from cherrypy.lib import httpauth
from cherrypy.test import helper
class HTTPAuthTest(helper.CPWebCase):
def setup_server():
class Root:
def index(self):
return "This is public."
index.exposed = True
class DigestProtected:
def index(self):
return "Hello %s, you've been authorized." % (
cherrypy.request.login)
index.exposed = True
class BasicProtected:
def index(self):
return "Hello %s, you've been authorized." % (
cherrypy.request.login)
index.exposed = True
class BasicProtected2:
def index(self):
return "Hello %s, you've been authorized." % (
cherrypy.request.login)
index.exposed = True
def fetch_users():
return {'test': 'test'}
def sha_password_encrypter(password):
return sha1(ntob(password)).hexdigest()
def fetch_password(username):
return sha1(ntob('test')).hexdigest()
conf = {
'/digest': {
'tools.digest_auth.on': True,
'tools.digest_auth.realm': 'localhost',
'tools.digest_auth.users': fetch_users
},
'/basic': {
'tools.basic_auth.on': True,
'tools.basic_auth.realm': 'localhost',
'tools.basic_auth.users': {
'test': md5(ntob('test')).hexdigest()
}
},
'/basic2': {
'tools.basic_auth.on': True,
'tools.basic_auth.realm': 'localhost',
'tools.basic_auth.users': fetch_password,
'tools.basic_auth.encrypt': sha_password_encrypter
}
}
root = Root()
root.digest = DigestProtected()
root.basic = BasicProtected()
root.basic2 = BasicProtected2()
cherrypy.tree.mount(root, config=conf)
setup_server = staticmethod(setup_server)
def testPublic(self):
self.getPage("/")
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html;charset=utf-8')
self.assertBody('This is public.')
def testBasic(self):
self.getPage("/basic/")
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="localhost"')
self.getPage('/basic/', [('Authorization', 'Basic dGVzdDp0ZX60')])
self.assertStatus(401)
self.getPage('/basic/', [('Authorization', 'Basic dGVzdDp0ZXN0')])
self.assertStatus('200 OK')
self.assertBody("Hello test, you've been authorized.")
def testBasic2(self):
self.getPage("/basic2/")
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="localhost"')
self.getPage('/basic2/', [('Authorization', 'Basic dGVzdDp0ZX60')])
self.assertStatus(401)
self.getPage('/basic2/', [('Authorization', 'Basic dGVzdDp0ZXN0')])
self.assertStatus('200 OK')
self.assertBody("Hello test, you've been authorized.")
def testDigest(self):
self.getPage("/digest/")
self.assertStatus(401)
value = None
for k, v in self.headers:
if k.lower() == "www-authenticate":
if v.startswith("Digest"):
value = v
break
if value is None:
self._handlewebError(
"Digest authentification scheme was not found")
value = value[7:]
items = value.split(', ')
tokens = {}
for item in items:
key, value = item.split('=')
tokens[key.lower()] = value
missing_msg = "%s is missing"
bad_value_msg = "'%s' was expecting '%s' but found '%s'"
nonce = None
if 'realm' not in tokens:
self._handlewebError(missing_msg % 'realm')
elif tokens['realm'] != '"localhost"':
self._handlewebError(bad_value_msg %
('realm', '"localhost"', tokens['realm']))
if 'nonce' not in tokens:
self._handlewebError(missing_msg % 'nonce')
else:
nonce = tokens['nonce'].strip('"')
if 'algorithm' not in tokens:
self._handlewebError(missing_msg % 'algorithm')
elif tokens['algorithm'] != '"MD5"':
self._handlewebError(bad_value_msg %
('algorithm', '"MD5"', tokens['algorithm']))
if 'qop' not in tokens:
self._handlewebError(missing_msg % 'qop')
elif tokens['qop'] != '"auth"':
self._handlewebError(bad_value_msg %
('qop', '"auth"', tokens['qop']))
# Test a wrong 'realm' value
base_auth = (
'Digest '
'username="test", '
'realm="wrong realm", '
'nonce="%s", '
'uri="/digest/", '
'algorithm=MD5, '
'response="%s", '
'qop=auth, '
'nc=%s, '
'cnonce="1522e61005789929"'
)
auth = base_auth % (nonce, '', '00000001')
params = httpauth.parseAuthorization(auth)
response = httpauth._computeDigestResponse(params, 'test')
auth = base_auth % (nonce, response, '00000001')
self.getPage('/digest/', [('Authorization', auth)])
self.assertStatus(401)
# Test that must pass
base_auth = (
'Digest '
'username="test", '
'realm="localhost", '
'nonce="%s", '
'uri="/digest/", '
'algorithm=MD5, '
'response="%s", '
'qop=auth, '
'nc=%s, '
'cnonce="1522e61005789929"'
)
auth = base_auth % (nonce, '', '00000001')
params = httpauth.parseAuthorization(auth)
response = httpauth._computeDigestResponse(params, 'test')
auth = base_auth % (nonce, response, '00000001')
self.getPage('/digest/', [('Authorization', auth)])
self.assertStatus('200 OK')
self.assertBody("Hello test, you've been authorized.")
| gpl-3.0 |
roselleebarle04/django | tests/migrations/test_loader.py | 165 | 13346 | from __future__ import unicode_literals
from unittest import skipIf
from django.db import connection, connections
from django.db.migrations.exceptions import AmbiguityError, NodeNotFoundError
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.recorder import MigrationRecorder
from django.test import TestCase, modify_settings, override_settings
from django.utils import six
class RecorderTests(TestCase):
"""
Tests recording migrations as applied or not.
"""
def test_apply(self):
"""
Tests marking migrations as applied/unapplied.
"""
recorder = MigrationRecorder(connection)
self.assertEqual(
set((x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"),
set(),
)
recorder.record_applied("myapp", "0432_ponies")
self.assertEqual(
set((x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"),
{("myapp", "0432_ponies")},
)
# That should not affect records of another database
recorder_other = MigrationRecorder(connections['other'])
self.assertEqual(
set((x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"),
set(),
)
recorder.record_unapplied("myapp", "0432_ponies")
self.assertEqual(
set((x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"),
set(),
)
class LoaderTests(TestCase):
"""
Tests the disk and database loader, and running through migrations
in memory.
"""
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
@modify_settings(INSTALLED_APPS={'append': 'basic'})
def test_load(self):
"""
Makes sure the loader can load the migrations for the test apps,
and then render them out to a new Apps.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0002_second"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0002_second"))
self.assertEqual(len(project_state.models), 2)
author_state = project_state.models["migrations", "author"]
self.assertEqual(
[x for x, y in author_state.fields],
["id", "name", "slug", "age", "rating"]
)
book_state = project_state.models["migrations", "book"]
self.assertEqual(
[x for x, y in book_state.fields],
["id", "author"]
)
# Ensure we've included unmigrated apps in there too
self.assertIn("basic", project_state.real_apps)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"})
def test_load_unmigrated_dependency(self):
"""
Makes sure the loader can load migrations with a dependency on an unmigrated app.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0001_initial")),
[
('contenttypes', '0001_initial'),
('auth', '0001_initial'),
("migrations", "0001_initial"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0001_initial"))
self.assertEqual(len([m for a, m in project_state.models if a == "migrations"]), 1)
book_state = project_state.models["migrations", "book"]
self.assertEqual(
[x for x, y in book_state.fields],
["id", "user"]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"})
def test_run_before(self):
"""
Makes sure the loader uses Migration.run_before.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0003_third"),
("migrations", "0002_second"),
],
)
@override_settings(MIGRATION_MODULES={
"migrations": "migrations.test_migrations_first",
"migrations2": "migrations2.test_migrations_2_first",
})
@modify_settings(INSTALLED_APPS={'append': 'migrations2'})
def test_first(self):
"""
Makes sure the '__first__' migrations build correctly.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "second")),
[
("migrations", "thefirst"),
("migrations2", "0001_initial"),
("migrations2", "0002_second"),
("migrations", "second"),
],
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_name_match(self):
"Tests prefix name matching"
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.get_migration_by_prefix("migrations", "0001").name,
"0001_initial",
)
with self.assertRaises(AmbiguityError):
migration_loader.get_migration_by_prefix("migrations", "0")
with self.assertRaises(KeyError):
migration_loader.get_migration_by_prefix("migrations", "blarg")
def test_load_import_error(self):
with override_settings(MIGRATION_MODULES={"migrations": "import_error_package"}):
with self.assertRaises(ImportError):
MigrationLoader(connection)
def test_load_module_file(self):
with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}):
loader = MigrationLoader(connection)
self.assertIn(
"migrations", loader.unmigrated_apps,
"App with migrations module file not in unmigrated apps."
)
@skipIf(six.PY2, "PY2 doesn't load empty dirs.")
def test_load_empty_dir(self):
with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}):
loader = MigrationLoader(connection)
self.assertIn(
"migrations", loader.unmigrated_apps,
"App missing __init__.py in migrations module not in unmigrated apps."
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_loading_squashed(self):
"Tests loading a squashed migration"
migration_loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
# Loading with nothing applied should just give us the one node
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
1,
)
# However, fake-apply one migration and it should now use the old two
recorder.record_applied("migrations", "0001_initial")
migration_loader.build_graph()
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
2,
)
recorder.flush()
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"})
def test_loading_squashed_complex(self):
"Tests loading a complex set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
def num_nodes():
plan = set(loader.graph.forwards_plan(('migrations', '7_auto')))
return len(plan - loader.applied_migrations)
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
recorder.record_applied("migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
recorder.record_applied("migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 to 5 cannot use the squashed migration
recorder.record_applied("migrations", "3_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
recorder.record_applied("migrations", "4_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# Starting at 5 to 7 we are passed the squashed migrations
recorder.record_applied("migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
recorder.record_applied("migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
recorder.record_applied("migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
recorder.flush()
@override_settings(MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
})
@modify_settings(INSTALLED_APPS={'append': [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]})
def test_loading_squashed_complex_multi_apps(self):
loader = MigrationLoader(connection)
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
expected_plan = {
('app1', '4_auto'),
('app1', '2_squashed_3'),
('app2', '1_squashed_2'),
('app1', '1_auto')
}
self.assertEqual(plan, expected_plan)
@override_settings(MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
})
@modify_settings(INSTALLED_APPS={'append': [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]})
def test_loading_squashed_complex_multi_apps_partially_applied(self):
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
recorder.record_applied('app1', '1_auto')
recorder.record_applied('app1', '2_auto')
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
plan = plan - loader.applied_migrations
expected_plan = {
('app1', '4_auto'),
('app1', '3_auto'),
('app2', '1_squashed_2'),
}
self.assertEqual(plan, expected_plan)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_erroneous"})
def test_loading_squashed_erroneous(self):
"Tests loading a complex but erroneous set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
def num_nodes():
plan = set(loader.graph.forwards_plan(('migrations', '7_auto')))
return len(plan - loader.applied_migrations)
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
recorder.record_applied("migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
recorder.record_applied("migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 or 4 we'd need to use non-existing migrations
msg = ("Migration migrations.6_auto depends on nonexistent node ('migrations', '5_auto'). "
"Django tried to replace migration migrations.5_auto with any of "
"[migrations.3_squashed_5] but wasn't able to because some of the replaced "
"migrations are already applied.")
recorder.record_applied("migrations", "3_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
recorder.record_applied("migrations", "4_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
# Starting at 5 to 7 we are passed the squashed migrations
recorder.record_applied("migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
recorder.record_applied("migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
recorder.record_applied("migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
recorder.flush()
| bsd-3-clause |
rahuldan/sympy | sympy/stats/tests/test_continuous_rv.py | 12 | 19866 | from __future__ import division
from sympy.stats import (P, E, where, density, variance, covariance, skewness,
given, pspace, cdf, ContinuousRV, sample,
Arcsin, Benini, Beta, BetaPrime, Cauchy,
Chi, ChiSquared,
ChiNoncentral, Dagum, Erlang, Exponential,
FDistribution, FisherZ, Frechet, Gamma, GammaInverse,
Gompertz, Kumaraswamy, Laplace, Logistic,
LogNormal, Maxwell, Nakagami, Normal, Pareto,
QuadraticU, RaisedCosine, Rayleigh, ShiftedGompertz,
StudentT, Triangular, Uniform, UniformSum,
VonMises, Weibull, WignerSemicircle, correlation,
moment, cmoment, smoment)
from sympy import (Symbol, Abs, exp, S, N, pi, simplify, Interval, erf, erfc,
Eq, log, lowergamma, Sum, symbols, sqrt, And, gamma, beta,
Piecewise, Integral, sin, cos, besseli, factorial, binomial,
floor, expand_func)
from sympy.stats.crv_types import NormalDistribution
from sympy.stats.rv import ProductPSpace
from sympy.utilities.pytest import raises, XFAIL, slow
from sympy.core.compatibility import range
oo = S.Infinity
x, y, z = map(Symbol, 'xyz')
def test_single_normal():
mu = Symbol('mu', real=True, finite=True)
sigma = Symbol('sigma', real=True, positive=True, finite=True)
X = Normal('x', 0, 1)
Y = X*sigma + mu
assert simplify(E(Y)) == mu
assert simplify(variance(Y)) == sigma**2
pdf = density(Y)
x = Symbol('x')
assert (pdf(x) ==
2**S.Half*exp(-(mu - x)**2/(2*sigma**2))/(2*pi**S.Half*sigma))
assert P(X**2 < 1) == erf(2**S.Half/2)
assert E(X, Eq(X, mu)) == mu
@XFAIL
def test_conditional_1d():
X = Normal('x', 0, 1)
Y = given(X, X >= 0)
assert density(Y) == 2 * density(X)
assert Y.pspace.domain.set == Interval(0, oo)
assert E(Y) == sqrt(2) / sqrt(pi)
assert E(X**2) == E(Y**2)
def test_ContinuousDomain():
X = Normal('x', 0, 1)
assert where(X**2 <= 1).set == Interval(-1, 1)
assert where(X**2 <= 1).symbol == X.symbol
where(And(X**2 <= 1, X >= 0)).set == Interval(0, 1)
raises(ValueError, lambda: where(sin(X) > 1))
Y = given(X, X >= 0)
assert Y.pspace.domain.set == Interval(0, oo)
@slow
def test_multiple_normal():
X, Y = Normal('x', 0, 1), Normal('y', 0, 1)
assert E(X + Y) == 0
assert variance(X + Y) == 2
assert variance(X + X) == 4
assert covariance(X, Y) == 0
assert covariance(2*X + Y, -X) == -2*variance(X)
assert skewness(X) == 0
assert skewness(X + Y) == 0
assert correlation(X, Y) == 0
assert correlation(X, X + Y) == correlation(X, X - Y)
assert moment(X, 2) == 1
assert cmoment(X, 3) == 0
assert moment(X + Y, 4) == 12
assert cmoment(X, 2) == variance(X)
assert smoment(X*X, 2) == 1
assert smoment(X + Y, 3) == skewness(X + Y)
assert E(X, Eq(X + Y, 0)) == 0
assert variance(X, Eq(X + Y, 0)) == S.Half
@slow
def test_symbolic():
mu1, mu2 = symbols('mu1 mu2', real=True, finite=True)
s1, s2 = symbols('sigma1 sigma2', real=True, finite=True, positive=True)
rate = Symbol('lambda', real=True, positive=True, finite=True)
X = Normal('x', mu1, s1)
Y = Normal('y', mu2, s2)
Z = Exponential('z', rate)
a, b, c = symbols('a b c', real=True, finite=True)
assert E(X) == mu1
assert E(X + Y) == mu1 + mu2
assert E(a*X + b) == a*E(X) + b
assert variance(X) == s1**2
assert simplify(variance(X + a*Y + b)) == variance(X) + a**2*variance(Y)
assert E(Z) == 1/rate
assert E(a*Z + b) == a*E(Z) + b
assert E(X + a*Z + b) == mu1 + a/rate + b
def test_cdf():
X = Normal('x', 0, 1)
d = cdf(X)
assert P(X < 1) == d(1)
assert d(0) == S.Half
d = cdf(X, X > 0) # given X>0
assert d(0) == 0
Y = Exponential('y', 10)
d = cdf(Y)
assert d(-5) == 0
assert P(Y > 3) == 1 - d(3)
raises(ValueError, lambda: cdf(X + Y))
Z = Exponential('z', 1)
f = cdf(Z)
z = Symbol('z')
assert f(z) == Piecewise((1 - exp(-z), z >= 0), (0, True))
def test_sample():
z = Symbol('z')
Z = ContinuousRV(z, exp(-z), set=Interval(0, oo))
assert sample(Z) in Z.pspace.domain.set
sym, val = list(Z.pspace.sample().items())[0]
assert sym == Z and val in Interval(0, oo)
def test_ContinuousRV():
x = Symbol('x')
pdf = sqrt(2)*exp(-x**2/2)/(2*sqrt(pi)) # Normal distribution
# X and Y should be equivalent
X = ContinuousRV(x, pdf)
Y = Normal('y', 0, 1)
assert variance(X) == variance(Y)
assert P(X > 0) == P(Y > 0)
def test_arcsin():
a = Symbol("a", real=True)
b = Symbol("b", real=True)
X = Arcsin('x', a, b)
assert density(X)(x) == 1/(pi*sqrt((-x + b)*(x - a)))
def test_benini():
alpha = Symbol("alpha", positive=True)
b = Symbol("beta", positive=True)
sigma = Symbol("sigma", positive=True)
X = Benini('x', alpha, b, sigma)
assert density(X)(x) == ((alpha/x + 2*b*log(x/sigma)/x)
*exp(-alpha*log(x/sigma) - b*log(x/sigma)**2))
def test_beta():
a, b = symbols('alpha beta', positive=True)
B = Beta('x', a, b)
assert pspace(B).domain.set == Interval(0, 1)
dens = density(B)
x = Symbol('x')
assert dens(x) == x**(a - 1)*(1 - x)**(b - 1) / beta(a, b)
# This is too slow
# assert E(B) == a / (a + b)
# assert variance(B) == (a*b) / ((a+b)**2 * (a+b+1))
# Full symbolic solution is too much, test with numeric version
a, b = 1, 2
B = Beta('x', a, b)
assert expand_func(E(B)) == a / S(a + b)
assert expand_func(variance(B)) == (a*b) / S((a + b)**2 * (a + b + 1))
def test_betaprime():
alpha = Symbol("alpha", positive=True)
betap = Symbol("beta", positive=True)
X = BetaPrime('x', alpha, betap)
assert density(X)(x) == x**(alpha - 1)*(x + 1)**(-alpha - betap)/beta(alpha, betap)
def test_cauchy():
x0 = Symbol("x0")
gamma = Symbol("gamma", positive=True)
X = Cauchy('x', x0, gamma)
assert density(X)(x) == 1/(pi*gamma*(1 + (x - x0)**2/gamma**2))
def test_chi():
k = Symbol("k", integer=True)
X = Chi('x', k)
assert density(X)(x) == 2**(-k/2 + 1)*x**(k - 1)*exp(-x**2/2)/gamma(k/2)
def test_chi_noncentral():
k = Symbol("k", integer=True)
l = Symbol("l")
X = ChiNoncentral("x", k, l)
assert density(X)(x) == (x**k*l*(x*l)**(-k/2)*
exp(-x**2/2 - l**2/2)*besseli(k/2 - 1, x*l))
def test_chi_squared():
k = Symbol("k", integer=True)
X = ChiSquared('x', k)
assert density(X)(x) == 2**(-k/2)*x**(k/2 - 1)*exp(-x/2)/gamma(k/2)
def test_dagum():
p = Symbol("p", positive=True)
b = Symbol("b", positive=True)
a = Symbol("a", positive=True)
X = Dagum('x', p, a, b)
assert density(X)(x) == a*p*(x/b)**(a*p)*((x/b)**a + 1)**(-p - 1)/x
def test_erlang():
k = Symbol("k", integer=True, positive=True)
l = Symbol("l", positive=True)
X = Erlang("x", k, l)
assert density(X)(x) == x**(k - 1)*l**k*exp(-x*l)/gamma(k)
def test_exponential():
rate = Symbol('lambda', positive=True, real=True, finite=True)
X = Exponential('x', rate)
assert E(X) == 1/rate
assert variance(X) == 1/rate**2
assert skewness(X) == 2
assert skewness(X) == smoment(X, 3)
assert smoment(2*X, 4) == smoment(X, 4)
assert moment(X, 3) == 3*2*1/rate**3
assert P(X > 0) == S(1)
assert P(X > 1) == exp(-rate)
assert P(X > 10) == exp(-10*rate)
assert where(X <= 1).set == Interval(0, 1)
def test_f_distribution():
d1 = Symbol("d1", positive=True)
d2 = Symbol("d2", positive=True)
X = FDistribution("x", d1, d2)
assert density(X)(x) == (d2**(d2/2)*sqrt((d1*x)**d1*(d1*x + d2)**(-d1 - d2))
/(x*beta(d1/2, d2/2)))
def test_fisher_z():
d1 = Symbol("d1", positive=True)
d2 = Symbol("d2", positive=True)
X = FisherZ("x", d1, d2)
assert density(X)(x) == (2*d1**(d1/2)*d2**(d2/2)*(d1*exp(2*x) + d2)
**(-d1/2 - d2/2)*exp(d1*x)/beta(d1/2, d2/2))
def test_frechet():
a = Symbol("a", positive=True)
s = Symbol("s", positive=True)
m = Symbol("m", real=True)
X = Frechet("x", a, s=s, m=m)
assert density(X)(x) == a*((x - m)/s)**(-a - 1)*exp(-((x - m)/s)**(-a))/s
def test_gamma():
k = Symbol("k", positive=True)
theta = Symbol("theta", positive=True)
X = Gamma('x', k, theta)
assert density(X)(x) == x**(k - 1)*theta**(-k)*exp(-x/theta)/gamma(k)
assert cdf(X, meijerg=True)(z) == Piecewise(
(-k*lowergamma(k, 0)/gamma(k + 1) +
k*lowergamma(k, z/theta)/gamma(k + 1), z >= 0),
(0, True))
# assert simplify(variance(X)) == k*theta**2 # handled numerically below
assert E(X) == moment(X, 1)
k, theta = symbols('k theta', real=True, finite=True, positive=True)
X = Gamma('x', k, theta)
assert simplify(E(X)) == k*theta
# can't get things to simplify on this one so we use subs
assert variance(X).subs(k, 5) == (k*theta**2).subs(k, 5)
# The following is too slow
# assert simplify(skewness(X)).subs(k, 5) == (2/sqrt(k)).subs(k, 5)
def test_gamma_inverse():
a = Symbol("a", positive=True)
b = Symbol("b", positive=True)
X = GammaInverse("x", a, b)
assert density(X)(x) == x**(-a - 1)*b**a*exp(-b/x)/gamma(a)
def test_gompertz():
b = Symbol("b", positive=True)
eta = Symbol("eta", positive=True)
X = Gompertz("x", b, eta)
assert density(X)(x) == b*eta*exp(eta)*exp(b*x)*exp(-eta*exp(b*x))
def test_kumaraswamy():
a = Symbol("a", positive=True)
b = Symbol("b", positive=True)
X = Kumaraswamy("x", a, b)
assert density(X)(x) == x**(a - 1)*a*b*(-x**a + 1)**(b - 1)
def test_laplace():
mu = Symbol("mu")
b = Symbol("b", positive=True)
X = Laplace('x', mu, b)
assert density(X)(x) == exp(-Abs(x - mu)/b)/(2*b)
def test_logistic():
mu = Symbol("mu", real=True)
s = Symbol("s", positive=True)
X = Logistic('x', mu, s)
assert density(X)(x) == exp((-x + mu)/s)/(s*(exp((-x + mu)/s) + 1)**2)
def test_lognormal():
mean = Symbol('mu', real=True, finite=True)
std = Symbol('sigma', positive=True, real=True, finite=True)
X = LogNormal('x', mean, std)
# The sympy integrator can't do this too well
#assert E(X) == exp(mean+std**2/2)
#assert variance(X) == (exp(std**2)-1) * exp(2*mean + std**2)
# Right now, only density function and sampling works
# Test sampling: Only e^mean in sample std of 0
for i in range(3):
X = LogNormal('x', i, 0)
assert S(sample(X)) == N(exp(i))
# The sympy integrator can't do this too well
#assert E(X) ==
mu = Symbol("mu", real=True)
sigma = Symbol("sigma", positive=True)
X = LogNormal('x', mu, sigma)
assert density(X)(x) == (sqrt(2)*exp(-(-mu + log(x))**2
/(2*sigma**2))/(2*x*sqrt(pi)*sigma))
X = LogNormal('x', 0, 1) # Mean 0, standard deviation 1
assert density(X)(x) == sqrt(2)*exp(-log(x)**2/2)/(2*x*sqrt(pi))
def test_maxwell():
a = Symbol("a", positive=True)
X = Maxwell('x', a)
assert density(X)(x) == (sqrt(2)*x**2*exp(-x**2/(2*a**2))/
(sqrt(pi)*a**3))
assert E(X) == 2*sqrt(2)*a/sqrt(pi)
assert simplify(variance(X)) == a**2*(-8 + 3*pi)/pi
def test_nakagami():
mu = Symbol("mu", positive=True)
omega = Symbol("omega", positive=True)
X = Nakagami('x', mu, omega)
assert density(X)(x) == (2*x**(2*mu - 1)*mu**mu*omega**(-mu)
*exp(-x**2*mu/omega)/gamma(mu))
assert simplify(E(X, meijerg=True)) == (sqrt(mu)*sqrt(omega)
*gamma(mu + S.Half)/gamma(mu + 1))
assert simplify(variance(X, meijerg=True)) == (
omega - omega*gamma(mu + S(1)/2)**2/(gamma(mu)*gamma(mu + 1)))
def test_pareto():
xm, beta = symbols('xm beta', positive=True, finite=True)
alpha = beta + 5
X = Pareto('x', xm, alpha)
dens = density(X)
x = Symbol('x')
assert dens(x) == x**(-(alpha + 1))*xm**(alpha)*(alpha)
# These fail because SymPy can not deduce that 1/xm != 0
# assert simplify(E(X)) == alpha*xm/(alpha-1)
# assert simplify(variance(X)) == xm**2*alpha / ((alpha-1)**2*(alpha-2))
def test_pareto_numeric():
xm, beta = 3, 2
alpha = beta + 5
X = Pareto('x', xm, alpha)
assert E(X) == alpha*xm/S(alpha - 1)
assert variance(X) == xm**2*alpha / S(((alpha - 1)**2*(alpha - 2)))
# Skewness tests too slow. Try shortcutting function?
def test_raised_cosine():
mu = Symbol("mu", real=True)
s = Symbol("s", positive=True)
X = RaisedCosine("x", mu, s)
assert density(X)(x) == (Piecewise(((cos(pi*(x - mu)/s) + 1)/(2*s),
And(x <= mu + s, mu - s <= x)), (0, True)))
def test_rayleigh():
sigma = Symbol("sigma", positive=True)
X = Rayleigh('x', sigma)
assert density(X)(x) == x*exp(-x**2/(2*sigma**2))/sigma**2
assert E(X) == sqrt(2)*sqrt(pi)*sigma/2
assert variance(X) == -pi*sigma**2/2 + 2*sigma**2
def test_shiftedgompertz():
b = Symbol("b", positive=True)
eta = Symbol("eta", positive=True)
X = ShiftedGompertz("x", b, eta)
assert density(X)(x) == b*(eta*(1 - exp(-b*x)) + 1)*exp(-b*x)*exp(-eta*exp(-b*x))
def test_studentt():
nu = Symbol("nu", positive=True)
X = StudentT('x', nu)
assert density(X)(x) == (1 + x**2/nu)**(-nu/2 - 1/2)/(sqrt(nu)*beta(1/2, nu/2))
@XFAIL
def test_triangular():
a = Symbol("a")
b = Symbol("b")
c = Symbol("c")
X = Triangular('x', a, b, c)
assert density(X)(x) == Piecewise(
((2*x - 2*a)/((-a + b)*(-a + c)), And(a <= x, x < c)),
(2/(-a + b), x == c),
((-2*x + 2*b)/((-a + b)*(b - c)), And(x <= b, c < x)),
(0, True))
def test_quadratic_u():
a = Symbol("a", real=True)
b = Symbol("b", real=True)
X = QuadraticU("x", a, b)
assert density(X)(x) == (Piecewise((12*(x - a/2 - b/2)**2/(-a + b)**3,
And(x <= b, a <= x)), (0, True)))
def test_uniform():
l = Symbol('l', real=True, finite=True)
w = Symbol('w', positive=True, finite=True)
X = Uniform('x', l, l + w)
assert simplify(E(X)) == l + w/2
assert simplify(variance(X)) == w**2/12
# With numbers all is well
X = Uniform('x', 3, 5)
assert P(X < 3) == 0 and P(X > 5) == 0
assert P(X < 4) == P(X > 4) == S.Half
def test_uniform_P():
""" This stopped working because SingleContinuousPSpace.compute_density no
longer calls integrate on a DiracDelta but rather just solves directly.
integrate used to call UniformDistribution.expectation which special-cased
subsed out the Min and Max terms that Uniform produces
I decided to regress on this class for general cleanliness (and I suspect
speed) of the algorithm.
"""
l = Symbol('l', real=True, finite=True)
w = Symbol('w', positive=True, finite=True)
X = Uniform('x', l, l + w)
assert P(X < l) == 0 and P(X > l + w) == 0
@XFAIL
def test_uniformsum():
n = Symbol("n", integer=True)
_k = Symbol("k")
X = UniformSum('x', n)
assert density(X)(x) == (Sum((-1)**_k*(-_k + x)**(n - 1)
*binomial(n, _k), (_k, 0, floor(x)))/factorial(n - 1))
def test_von_mises():
mu = Symbol("mu")
k = Symbol("k", positive=True)
X = VonMises("x", mu, k)
assert density(X)(x) == exp(k*cos(x - mu))/(2*pi*besseli(0, k))
def test_weibull():
a, b = symbols('a b', positive=True)
X = Weibull('x', a, b)
assert simplify(E(X)) == simplify(a * gamma(1 + 1/b))
assert simplify(variance(X)) == simplify(a**2 * gamma(1 + 2/b) - E(X)**2)
# Skewness tests too slow. Try shortcutting function?
def test_weibull_numeric():
# Test for integers and rationals
a = 1
bvals = [S.Half, 1, S(3)/2, 5]
for b in bvals:
X = Weibull('x', a, b)
assert simplify(E(X)) == simplify(a * gamma(1 + 1/S(b)))
assert simplify(variance(X)) == simplify(
a**2 * gamma(1 + 2/S(b)) - E(X)**2)
# Not testing Skew... it's slow with int/frac values > 3/2
def test_wignersemicircle():
R = Symbol("R", positive=True)
X = WignerSemicircle('x', R)
assert density(X)(x) == 2*sqrt(-x**2 + R**2)/(pi*R**2)
assert E(X) == 0
def test_prefab_sampling():
N = Normal('X', 0, 1)
L = LogNormal('L', 0, 1)
E = Exponential('Ex', 1)
P = Pareto('P', 1, 3)
W = Weibull('W', 1, 1)
U = Uniform('U', 0, 1)
B = Beta('B', 2, 5)
G = Gamma('G', 1, 3)
variables = [N, L, E, P, W, U, B, G]
niter = 10
for var in variables:
for i in range(niter):
assert sample(var) in var.pspace.domain.set
def test_input_value_assertions():
a, b = symbols('a b')
p, q = symbols('p q', positive=True)
m, n = symbols('m n', positive=False, real=True)
raises(ValueError, lambda: Normal('x', 3, 0))
raises(ValueError, lambda: Normal('x', m, n))
Normal('X', a, p) # No error raised
raises(ValueError, lambda: Exponential('x', m))
Exponential('Ex', p) # No error raised
for fn in [Pareto, Weibull, Beta, Gamma]:
raises(ValueError, lambda: fn('x', m, p))
raises(ValueError, lambda: fn('x', p, n))
fn('x', p, q) # No error raised
@XFAIL
def test_unevaluated():
X = Normal('x', 0, 1)
assert E(X, evaluate=False) == (
Integral(sqrt(2)*x*exp(-x**2/2)/(2*sqrt(pi)), (x, -oo, oo)))
assert E(X + 1, evaluate=False) == (
Integral(sqrt(2)*x*exp(-x**2/2)/(2*sqrt(pi)), (x, -oo, oo)) + 1)
assert P(X > 0, evaluate=False) == (
Integral(sqrt(2)*exp(-x**2/2)/(2*sqrt(pi)), (x, 0, oo)))
assert P(X > 0, X**2 < 1, evaluate=False) == (
Integral(sqrt(2)*exp(-x**2/2)/(2*sqrt(pi)*
Integral(sqrt(2)*exp(-x**2/2)/(2*sqrt(pi)),
(x, -1, 1))), (x, 0, 1)))
def test_probability_unevaluated():
T = Normal('T', 30, 3)
assert type(P(T > 33, evaluate=False)) == Integral
def test_density_unevaluated():
X = Normal('X', 0, 1)
Y = Normal('Y', 0, 2)
assert isinstance(density(X+Y, evaluate=False)(z), Integral)
def test_NormalDistribution():
nd = NormalDistribution(0, 1)
x = Symbol('x')
assert nd.cdf(x) == (1 - erfc(sqrt(2)*x/2))/2 + S.One/2
assert isinstance(nd.sample(), float) or nd.sample().is_Number
assert nd.expectation(1, x) == 1
assert nd.expectation(x, x) == 0
assert nd.expectation(x**2, x) == 1
def test_random_parameters():
mu = Normal('mu', 2, 3)
meas = Normal('T', mu, 1)
assert density(meas, evaluate=False)(z)
assert isinstance(pspace(meas), ProductPSpace)
#assert density(meas, evaluate=False)(z) == Integral(mu.pspace.pdf *
# meas.pspace.pdf, (mu.symbol, -oo, oo)).subs(meas.symbol, z)
def test_random_parameters_given():
mu = Normal('mu', 2, 3)
meas = Normal('T', mu, 1)
assert given(meas, Eq(mu, 5)) == Normal('T', 5, 1)
def test_conjugate_priors():
mu = Normal('mu', 2, 3)
x = Normal('x', mu, 1)
assert isinstance(simplify(density(mu, Eq(x, y), evaluate=False)(z)),
Integral)
def test_difficult_univariate():
""" Since using solve in place of deltaintegrate we're able to perform
substantially more complex density computations on single continuous random
variables """
x = Normal('x', 0, 1)
assert density(x**3)
assert density(exp(x**2))
assert density(log(x))
def test_issue_10003():
X = Exponential('x', 3)
G = Gamma('g', 1, 2)
assert P(X < -1) == S.Zero
assert P(G < -1) == S.Zero
| bsd-3-clause |
mmagnus/rna-pdb-tools | rna_tools/tools/mini_moderna3/moderna/sequence/ModernaAlphabet.py | 2 | 6090 | #!/usr/bin/env python
#
# ModernaAlphabet.py
#
# Defines alphabet for RNA including modifications.
#
# http://iimcb.genesilico.pl/moderna/
#
__author__ = "Magdalena Rother, Tomasz Puton, Kristian Rother"
__copyright__ = "Copyright 2008, The Moderna Project"
__credits__ = ["Janusz Bujnicki"]
__license__ = "GPL"
__maintainer__ = "Magdalena Rother"
__email__ = "mmusiel@genesilico.pl"
__status__ = "Production"
from rna_tools.tools.mini_moderna3.moderna.Constants import MODIFICATION_NAMES_TABLE_PATH, ANY_RESIDUE, \
RESIDUE_WITHOUT_ONE_LETTER_ABBREV
from rna_tools.tools.mini_moderna3.moderna.util.Errors import AlphabetError
class AlphabetEntry(object):
"""
Collects information about nomenclature for one residue.
Arguments:
- long abbreviation e.g. m1A, m66Am, A
- short abbreviation - one letter abbreviation or ANY RESIDUE (X)
if there is no such abbreviation for residue
- pdb abbreviation - abbreviation used in pdb e.g. MMA, MTA, UNK
- full name
- original base - stores name of original base for modification
If a base is not modified then long_abbrev, short_abbrev,
pdb_abbrev and original_base are equal.
"""
def __init__(self, long_abbrev=None, short_abbrev=None, pdb_abbrev=None, \
full_name=None, original_base=None, new_abbrev=None, \
category=None):
self.long_abbrev = long_abbrev
self.short_abbrev = short_abbrev
self.pdb_abbrev = pdb_abbrev
self.full_name = full_name
self.original_base = original_base
self.new_abbrev = new_abbrev
self.category = category
def __str__(self):
return '<'+self.new_abbrev+'>'
def __repr__(self):
return self.new_abbrev
def __eq__(self, other_ae):
if self.short_abbrev == ANY_RESIDUE or other_ae.short_abbrev == ANY_RESIDUE:
return False
elif self.long_abbrev == other_ae.long_abbrev:
return True
else:
return False
@property
def purine(self):
if self.original_base in ['A', 'G']:
return True
@property
def pyrimidine(self):
if self.original_base in ['U', 'C']:
return True
def get_new_notation_tuple(self):
"""Converts an AlphabetEntry to a (string, length) tuple"""
if self.short_abbrev in ['A', 'G', 'C', 'U', 'Q']:
string = self.new_abbrev
length = len(self.new_abbrev)
else:
string = '0' * (4 - len(self.new_abbrev)) + self.new_abbrev
length = 4
return string, length
class Alphabet(dict):
"""
Instance of this class contains a dict with nomenclature for modifications:
{ key: AlphabetEntry }
key - long abbreviation e.g. m1A, m66Am, A
AlphabetEntry conteins information as short abbreviation, pdb abbreviation, full name, original base.
To lern more see documentation for AlphabetEntry
there in an another dict available as Alphabet.one_letter_original:
{ key: AlphabetEntry}
key - short abbreviation (one letter code)
"""
def __init__(self, table_path = MODIFICATION_NAMES_TABLE_PATH):
dict.__init__(self)
self.table_path = table_path
self.parse_alphabet_table()
self._short_original = {}
self._new_original = {}
self.set_short_original()
self.set_new_original()
def parse_alphabet_table(self):
"""Parses table with different kinds of abbreviations."""
try:
infile = open(self.table_path)
except IOError:
raise AlphabetError('File does not exist: %s ' % self.table_path)
for line in infile:
if line.startswith('#') or line == '' or line == '\n':
continue
tokens = line.replace('\n','').split('\t')
tokens = [t.strip() for t in tokens]
if len(tokens) == 7:
aentry = AlphabetEntry()
aentry.new_abbrev = tokens[0]
aentry.original_base = tokens[1]
aentry.long_abbrev = tokens[2]
aentry.full_name = tokens[3]
if len(tokens[4]) == 1:
aentry.short_abbrev = tokens[4]
else:
aentry.short_abbrev = RESIDUE_WITHOUT_ONE_LETTER_ABBREV
aentry.pdb_abbrev = tokens[5]
aentry.category = tokens[6]
self[tokens[2]] = aentry
elif len(tokens) >0 :
raise AlphabetError('Wrong line format: %s' %line)
def set_short_original(self):
"""Creates short_original dict."""
for abbrev in list(self.keys()):
if self[abbrev].short_abbrev != RESIDUE_WITHOUT_ONE_LETTER_ABBREV:
self._short_original[self[abbrev].short_abbrev] = self[abbrev]
# add one defined entry for all entries without one letter abbreviations
if RESIDUE_WITHOUT_ONE_LETTER_ABBREV in self:
self._short_original[RESIDUE_WITHOUT_ONE_LETTER_ABBREV] = self[RESIDUE_WITHOUT_ONE_LETTER_ABBREV]
def set_new_original(self):
"""Creates new_original dict."""
for abbrev in list(self.keys()):
if self[abbrev].new_abbrev not in list(self._new_original.keys()):
self._new_original[self[abbrev].new_abbrev] = self[abbrev]
def get_short_original(self, short_abbrev):
"""Returns proper alphabet entry"""
try:
return self._short_original[short_abbrev]
except KeyError:
raise AlphabetError('This residue [%s] has no one letter abbreviation, cannot return alphabet entry'%short_abbrev)
def get_new_original(self, new_abbrev):
"""Returns proper alphabet entry"""
try:
return self._new_original[new_abbrev]
except KeyError:
raise AlphabetError('This residue [%s] has no new abbreviation, cannot return alphabet entry'%new_abbrev)
# initialize the Alphabet once, for all other classes to import.
alphabet = Alphabet()
| gpl-3.0 |
dsfsdgsbngfggb/odoo | addons/delivery/sale.py | 42 | 4541 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class sale_order_line(osv.Model):
_inherit = 'sale.order.line'
_columns = {
'is_delivery': fields.boolean("Is a Delivery"),
}
_defaults = {
'is_delivery': False
}
class sale_order(osv.Model):
_inherit = 'sale.order'
_columns = {
'carrier_id': fields.many2one(
"delivery.carrier", string="Delivery Method",
help="Complete this field if you plan to invoice the shipping based on picking."),
}
def onchange_partner_id(self, cr, uid, ids, part, context=None):
result = super(sale_order, self).onchange_partner_id(cr, uid, ids, part, context=context)
if part:
dtype = self.pool.get('res.partner').browse(cr, uid, part, context=context).property_delivery_carrier.id
# TDE NOTE: not sure the aded 'if dtype' is valid
if dtype:
result['value']['carrier_id'] = dtype
return result
def _delivery_unset(self, cr, uid, ids, context=None):
sale_obj = self.pool['sale.order.line']
line_ids = sale_obj.search(cr, uid, [('order_id', 'in', ids), ('is_delivery', '=', True)],context=context)
sale_obj.unlink(cr, uid, line_ids, context=context)
def delivery_set(self, cr, uid, ids, context=None):
line_obj = self.pool.get('sale.order.line')
grid_obj = self.pool.get('delivery.grid')
carrier_obj = self.pool.get('delivery.carrier')
acc_fp_obj = self.pool.get('account.fiscal.position')
self._delivery_unset(cr, uid, ids, context=context)
currency_obj = self.pool.get('res.currency')
line_ids = []
for order in self.browse(cr, uid, ids, context=context):
grid_id = carrier_obj.grid_get(cr, uid, [order.carrier_id.id], order.partner_shipping_id.id)
if not grid_id:
raise osv.except_osv(_('No Grid Available!'), _('No grid matching for this carrier!'))
if order.state not in ('draft', 'sent'):
raise osv.except_osv(_('Order not in Draft State!'), _('The order state have to be draft to add delivery lines.'))
grid = grid_obj.browse(cr, uid, grid_id, context=context)
taxes = grid.carrier_id.product_id.taxes_id.filtered(lambda t: t.company_id.id == order.company_id.id)
fpos = order.fiscal_position or False
taxes_ids = acc_fp_obj.map_tax(cr, uid, fpos, taxes)
price_unit = grid_obj.get_price(cr, uid, grid.id, order, time.strftime('%Y-%m-%d'), context)
if order.company_id.currency_id.id != order.pricelist_id.currency_id.id:
price_unit = currency_obj.compute(cr, uid, order.company_id.currency_id.id, order.pricelist_id.currency_id.id,
price_unit, context=dict(context or {}, date=order.date_order))
values = {
'order_id': order.id,
'name': grid.carrier_id.name,
'product_uom_qty': 1,
'product_uom': grid.carrier_id.product_id.uom_id.id,
'product_id': grid.carrier_id.product_id.id,
'price_unit': price_unit,
'tax_id': [(6, 0, taxes_ids)],
'is_delivery': True,
}
if order.order_line:
values['sequence'] = order.order_line[-1].sequence + 1
line_id = line_obj.create(cr, uid, values, context=context)
line_ids.append(line_id)
return line_ids
| agpl-3.0 |
YOTOV-LIMITED/kuma | vendor/packages/translate/filters/decoration.py | 25 | 10556 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2004-2008 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""functions to get decorative/informative text out of strings..."""
import re
import unicodedata
from translate.lang import data
def spacestart(str1):
"""returns all the whitespace from the start of the string"""
newstring = u""
for c in str1:
if c.isspace():
newstring += c
else:
break
return newstring
def spaceend(str1):
"""returns all the whitespace from the end of the string"""
newstring = u""
for n in range(len(str1)):
c = str1[-1-n]
if c.isspace():
newstring = c + newstring
else:
break
return newstring
def puncstart(str1, punctuation):
"""returns all the punctuation from the start of the string"""
newstring = u""
for c in str1:
if c in punctuation or c.isspace():
newstring += c
else:
break
return newstring
def puncend(str1, punctuation):
"""returns all the punctuation from the end of the string"""
# An implementation with regular expressions was slightly slower.
newstring = u""
for n in range(len(str1)):
c = str1[-1-n]
if c in punctuation or c.isspace():
newstring = c + newstring
else:
break
return newstring.replace(u"\u00a0", u" ")
def ispurepunctuation(str1):
"""checks whether the string is entirely punctuation"""
for c in str1:
if c.isalnum():
return False
return len(str1)
def isvalidaccelerator(accelerator, acceptlist=None):
"""returns whether the given accelerator character is valid
:type accelerator: character
:param accelerator: A character to be checked for accelerator validity
:type acceptlist: String
:param acceptlist: A list of characters that are permissible as
accelerators
:rtype: Boolean
:return: True if the supplied character is an acceptable accelerator
"""
assert isinstance(accelerator, unicode)
assert isinstance(acceptlist, unicode) or acceptlist is None
if len(accelerator) == 0:
return False
if acceptlist is not None:
acceptlist = data.normalize(acceptlist)
if accelerator in acceptlist:
return True
return False
else:
# Old code path - ensures that we don't get a large number of
# regressions
accelerator = accelerator.replace("_", "")
if accelerator in u"-?":
return True
if not accelerator.isalnum():
return False
# We don't want to have accelerators on characters with diacritics,
# so let's see if the character can decompose.
decomposition = unicodedata.decomposition(accelerator)
# Next we strip out any extra information like <this>
decomposition = re.sub("<[^>]+>", "", decomposition).strip()
return decomposition.count(" ") == 0
def findaccelerators(str1, accelmarker, acceptlist=None):
"""returns all the accelerators and locations in str1 marked with a
given marker"""
accelerators = []
badaccelerators = []
currentpos = 0
while currentpos >= 0:
currentpos = str1.find(accelmarker, currentpos)
if currentpos >= 0:
accelstart = currentpos
currentpos += len(accelmarker)
# we assume accelerators are single characters
accelend = currentpos + 1
if accelend > len(str1):
break
accelerator = str1[currentpos:accelend]
currentpos = accelend
if isvalidaccelerator(accelerator, acceptlist):
accelerators.append((accelstart, accelerator))
else:
badaccelerators.append((accelstart, accelerator))
return accelerators, badaccelerators
def findmarkedvariables(str1, startmarker, endmarker, ignorelist=[]):
"""returns all the variables and locations in str1 marked with a given
marker"""
variables = []
currentpos = 0
while currentpos >= 0:
variable = None
currentpos = str1.find(startmarker, currentpos)
if currentpos >= 0:
startmatch = currentpos
currentpos += len(startmarker)
if endmarker is None:
# handle case without an end marker - use any non-alphanumeric
# character as the end marker, var must be len > 1
endmatch = currentpos
for n in range(currentpos, len(str1)):
if not (str1[n].isalnum() or str1[n] == '_'):
endmatch = n
break
if currentpos == endmatch:
endmatch = len(str1)
if currentpos < endmatch:
variable = str1[currentpos:endmatch]
currentpos = endmatch
elif type(endmarker) == int:
# setting endmarker to an int means it is a fixed-length
# variable string (usually endmarker==1)
endmatch = currentpos + endmarker
if endmatch > len(str1):
break
variable = str1[currentpos:endmatch]
currentpos = endmatch
else:
endmatch = str1.find(endmarker, currentpos)
if endmatch == -1:
break
# search backwards in case there's an intervening startmarker
# (if not it's OK)...
start2 = str1.rfind(startmarker, currentpos, endmatch)
if start2 != -1:
startmatch2 = start2
start2 += len(startmarker)
if start2 != currentpos:
currentpos = start2
startmatch = startmatch2
variable = str1[currentpos:endmatch]
currentpos = endmatch + len(endmarker)
if variable is not None and variable not in ignorelist:
if (not variable or
variable.replace("_", "").replace(".", "").isalnum()):
variables.append((startmatch, variable))
return variables
def getaccelerators(accelmarker, acceptlist=None):
"""returns a function that gets a list of accelerators marked using
accelmarker"""
def getmarkedaccelerators(str1):
"""returns all the accelerators in str1 marked with a given marker"""
acclocs, badlocs = findaccelerators(str1, accelmarker, acceptlist)
accelerators = [accelerator for accelstart, accelerator in acclocs]
badaccelerators = [accelerator for accelstart, accelerator in badlocs]
return accelerators, badaccelerators
return getmarkedaccelerators
def getvariables(startmarker, endmarker):
"""returns a function that gets a list of variables marked using
startmarker and endmarker"""
def getmarkedvariables(str1):
"""returns all the variables in str1 marked with a given marker"""
varlocs = findmarkedvariables(str1, startmarker, endmarker)
variables = [variable for accelstart, variable in varlocs]
return variables
return getmarkedvariables
def getnumbers(str1):
"""returns any numbers that are in the string"""
# TODO: handle locale-based periods e.g. 2,5 for Afrikaans
assert isinstance(str1, unicode)
numbers = []
innumber = False
degreesign = u'\xb0'
lastnumber = ""
carryperiod = ""
for chr1 in str1:
if chr1.isdigit():
innumber = True
elif innumber:
if not (chr1 == '.' or chr1 == degreesign):
innumber = False
if lastnumber:
numbers.append(lastnumber)
lastnumber = ""
if innumber:
if chr1 == degreesign:
lastnumber += chr1
elif chr1 == '.':
carryperiod += chr1
else:
lastnumber += carryperiod + chr1
carryperiod = ""
else:
carryperiod = ""
if innumber:
if lastnumber:
numbers.append(lastnumber)
return numbers
_function_re = re.compile(r'''((?:
[\w\.]+ # function or module name - any alpha-numeric character, _, or .
(?:(?:::|->|\.)\w+)* # (optional) C++ style Class::Method() syntax or pointer->Method() or module.function()
\(\) # Must close with ()
)+)
''', re.VERBOSE) # shouldn't be locale aware
# Reference functions:
# pam_*_item() IO::String NULL() POE::Component::Client::LDAP->new()
# POE::Wheel::Null mechanize.UserAgent POSIX::sigaction()
# window.resizeBy() @fptr()
def getfunctions(str1):
"""returns the functions() that are in a string, while ignoring the
trailing punctuation in the given parameter"""
if u"()" in str1:
return _function_re.findall(str1)
else:
return []
def getemails(str1):
"""returns the email addresses that are in a string"""
return re.findall('[\w\.\-]+@[\w\.\-]+', str1)
def geturls(str1):
"""returns the URIs in a string"""
# TODO turn this into a verbose and compiled regex
URLPAT = 'https?:[\w/\.:;+\-~\%#\$?=&,()]+|' + \
'www\.[\w/\.:;+\-~\%#\$?=&,()]+|' + \
'ftp:[\w/\.:;+\-~\%#?=&,]+'
return re.findall(URLPAT, str1)
def countaccelerators(accelmarker, acceptlist=None):
"""returns a function that counts the number of accelerators marked
with the given marker"""
def countmarkedaccelerators(str1):
"""returns all the variables in str1 marked with a given marker"""
acclocs, badlocs = findaccelerators(str1, accelmarker, acceptlist)
return len(acclocs), len(badlocs)
return countmarkedaccelerators
| mpl-2.0 |
EzyInsights/Diamond | src/diamond/handler/multigraphite.py | 58 | 2412 | # coding=utf-8
"""
Send metrics to a [graphite](http://graphite.wikidot.com/) using the default
interface. Unlike GraphiteHandler, this one supports multiple graphite servers.
Specify them as a list of hosts divided by comma.
"""
from Handler import Handler
from graphite import GraphiteHandler
from copy import deepcopy
class MultiGraphiteHandler(Handler):
"""
Implements the abstract Handler class, sending data to multiple
graphite servers by using two instances of GraphiteHandler
"""
def __init__(self, config=None):
"""
Create a new instance of the MultiGraphiteHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
self.handlers = []
# Initialize Options
hosts = self.config['host']
for host in hosts:
config = deepcopy(self.config)
config['host'] = host
self.handlers.append(GraphiteHandler(config))
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(MultiGraphiteHandler, self).get_default_config_help()
config.update({
'host': 'Hostname, Hostname, Hostname',
'port': 'Port',
'proto': 'udp or tcp',
'timeout': '',
'batch': 'How many to store before sending to the graphite server',
'max_backlog_multiplier': 'how many batches to store before trimming', # NOQA
'trim_backlog_multiplier': 'Trim down how many batches',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(MultiGraphiteHandler, self).get_default_config()
config.update({
'host': ['localhost'],
'port': 2003,
'proto': 'tcp',
'timeout': 15,
'batch': 1,
'max_backlog_multiplier': 5,
'trim_backlog_multiplier': 4,
})
return config
def process(self, metric):
"""
Process a metric by passing it to GraphiteHandler
instances
"""
for handler in self.handlers:
handler.process(metric)
def flush(self):
"""Flush metrics in queue"""
for handler in self.handlers:
handler.flush()
| mit |
wkoathp/glance | glance/tests/functional/db/base.py | 5 | 92939 | # Copyright 2010-2012 OpenStack Foundation
# Copyright 2012 Justin Santa Barbara
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import uuid
import mock
from oslo_utils import timeutils
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
from glance.common import exception
from glance import context
from glance.tests import functional
import glance.tests.functional.db as db_tests
from glance.tests import utils as test_utils
# The default sort order of results is whatever sort key is specified,
# plus created_at and id for ties. When we're not specifying a sort_key,
# we get the default (created_at). Some tests below expect the fixtures to be
# returned in array-order, so if the created_at timestamps are the same,
# these tests rely on the UUID* values being in order
UUID1, UUID2, UUID3 = sorted([str(uuid.uuid4()) for x in range(3)])
def build_image_fixture(**kwargs):
default_datetime = timeutils.utcnow()
image = {
'id': str(uuid.uuid4()),
'name': 'fake image #2',
'status': 'active',
'disk_format': 'vhd',
'container_format': 'ovf',
'is_public': True,
'created_at': default_datetime,
'updated_at': default_datetime,
'deleted_at': None,
'deleted': False,
'checksum': None,
'min_disk': 5,
'min_ram': 256,
'size': 19,
'locations': [{'url': "file:///tmp/glance-tests/2",
'metadata': {}, 'status': 'active'}],
'properties': {},
}
image.update(kwargs)
return image
def build_task_fixture(**kwargs):
default_datetime = timeutils.utcnow()
task = {
'id': str(uuid.uuid4()),
'type': 'import',
'status': 'pending',
'input': {'ping': 'pong'},
'owner': str(uuid.uuid4()),
'message': None,
'expires_at': None,
'created_at': default_datetime,
'updated_at': default_datetime
}
task.update(kwargs)
return task
class FunctionalInitWrapper(functional.FunctionalTest):
def setUp(self):
super(FunctionalInitWrapper, self).setUp()
self.config(policy_file=self.policy_file, group='oslo_policy')
class TestDriver(test_utils.BaseTestCase):
def setUp(self):
super(TestDriver, self).setUp()
context_cls = context.RequestContext
self.adm_context = context_cls(is_admin=True,
auth_token='user:user:admin')
self.context = context_cls(is_admin=False,
auth_token='user:user:user')
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
self.fixtures = self.build_image_fixtures()
self.create_images(self.fixtures)
def build_image_fixtures(self):
dt1 = timeutils.utcnow()
dt2 = dt1 + datetime.timedelta(microseconds=5)
fixtures = [
{
'id': UUID1,
'created_at': dt1,
'updated_at': dt1,
'properties': {'foo': 'bar', 'far': 'boo'},
'size': 13,
},
{
'id': UUID2,
'created_at': dt1,
'updated_at': dt2,
'size': 17,
},
{
'id': UUID3,
'created_at': dt2,
'updated_at': dt2,
},
]
return [build_image_fixture(**fixture) for fixture in fixtures]
def create_images(self, images):
for fixture in images:
self.db_api.image_create(self.adm_context, fixture)
class DriverTests(object):
def test_image_create_requires_status(self):
fixture = {'name': 'mark', 'size': 12}
self.assertRaises(exception.Invalid,
self.db_api.image_create, self.context, fixture)
fixture = {'name': 'mark', 'size': 12, 'status': 'queued'}
self.db_api.image_create(self.context, fixture)
@mock.patch.object(timeutils, 'utcnow')
def test_image_create_defaults(self, mock_utcnow):
mock_utcnow.return_value = datetime.datetime.utcnow()
create_time = timeutils.utcnow()
values = {'status': 'queued',
'created_at': create_time,
'updated_at': create_time}
image = self.db_api.image_create(self.context, values)
self.assertIsNone(image['name'])
self.assertIsNone(image['container_format'])
self.assertEqual(0, image['min_ram'])
self.assertEqual(0, image['min_disk'])
self.assertIsNone(image['owner'])
self.assertFalse(image['is_public'])
self.assertIsNone(image['size'])
self.assertIsNone(image['checksum'])
self.assertIsNone(image['disk_format'])
self.assertEqual([], image['locations'])
self.assertFalse(image['protected'])
self.assertFalse(image['deleted'])
self.assertIsNone(image['deleted_at'])
self.assertEqual([], image['properties'])
self.assertEqual(create_time, image['created_at'])
self.assertEqual(create_time, image['updated_at'])
# Image IDs aren't predictable, but they should be populated
self.assertTrue(uuid.UUID(image['id']))
# NOTE(bcwaldon): the tags attribute should not be returned as a part
# of a core image entity
self.assertFalse('tags' in image)
def test_image_create_duplicate_id(self):
self.assertRaises(exception.Duplicate,
self.db_api.image_create,
self.context, {'id': UUID1, 'status': 'queued'})
def test_image_create_with_locations(self):
locations = [{'url': 'a', 'metadata': {}, 'status': 'active'},
{'url': 'b', 'metadata': {}, 'status': 'active'}]
fixture = {'status': 'queued',
'locations': locations}
image = self.db_api.image_create(self.context, fixture)
actual = [{'url': l['url'], 'metadata': l['metadata'],
'status': l['status']}
for l in image['locations']]
self.assertEqual(locations, actual)
def test_image_create_with_location_data(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': 'b', 'metadata': {},
'status': 'active'}]
fixture = {'status': 'queued', 'locations': location_data}
image = self.db_api.image_create(self.context, fixture)
actual = [{'url': l['url'], 'metadata': l['metadata'],
'status': l['status']}
for l in image['locations']]
self.assertEqual(location_data, actual)
def test_image_create_properties(self):
fixture = {'status': 'queued', 'properties': {'ping': 'pong'}}
image = self.db_api.image_create(self.context, fixture)
expected = [{'name': 'ping', 'value': 'pong'}]
actual = [{'name': p['name'], 'value': p['value']}
for p in image['properties']]
self.assertEqual(expected, actual)
def test_image_create_unknown_attributes(self):
fixture = {'ping': 'pong'}
self.assertRaises(exception.Invalid,
self.db_api.image_create, self.context, fixture)
def test_image_create_bad_name(self):
bad_name = u'A name with forbidden symbol \U0001f62a'
fixture = {'name': bad_name, 'size': 12, 'status': 'queued'}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
def test_image_create_bad_property(self):
# bad value
fixture = {'status': 'queued',
'properties': {'bad': u'Bad \U0001f62a'}}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
# bad property names are also not allowed
fixture = {'status': 'queued', 'properties': {u'Bad \U0001f62a': 'ok'}}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
def test_image_create_bad_location(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': u'Bad \U0001f60a', 'metadata': {},
'status': 'active'}]
fixture = {'status': 'queued', 'locations': location_data}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
def test_image_update_core_attribute(self):
fixture = {'status': 'queued'}
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
self.assertEqual('queued', image['status'])
self.assertNotEqual(image['created_at'], image['updated_at'])
def test_image_update_with_locations(self):
locations = [{'url': 'a', 'metadata': {}, 'status': 'active'},
{'url': 'b', 'metadata': {}, 'status': 'active'}]
fixture = {'locations': locations}
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
self.assertEqual(2, len(image['locations']))
self.assertIn('id', image['locations'][0])
self.assertIn('id', image['locations'][1])
image['locations'][0].pop('id')
image['locations'][1].pop('id')
self.assertEqual(locations, image['locations'])
def test_image_update_with_location_data(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': 'b', 'metadata': {}, 'status': 'active'}]
fixture = {'locations': location_data}
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
self.assertEqual(2, len(image['locations']))
self.assertIn('id', image['locations'][0])
self.assertIn('id', image['locations'][1])
image['locations'][0].pop('id')
image['locations'][1].pop('id')
self.assertEqual(location_data, image['locations'])
def test_image_update(self):
fixture = {'status': 'queued', 'properties': {'ping': 'pong'}}
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
expected = [{'name': 'ping', 'value': 'pong'}]
actual = [{'name': p['name'], 'value': p['value']}
for p in image['properties']]
self.assertEqual(expected, actual)
self.assertEqual('queued', image['status'])
self.assertNotEqual(image['created_at'], image['updated_at'])
def test_image_update_properties(self):
fixture = {'properties': {'ping': 'pong'}}
image = self.db_api.image_update(self.adm_context, UUID1, fixture)
expected = {'ping': 'pong', 'foo': 'bar', 'far': 'boo'}
actual = dict((p['name'], p['value']) for p in image['properties'])
self.assertEqual(expected, actual)
self.assertNotEqual(image['created_at'], image['updated_at'])
def test_image_update_purge_properties(self):
fixture = {'properties': {'ping': 'pong'}}
image = self.db_api.image_update(self.adm_context, UUID1,
fixture, purge_props=True)
properties = dict((p['name'], p) for p in image['properties'])
# New properties are set
self.assertTrue('ping' in properties)
self.assertEqual('pong', properties['ping']['value'])
self.assertFalse(properties['ping']['deleted'])
# Original properties still show up, but with deleted=True
# TODO(markwash): db api should not return deleted properties
self.assertTrue('foo' in properties)
self.assertEqual('bar', properties['foo']['value'])
self.assertTrue(properties['foo']['deleted'])
def test_image_update_bad_name(self):
fixture = {'name': u'A new name with forbidden symbol \U0001f62a'}
self.assertRaises(exception.Invalid, self.db_api.image_update,
self.adm_context, UUID1, fixture)
def test_image_update_bad_property(self):
# bad value
fixture = {'status': 'queued',
'properties': {'bad': u'Bad \U0001f62a'}}
self.assertRaises(exception.Invalid, self.db_api.image_update,
self.adm_context, UUID1, fixture)
# bad property names are also not allowed
fixture = {'status': 'queued', 'properties': {u'Bad \U0001f62a': 'ok'}}
self.assertRaises(exception.Invalid, self.db_api.image_update,
self.adm_context, UUID1, fixture)
def test_image_update_bad_location(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': u'Bad \U0001f60a', 'metadata': {},
'status': 'active'}]
fixture = {'status': 'queued', 'locations': location_data}
self.assertRaises(exception.Invalid, self.db_api.image_update,
self.adm_context, UUID1, fixture)
def test_update_locations_direct(self):
"""
For some reasons update_locations can be called directly
(not via image_update), so better check that everything is ok if passed
4 byte unicode characters
"""
# update locations correctly first to retrieve existing location id
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'}]
fixture = {'locations': location_data}
image = self.db_api.image_update(self.adm_context, UUID1, fixture)
self.assertEqual(1, len(image['locations']))
self.assertIn('id', image['locations'][0])
loc_id = image['locations'][0].pop('id')
bad_location = {'url': u'Bad \U0001f60a', 'metadata': {},
'status': 'active', 'id': loc_id}
self.assertRaises(exception.Invalid,
self.db_api.image_location_update,
self.adm_context, UUID1, bad_location)
def test_image_property_delete(self):
fixture = {'name': 'ping', 'value': 'pong', 'image_id': UUID1}
prop = self.db_api.image_property_create(self.context, fixture)
prop = self.db_api.image_property_delete(self.context,
prop['name'], UUID1)
self.assertIsNotNone(prop['deleted_at'])
self.assertTrue(prop['deleted'])
def test_image_get(self):
image = self.db_api.image_get(self.context, UUID1)
self.assertEqual(self.fixtures[0]['id'], image['id'])
def test_image_get_disallow_deleted(self):
self.db_api.image_destroy(self.adm_context, UUID1)
self.assertRaises(exception.NotFound, self.db_api.image_get,
self.context, UUID1)
def test_image_get_allow_deleted(self):
self.db_api.image_destroy(self.adm_context, UUID1)
image = self.db_api.image_get(self.adm_context, UUID1)
self.assertEqual(self.fixtures[0]['id'], image['id'])
self.assertTrue(image['deleted'])
def test_image_get_force_allow_deleted(self):
self.db_api.image_destroy(self.adm_context, UUID1)
image = self.db_api.image_get(self.context, UUID1,
force_show_deleted=True)
self.assertEqual(self.fixtures[0]['id'], image['id'])
def test_image_get_not_owned(self):
TENANT1 = str(uuid.uuid4())
TENANT2 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
ctxt2 = context.RequestContext(is_admin=False, tenant=TENANT2,
auth_token='user:%s:user' % TENANT2)
image = self.db_api.image_create(
ctxt1, {'status': 'queued', 'owner': TENANT1})
self.assertRaises(exception.Forbidden,
self.db_api.image_get, ctxt2, image['id'])
def test_image_get_not_found(self):
UUID = str(uuid.uuid4())
self.assertRaises(exception.NotFound,
self.db_api.image_get, self.context, UUID)
def test_image_get_all(self):
images = self.db_api.image_get_all(self.context)
self.assertEqual(3, len(images))
def test_image_get_all_with_filter(self):
images = self.db_api.image_get_all(self.context,
filters={
'id': self.fixtures[0]['id'],
})
self.assertEqual(1, len(images))
self.assertEqual(self.fixtures[0]['id'], images[0]['id'])
def test_image_get_all_with_filter_user_defined_property(self):
images = self.db_api.image_get_all(self.context,
filters={'foo': 'bar'})
self.assertEqual(1, len(images))
self.assertEqual(self.fixtures[0]['id'], images[0]['id'])
def test_image_get_all_with_filter_nonexistent_userdef_property(self):
images = self.db_api.image_get_all(self.context,
filters={'faz': 'boo'})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_userdef_prop_nonexistent_value(self):
images = self.db_api.image_get_all(self.context,
filters={'foo': 'baz'})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_multiple_user_defined_properties(self):
images = self.db_api.image_get_all(self.context,
filters={'foo': 'bar',
'far': 'boo'})
self.assertEqual(1, len(images))
self.assertEqual(images[0]['id'], self.fixtures[0]['id'])
def test_image_get_all_with_filter_nonexistent_user_defined_property(self):
images = self.db_api.image_get_all(self.context,
filters={'foo': 'bar',
'faz': 'boo'})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_user_deleted_property(self):
fixture = {'name': 'poo', 'value': 'bear', 'image_id': UUID1}
prop = self.db_api.image_property_create(self.context,
fixture)
images = self.db_api.image_get_all(self.context,
filters={
'properties': {'poo': 'bear'},
})
self.assertEqual(1, len(images))
self.db_api.image_property_delete(self.context,
prop['name'], images[0]['id'])
images = self.db_api.image_get_all(self.context,
filters={
'properties': {'poo': 'bear'},
})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_undefined_property(self):
images = self.db_api.image_get_all(self.context,
filters={'poo': 'bear'})
self.assertEqual(0, len(images))
def test_image_get_all_size_min_max(self):
images = self.db_api.image_get_all(self.context,
filters={
'size_min': 10,
'size_max': 15,
})
self.assertEqual(1, len(images))
self.assertEqual(self.fixtures[0]['id'], images[0]['id'])
def test_image_get_all_size_min(self):
images = self.db_api.image_get_all(self.context,
filters={'size_min': 15})
self.assertEqual(2, len(images))
self.assertEqual(self.fixtures[2]['id'], images[0]['id'])
self.assertEqual(self.fixtures[1]['id'], images[1]['id'])
def test_image_get_all_size_range(self):
images = self.db_api.image_get_all(self.context,
filters={'size_max': 15,
'size_min': 20})
self.assertEqual(0, len(images))
def test_image_get_all_size_max(self):
images = self.db_api.image_get_all(self.context,
filters={'size_max': 15})
self.assertEqual(1, len(images))
self.assertEqual(self.fixtures[0]['id'], images[0]['id'])
def test_image_get_all_with_filter_min_range_bad_value(self):
self.assertRaises(exception.InvalidFilterRangeValue,
self.db_api.image_get_all,
self.context, filters={'size_min': 'blah'})
def test_image_get_all_with_filter_max_range_bad_value(self):
self.assertRaises(exception.InvalidFilterRangeValue,
self.db_api.image_get_all,
self.context, filters={'size_max': 'blah'})
def test_image_get_all_marker(self):
images = self.db_api.image_get_all(self.context, marker=UUID3)
self.assertEqual(2, len(images))
def test_image_get_all_marker_deleted(self):
"""Cannot specify a deleted image as a marker."""
self.db_api.image_destroy(self.adm_context, UUID1)
filters = {'deleted': False}
self.assertRaises(exception.NotFound, self.db_api.image_get_all,
self.context, marker=UUID1, filters=filters)
def test_image_get_all_marker_deleted_showing_deleted_as_admin(self):
"""Specify a deleted image as a marker if showing deleted images."""
self.db_api.image_destroy(self.adm_context, UUID3)
images = self.db_api.image_get_all(self.adm_context, marker=UUID3)
# NOTE(bcwaldon): an admin should see all images (deleted or not)
self.assertEqual(2, len(images))
def test_image_get_all_marker_deleted_showing_deleted(self):
"""Specify a deleted image as a marker if showing deleted images.
A non-admin user has to explicitly ask for deleted
images, and should only see deleted images in the results
"""
self.db_api.image_destroy(self.adm_context, UUID3)
self.db_api.image_destroy(self.adm_context, UUID1)
filters = {'deleted': True}
images = self.db_api.image_get_all(self.context, marker=UUID3,
filters=filters)
self.assertEqual(1, len(images))
def test_image_get_all_marker_null_name_desc(self):
"""Check an image with name null is handled
Check an image with name null is handled
marker is specified and order is descending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'name': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['name'],
sort_dir=['desc'])
image_ids = [image['id'] for image in images]
expected = []
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_disk_format_desc(self):
"""Check an image with disk_format null is handled
Check an image with disk_format null is handled when
marker is specified and order is descending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'disk_format': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['disk_format'],
sort_dir=['desc'])
image_ids = [image['id'] for image in images]
expected = []
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_container_format_desc(self):
"""Check an image with container_format null is handled
Check an image with container_format null is handled when
marker is specified and order is descending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'container_format': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['container_format'],
sort_dir=['desc'])
image_ids = [image['id'] for image in images]
expected = []
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_name_asc(self):
"""Check an image with name null is handled
Check an image with name null is handled when
marker is specified and order is ascending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'name': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['name'],
sort_dir=['asc'])
image_ids = [image['id'] for image in images]
expected = [UUID3, UUID2, UUID1]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_disk_format_asc(self):
"""Check an image with disk_format null is handled
Check an image with disk_format null is handled when
marker is specified and order is ascending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'disk_format': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['disk_format'],
sort_dir=['asc'])
image_ids = [image['id'] for image in images]
expected = [UUID3, UUID2, UUID1]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_container_format_asc(self):
"""Check an image with container_format null is handled
Check an image with container_format null is handled when
marker is specified and order is ascending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'container_format': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['container_format'],
sort_dir=['asc'])
image_ids = [image['id'] for image in images]
expected = [UUID3, UUID2, UUID1]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_limit(self):
images = self.db_api.image_get_all(self.context, limit=2)
self.assertEqual(2, len(images))
# A limit of None should not equate to zero
images = self.db_api.image_get_all(self.context, limit=None)
self.assertEqual(3, len(images))
# A limit of zero should actually mean zero
images = self.db_api.image_get_all(self.context, limit=0)
self.assertEqual(0, len(images))
def test_image_get_all_owned(self):
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False,
tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
image_meta_data = {'id': UUIDX, 'status': 'queued', 'owner': TENANT1}
self.db_api.image_create(ctxt1, image_meta_data)
TENANT2 = str(uuid.uuid4())
ctxt2 = context.RequestContext(is_admin=False,
tenant=TENANT2,
auth_token='user:%s:user' % TENANT2)
UUIDY = str(uuid.uuid4())
image_meta_data = {'id': UUIDY, 'status': 'queued', 'owner': TENANT2}
self.db_api.image_create(ctxt2, image_meta_data)
images = self.db_api.image_get_all(ctxt1)
image_ids = [image['id'] for image in images]
expected = [UUIDX, UUID3, UUID2, UUID1]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_owned_checksum(self):
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False,
tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
CHECKSUM1 = '91264c3edf5972c9f1cb309543d38a5c'
image_meta_data = {
'id': UUIDX,
'status': 'queued',
'checksum': CHECKSUM1,
'owner': TENANT1
}
self.db_api.image_create(ctxt1, image_meta_data)
image_member_data = {
'image_id': UUIDX,
'member': TENANT1,
'can_share': False,
"status": "accepted",
}
self.db_api.image_member_create(ctxt1, image_member_data)
TENANT2 = str(uuid.uuid4())
ctxt2 = context.RequestContext(is_admin=False,
tenant=TENANT2,
auth_token='user:%s:user' % TENANT2)
UUIDY = str(uuid.uuid4())
CHECKSUM2 = '92264c3edf5972c9f1cb309543d38a5c'
image_meta_data = {
'id': UUIDY,
'status': 'queued',
'checksum': CHECKSUM2,
'owner': TENANT2
}
self.db_api.image_create(ctxt2, image_meta_data)
image_member_data = {
'image_id': UUIDY,
'member': TENANT2,
'can_share': False,
"status": "accepted",
}
self.db_api.image_member_create(ctxt2, image_member_data)
filters = {'visibility': 'shared', 'checksum': CHECKSUM2}
images = self.db_api.image_get_all(ctxt2, filters)
self.assertEqual(1, len(images))
self.assertEqual(UUIDY, images[0]['id'])
def test_image_get_all_with_filter_tags(self):
self.db_api.image_tag_create(self.context, UUID1, 'x86')
self.db_api.image_tag_create(self.context, UUID1, '64bit')
self.db_api.image_tag_create(self.context, UUID2, 'power')
self.db_api.image_tag_create(self.context, UUID2, '64bit')
images = self.db_api.image_get_all(self.context,
filters={'tags': ['64bit']})
self.assertEqual(2, len(images))
image_ids = [image['id'] for image in images]
expected = [UUID1, UUID2]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_with_filter_multi_tags(self):
self.db_api.image_tag_create(self.context, UUID1, 'x86')
self.db_api.image_tag_create(self.context, UUID1, '64bit')
self.db_api.image_tag_create(self.context, UUID2, 'power')
self.db_api.image_tag_create(self.context, UUID2, '64bit')
images = self.db_api.image_get_all(self.context,
filters={'tags': ['64bit', 'power']
})
self.assertEqual(1, len(images))
self.assertEqual(UUID2, images[0]['id'])
def test_image_get_all_with_filter_tags_and_nonexistent(self):
self.db_api.image_tag_create(self.context, UUID1, 'x86')
images = self.db_api.image_get_all(self.context,
filters={'tags': ['x86', 'fake']
})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_deleted_tags(self):
tag = self.db_api.image_tag_create(self.context, UUID1, 'AIX')
images = self.db_api.image_get_all(self.context,
filters={
'tags': [tag],
})
self.assertEqual(1, len(images))
self.db_api.image_tag_delete(self.context, UUID1, tag)
images = self.db_api.image_get_all(self.context,
filters={
'tags': [tag],
})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_undefined_tags(self):
images = self.db_api.image_get_all(self.context,
filters={'tags': ['fake']})
self.assertEqual(0, len(images))
def test_image_paginate(self):
"""Paginate through a list of images using limit and marker"""
now = timeutils.utcnow()
extra_uuids = [(str(uuid.uuid4()),
now + datetime.timedelta(seconds=i * 5))
for i in range(2)]
extra_images = [build_image_fixture(id=_id,
created_at=_dt,
updated_at=_dt)
for _id, _dt in extra_uuids]
self.create_images(extra_images)
# Reverse uuids to match default sort of created_at
extra_uuids.reverse()
page = self.db_api.image_get_all(self.context, limit=2)
self.assertEqual([i[0] for i in extra_uuids], [i['id'] for i in page])
last = page[-1]['id']
page = self.db_api.image_get_all(self.context, limit=2, marker=last)
self.assertEqual([UUID3, UUID2], [i['id'] for i in page])
page = self.db_api.image_get_all(self.context, limit=2, marker=UUID2)
self.assertEqual([UUID1], [i['id'] for i in page])
def test_image_get_all_invalid_sort_key(self):
self.assertRaises(exception.InvalidSortKey, self.db_api.image_get_all,
self.context, sort_key=['blah'])
def test_image_get_all_limit_marker(self):
images = self.db_api.image_get_all(self.context, limit=2)
self.assertEqual(2, len(images))
def test_image_get_all_with_tag_returning(self):
expected_tags = {UUID1: ['foo'], UUID2: ['bar'], UUID3: ['baz']}
self.db_api.image_tag_create(self.context, UUID1,
expected_tags[UUID1][0])
self.db_api.image_tag_create(self.context, UUID2,
expected_tags[UUID2][0])
self.db_api.image_tag_create(self.context, UUID3,
expected_tags[UUID3][0])
images = self.db_api.image_get_all(self.context, return_tag=True)
self.assertEqual(3, len(images))
for image in images:
self.assertIn('tags', image)
self.assertEqual(expected_tags[image['id']], image['tags'])
self.db_api.image_tag_delete(self.context, UUID1,
expected_tags[UUID1][0])
expected_tags[UUID1] = []
images = self.db_api.image_get_all(self.context, return_tag=True)
self.assertEqual(3, len(images))
for image in images:
self.assertIn('tags', image)
self.assertEqual(expected_tags[image['id']], image['tags'])
def test_image_destroy(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': 'b', 'metadata': {},
'status': 'active'}]
fixture = {'status': 'queued', 'locations': location_data}
image = self.db_api.image_create(self.context, fixture)
IMG_ID = image['id']
fixture = {'name': 'ping', 'value': 'pong', 'image_id': IMG_ID}
prop = self.db_api.image_property_create(self.context, fixture)
TENANT2 = str(uuid.uuid4())
fixture = {'image_id': IMG_ID, 'member': TENANT2, 'can_share': False}
member = self.db_api.image_member_create(self.context, fixture)
self.db_api.image_tag_create(self.context, IMG_ID, 'snarf')
self.assertEqual(2, len(image['locations']))
self.assertIn('id', image['locations'][0])
self.assertIn('id', image['locations'][1])
image['locations'][0].pop('id')
image['locations'][1].pop('id')
self.assertEqual(location_data, image['locations'])
self.assertEqual(('ping', 'pong', IMG_ID, False),
(prop['name'], prop['value'],
prop['image_id'], prop['deleted']))
self.assertEqual((TENANT2, IMG_ID, False),
(member['member'], member['image_id'],
member['can_share']))
self.assertEqual(['snarf'],
self.db_api.image_tag_get_all(self.context, IMG_ID))
image = self.db_api.image_destroy(self.adm_context, IMG_ID)
self.assertTrue(image['deleted'])
self.assertTrue(image['deleted_at'])
self.assertRaises(exception.NotFound, self.db_api.image_get,
self.context, IMG_ID)
self.assertEqual([], image['locations'])
prop = image['properties'][0]
self.assertEqual(('ping', IMG_ID, True),
(prop['name'], prop['image_id'], prop['deleted']))
self.context.auth_token = 'user:%s:user' % TENANT2
members = self.db_api.image_member_find(self.context, IMG_ID)
self.assertEqual([], members)
tags = self.db_api.image_tag_get_all(self.context, IMG_ID)
self.assertEqual([], tags)
def test_image_destroy_with_delete_all(self):
"""Check the image child element's _image_delete_all methods.
checks if all the image_delete_all methods deletes only the child
elements of the image to be deleted.
"""
TENANT2 = str(uuid.uuid4())
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': 'b', 'metadata': {}, 'status': 'active'}]
def _create_image_with_child_entries():
fixture = {'status': 'queued', 'locations': location_data}
image_id = self.db_api.image_create(self.context, fixture)['id']
fixture = {'name': 'ping', 'value': 'pong', 'image_id': image_id}
self.db_api.image_property_create(self.context, fixture)
fixture = {'image_id': image_id, 'member': TENANT2,
'can_share': False}
self.db_api.image_member_create(self.context, fixture)
self.db_api.image_tag_create(self.context, image_id, 'snarf')
return image_id
ACTIVE_IMG_ID = _create_image_with_child_entries()
DEL_IMG_ID = _create_image_with_child_entries()
deleted_image = self.db_api.image_destroy(self.adm_context, DEL_IMG_ID)
self.assertTrue(deleted_image['deleted'])
self.assertTrue(deleted_image['deleted_at'])
self.assertRaises(exception.NotFound, self.db_api.image_get,
self.context, DEL_IMG_ID)
active_image = self.db_api.image_get(self.context, ACTIVE_IMG_ID)
self.assertFalse(active_image['deleted'])
self.assertFalse(active_image['deleted_at'])
self.assertEqual(2, len(active_image['locations']))
self.assertTrue('id' in active_image['locations'][0])
self.assertTrue('id' in active_image['locations'][1])
active_image['locations'][0].pop('id')
active_image['locations'][1].pop('id')
self.assertEqual(location_data, active_image['locations'])
self.assertEqual(1, len(active_image['properties']))
prop = active_image['properties'][0]
self.assertEqual(('ping', 'pong', ACTIVE_IMG_ID),
(prop['name'], prop['value'],
prop['image_id']))
self.assertEqual((False, None),
(prop['deleted'], prop['deleted_at']))
self.context.auth_token = 'user:%s:user' % TENANT2
members = self.db_api.image_member_find(self.context, ACTIVE_IMG_ID)
self.assertEqual(1, len(members))
member = members[0]
self.assertEqual((TENANT2, ACTIVE_IMG_ID, False),
(member['member'], member['image_id'],
member['can_share']))
tags = self.db_api.image_tag_get_all(self.context, ACTIVE_IMG_ID)
self.assertEqual(['snarf'], tags)
def test_image_get_multiple_members(self):
TENANT1 = str(uuid.uuid4())
TENANT2 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1,
owner_is_tenant=True)
ctxt2 = context.RequestContext(is_admin=False, user=TENANT2,
auth_token='user:%s:user' % TENANT2,
owner_is_tenant=False)
UUIDX = str(uuid.uuid4())
# We need private image and context.owner should not match image
# owner
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'is_public': False,
'owner': TENANT1})
values = {'image_id': UUIDX, 'member': TENANT2, 'can_share': False}
self.db_api.image_member_create(ctxt1, values)
image = self.db_api.image_get(ctxt2, UUIDX)
self.assertEqual(UUIDX, image['id'])
# by default get_all displays only images with status 'accepted'
images = self.db_api.image_get_all(ctxt2)
self.assertEqual(3, len(images))
# filter by rejected
images = self.db_api.image_get_all(ctxt2, member_status='rejected')
self.assertEqual(3, len(images))
# filter by visibility
images = self.db_api.image_get_all(ctxt2,
filters={'visibility': 'shared'})
self.assertEqual(0, len(images))
# filter by visibility
images = self.db_api.image_get_all(ctxt2, member_status='pending',
filters={'visibility': 'shared'})
self.assertEqual(1, len(images))
# filter by visibility
images = self.db_api.image_get_all(ctxt2, member_status='all',
filters={'visibility': 'shared'})
self.assertEqual(1, len(images))
# filter by status pending
images = self.db_api.image_get_all(ctxt2, member_status='pending')
self.assertEqual(4, len(images))
# filter by status all
images = self.db_api.image_get_all(ctxt2, member_status='all')
self.assertEqual(4, len(images))
def test_is_image_visible(self):
TENANT1 = str(uuid.uuid4())
TENANT2 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1,
owner_is_tenant=True)
ctxt2 = context.RequestContext(is_admin=False, user=TENANT2,
auth_token='user:%s:user' % TENANT2,
owner_is_tenant=False)
UUIDX = str(uuid.uuid4())
# We need private image and context.owner should not match image
# owner
image = self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'is_public': False,
'owner': TENANT1})
values = {'image_id': UUIDX, 'member': TENANT2, 'can_share': False}
self.db_api.image_member_create(ctxt1, values)
result = self.db_api.is_image_visible(ctxt2, image)
self.assertTrue(result)
# image should not be visible for a deleted member
members = self.db_api.image_member_find(ctxt1, image_id=UUIDX)
self.db_api.image_member_delete(ctxt1, members[0]['id'])
result = self.db_api.is_image_visible(ctxt2, image)
self.assertFalse(result)
def test_image_tag_create(self):
tag = self.db_api.image_tag_create(self.context, UUID1, 'snap')
self.assertEqual('snap', tag)
def test_image_tag_create_bad_value(self):
self.assertRaises(exception.Invalid,
self.db_api.image_tag_create, self.context,
UUID1, u'Bad \U0001f62a')
def test_image_tag_set_all(self):
tags = self.db_api.image_tag_get_all(self.context, UUID1)
self.assertEqual([], tags)
self.db_api.image_tag_set_all(self.context, UUID1, ['ping', 'pong'])
tags = self.db_api.image_tag_get_all(self.context, UUID1)
# NOTE(bcwaldon): tag ordering should match exactly what was provided
self.assertEqual(['ping', 'pong'], tags)
def test_image_tag_get_all(self):
self.db_api.image_tag_create(self.context, UUID1, 'snap')
self.db_api.image_tag_create(self.context, UUID1, 'snarf')
self.db_api.image_tag_create(self.context, UUID2, 'snarf')
# Check the tags for the first image
tags = self.db_api.image_tag_get_all(self.context, UUID1)
expected = ['snap', 'snarf']
self.assertEqual(expected, tags)
# Check the tags for the second image
tags = self.db_api.image_tag_get_all(self.context, UUID2)
expected = ['snarf']
self.assertEqual(expected, tags)
def test_image_tag_get_all_no_tags(self):
actual = self.db_api.image_tag_get_all(self.context, UUID1)
self.assertEqual([], actual)
def test_image_tag_get_all_non_existent_image(self):
bad_image_id = str(uuid.uuid4())
actual = self.db_api.image_tag_get_all(self.context, bad_image_id)
self.assertEqual([], actual)
def test_image_tag_delete(self):
self.db_api.image_tag_create(self.context, UUID1, 'snap')
self.db_api.image_tag_delete(self.context, UUID1, 'snap')
self.assertRaises(exception.NotFound, self.db_api.image_tag_delete,
self.context, UUID1, 'snap')
@mock.patch.object(timeutils, 'utcnow')
def test_image_member_create(self, mock_utcnow):
mock_utcnow.return_value = datetime.datetime.utcnow()
memberships = self.db_api.image_member_find(self.context)
self.assertEqual([], memberships)
TENANT1 = str(uuid.uuid4())
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
self.db_api.image_member_create(self.context,
{'member': TENANT1, 'image_id': UUID1})
memberships = self.db_api.image_member_find(self.context)
self.assertEqual(1, len(memberships))
actual = memberships[0]
self.assertIsNotNone(actual['created_at'])
self.assertIsNotNone(actual['updated_at'])
actual.pop('id')
actual.pop('created_at')
actual.pop('updated_at')
expected = {
'member': TENANT1,
'image_id': UUID1,
'can_share': False,
'status': 'pending',
}
self.assertEqual(expected, actual)
def test_image_member_update(self):
TENANT1 = str(uuid.uuid4())
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
member = self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
member_id = member.pop('id')
member.pop('created_at')
member.pop('updated_at')
expected = {'member': TENANT1,
'image_id': UUID1,
'status': 'pending',
'can_share': False}
self.assertEqual(expected, member)
member = self.db_api.image_member_update(self.context,
member_id,
{'can_share': True})
self.assertNotEqual(member['created_at'], member['updated_at'])
member.pop('id')
member.pop('created_at')
member.pop('updated_at')
expected = {'member': TENANT1,
'image_id': UUID1,
'status': 'pending',
'can_share': True}
self.assertEqual(expected, member)
members = self.db_api.image_member_find(self.context,
member=TENANT1,
image_id=UUID1)
member = members[0]
member.pop('id')
member.pop('created_at')
member.pop('updated_at')
self.assertEqual(expected, member)
def test_image_member_update_status(self):
TENANT1 = str(uuid.uuid4())
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
member = self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
member_id = member.pop('id')
member.pop('created_at')
member.pop('updated_at')
expected = {'member': TENANT1,
'image_id': UUID1,
'status': 'pending',
'can_share': False}
self.assertEqual(expected, member)
member = self.db_api.image_member_update(self.context,
member_id,
{'status': 'accepted'})
self.assertNotEqual(member['created_at'], member['updated_at'])
member.pop('id')
member.pop('created_at')
member.pop('updated_at')
expected = {'member': TENANT1,
'image_id': UUID1,
'status': 'accepted',
'can_share': False}
self.assertEqual(expected, member)
members = self.db_api.image_member_find(self.context,
member=TENANT1,
image_id=UUID1)
member = members[0]
member.pop('id')
member.pop('created_at')
member.pop('updated_at')
self.assertEqual(expected, member)
def test_image_member_find(self):
TENANT1 = str(uuid.uuid4())
TENANT2 = str(uuid.uuid4())
fixtures = [
{'member': TENANT1, 'image_id': UUID1},
{'member': TENANT1, 'image_id': UUID2, 'status': 'rejected'},
{'member': TENANT2, 'image_id': UUID1, 'status': 'accepted'},
]
for f in fixtures:
self.db_api.image_member_create(self.context, copy.deepcopy(f))
def _simplify(output):
return
def _assertMemberListMatch(list1, list2):
_simple = lambda x: set([(o['member'], o['image_id']) for o in x])
self.assertEqual(_simple(list1), _simple(list2))
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
output = self.db_api.image_member_find(self.context, member=TENANT1)
_assertMemberListMatch([fixtures[0], fixtures[1]], output)
output = self.db_api.image_member_find(self.adm_context,
image_id=UUID1)
_assertMemberListMatch([fixtures[0], fixtures[2]], output)
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT2
output = self.db_api.image_member_find(self.context,
member=TENANT2,
image_id=UUID1)
_assertMemberListMatch([fixtures[2]], output)
output = self.db_api.image_member_find(self.context,
status='accepted')
_assertMemberListMatch([fixtures[2]], output)
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
output = self.db_api.image_member_find(self.context,
status='rejected')
_assertMemberListMatch([fixtures[1]], output)
output = self.db_api.image_member_find(self.context,
status='pending')
_assertMemberListMatch([fixtures[0]], output)
output = self.db_api.image_member_find(self.context,
status='pending',
image_id=UUID2)
_assertMemberListMatch([], output)
image_id = str(uuid.uuid4())
output = self.db_api.image_member_find(self.context,
member=TENANT2,
image_id=image_id)
_assertMemberListMatch([], output)
def test_image_member_count(self):
TENANT1 = str(uuid.uuid4())
self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
actual = self.db_api.image_member_count(self.context, UUID1)
self.assertEqual(1, actual)
def test_image_member_count_invalid_image_id(self):
TENANT1 = str(uuid.uuid4())
self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
self.assertRaises(exception.Invalid, self.db_api.image_member_count,
self.context, None)
def test_image_member_count_empty_image_id(self):
TENANT1 = str(uuid.uuid4())
self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
self.assertRaises(exception.Invalid, self.db_api.image_member_count,
self.context, "")
def test_image_member_delete(self):
TENANT1 = str(uuid.uuid4())
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
fixture = {'member': TENANT1, 'image_id': UUID1, 'can_share': True}
member = self.db_api.image_member_create(self.context, fixture)
self.assertEqual(1, len(self.db_api.image_member_find(self.context)))
member = self.db_api.image_member_delete(self.context, member['id'])
self.assertEqual(0, len(self.db_api.image_member_find(self.context)))
class DriverQuotaTests(test_utils.BaseTestCase):
def setUp(self):
super(DriverQuotaTests, self).setUp()
self.owner_id1 = str(uuid.uuid4())
self.context1 = context.RequestContext(
is_admin=False, user=self.owner_id1, tenant=self.owner_id1,
auth_token='%s:%s:user' % (self.owner_id1, self.owner_id1))
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
dt1 = timeutils.utcnow()
dt2 = dt1 + datetime.timedelta(microseconds=5)
fixtures = [
{
'id': UUID1,
'created_at': dt1,
'updated_at': dt1,
'size': 13,
'owner': self.owner_id1,
},
{
'id': UUID2,
'created_at': dt1,
'updated_at': dt2,
'size': 17,
'owner': self.owner_id1,
},
{
'id': UUID3,
'created_at': dt2,
'updated_at': dt2,
'size': 7,
'owner': self.owner_id1,
},
]
self.owner1_fixtures = [
build_image_fixture(**fixture) for fixture in fixtures]
for fixture in self.owner1_fixtures:
self.db_api.image_create(self.context1, fixture)
def test_storage_quota(self):
total = reduce(lambda x, y: x + y,
[f['size'] for f in self.owner1_fixtures])
x = self.db_api.user_get_storage_usage(self.context1, self.owner_id1)
self.assertEqual(total, x)
def test_storage_quota_without_image_id(self):
total = reduce(lambda x, y: x + y,
[f['size'] for f in self.owner1_fixtures])
total = total - self.owner1_fixtures[0]['size']
x = self.db_api.user_get_storage_usage(
self.context1, self.owner_id1,
image_id=self.owner1_fixtures[0]['id'])
self.assertEqual(total, x)
def test_storage_quota_multiple_locations(self):
dt1 = timeutils.utcnow()
sz = 53
new_fixture_dict = {'id': str(uuid.uuid4()), 'created_at': dt1,
'updated_at': dt1, 'size': sz,
'owner': self.owner_id1}
new_fixture = build_image_fixture(**new_fixture_dict)
new_fixture['locations'].append({'url': 'file:///some/path/file',
'metadata': {},
'status': 'active'})
self.db_api.image_create(self.context1, new_fixture)
total = reduce(lambda x, y: x + y,
[f['size'] for f in self.owner1_fixtures]) + (sz * 2)
x = self.db_api.user_get_storage_usage(self.context1, self.owner_id1)
self.assertEqual(total, x)
def test_storage_quota_deleted_image(self):
# NOTE(flaper87): This needs to be tested for
# soft deleted images as well. Currently there's no
# good way to delete locations.
dt1 = timeutils.utcnow()
sz = 53
image_id = str(uuid.uuid4())
new_fixture_dict = {'id': image_id, 'created_at': dt1,
'updated_at': dt1, 'size': sz,
'owner': self.owner_id1}
new_fixture = build_image_fixture(**new_fixture_dict)
new_fixture['locations'].append({'url': 'file:///some/path/file',
'metadata': {},
'status': 'active'})
self.db_api.image_create(self.context1, new_fixture)
total = reduce(lambda x, y: x + y,
[f['size'] for f in self.owner1_fixtures])
x = self.db_api.user_get_storage_usage(self.context1, self.owner_id1)
self.assertEqual(total + (sz * 2), x)
self.db_api.image_destroy(self.context1, image_id)
x = self.db_api.user_get_storage_usage(self.context1, self.owner_id1)
self.assertEqual(total, x)
class TaskTests(test_utils.BaseTestCase):
def setUp(self):
super(TaskTests, self).setUp()
self.owner_id = str(uuid.uuid4())
self.adm_context = context.RequestContext(is_admin=True,
auth_token='user:user:admin')
self.context = context.RequestContext(
is_admin=False, auth_token='user:user:user', user=self.owner_id)
self.db_api = db_tests.get_db(self.config)
self.fixtures = self.build_task_fixtures()
db_tests.reset_db(self.db_api)
def build_task_fixtures(self):
self.context.tenant = str(uuid.uuid4())
fixtures = [
{
'owner': self.context.owner,
'type': 'import',
'input': {'import_from': 'file:///a.img',
'import_from_format': 'qcow2',
'image_properties': {
"name": "GreatStack 1.22",
"tags": ["lamp", "custom"]
}},
},
{
'owner': self.context.owner,
'type': 'import',
'input': {'import_from': 'file:///b.img',
'import_from_format': 'qcow2',
'image_properties': {
"name": "GreatStack 1.23",
"tags": ["lamp", "good"]
}},
},
{
'owner': self.context.owner,
"type": "export",
"input": {
"export_uuid": "deadbeef-dead-dead-dead-beefbeefbeef",
"export_to":
"swift://cloud.foo/myaccount/mycontainer/path",
"export_format": "qcow2"
}
},
]
return [build_task_fixture(**fixture) for fixture in fixtures]
def test_task_get_all_with_filter(self):
for fixture in self.fixtures:
self.db_api.task_create(self.adm_context,
build_task_fixture(**fixture))
import_tasks = self.db_api.task_get_all(self.adm_context,
filters={'type': 'import'})
self.assertTrue(import_tasks)
self.assertEqual(2, len(import_tasks))
for task in import_tasks:
self.assertEqual('import', task['type'])
self.assertEqual(self.context.owner, task['owner'])
def test_task_get_all_as_admin(self):
tasks = []
for fixture in self.fixtures:
task = self.db_api.task_create(self.adm_context,
build_task_fixture(**fixture))
tasks.append(task)
import_tasks = self.db_api.task_get_all(self.adm_context)
self.assertTrue(import_tasks)
self.assertEqual(3, len(import_tasks))
def test_task_get_all_marker(self):
for fixture in self.fixtures:
self.db_api.task_create(self.adm_context,
build_task_fixture(**fixture))
tasks = self.db_api.task_get_all(self.adm_context, sort_key='id')
task_ids = [t['id'] for t in tasks]
tasks = self.db_api.task_get_all(self.adm_context, sort_key='id',
marker=task_ids[0])
self.assertEqual(2, len(tasks))
def test_task_get_all_limit(self):
for fixture in self.fixtures:
self.db_api.task_create(self.adm_context,
build_task_fixture(**fixture))
tasks = self.db_api.task_get_all(self.adm_context, limit=2)
self.assertEqual(2, len(tasks))
# A limit of None should not equate to zero
tasks = self.db_api.task_get_all(self.adm_context, limit=None)
self.assertEqual(3, len(tasks))
# A limit of zero should actually mean zero
tasks = self.db_api.task_get_all(self.adm_context, limit=0)
self.assertEqual(0, len(tasks))
def test_task_get_all_owned(self):
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False,
tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
task_values = {'type': 'import', 'status': 'pending',
'input': '{"loc": "fake"}', 'owner': TENANT1}
self.db_api.task_create(ctxt1, task_values)
TENANT2 = str(uuid.uuid4())
ctxt2 = context.RequestContext(is_admin=False,
tenant=TENANT2,
auth_token='user:%s:user' % TENANT2)
task_values = {'type': 'export', 'status': 'pending',
'input': '{"loc": "fake"}', 'owner': TENANT2}
self.db_api.task_create(ctxt2, task_values)
tasks = self.db_api.task_get_all(ctxt1)
task_owners = set([task['owner'] for task in tasks])
expected = set([TENANT1])
self.assertEqual(sorted(expected), sorted(task_owners))
def test_task_get(self):
expires_at = timeutils.utcnow()
image_id = str(uuid.uuid4())
fixture = {
'owner': self.context.owner,
'type': 'import',
'status': 'pending',
'input': '{"loc": "fake"}',
'result': "{'image_id': %s}" % image_id,
'message': 'blah',
'expires_at': expires_at
}
task = self.db_api.task_create(self.adm_context, fixture)
self.assertIsNotNone(task)
self.assertIsNotNone(task['id'])
task_id = task['id']
task = self.db_api.task_get(self.adm_context, task_id)
self.assertIsNotNone(task)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('import', task['type'])
self.assertEqual('pending', task['status'])
self.assertEqual(fixture['input'], task['input'])
self.assertEqual(fixture['result'], task['result'])
self.assertEqual(fixture['message'], task['message'])
self.assertEqual(expires_at, task['expires_at'])
def test_task_get_all(self):
now = timeutils.utcnow()
image_id = str(uuid.uuid4())
fixture1 = {
'owner': self.context.owner,
'type': 'import',
'status': 'pending',
'input': '{"loc": "fake_1"}',
'result': "{'image_id': %s}" % image_id,
'message': 'blah_1',
'expires_at': now,
'created_at': now,
'updated_at': now
}
fixture2 = {
'owner': self.context.owner,
'type': 'import',
'status': 'pending',
'input': '{"loc": "fake_2"}',
'result': "{'image_id': %s}" % image_id,
'message': 'blah_2',
'expires_at': now,
'created_at': now,
'updated_at': now
}
task1 = self.db_api.task_create(self.adm_context, fixture1)
task2 = self.db_api.task_create(self.adm_context, fixture2)
self.assertIsNotNone(task1)
self.assertIsNotNone(task2)
task1_id = task1['id']
task2_id = task2['id']
task_fixtures = {task1_id: fixture1, task2_id: fixture2}
tasks = self.db_api.task_get_all(self.adm_context)
self.assertEqual(2, len(tasks))
self.assertEqual(set((tasks[0]['id'], tasks[1]['id'])),
set((task1_id, task2_id)))
for task in tasks:
fixture = task_fixtures[task['id']]
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual(fixture['type'], task['type'])
self.assertEqual(fixture['status'], task['status'])
self.assertEqual(fixture['expires_at'], task['expires_at'])
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
self.assertEqual(fixture['created_at'], task['created_at'])
self.assertEqual(fixture['updated_at'], task['updated_at'])
task_details_keys = ['input', 'message', 'result']
for key in task_details_keys:
self.assertFalse(key in task)
def test_task_create(self):
task_id = str(uuid.uuid4())
self.context.tenant = self.context.owner
values = {
'id': task_id,
'owner': self.context.owner,
'type': 'export',
'status': 'pending',
}
task_values = build_task_fixture(**values)
task = self.db_api.task_create(self.adm_context, task_values)
self.assertIsNotNone(task)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('export', task['type'])
self.assertEqual('pending', task['status'])
self.assertEqual({'ping': 'pong'}, task['input'])
def test_task_create_with_all_task_info_null(self):
task_id = str(uuid.uuid4())
self.context.tenant = str(uuid.uuid4())
values = {
'id': task_id,
'owner': self.context.owner,
'type': 'export',
'status': 'pending',
'input': None,
'result': None,
'message': None,
}
task_values = build_task_fixture(**values)
task = self.db_api.task_create(self.adm_context, task_values)
self.assertIsNotNone(task)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('export', task['type'])
self.assertEqual('pending', task['status'])
self.assertIsNone(task['input'])
self.assertIsNone(task['result'])
self.assertIsNone(task['message'])
def test_task_update(self):
self.context.tenant = str(uuid.uuid4())
result = {'foo': 'bar'}
task_values = build_task_fixture(owner=self.context.owner,
result=result)
task = self.db_api.task_create(self.adm_context, task_values)
task_id = task['id']
fixture = {
'status': 'processing',
'message': 'This is a error string',
}
task = self.db_api.task_update(self.adm_context, task_id, fixture)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('import', task['type'])
self.assertEqual('processing', task['status'])
self.assertEqual({'ping': 'pong'}, task['input'])
self.assertEqual(result, task['result'])
self.assertEqual('This is a error string', task['message'])
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
self.assertIsNone(task['expires_at'])
self.assertEqual(task_values['created_at'], task['created_at'])
self.assertTrue(task['updated_at'] > task['created_at'])
def test_task_update_with_all_task_info_null(self):
self.context.tenant = str(uuid.uuid4())
task_values = build_task_fixture(owner=self.context.owner,
input=None,
result=None,
message=None)
task = self.db_api.task_create(self.adm_context, task_values)
task_id = task['id']
fixture = {'status': 'processing'}
task = self.db_api.task_update(self.adm_context, task_id, fixture)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('import', task['type'])
self.assertEqual('processing', task['status'])
self.assertIsNone(task['input'])
self.assertIsNone(task['result'])
self.assertIsNone(task['message'])
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
self.assertIsNone(task['expires_at'])
self.assertEqual(task_values['created_at'], task['created_at'])
self.assertTrue(task['updated_at'] > task['created_at'])
def test_task_delete(self):
task_values = build_task_fixture(owner=self.context.owner)
task = self.db_api.task_create(self.adm_context, task_values)
self.assertIsNotNone(task)
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
task_id = task['id']
self.db_api.task_delete(self.adm_context, task_id)
self.assertRaises(exception.TaskNotFound, self.db_api.task_get,
self.context, task_id)
def test_task_delete_as_admin(self):
task_values = build_task_fixture(owner=self.context.owner)
task = self.db_api.task_create(self.adm_context, task_values)
self.assertIsNotNone(task)
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
task_id = task['id']
self.db_api.task_delete(self.adm_context, task_id)
del_task = self.db_api.task_get(self.adm_context,
task_id,
force_show_deleted=True)
self.assertIsNotNone(del_task)
self.assertEqual(task_id, del_task['id'])
self.assertTrue(del_task['deleted'])
self.assertIsNotNone(del_task['deleted_at'])
class TestVisibility(test_utils.BaseTestCase):
def setUp(self):
super(TestVisibility, self).setUp()
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
self.setup_tenants()
self.setup_contexts()
self.fixtures = self.build_image_fixtures()
self.create_images(self.fixtures)
def setup_tenants(self):
self.admin_tenant = str(uuid.uuid4())
self.tenant1 = str(uuid.uuid4())
self.tenant2 = str(uuid.uuid4())
def setup_contexts(self):
self.admin_context = context.RequestContext(
is_admin=True, tenant=self.admin_tenant)
self.admin_none_context = context.RequestContext(
is_admin=True, tenant=None)
self.tenant1_context = context.RequestContext(tenant=self.tenant1)
self.tenant2_context = context.RequestContext(tenant=self.tenant2)
self.none_context = context.RequestContext(tenant=None)
def build_image_fixtures(self):
fixtures = []
owners = {
'Unowned': None,
'Admin Tenant': self.admin_tenant,
'Tenant 1': self.tenant1,
'Tenant 2': self.tenant2,
}
visibilities = {'public': True, 'private': False}
for owner_label, owner in owners.items():
for visibility, is_public in visibilities.items():
fixture = {
'name': '%s, %s' % (owner_label, visibility),
'owner': owner,
'is_public': is_public,
}
fixtures.append(fixture)
return [build_image_fixture(**f) for f in fixtures]
def create_images(self, images):
for fixture in images:
self.db_api.image_create(self.admin_context, fixture)
class VisibilityTests(object):
def test_unknown_admin_sees_all(self):
images = self.db_api.image_get_all(self.admin_none_context)
self.assertEqual(8, len(images))
def test_unknown_admin_is_public_true(self):
images = self.db_api.image_get_all(self.admin_none_context,
is_public=True)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_admin_is_public_false(self):
images = self.db_api.image_get_all(self.admin_none_context,
is_public=False)
self.assertEqual(4, len(images))
for i in images:
self.assertFalse(i['is_public'])
def test_unknown_admin_is_public_none(self):
images = self.db_api.image_get_all(self.admin_none_context)
self.assertEqual(8, len(images))
def test_unknown_admin_visibility_public(self):
images = self.db_api.image_get_all(self.admin_none_context,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_admin_visibility_private(self):
images = self.db_api.image_get_all(self.admin_none_context,
filters={'visibility': 'private'})
self.assertEqual(4, len(images))
for i in images:
self.assertFalse(i['is_public'])
def test_known_admin_sees_all(self):
images = self.db_api.image_get_all(self.admin_context)
self.assertEqual(8, len(images))
def test_known_admin_is_public_true(self):
images = self.db_api.image_get_all(self.admin_context, is_public=True)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_known_admin_is_public_false(self):
images = self.db_api.image_get_all(self.admin_context,
is_public=False)
self.assertEqual(4, len(images))
for i in images:
self.assertFalse(i['is_public'])
def test_known_admin_is_public_none(self):
images = self.db_api.image_get_all(self.admin_context)
self.assertEqual(8, len(images))
def test_admin_as_user_true(self):
images = self.db_api.image_get_all(self.admin_context,
admin_as_user=True)
self.assertEqual(5, len(images))
for i in images:
self.assertTrue(i['is_public'] or i['owner'] == self.admin_tenant)
def test_known_admin_visibility_public(self):
images = self.db_api.image_get_all(self.admin_context,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_known_admin_visibility_private(self):
images = self.db_api.image_get_all(self.admin_context,
filters={'visibility': 'private'})
self.assertEqual(4, len(images))
for i in images:
self.assertFalse(i['is_public'])
def test_what_unknown_user_sees(self):
images = self.db_api.image_get_all(self.none_context)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_user_is_public_true(self):
images = self.db_api.image_get_all(self.none_context, is_public=True)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_user_is_public_false(self):
images = self.db_api.image_get_all(self.none_context, is_public=False)
self.assertEqual(0, len(images))
def test_unknown_user_is_public_none(self):
images = self.db_api.image_get_all(self.none_context)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_user_visibility_public(self):
images = self.db_api.image_get_all(self.none_context,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_user_visibility_private(self):
images = self.db_api.image_get_all(self.none_context,
filters={'visibility': 'private'})
self.assertEqual(0, len(images))
def test_what_tenant1_sees(self):
images = self.db_api.image_get_all(self.tenant1_context)
self.assertEqual(5, len(images))
for i in images:
if not i['is_public']:
self.assertEqual(i['owner'], self.tenant1)
def test_tenant1_is_public_true(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=True)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_tenant1_is_public_false(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=False)
self.assertEqual(1, len(images))
self.assertFalse(images[0]['is_public'])
self.assertEqual(images[0]['owner'], self.tenant1)
def test_tenant1_is_public_none(self):
images = self.db_api.image_get_all(self.tenant1_context)
self.assertEqual(5, len(images))
for i in images:
if not i['is_public']:
self.assertEqual(self.tenant1, i['owner'])
def test_tenant1_visibility_public(self):
images = self.db_api.image_get_all(self.tenant1_context,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_tenant1_visibility_private(self):
images = self.db_api.image_get_all(self.tenant1_context,
filters={'visibility': 'private'})
self.assertEqual(1, len(images))
self.assertFalse(images[0]['is_public'])
self.assertEqual(self.tenant1, images[0]['owner'])
def _setup_is_public_red_herring(self):
values = {
'name': 'Red Herring',
'owner': self.tenant1,
'is_public': False,
'properties': {'is_public': 'silly'}
}
fixture = build_image_fixture(**values)
self.db_api.image_create(self.admin_context, fixture)
def test_is_public_is_a_normal_filter_for_admin(self):
self._setup_is_public_red_herring()
images = self.db_api.image_get_all(self.admin_context,
filters={'is_public': 'silly'})
self.assertEqual(1, len(images))
self.assertEqual('Red Herring', images[0]['name'])
def test_is_public_is_a_normal_filter_for_user(self):
self._setup_is_public_red_herring()
images = self.db_api.image_get_all(self.tenant1_context,
filters={'is_public': 'silly'})
self.assertEqual(1, len(images))
self.assertEqual('Red Herring', images[0]['name'])
# NOTE(markwash): the following tests are sanity checks to make sure
# visibility filtering and is_public=(True|False) do not interact in
# unexpected ways. However, using both of the filtering techniques
# simultaneously is not an anticipated use case.
def test_admin_is_public_true_and_visibility_public(self):
images = self.db_api.image_get_all(self.admin_context, is_public=True,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
def test_admin_is_public_false_and_visibility_public(self):
images = self.db_api.image_get_all(self.admin_context, is_public=False,
filters={'visibility': 'public'})
self.assertEqual(0, len(images))
def test_admin_is_public_true_and_visibility_private(self):
images = self.db_api.image_get_all(self.admin_context, is_public=True,
filters={'visibility': 'private'})
self.assertEqual(0, len(images))
def test_admin_is_public_false_and_visibility_private(self):
images = self.db_api.image_get_all(self.admin_context, is_public=False,
filters={'visibility': 'private'})
self.assertEqual(4, len(images))
def test_tenant1_is_public_true_and_visibility_public(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=True,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
def test_tenant1_is_public_false_and_visibility_public(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=False,
filters={'visibility': 'public'})
self.assertEqual(0, len(images))
def test_tenant1_is_public_true_and_visibility_private(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=True,
filters={'visibility': 'private'})
self.assertEqual(0, len(images))
def test_tenant1_is_public_false_and_visibility_private(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=False,
filters={'visibility': 'private'})
self.assertEqual(1, len(images))
class TestMembershipVisibility(test_utils.BaseTestCase):
def setUp(self):
super(TestMembershipVisibility, self).setUp()
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
self._create_contexts()
self._create_images()
def _create_contexts(self):
self.owner1, self.owner1_ctx = self._user_fixture()
self.owner2, self.owner2_ctx = self._user_fixture()
self.tenant1, self.user1_ctx = self._user_fixture()
self.tenant2, self.user2_ctx = self._user_fixture()
self.tenant3, self.user3_ctx = self._user_fixture()
self.admin_tenant, self.admin_ctx = self._user_fixture(admin=True)
def _user_fixture(self, admin=False):
tenant_id = str(uuid.uuid4())
ctx = context.RequestContext(tenant=tenant_id, is_admin=admin)
return tenant_id, ctx
def _create_images(self):
self.image_ids = {}
for owner in [self.owner1, self.owner2]:
self._create_image('not_shared', owner)
self._create_image('shared-with-1', owner, members=[self.tenant1])
self._create_image('shared-with-2', owner, members=[self.tenant2])
self._create_image('shared-with-both', owner,
members=[self.tenant1, self.tenant2])
def _create_image(self, name, owner, members=None):
image = build_image_fixture(name=name, owner=owner, is_public=False)
self.image_ids[(owner, name)] = image['id']
self.db_api.image_create(self.admin_ctx, image)
for member in members or []:
member = {'image_id': image['id'], 'member': member}
self.db_api.image_member_create(self.admin_ctx, member)
class MembershipVisibilityTests(object):
def _check_by_member(self, ctx, member_id, expected):
members = self.db_api.image_member_find(ctx, member=member_id)
images = [self.db_api.image_get(self.admin_ctx, member['image_id'])
for member in members]
facets = [(image['owner'], image['name']) for image in images]
self.assertEqual(set(expected), set(facets))
def test_owner1_finding_user1_memberships(self):
"""Owner1 should see images it owns that are shared with User1."""
expected = [
(self.owner1, 'shared-with-1'),
(self.owner1, 'shared-with-both'),
]
self._check_by_member(self.owner1_ctx, self.tenant1, expected)
def test_user1_finding_user1_memberships(self):
"""User1 should see all images shared with User1 """
expected = [
(self.owner1, 'shared-with-1'),
(self.owner1, 'shared-with-both'),
(self.owner2, 'shared-with-1'),
(self.owner2, 'shared-with-both'),
]
self._check_by_member(self.user1_ctx, self.tenant1, expected)
def test_user2_finding_user1_memberships(self):
"""User2 should see no images shared with User1 """
expected = []
self._check_by_member(self.user2_ctx, self.tenant1, expected)
def test_admin_finding_user1_memberships(self):
"""Admin should see all images shared with User1 """
expected = [
(self.owner1, 'shared-with-1'),
(self.owner1, 'shared-with-both'),
(self.owner2, 'shared-with-1'),
(self.owner2, 'shared-with-both'),
]
self._check_by_member(self.admin_ctx, self.tenant1, expected)
def _check_by_image(self, context, image_id, expected):
members = self.db_api.image_member_find(context, image_id=image_id)
member_ids = [member['member'] for member in members]
self.assertEqual(set(expected), set(member_ids))
def test_owner1_finding_owner1s_image_members(self):
"""Owner1 should see all memberships of its image """
expected = [self.tenant1, self.tenant2]
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.owner1_ctx, image_id, expected)
def test_admin_finding_owner1s_image_members(self):
"""Admin should see all memberships of owner1's image """
expected = [self.tenant1, self.tenant2]
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.admin_ctx, image_id, expected)
def test_user1_finding_owner1s_image_members(self):
"""User1 should see its own membership of owner1's image """
expected = [self.tenant1]
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.user1_ctx, image_id, expected)
def test_user2_finding_owner1s_image_members(self):
"""User2 should see its own membership of owner1's image """
expected = [self.tenant2]
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.user2_ctx, image_id, expected)
def test_user3_finding_owner1s_image_members(self):
"""User3 should see no memberships of owner1's image """
expected = []
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.user3_ctx, image_id, expected)
| apache-2.0 |
acharguruprasad/team4 | contrib/jb2bz.py | 10 | 11004 | #!/usr/local/bin/python
# -*- mode: python -*-
"""
jb2bz.py - a nonce script to import bugs from JitterBug to Bugzilla
Written by Tom Emerson, tree@basistech.com
This script is provided in the hopes that it will be useful. No
rights reserved. No guarantees expressed or implied. Use at your own
risk. May be dangerous if swallowed. If it doesn't work for you, don't
blame me. It did what I needed it to do.
This code requires a recent version of Andy Dustman's MySQLdb interface,
http://sourceforge.net/projects/mysql-python
Share and enjoy.
"""
import rfc822, mimetools, multifile, mimetypes, email.utils
import sys, re, glob, StringIO, os, stat, time
import MySQLdb, getopt
# mimetypes doesn't include everything we might encounter, yet.
if not mimetypes.types_map.has_key('.doc'):
mimetypes.types_map['.doc'] = 'application/msword'
if not mimetypes.encodings_map.has_key('.bz2'):
mimetypes.encodings_map['.bz2'] = "bzip2"
bug_status='CONFIRMED'
component="default"
version=""
product="" # this is required, the rest of these are defaulted as above
"""
Each bug in JitterBug is stored as a text file named by the bug number.
Additions to the bug are indicated by suffixes to this:
<bug>
<bug>.followup.*
<bug>.reply.*
<bug>.notes
The dates on the files represent the respective dates they were created/added.
All <bug>s and <bug>.reply.*s include RFC 822 mail headers. These could include
MIME file attachments as well that would need to be extracted.
There are other additions to the file names, such as
<bug>.notify
which are ignored.
Bugs in JitterBug are organized into directories. At Basis we used the following
naming conventions:
<product>-bugs Open bugs
<product>-requests Open Feature Requests
<product>-resolved Bugs/Features marked fixed by engineering, but not verified
<product>-verified Resolved defects that have been verified by QA
where <product> is either:
<product-name>
or
<product-name>-<version>
"""
def process_notes_file(current, fname):
try:
new_note = {}
notes = open(fname, "r")
s = os.fstat(notes.fileno())
new_note['text'] = notes.read()
new_note['timestamp'] = time.gmtime(s[stat.ST_MTIME])
notes.close()
current['notes'].append(new_note)
except IOError:
pass
def process_reply_file(current, fname):
new_note = {}
reply = open(fname, "r")
msg = rfc822.Message(reply)
new_note['text'] = "%s\n%s" % (msg['From'], msg.fp.read())
new_note['timestamp'] = email.utils.parsedate_tz(msg['Date'])
current["notes"].append(new_note)
def add_notes(current):
"""Add any notes that have been recorded for the current bug."""
process_notes_file(current, "%d.notes" % current['number'])
for f in glob.glob("%d.reply.*" % current['number']):
process_reply_file(current, f)
for f in glob.glob("%d.followup.*" % current['number']):
process_reply_file(current, f)
def maybe_add_attachment(current, file, submsg):
"""Adds the attachment to the current record"""
cd = submsg["Content-Disposition"]
m = re.search(r'filename="([^"]+)"', cd)
if m == None:
return
attachment_filename = m.group(1)
if (submsg.gettype() == 'application/octet-stream'):
# try get a more specific content-type for this attachment
type, encoding = mimetypes.guess_type(m.group(1))
if type == None:
type = submsg.gettype()
else:
type = submsg.gettype()
try:
data = StringIO.StringIO()
mimetools.decode(file, data, submsg.getencoding())
except:
return
current['attachments'].append( ( attachment_filename, type, data.getvalue() ) )
def process_mime_body(current, file, submsg):
data = StringIO.StringIO()
try:
mimetools.decode(file, data, submsg.getencoding())
current['description'] = data.getvalue()
except:
return
def process_text_plain(msg, current):
current['description'] = msg.fp.read()
def process_multi_part(file, msg, current):
mf = multifile.MultiFile(file)
mf.push(msg.getparam("boundary"))
while mf.next():
submsg = mimetools.Message(file)
if submsg.has_key("Content-Disposition"):
maybe_add_attachment(current, mf, submsg)
else:
# This is the message body itself (always?), so process
# accordingly
process_mime_body(current, mf, submsg)
def process_jitterbug(filename):
current = {}
current['number'] = int(filename)
current['notes'] = []
current['attachments'] = []
current['description'] = ''
current['date-reported'] = ()
current['short-description'] = ''
print "Processing: %d" % current['number']
file = open(filename, "r")
create_date = os.fstat(file.fileno())
msg = mimetools.Message(file)
msgtype = msg.gettype()
add_notes(current)
current['date-reported'] = email.utils.parsedate_tz(msg['Date'])
if current['date-reported'] is None:
current['date-reported'] = time.gmtime(create_date[stat.ST_MTIME])
if current['date-reported'][0] < 1900:
current['date-reported'] = time.gmtime(create_date[stat.ST_MTIME])
if msg.getparam('Subject') is not None:
current['short-description'] = msg['Subject']
else:
current['short-description'] = "Unknown"
if msgtype[:5] == 'text/':
process_text_plain(msg, current)
elif msgtype[:5] == 'text':
process_text_plain(msg, current)
elif msgtype[:10] == "multipart/":
process_multi_part(file, msg, current)
else:
# Huh? This should never happen.
print "Unknown content-type: %s" % msgtype
sys.exit(1)
# At this point we have processed the message: we have all of the notes and
# attachments stored, so it's time to add things to the database.
# The schema for JitterBug 2.14 can be found at:
#
# http://www.trilobyte.net/barnsons/html/dbschema.html
#
# The following fields need to be provided by the user:
#
# bug_status
# product
# version
# reporter
# component
# resolution
# change this to the user_id of the Bugzilla user who is blessed with the
# imported defects
reporter=6
# the resolution will need to be set manually
resolution=""
db = MySQLdb.connect(db='bugs',user='root',host='localhost',passwd='password')
cursor = db.cursor()
try:
cursor.execute( "INSERT INTO bugs SET " \
"bug_id=%s," \
"bug_severity='normal'," \
"bug_status=%s," \
"creation_ts=%s," \
"delta_ts=%s," \
"short_desc=%s," \
"product_id=%s," \
"rep_platform='All'," \
"assigned_to=%s," \
"reporter=%s," \
"version=%s," \
"component_id=%s," \
"resolution=%s",
[ current['number'],
bug_status,
time.strftime("%Y-%m-%d %H:%M:%S", current['date-reported'][:9]),
time.strftime("%Y-%m-%d %H:%M:%S", current['date-reported'][:9]),
current['short-description'],
product,
reporter,
reporter,
version,
component,
resolution] )
# This is the initial long description associated with the bug report
cursor.execute( "INSERT INTO longdescs SET " \
"bug_id=%s," \
"who=%s," \
"bug_when=%s," \
"thetext=%s",
[ current['number'],
reporter,
time.strftime("%Y-%m-%d %H:%M:%S", current['date-reported'][:9]),
current['description'] ] )
# Add whatever notes are associated with this defect
for n in current['notes']:
cursor.execute( "INSERT INTO longdescs SET " \
"bug_id=%s," \
"who=%s," \
"bug_when=%s," \
"thetext=%s",
[current['number'],
reporter,
time.strftime("%Y-%m-%d %H:%M:%S", n['timestamp'][:9]),
n['text']])
# add attachments associated with this defect
for a in current['attachments']:
cursor.execute( "INSERT INTO attachments SET " \
"bug_id=%s, creation_ts=%s, description='', mimetype=%s," \
"filename=%s, submitter_id=%s",
[ current['number'],
time.strftime("%Y-%m-%d %H:%M:%S", current['date-reported'][:9]),
a[1], a[0], reporter ])
cursor.execute( "INSERT INTO attach_data SET " \
"id=LAST_INSERT_ID(), thedata=%s",
[ a[2] ])
except MySQLdb.IntegrityError, message:
errorcode = message[0]
if errorcode == 1062: # duplicate
return
else:
raise
cursor.execute("COMMIT")
cursor.close()
db.close()
def usage():
print """Usage: jb2bz.py [OPTIONS] Product
Where OPTIONS are one or more of the following:
-h This help information.
-s STATUS One of UNCONFIRMED, CONFIRMED, IN_PROGRESS, RESOLVED, VERIFIED
(default is CONFIRMED)
-c COMPONENT The component to attach to each bug as it is important. This should be
valid component for the Product.
-v VERSION Version to assign to these defects.
Product is the Product to assign these defects to.
All of the JitterBugs in the current directory are imported, including replies, notes,
attachments, and similar noise.
"""
sys.exit(1)
def main():
global bug_status, component, version, product
opts, args = getopt.getopt(sys.argv[1:], "hs:c:v:")
for o,a in opts:
if o == "-s":
if a in ('UNCONFIRMED','CONFIRMED','IN_PROGRESS','RESOLVED','VERIFIED'):
bug_status = a
elif o == '-c':
component = a
elif o == '-v':
version = a
elif o == '-h':
usage()
if len(args) != 1:
sys.stderr.write("Must specify the Product.\n")
sys.exit(1)
product = args[0]
for bug in filter(lambda x: re.match(r"\d+$", x), glob.glob("*")):
process_jitterbug(bug)
if __name__ == "__main__":
main()
| mpl-2.0 |
robhudson/django | tests/template_tests/test_parser.py | 288 | 4109 | """
Testing some internals of the template processing. These are *not* examples to be copied in user code.
"""
from __future__ import unicode_literals
from unittest import TestCase
from django.template import Library, TemplateSyntaxError
from django.template.base import (
TOKEN_BLOCK, FilterExpression, Parser, Token, Variable,
)
from django.template.defaultfilters import register as filter_library
from django.utils import six
class ParserTests(TestCase):
def test_token_smart_split(self):
"""
#7027 -- _() syntax should work with spaces
"""
token = Token(TOKEN_BLOCK, 'sometag _("Page not found") value|yesno:_("yes,no")')
split = token.split_contents()
self.assertEqual(split, ["sometag", '_("Page not found")', 'value|yesno:_("yes,no")'])
def test_filter_parsing(self):
c = {"article": {"section": "News"}}
p = Parser("", builtins=[filter_library])
def fe_test(s, val):
self.assertEqual(FilterExpression(s, p).resolve(c), val)
fe_test("article.section", "News")
fe_test("article.section|upper", "NEWS")
fe_test('"News"', "News")
fe_test("'News'", "News")
fe_test(r'"Some \"Good\" News"', 'Some "Good" News')
fe_test(r'"Some \"Good\" News"', 'Some "Good" News')
fe_test(r"'Some \'Bad\' News'", "Some 'Bad' News")
fe = FilterExpression(r'"Some \"Good\" News"', p)
self.assertEqual(fe.filters, [])
self.assertEqual(fe.var, 'Some "Good" News')
# Filtered variables should reject access of attributes beginning with
# underscores.
self.assertRaises(TemplateSyntaxError, FilterExpression, "article._hidden|upper", p)
def test_variable_parsing(self):
c = {"article": {"section": "News"}}
self.assertEqual(Variable("article.section").resolve(c), "News")
self.assertEqual(Variable('"News"').resolve(c), "News")
self.assertEqual(Variable("'News'").resolve(c), "News")
# Translated strings are handled correctly.
self.assertEqual(Variable("_(article.section)").resolve(c), "News")
self.assertEqual(Variable('_("Good News")').resolve(c), "Good News")
self.assertEqual(Variable("_('Better News')").resolve(c), "Better News")
# Escaped quotes work correctly as well.
self.assertEqual(
Variable(r'"Some \"Good\" News"').resolve(c), 'Some "Good" News'
)
self.assertEqual(
Variable(r"'Some \'Better\' News'").resolve(c), "Some 'Better' News"
)
# Variables should reject access of attributes beginning with
# underscores.
self.assertRaises(TemplateSyntaxError, Variable, "article._hidden")
# Variables should raise on non string type
with six.assertRaisesRegex(self, TypeError, "Variable must be a string or number, got <(class|type) 'dict'>"):
Variable({})
def test_filter_args_count(self):
p = Parser("")
l = Library()
@l.filter
def no_arguments(value):
pass
@l.filter
def one_argument(value, arg):
pass
@l.filter
def one_opt_argument(value, arg=False):
pass
@l.filter
def two_arguments(value, arg, arg2):
pass
@l.filter
def two_one_opt_arg(value, arg, arg2=False):
pass
p.add_library(l)
for expr in (
'1|no_arguments:"1"',
'1|two_arguments',
'1|two_arguments:"1"',
'1|two_one_opt_arg',
):
with self.assertRaises(TemplateSyntaxError):
FilterExpression(expr, p)
for expr in (
# Correct number of arguments
'1|no_arguments',
'1|one_argument:"1"',
# One optional
'1|one_opt_argument',
'1|one_opt_argument:"1"',
# Not supplying all
'1|two_one_opt_arg:"1"',
):
FilterExpression(expr, p)
| bsd-3-clause |
Hot2-Kernel/Mystery-Kernel | tools/perf/util/setup.py | 989 | 1543 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = getenv('CFLAGS', '').split()
# switch off several checks (need to be at the end of cflags list)
cflags += ['-fno-strict-aliasing', '-Wno-write-strings', '-Wno-unused-parameter' ]
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
libapikfs = getenv('LIBAPIKFS')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, libapikfs],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
UBERTC/binutils | gdb/testsuite/gdb.python/py-recurse-unwind.py | 6 | 2790 | # Copyright (C) 2016 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This unwinder never does any unwinding. It'll (pretend to) "sniff"
# the frame and ultimately return None, indicating that actual unwinding
# should be performed by some other unwinder.
#
# But, prior to returning None, it will attempt to obtain the value
# associated with a symbol via a call to gdb.parse_and_eval(). In
# the course of doing this evaluation, GDB will potentially access
# some frames, leading to the possibility of a recursive invocation of
# this unwinder. If that should happen, code contained herein detects
# that and prints a message which will cause some of the associated
# tests to FAIL.
import gdb
from gdb.unwinder import Unwinder
class TestUnwinder(Unwinder):
count = 0
@classmethod
def reset_count (cls):
cls.count = 0
@classmethod
def inc_count (cls):
cls.count += 1
test = 'check_undefined_symbol'
@classmethod
def set_test (cls, test) :
cls.test = test
def __init__(self):
Unwinder.__init__(self, "test unwinder")
self.recurse_level = 0
def __call__(self, pending_frame):
if self.recurse_level > 0:
gdb.write("TestUnwinder: Recursion detected - returning early.\n")
return None
self.recurse_level += 1
TestUnwinder.inc_count()
if TestUnwinder.test == 'check_user_reg_pc' :
pc = pending_frame.read_register('pc')
pc_as_int = int(pc.cast(gdb.lookup_type('int')))
# gdb.write("In unwinder: pc=%x\n" % pc_as_int)
elif TestUnwinder.test == 'check_pae_pc' :
pc = gdb.parse_and_eval('$pc')
pc_as_int = int(pc.cast(gdb.lookup_type('int')))
# gdb.write("In unwinder: pc=%x\n" % pc_as_int)
elif TestUnwinder.test == 'check_undefined_symbol' :
try:
val = gdb.parse_and_eval("undefined_symbol")
except Exception as arg:
pass
self.recurse_level -= 1
return None
gdb.unwinder.register_unwinder(None, TestUnwinder(), True)
gdb.write("Python script imported\n")
| gpl-2.0 |
BT-ojossen/odoo | addons/l10n_in_hr_payroll/report/payment_advice_report.py | 340 | 3967 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import fields, osv
class payment_advice_report(osv.osv):
_name = "payment.advice.report"
_description = "Payment Advice Analysis"
_auto = False
_columns = {
'name':fields.char('Name', readonly=True),
'date': fields.date('Date', readonly=True,),
'year': fields.char('Year', size=4, readonly=True),
'month': fields.selection([('01', 'January'), ('02', 'February'), ('03', 'March'), ('04', 'April'),
('05', 'May'), ('06', 'June'), ('07', 'July'), ('08', 'August'), ('09', 'September'),
('10', 'October'), ('11', 'November'), ('12', 'December')], 'Month', readonly=True),
'day': fields.char('Day', size=128, readonly=True),
'state':fields.selection([
('draft', 'Draft'),
('confirm', 'Confirmed'),
('cancel', 'Cancelled'),
], 'Status', select=True, readonly=True),
'employee_id': fields.many2one('hr.employee', 'Employee', readonly=True),
'nbr': fields.integer('# Payment Lines', readonly=True),
'number':fields.char('Number', readonly=True),
'bysal': fields.float('By Salary', readonly=True),
'bank_id':fields.many2one('res.bank', 'Bank', readonly=True),
'company_id':fields.many2one('res.company', 'Company', readonly=True),
'cheque_nos':fields.char('Cheque Numbers', readonly=True),
'neft': fields.boolean('NEFT Transaction', readonly=True),
'ifsc_code': fields.char('IFSC Code', size=32, readonly=True),
'employee_bank_no': fields.char('Employee Bank Account', required=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'payment_advice_report')
cr.execute("""
create or replace view payment_advice_report as (
select
min(l.id) as id,
sum(l.bysal) as bysal,
p.name,
p.state,
p.date,
p.number,
p.company_id,
p.bank_id,
p.chaque_nos as cheque_nos,
p.neft,
l.employee_id,
l.ifsc_code,
l.name as employee_bank_no,
to_char(p.date, 'YYYY') as year,
to_char(p.date, 'MM') as month,
to_char(p.date, 'YYYY-MM-DD') as day,
1 as nbr
from
hr_payroll_advice as p
left join hr_payroll_advice_line as l on (p.id=l.advice_id)
where
l.employee_id IS NOT NULL
group by
p.number,p.name,p.date,p.state,p.company_id,p.bank_id,p.chaque_nos,p.neft,
l.employee_id,l.advice_id,l.bysal,l.ifsc_code, l.name
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
spxiwh/pycbc-glue | test/segments_verify.py | 3 | 16031 | import doctest
import pickle
import random
import sys
import unittest
import verifyutils
#
# How many times to repeat the algebraic tests
#
algebra_repeats = 8000
algebra_listlength = 200
#
# Some useful code.
#
def set1():
return (
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2),
segments.segment(-2, 2)
)
def set2():
return (
segments.segment(-4, -3),
segments.segment(-4, -2),
segments.segment(-4, 0),
segments.segment(-4, 2),
segments.segment(-4, 4),
segments.segment(-2, 4),
segments.segment( 0, 4),
segments.segment( 2, 4),
segments.segment( 3, 4),
segments.segment(-2, 2),
segments.segment(-1, 1),
segments.segment(-segments.infinity(), segments.infinity()),
segments.segment(0, segments.infinity()),
segments.segment(-segments.infinity(), 0)
)
#
# Define the components of the test suite.
#
class test_infinity(unittest.TestCase):
def test__cmp__(self):
a = segments.infinity()
self.assertEqual( 0, cmp(-a, -a))
self.assertEqual(-1, cmp(-a, 0))
self.assertEqual(-1, cmp(-a, a))
self.assertEqual( 1, cmp( 0, -a))
self.assertEqual(-1, cmp( 0, a))
self.assertEqual( 1, cmp( a, -a))
self.assertEqual( 1, cmp( a, 0))
self.assertEqual( 0, cmp( a, a))
def test__add__(self):
a = segments.infinity()
b = segments.infinity()
self.assertEqual( b, ( a) + ( 10))
self.assertEqual( b, ( a) + (-10))
self.assertEqual(-b, ( -a) + ( 10))
self.assertEqual(-b, ( -a) + (-10))
self.assertEqual( b, ( 10) + ( a))
self.assertEqual( b, (-10) + ( a))
self.assertEqual(-b, ( 10) + ( -a))
self.assertEqual(-b, (-10) + ( -a))
self.assertEqual( b, ( a) + ( a))
self.assertEqual(-b, ( -a) + ( -a))
def test__sub__(self):
a = segments.infinity()
b = segments.infinity()
self.assertEqual( b, ( a) - ( 10))
self.assertEqual( b, ( a) - (-10))
self.assertEqual(-b, ( -a) - ( 10))
self.assertEqual(-b, ( -a) - (-10))
self.assertEqual(-b, ( 10) - ( a))
self.assertEqual(-b, (-10) - ( a))
self.assertEqual( b, ( 10) - ( -a))
self.assertEqual( b, (-10) - ( -a))
self.assertEqual( b, ( a) - ( a))
self.assertEqual(-b, ( -a) - ( -a))
self.assertEqual( b, ( a) - ( -a))
self.assertEqual(-b, ( -a) - ( a))
class test_segment(unittest.TestCase):
def test__new__(self):
self.assertEqual((-2, 2), tuple(segments.segment(-2, 2)))
self.assertEqual((-2, 2), tuple(segments.segment(2, -2)))
self.assertEqual((-segments.infinity(), 2), tuple(segments.segment(-segments.infinity(), 2)))
self.assertEqual((-segments.infinity(), 2), tuple(segments.segment(2, -segments.infinity())))
self.assertEqual((2, segments.infinity()), tuple(segments.segment(segments.infinity(), 2)))
self.assertEqual((2, segments.infinity()), tuple(segments.segment(2, segments.infinity())))
self.assertEqual((-segments.infinity(), segments.infinity()), tuple(segments.segment(-segments.infinity(), segments.infinity())))
def test__abs__(self):
results = (
1,
2,
4,
6,
8,
6,
4,
2,
1,
4,
2,
segments.infinity(),
segments.infinity(),
segments.infinity()
)
map(lambda i, r, a: self.assertEqual((i, r), (i, abs(a))), xrange(len(results)), results, set2())
def testintersects(self):
results = (
False,
False,
True,
True,
True,
True,
True,
False,
False,
True,
True,
True,
True,
True
)
map(lambda i, r, a, b: self.assertEqual((i, r), (i, a.intersects(b))), xrange(len(results)), results, set1(), set2())
def test__contains__(self):
results = (
False,
False,
False,
False,
False,
False,
False,
False,
False,
True,
True,
False,
False,
False
)
map(lambda i, r, a, b: self.assertEqual((i, r), (i, a.__contains__(b))), xrange(len(results)), results, set1(), set2())
def testdisjoint(self):
results = (
+1,
0,
0,
0,
0,
0,
0,
0,
-1,
0,
0,
0,
0,
0
)
map(lambda i, r, a, b: self.assertEqual((i, r), (i, a.disjoint(b))), xrange(len(results)), results, set1(), set2())
def testcontract(self):
results = (
segments.segment(-5, -2),
segments.segment(-4, -2),
segments.segment(-2, -2),
segments.segment(-2, 0),
segments.segment(-2, 2),
segments.segment( 0, 2),
segments.segment( 2, 2),
segments.segment( 2, 4),
segments.segment( 2, 5),
segments.segment( 0, 0),
segments.segment(-1, 1),
segments.segment(-segments.infinity(), segments.infinity()),
segments.segment(2, segments.infinity()),
segments.segment(-segments.infinity(), -2)
)
map(lambda i, r, a: self.assertEqual((i, r), (i, a.contract(2))), xrange(len(results)), results, set2())
def test_typesafety(self):
x = "segments.segment(10, 20)"
y = "(20, 30)"
z = "None"
for op in ("|", "&", "-", "^"):
for arg1, arg2 in (
(x, z), (z, x)
):
expr = "%s %s %s" % (arg1, op, arg2)
try:
eval(expr)
except TypeError:
pass
else:
raise AssertionError("%s did not raise TypeError" % expr)
# FIXME: this doesn't work, should it?
#self.assertEqual(eval("%s | %s" % (x, y)), segments.segmentlist([segments.segment(10, 30)]))
class test_segmentlist(unittest.TestCase):
def test__sub__(self):
self.assertEqual(segments.segmentlist([]), segments.segmentlist([]) - segments.segmentlist([]))
self.assertEqual(segments.segmentlist([]), segments.segmentlist([]) - segments.segmentlist([segments.segment(-1,1)]))
self.assertEqual(segments.segmentlist([segments.segment(-1,1)]) - segments.segmentlist([segments.segment(-1,1)]), segments.segmentlist([]))
self.assertEqual(segments.segmentlist([]), segments.segmentlist([segments.segment(-1,1)]) - segments.segmentlist([segments.segment(-1,1)]))
# This next test fails, but I don't know that that's not OK yet
#self.assertEqual(segments.segmentlist([]), segments.segmentlist([segments.segment(0,0)]) - segments.segmentlist([segments.segment(0,0)]))
self.assertEqual(segments.segmentlist([segments.segment(0,1)]), segments.segmentlist([segments.segment(0,1)]) - segments.segmentlist([segments.segment(2,3)]))
self.assertEqual(segments.segmentlist([segments.segment(0,1)]), segments.segmentlist([segments.segment(0,1)]) - segments.segmentlist([segments.segment(2,3), segments.segment(4,5)]))
self.assertEqual(segments.segmentlist([segments.segment(0,1)]), segments.segmentlist([segments.segment(0,1), segments.segment(2,3)]) - segments.segmentlist([segments.segment(2,3)]))
self.assertEqual(segments.segmentlist([segments.segment(2,3)]), segments.segmentlist([segments.segment(0,1), segments.segment(2,3)]) - segments.segmentlist([segments.segment(0,1)]))
self.assertEqual(segments.segmentlist([segments.segment(0,1), segments.segment(4,5)]), segments.segmentlist([segments.segment(0,1), segments.segment(2,3), segments.segment(4,5)]) - segments.segmentlist([segments.segment(2,3)]))
self.assertEqual(segments.segmentlist([segments.segment(0,1)]), segments.segmentlist([segments.segment(0,2)]) - segments.segmentlist([segments.segment(1,2)]))
self.assertEqual(segments.segmentlist([segments.segment(0.8, 0.9), segments.segment(1.0, 1.8)]), segments.segmentlist([segments.segment(0, 2)]) - segments.segmentlist([segments.segment(0, 0.8), segments.segment(0.9, 1.0), segments.segment(1.8, 2)]))
self.assertEqual(segments.segmentlist([segments.segment(-5, 10)]), segments.segmentlist([segments.segment(-10,10)]) - segments.segmentlist([segments.segment(-15,-5)]))
self.assertEqual(segments.segmentlist([segments.segment(-10, -5), segments.segment(5, 10)]), segments.segmentlist([segments.segment(-10,10)]) - segments.segmentlist([segments.segment(-5,5)]))
self.assertEqual(segments.segmentlist([segments.segment(-10, 5)]), segments.segmentlist([segments.segment(-10,10)]) - segments.segmentlist([segments.segment(5,15)]))
self.assertEqual(segments.segmentlist([segments.segment(0,5), segments.segment(45,50)]), segments.segmentlist([segments.segment(0,10), segments.segment(20,30), segments.segment(40,50)]) - segments.segmentlist([segments.segment(5, 45)]))
def test__invert__(self):
self.assertEqual(segments.segmentlist([segments.segment(-segments.infinity(), segments.infinity())]), ~segments.segmentlist([]))
self.assertEqual(segments.segmentlist([]), ~segments.segmentlist([segments.segment(-segments.infinity(), segments.infinity())]))
self.assertEqual(segments.segmentlist([segments.segment(-segments.infinity(), -5), segments.segment(5, segments.infinity())]), ~segments.segmentlist([segments.segment(-5,5)]))
def test__and__(self):
for i in xrange(algebra_repeats):
a = verifyutils.random_coalesced_list(random.randint(1, algebra_listlength))
b = verifyutils.random_coalesced_list(random.randint(1, algebra_listlength))
c = a & b
try:
# make sure __and__ and __sub__ have the
# correct relationship to one another
self.assertEqual(c, a - (a - b))
self.assertEqual(c, b - (b - a))
except AssertionError, e:
raise AssertionError, str(e) + "\na = " + str(a) + "\nb = " + str(b)
def test__or__(self):
for i in xrange(algebra_repeats):
a = verifyutils.random_coalesced_list(random.randint(1, algebra_listlength))
b = verifyutils.random_coalesced_list(random.randint(1, algebra_listlength))
c = a | b
try:
# make sure c is coalesced
self.assertTrue(verifyutils.iscoalesced(c))
# make sure c contains all of a
self.assertEqual(a, c & a)
# make sure c contains all of b
self.assertEqual(b, c & b)
# make sure c contains nothing except a and b
self.assertEqual(segments.segmentlist([]), c - a - b)
except AssertionError, e:
raise AssertionError, str(e) + "\na = " + str(a) + "\nb = " + str(b)
def test__xor__(self):
for i in xrange(algebra_repeats):
a = verifyutils.random_coalesced_list(random.randint(1, algebra_listlength))
b = verifyutils.random_coalesced_list(random.randint(1, algebra_listlength))
c = a ^ b
try:
# c contains nothing that can be found in
# the intersection of a and b
self.assertFalse(c.intersects(a & b))
# c contains nothing that cannot be found
# in either a or b
self.assertEqual(segments.segmentlist([]), c - a - b)
# that c + the intersection of a and b
# leaves no part of either a or b
# unconvered
self.assertEqual(segments.segmentlist([]), a - (c | a & b))
self.assertEqual(segments.segmentlist([]), b - (c | a & b))
except AssertionError, e:
raise AssertionError, str(e) + "\na = " + str(a) + "\nb = " + str(b)
def testprotract(self):
self.assertEqual(segments.segmentlist([segments.segment(0, 20)]), segments.segmentlist([segments.segment(3, 7), segments.segment(13, 17)]).protract(3))
def testcontract(self):
self.assertEqual(segments.segmentlist([segments.segment(0, 20)]), segments.segmentlist([segments.segment(3, 7), segments.segment(13, 17)]).contract(-3))
def testintersects(self):
for i in xrange(algebra_repeats):
a = verifyutils.random_coalesced_list(random.randint(1, algebra_listlength))
b = verifyutils.random_coalesced_list(random.randint(1, algebra_listlength))
c = a - b
d = a & b
try:
if len(c):
self.assertFalse(c.intersects(b))
if len(d):
self.assertTrue(d.intersects(a))
self.assertTrue(d.intersects(b))
self.assertTrue(a.intersects(b))
except AssertionError, e:
raise AssertionError, str(e) + "\na = " + str(a) + "\nb = " + str(b)
def testextent(self):
self.assertEqual(segments.segmentlist([(1, 0)]).extent(), segments.segment(0, 1))
def testcoalesce(self):
# check that mixed-type coalescing works
x = segments.segmentlist([segments.segment(1, 2), segments.segment(3, 4), (2, 3)])
try:
self.assertEqual(x.coalesce(), segments.segmentlist([segments.segment(1, 4)]))
except AssertionError, e:
raise AssertionError, "mixed type coalesce failed: got %s" % str(x)
# try a bunch of random segment lists
for i in xrange(algebra_repeats):
a = verifyutils.random_uncoalesced_list(random.randint(1, algebra_listlength))
b = segments.segmentlist(a[:]).coalesce()
try:
self.assertTrue(verifyutils.iscoalesced(b))
for seg in a:
self.assertTrue(seg in b)
for seg in a:
b -= segments.segmentlist([seg])
self.assertEqual(b, segments.segmentlist([]))
except AssertionError, e:
raise AssertionError, str(e) + "\na = " + str(a) + "\nb = " + str(b)
def test_typesafety(self):
w = "segments.segmentlist([segments.segment(0, 10), segments.segment(20, 30)])"
x = "segments.segment(10, 20)"
y = "[(10, 20)]"
z = "None"
for op in ("|", "&", "-", "^"):
for arg1, arg2 in (
(w, x), (x, w),
(w, z), (z, w)
):
expr = "%s %s %s" % (arg1, op, arg2)
try:
eval(expr)
except TypeError:
pass
else:
raise AssertionError("%s did not raise TypeError" % expr)
self.assertEqual(eval("%s | %s" % (w, y)), segments.segmentlist([segments.segment(0, 30)]))
class test_segmentlistdict(unittest.TestCase):
def testextent_all(self):
a = segments.segmentlistdict({"H1": segments.segmentlist(), "L1": segments.segmentlist([segments.segment(25, 35)])})
self.assertEqual(a.extent_all(), segments.segment(25, 35))
def testintersects(self):
a = segments.segmentlistdict({"H1": segments.segmentlist([segments.segment(0, 10), segments.segment(20, 30)])})
b = segments.segmentlistdict({"H1": segments.segmentlist([segments.segment(5, 15)]), "L1": segments.segmentlist([segments.segment(25, 35)])})
c = segments.segmentlistdict({"V1": segments.segmentlist([segments.segment(7, 13), segments.segment(27, 40)])})
self.assertEqual(a.intersects(b), True)
self.assertEqual(b.intersects(a), True)
self.assertEqual(a.intersects(a), True)
self.assertEqual(a.intersects(c), False)
self.assertEqual(b.intersects(segments.segmentlistdict({})), False)
self.assertEqual(segments.segmentlistdict({}).intersects(segments.segmentlistdict({})), False)
self.assertEqual(a.intersects_all(b), False)
self.assertEqual(b.intersects_all(a), True)
self.assertEqual(a.all_intersects(b), True)
self.assertEqual(b.all_intersects(a), False)
self.assertEqual(a.all_intersects_all(b), False)
def testpickle(self):
a = segments.segmentlistdict({"H1": segments.segmentlist([segments.segment(0, 10), segments.segment(20, 30)])})
a.offsets["H1"] = 10.0
self.assertEqual(a, pickle.loads(pickle.dumps(a, protocol = 0)))
self.assertEqual(a, pickle.loads(pickle.dumps(a, protocol = 1)))
self.assertEqual(a, pickle.loads(pickle.dumps(a, protocol = 2)))
#
# Construct and run the test suite.
#
if __name__ == "__main__":
# first with the pure Python segments implementation
from glue import segments
verifyutils.segments = segments
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(test_infinity))
suite.addTest(unittest.makeSuite(test_segment))
suite.addTest(unittest.makeSuite(test_segmentlist))
suite.addTest(unittest.makeSuite(test_segmentlistdict))
if not unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful():
sys.exit(1)
doctest.testmod(segments)
# then with C extension implementation
from glue import __segments
segments.infinity = __segments.infinity
segments.NegInfinity = __segments.NegInfinity
segments.PosInfinity = __segments.PosInfinity
segments.segment = __segments.segment
segments.segmentlist = __segments.segmentlist
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(test_infinity))
suite.addTest(unittest.makeSuite(test_segment))
suite.addTest(unittest.makeSuite(test_segmentlist))
suite.addTest(unittest.makeSuite(test_segmentlistdict))
if not unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful():
sys.exit(1)
doctest.testmod(segments)
| gpl-3.0 |
namlook/mongokit | tests/test_helpers.py | 5 | 6576 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2011, Nicolas Clairon
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California, Berkeley nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from mongokit import *
from mongokit.schema_document import DotExpandedDict
class HelpersTestCase(unittest.TestCase):
def test_DotExpandedDict(self):
d = DotExpandedDict({'a.$int.c.d': 3, 'a.$int.e': 5, '_id': u'user', 'a.g': 2, 'f': 6})
assert d == {'_id': u'user', 'a':{int:{'c':{'d':3}, 'e':5}, "g":2}, 'f':6}, d
d = DotExpandedDict({'foo.bla.$unicode': [unicode], 'foo.bar': {}})
assert d == {'foo': {'bar': {}, 'bla': {unicode: [unicode]}}}, d
self.assertRaises(EvalException, DotExpandedDict, {'foo.bla.$arf': [unicode], 'foo.bar': {}})
d = DotExpandedDict({'person.1.firstname': ['Simon'],
'person.1.lastname': ['Willison'],
'person.2.firstname': ['Adrian'],
'person.2.lastname': ['Holovaty']})
assert d == {'person': {'1': {'lastname': ['Willison'], 'firstname': ['Simon']}, '2': {'lastname': ['Holovaty'], 'firstname': ['Adrian']}}}
assert d['person'] == {'1': {'lastname': ['Willison'], 'firstname': ['Simon']}, '2': {'lastname': ['Holovaty'], 'firstname': ['Adrian']}}
assert d['person']['1'] == {'lastname': ['Willison'], 'firstname': ['Simon']}
# Gotcha: Results are unpredictable if the dots are "uneven":
assert DotExpandedDict({'c.1': 2, 'c.2': 3, 'c': 1}) == {'c': 1}
def test_DotCollapsedDict(self):
dic = {'foo':{}}
d = DotCollapsedDict(dic)
assert d == {'foo':{}}, d
dic = {'bar':{'foo':{}}}
d = DotCollapsedDict(dic)
assert d == {'bar.foo':{}}, d
dic = {'_id': u'user', 'a':3, 'e':5, "g":2, 'f':6}
d = DotCollapsedDict(dic)
assert d == {'_id': u'user', 'a':3, 'e':5, "g":2, 'f':6}, d
dic = {'_id': u'user', 'a':{'b':{'c':{'d':3}, 'e':5}, "g":2}, 'f':6}
d = DotCollapsedDict(dic)
assert d == {'a.b.c.d': 3, '_id': u'user', 'a.b.e': 5, 'a.g': 2, 'f': 6}, d
dic = {'_id': u'user', 'a':{'b':1, 'd':3, 'e':5}, 'f':6}
d = DotCollapsedDict(dic)
assert d == {'_id': u'user', 'a.b': 1, 'a.d': 3, 'a.e': 5, 'f': 6}, d
dic = {'_id': u'user', 'a':{'b':1, 'd':3, 'e':{'g':5, 'h':0}}, 'f':6}
d = DotCollapsedDict(dic)
assert d == {'a.d': 3, 'a.e.h': 0, 'a.b': 1, 'f': 6, 'a.e.g': 5, '_id': u'user'}, d
def test_DotCollapsedDict_with_reference(self):
dic = {'foo':{}}
d = DotCollapsedDict(dic, reference={'foo':{}})
assert d == {'foo':{}}, d
dic = {'bar':{'foo':{}}}
d = DotCollapsedDict(dic, reference={'bar':{'foo':{}}})
assert d == {'bar':{'foo':{}}}, d
dic = {'bar':{'foo':3}, 'bla':{'g':2, 'h':3}}
d = DotCollapsedDict(dic, reference={'bar.foo':None, 'bla':{'g':None, 'h':None}})
assert d == {'bar.foo':3, 'bla':{'g':2, 'h':3}}, d
# # XXX TODO
# dic = {'bar':{'foo':3, 'bla':2}}
# d = DotCollapsedDict(dic, reference={'bar.foo':None, 'bar':{'bla':None}})
# assert d == {'bar.foo':3, 'bar':{'bla':2}}, d
dic = {'_id': u'user', 'a':3, 'e':5, "g":2, 'f':6}
d = DotCollapsedDict(dic, reference=dic)
assert d == {'_id': u'user', 'a':3, 'e':5, "g":2, 'f':6}, d
dic = {'_id': u'user', 'a':{'b':1, 'd':3, 'e':{'g':5, 'h':0}}, 'f':6}
d = DotCollapsedDict(dic, reference={'_id':None, 'a.b':1, 'a.d':3, 'a.e':{'g':5, 'h':0}, 'a.f':6})
assert d == {'a.d': 3, 'a.b': 1, 'f': 6, 'a.e':{'g': 5, 'h':0}, '_id': u'user'}, d
dic = {'_id': u'user', 'a':{'b':{'c':{'d':3}, 'e':5}, "g":2}, 'f':6}
d = DotCollapsedDict(dic, reference={'_id':None, 'a.b':{'c':{'d':3}, 'e':5}, 'a.g':2, 'f':6})
assert d == {'_id': u'user', 'a.b':{'c': {'d': 3}, 'e':5}, 'a.g': 2, 'f': 6}, d
def test_DotCollapsedDict_with_remove_under_type(self):
dic = {'_id': u'user', 'a':{int:{'c':{'d':3}, 'e':5}, "g":2}, 'f':6}
d = DotCollapsedDict(dic, remove_under_type=True)
assert d == {'a': {}, '_id': u'user', 'f': 6}, d
dic = {'bla':{'foo':{unicode:{"bla":int}}, 'bar':unicode}}
d = DotCollapsedDict(dic, remove_under_type=True)
assert d == {'bla.foo':{}, 'bla.bar':unicode}, d
dic = {'bla':{'foo':{unicode:[unicode]}, 'bar':"egg"}}
d = DotCollapsedDict(dic, remove_under_type=True)
assert d == {'bla.foo':{}, 'bla.bar':"egg"}, d
def test_DotCollapsedDict_with_type(self):
dic = {'_id': u'user', 'a':{int:{'c':{'d':3}, 'e':5}, "g":2}, 'f':6}
d = DotCollapsedDict(dic)
assert d == {'a.$int.c.d': 3, 'a.$int.e': 5, '_id': u'user', 'a.g': 2, 'f': 6}, d
dic = {'bla':{'foo':{unicode:{"bla":3}}, 'bar':'egg'}}
d = DotCollapsedDict(dic)
assert d == {'bla.foo.$unicode.bla': 3, 'bla.bar': "egg"}, d
dic = {'bla':{'foo':{unicode:['egg']}, 'bar':"egg"}}
d = DotCollapsedDict(dic)
assert d == {'bla.foo.$unicode': ['egg'], 'bla.bar': 'egg'}, d
| bsd-3-clause |
rahul67/hue | desktop/core/src/desktop/redaction/__init__.py | 35 | 1504 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from desktop.redaction import logfilter
from desktop.redaction.engine import RedactionEngine
global_redaction_engine = RedactionEngine()
def redact(string):
"""
Redact a string using the global redaction engine.
"""
return global_redaction_engine.redact(string)
def register_log_filtering(policy):
"""
`add_redaction_filter` injects the redaction filter into all of the `logger`
handlers. This must be called after all of the handlers have been added to
`logger`, otherwise those handlers may expose unredacted strings.
"""
if policy:
global_redaction_engine.add_policy(policy)
logfilter.add_log_redaction_filter_to_logger(global_redaction_engine, logging.root)
| apache-2.0 |
agry/NGECore2 | scripts/mobiles/generic/faction/imperial/elite_sand_trooper_commando_75.py | 2 | 1453 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from resources.datatables import FactionStatus
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('crackdown_elite_sand_trooper_hard')
mobileTemplate.setLevel(75)
mobileTemplate.setDifficulty(Difficulty.ELITE)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("imperial")
mobileTemplate.setAssistRange(0)
mobileTemplate.setStalker(False)
mobileTemplate.setFaction("imperial")
mobileTemplate.setFactionStatus(FactionStatus.Combatant)
templates = Vector()
templates.add('object/mobile/shared_dressed_stormtrooper_sandtrooper_m.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/ranged/carbine/shared_carbine_e11.iff', WeaponType.CARBINE, 1.0, 15, 'energy')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('rangedShot')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('elite_sandtrooper_commando_75', mobileTemplate)
return | lgpl-3.0 |
jdsika/TUM_HOly | openrave/docs/preprocess_doxygen.py | 2 | 2409 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2011 Rosen Diankov (rosen.diankov@gmail.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
from optparse import OptionParser
if __name__ == "__main__":
parser = OptionParser(description='Build the doxygen files', usage='%prog [options] latex_directory')
parser.add_option('--outdir',action="store",type='string',dest='outdir',default='build/en',
help='Doxygen output folder.')
(options, args) = parser.parse_args()
indexfilename = os.path.join(options.outdir,'coreapihtml','index.html')
indexhtml = open(indexfilename,'r').read()
indexhtml = re.sub('<li><a href="examples.html"><span>Examples</span></a></li>','<li><a href="cpp_examples.html"><span>C++ Examples</span></a></li>',indexhtml)
open(indexfilename,'w').write(indexhtml)
# for now only build for english pdf
# if lang == 'en':
# os.system('doxygen build/Doxyfile.latex.%s'%lang)
# refman_file = 'build/%s/coreapilatex/refman.tex'%lang
# doctext = open(refman_file,'r').read()
# lines = doctext.splitlines()
# # filedocindex = [i for i,s in enumerate(lines) if s.find('\\section{File Documentation}') >= 0][0]
# # if filedocindex >= 0:
# # lines.pop(filedocindex)
# # while filedocindex < len(lines):
# # if lines[filedocindex].find('\\input{') < 0:
# # break
# # lines.pop(filedocindex)
# doctext = '\n'.join(lines)
# doctext = re.sub('\\\\section\{','\\clearemptydoublepage\n\\section{',doctext)
# open(refman_file,'w').write(doctext)
#
# os.chdir('build/%s/coreapilatex'%lang)
# # yes, 3 times
# os.system('pdflatex refman.tex')
# os.system('pdflatex refman.tex')
# os.system('pdflatex refman.tex')
| mit |
Fkawala/gcloud-python | resource_manager/google/cloud/resource_manager/connection.py | 3 | 1594 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create / interact with Google Cloud Resource Manager connections."""
from google.cloud import _http
class Connection(_http.JSONConnection):
"""A connection to Google Cloud Resource Manager via the JSON REST API.
:type credentials: :class:`oauth2client.client.OAuth2Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
connection.
:type http: :class:`httplib2.Http` or class that defines ``request()``.
:param http: (Optional) HTTP object to make requests.
"""
API_BASE_URL = 'https://cloudresourcemanager.googleapis.com'
"""The base of the API call URL."""
API_VERSION = 'v1beta1'
"""The version of the API, used in building the API call's URL."""
API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}'
"""A template for the URL of a particular API call."""
SCOPE = ('https://www.googleapis.com/auth/cloud-platform',)
"""The scopes required for authenticating as a Resouce Manager consumer."""
| apache-2.0 |
robgolding/tasklib | docs/conf.py | 2 | 8139 | # -*- coding: utf-8 -*-
#
# tasklib documentation build configuration file, created by
# sphinx-quickstart on Sun Nov 10 15:19:07 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'tasklib'
copyright = u'2014, Rob Golding'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.3.0'
# The full version, including alpha/beta/rc tags.
release = '2.3.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'tasklibdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'tasklib.tex', u'tasklib Documentation',
u'Rob Golding', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'tasklib', u'tasklib Documentation',
[u'Rob Golding'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'tasklib', u'tasklib Documentation',
u'Rob Golding', 'tasklib', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| bsd-3-clause |
Supernomad/quantum | docs/conf.py | 1 | 5259 | # -*- coding: utf-8 -*-
#
# quantum documentation build configuration file, created by
# sphinx-quickstart on Tue Sep 12 01:36:36 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['jinja_renderer']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'quantum'
copyright = u'2016-2018 Christian Saide <supernomad>'
author = u'Christian Saide <supernomad>'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.15.0'
# The full version, including alpha/beta/rc tags.
release = u'0.15.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
suppress_warnings = ['']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'quantumdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'quantum.tex', u'quantum Documentation',
u'Christian Saide \\textless{}supernomad\\textgreater{}', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'quantum', u'quantum Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'quantum', u'quantum Documentation',
author, 'quantum', 'One line description of project.',
'Miscellaneous'),
]
| mpl-2.0 |
wedaly/django-rest-framework | tests/test_parsers.py | 81 | 3752 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.core.files.uploadhandler import MemoryFileUploadHandler
from django.test import TestCase
from django.utils.six.moves import StringIO
from rest_framework.exceptions import ParseError
from rest_framework.parsers import FileUploadParser, FormParser
class Form(forms.Form):
field1 = forms.CharField(max_length=3)
field2 = forms.CharField()
class TestFormParser(TestCase):
def setUp(self):
self.string = "field1=abc&field2=defghijk"
def test_parse(self):
""" Make sure the `QueryDict` works OK """
parser = FormParser()
stream = StringIO(self.string)
data = parser.parse(stream)
self.assertEqual(Form(data).is_valid(), True)
class TestFileUploadParser(TestCase):
def setUp(self):
class MockRequest(object):
pass
from io import BytesIO
self.stream = BytesIO(
"Test text file".encode('utf-8')
)
request = MockRequest()
request.upload_handlers = (MemoryFileUploadHandler(),)
request.META = {
'HTTP_CONTENT_DISPOSITION': 'Content-Disposition: inline; filename=file.txt',
'HTTP_CONTENT_LENGTH': 14,
}
self.parser_context = {'request': request, 'kwargs': {}}
def test_parse(self):
"""
Parse raw file upload.
"""
parser = FileUploadParser()
self.stream.seek(0)
data_and_files = parser.parse(self.stream, None, self.parser_context)
file_obj = data_and_files.files['file']
self.assertEqual(file_obj._size, 14)
def test_parse_missing_filename(self):
"""
Parse raw file upload when filename is missing.
"""
parser = FileUploadParser()
self.stream.seek(0)
self.parser_context['request'].META['HTTP_CONTENT_DISPOSITION'] = ''
with self.assertRaises(ParseError):
parser.parse(self.stream, None, self.parser_context)
def test_parse_missing_filename_multiple_upload_handlers(self):
"""
Parse raw file upload with multiple handlers when filename is missing.
Regression test for #2109.
"""
parser = FileUploadParser()
self.stream.seek(0)
self.parser_context['request'].upload_handlers = (
MemoryFileUploadHandler(),
MemoryFileUploadHandler()
)
self.parser_context['request'].META['HTTP_CONTENT_DISPOSITION'] = ''
with self.assertRaises(ParseError):
parser.parse(self.stream, None, self.parser_context)
def test_get_filename(self):
parser = FileUploadParser()
filename = parser.get_filename(self.stream, None, self.parser_context)
self.assertEqual(filename, 'file.txt')
def test_get_encoded_filename(self):
parser = FileUploadParser()
self.__replace_content_disposition('inline; filename*=utf-8\'\'ÀĥƦ.txt')
filename = parser.get_filename(self.stream, None, self.parser_context)
self.assertEqual(filename, 'ÀĥƦ.txt')
self.__replace_content_disposition('inline; filename=fallback.txt; filename*=utf-8\'\'ÀĥƦ.txt')
filename = parser.get_filename(self.stream, None, self.parser_context)
self.assertEqual(filename, 'ÀĥƦ.txt')
self.__replace_content_disposition('inline; filename=fallback.txt; filename*=utf-8\'en-us\'ÀĥƦ.txt')
filename = parser.get_filename(self.stream, None, self.parser_context)
self.assertEqual(filename, 'ÀĥƦ.txt')
def __replace_content_disposition(self, disposition):
self.parser_context['request'].META['HTTP_CONTENT_DISPOSITION'] = disposition
| bsd-2-clause |
roman-yepishev/mediafire-python-open-sdk | tests/api/test_error.py | 2 | 1379 | import unittest
import responses
from tests.api.base import MediaFireApiTestCase
from mediafire.api import MediaFireApiError
class TestErrorResponse(MediaFireApiTestCase):
def __init__(self, *args, **kwargs):
super(MediaFireApiTestCase, self).__init__(*args, **kwargs)
self.url = self.build_url('system/get_status')
@responses.activate
def test_json_error(self):
"""Test error handling for json response"""
body = """
{"response": {
"result": "Error",
"message": "This is a fun error",
"error": "102"}}
"""
responses.add(responses.POST, self.url, body=body, status=900,
content_type="application/json")
with self.assertRaises(MediaFireApiError):
self.api.system_get_status()
@responses.activate
def test_json_in_xml_error(self):
"""Test that json-in-xml (api bug) still detects error"""
body = """
{"response": {
"result": "Error",
"message": "This is even funnier",
"error": "103"}}
"""
responses.add(responses.POST, self.url, body=body, status=900,
content_type="text/xml")
with self.assertRaises(MediaFireApiError):
self.api.system_get_status()
if __name__ == "__main__":
unittest.main()
| bsd-2-clause |
janebeckman/gpdb | gpMgmt/bin/gppylib/operations/test/unit/test_unit_dump.py | 10 | 70055 | #
# Copyright (c) Greenplum Inc 2012. All Rights Reserved.
#
import unittest
from datetime import datetime
from gppylib.commands.base import Command, CommandResult
from gppylib.gparray import GpArray, GpDB
from gppylib.operations.backup_utils import *
from gppylib.operations.dump import *
from mock import patch, MagicMock, Mock, mock_open, call, ANY
from . import setup_fake_gparray
class DumpTestCase(unittest.TestCase):
@patch('gppylib.operations.backup_utils.Context.get_master_port', return_value = 5432)
def setUp(self, mock1):
with patch('gppylib.gparray.GpArray.initFromCatalog', return_value=setup_fake_gparray()):
context = Context()
context.target_db ='testdb'
context.dump_schema='testschema'
context.include_dump_tables_file='/tmp/table_list.txt'
context.master_datadir=context.backup_dir='/data/master'
context.batch_default=None
context.timestamp_key = '20160101010101'
context.generate_dump_timestamp()
context.schema_file = None
self.context = context
self.dumper = DumpDatabase(self.context)
self.dump_globals = DumpGlobal(self.context)
self.mailEvent = MailEvent(subject="test", message="Hello", to_addrs="example@pivotal.io")
@patch('gppylib.operations.dump.get_heap_partition_list', return_value=[['123', 'public', 't4'], ['123', 'public', 't5'], ['123', 'testschema', 't6']])
def test_get_dirty_heap_tables_default(self, mock1):
expected_output = set(['public.t4', 'public.t5', 'testschema.t6'])
dirty_table_list = get_dirty_heap_tables(self.context)
self.assertEqual(dirty_table_list, expected_output)
@patch('gppylib.operations.dump.get_heap_partition_list', return_value=[[], ['123', 'public', 't5'], ['123', 'public', 't6']])
def test_get_dirty_heap_tables_empty_arg(self, mock1):
with self.assertRaisesRegexp(Exception, 'Heap tables query returned rows with unexpected number of columns 0'):
dirty_table_list = get_dirty_heap_tables(self.context)
def test_write_dirty_file_default(self):
dirty_tables = ['t1', 't2', 't3']
m = mock_open()
with patch('__builtin__.open', m, create=True):
tmpfilename = write_dirty_file(self.context, dirty_tables)
result = m()
self.assertEqual(len(dirty_tables), len(result.write.call_args_list))
for i in range(len(dirty_tables)):
self.assertEqual(call(dirty_tables[i]+'\n'), result.write.call_args_list[i])
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='test_dirty_filename')
def test_write_dirty_file_timestamp(self, mock1):
dirty_tables = ['t1', 't2', 't3']
timestamp = '20160101010101'
m = mock_open()
with patch('__builtin__.open', m, create=True):
tmpfilename = write_dirty_file(self.context, dirty_tables, timestamp)
mock1.assert_called_with("dirty_table", timestamp=timestamp)
result = m()
self.assertEqual(len(dirty_tables), len(result.write.call_args_list))
for i in range(len(dirty_tables)):
self.assertEqual(call(dirty_tables[i]+'\n'), result.write.call_args_list[i])
result = m()
self.assertEqual(len(dirty_tables), len(result.write.call_args_list))
for i in range(len(dirty_tables)):
self.assertEqual(call(dirty_tables[i]+'\n'), result.write.call_args_list[i])
def test_write_dirty_file_no_list(self):
dirty_tables = None
tmpfilename = write_dirty_file(self.context, dirty_tables)
self.assertEqual(tmpfilename, None)
def test_write_dirty_file_empty_list(self):
dirty_tables = []
m = mock_open()
with patch('__builtin__.open', m, create=True):
tmpfilename = write_dirty_file(self.context, dirty_tables)
result = m()
self.assertEqual(len(result.write.call_args_list), 0)
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['20120330120102', '20120330120103'])
@patch('gppylib.operations.dump.get_incremental_ts_from_report_file', return_value='20120330120102')
def test_validate_increments_file_default(self, mock1, mock2):
# expect no exception to die out of this
CreateIncrementsFile.validate_increments_file(self.context, '/tmp/fn')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['20120330120102', '20120330120103'])
@patch('gppylib.operations.dump.get_incremental_ts_from_report_file', side_effect=Exception('invalid timestamp'))
def test_validate_increments_file_bad_increment(self, mock1, mock2):
with self.assertRaisesRegexp(Exception, "Timestamp '20120330120102' from increments file '/tmp/fn' is not a valid increment"):
CreateIncrementsFile.validate_increments_file(self.context, '/tmp/fn')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['20120330120102', '20120330120103'])
@patch('gppylib.operations.dump.get_incremental_ts_from_report_file', return_value=None)
def test_validate_increments_file_empty_file(self, mock1, mock2):
with self.assertRaisesRegexp(Exception, "Timestamp '20120330120102' from increments file '/tmp/fn' is not a valid increment"):
CreateIncrementsFile.validate_increments_file(self.context, '/tmp/fn')
@patch('os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
@patch('gppylib.operations.dump.CreateIncrementsFile.validate_increments_file')
def test_CreateIncrementsFile_init(self, mock1, mock2, mock3):
obj = CreateIncrementsFile(self.context)
self.assertEquals(obj.increments_filename, '/data/master/db_dumps/20160101/gp_dump_20160101000000_increments')
@patch('os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
@patch('gppylib.operations.dump.CreateIncrementsFile.validate_increments_file')
@patch('gppylib.operations.dump.get_lines_from_file', side_effect=[ [], ['20160101010101'] ])
def test_CreateIncrementsFile_execute_no_file(self, mock1, mock2, mock3, mock4):
obj = CreateIncrementsFile(self.context)
with patch('__builtin__.open', mock_open(), create=True):
result = obj.execute()
self.assertEquals(1, result)
@patch('os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
@patch('gppylib.operations.dump.get_incremental_ts_from_report_file', return_value='')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['20160101010101'])
def test_CreateIncrementsFile_execute_invalid_timestamp(self, mock1, mock2, mock3, mock4):
obj = CreateIncrementsFile(self.context)
with self.assertRaisesRegexp(Exception, ".* is not a valid increment"):
obj.execute()
@patch('os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
@patch('gppylib.operations.dump.get_lines_from_file', side_effect=[ ['20160101010000'], ['20160101010000', '20160101010101'] ])
@patch('gppylib.operations.dump.CreateIncrementsFile.validate_increments_file')
def test_CreateIncrementsFile_execute_append(self, mock1, mock2, mock3, mock4):
obj = CreateIncrementsFile(self.context)
with patch('__builtin__.open', mock_open(), create=True):
result = obj.execute()
self.assertEquals(2, result)
@patch('os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=[])
@patch('gppylib.operations.dump.CreateIncrementsFile.validate_increments_file')
def test_CreateIncrementsFile_execute_no_output(self, mock1, mock2, mock3, mock4):
obj = CreateIncrementsFile(self.context)
with patch('__builtin__.open', mock_open(), create=True):
with self.assertRaisesRegexp(Exception, 'File not written to'):
result = obj.execute()
@patch('os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['20160101000000'])
@patch('gppylib.operations.dump.CreateIncrementsFile.validate_increments_file')
def test_CreateIncrementsFile_execute_wrong_timestamp(self, mock1, mock2, mock3, mock4):
obj = CreateIncrementsFile(self.context)
with patch('__builtin__.open', mock_open(), create=True):
with self.assertRaisesRegexp(Exception, 'Timestamp .* not written to'):
result = obj.execute()
@patch('os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
@patch('gppylib.operations.dump.get_lines_from_file', side_effect=[ ['20160101010000'], ['20160101010001', '20160101010101'] ])
@patch('gppylib.operations.dump.CreateIncrementsFile.validate_increments_file')
def test_CreateIncrementsFile_execute_modified_timestamp(self, mock1, mock2, mock3, mock4):
obj = CreateIncrementsFile(self.context)
with patch('__builtin__.open', mock_open(), create=True):
with self.assertRaisesRegexp(Exception, 'trouble adding timestamp'):
result = obj.execute()
@patch('gppylib.operations.dump.get_filter_file', return_value=None)
def test_write_partition_list_file_no_filter_file(self, mock1):
with patch('gppylib.operations.dump.get_partition_list') as p:
part_list = [[123, 'myschema', 't1'], [4444, 'otherschema', 't2'], [992313, 'public', 't3']]
p.return_value = part_list
m = mock_open()
with patch('__builtin__.open', m, create=True):
write_partition_list_file(self.context)
result = m()
self.assertEqual(len(part_list), len(result.write.call_args_list))
for i in range(len(part_list)):
expected = "%s.%s\n" % (part_list[i][1], part_list[i][2])
self.assertEqual(call(expected), result.write.call_args_list[i])
@patch('gppylib.operations.dump.get_partition_list', return_value=[['t1', 'foo', 'koo'], ['public', 't2'], ['public', 't3']])
@patch('gppylib.operations.dump.get_filter_file', return_value=None)
def test_write_partition_list_file_bad_query_return(self, mock1, mock2):
with self.assertRaisesRegexp(Exception, 'Invalid results from query to get all tables'):
write_partition_list_file(self.context)
def test_create_dump_outcome_default(self):
start = datetime(2012, 7, 31, 9, 30, 00)
end = datetime(2012, 8, 1, 12, 21, 11)
rc = 5
expected_outcome = {'timestamp_start': '20120731093000',
'time_start': '09:30:00',
'time_end': '12:21:11',
'exit_status': 5}
outcome = self.dumper.create_dump_outcome(start, end, rc)
self.assertTrue(expected_outcome == outcome)
@patch('gppylib.operations.dump.ValidateDumpDatabase.run')
@patch('gppylib.operations.dump.Command.run')
@patch('gppylib.operations.dump.Command.get_results', return_value=CommandResult(0, "", "", True, False))
@patch('gppylib.operations.dump.DumpDatabase.create_filter_file')
def test_execute_default(self, mock1, mock2, mock3, mock4):
self.context.include_dump_tables_file = ''
self.dumper.execute()
# should not raise any exceptions
@patch('gppylib.operations.dump.dbconn.DbURL')
@patch('gppylib.operations.dump.dbconn.connect')
@patch('gppylib.operations.dump.execSQLForSingleton', return_value='100')
def test_get_partition_state_default(self, mock1, mock2, mock3):
partition_info = [(123, 'testschema', 't1', 4444), (234, 'testschema', 't2', 5555)]
expected_output = ['testschema, t1, 100', 'testschema, t2, 100']
result = get_partition_state(self.context, 'pg_aoseg', partition_info)
self.assertEqual(result, expected_output)
@patch('gppylib.operations.dump.dbconn.DbURL')
@patch('gppylib.operations.dump.dbconn.connect')
def test_get_partition_state_empty(self, mock1, mock2):
partition_info = []
expected_output = []
result = get_partition_state(self.context, 'pg_aoseg', partition_info)
self.assertEqual(result, expected_output)
@patch('gppylib.operations.dump.dbconn.DbURL')
@patch('gppylib.operations.dump.dbconn.connect')
@patch('gppylib.operations.dump.execSQLForSingleton', return_value='10000000000000000')
def test_get_partition_state_exceeded_count(self, mock1, mock2, mock3):
partition_info = [(123, 'testschema', 't1', 4444), (234, 'testschema', 't2', 5555)]
expected_output = ['testschema, t1, 10000000000000000', 'testschema, t2, 10000000000000000']
with self.assertRaisesRegexp(Exception, 'Exceeded backup max tuple count of 1 quadrillion rows per table for:'):
get_partition_state(self.context, 'pg_aoseg', partition_info)
@patch('gppylib.operations.dump.dbconn.DbURL')
@patch('gppylib.operations.dump.dbconn.connect')
@patch('gppylib.operations.dump.execSQLForSingleton', return_value='100')
def test_get_partition_state_many_partition(self, mock1, mock2, mock3):
master_port=5432
dbname='testdb'
partition_info = [(123, 'testschema', 't1', 4444), (234, 'testschema', 't2', 5555)] * 1
expected_output = ['testschema, t1, 100', 'testschema, t2, 100'] * 1
result = get_partition_state(self.context, 'pg_aoseg', partition_info)
self.assertEqual(result, expected_output)
def test_get_filename_from_filetype_ao(self):
expected_output = '/data/master/db_dumps/20160101/gp_dump_20160101010101_ao_state_file'
result = get_filename_from_filetype(self.context, "ao", self.context.timestamp)
self.assertEqual(result, expected_output)
def test_get_filename_from_filetype_co(self):
expected_output = '/data/master/db_dumps/20160101/gp_dump_20160101010101_co_state_file'
result = get_filename_from_filetype(self.context, "co", self.context.timestamp)
self.assertEqual(result, expected_output)
def test_get_filename_from_filetype_bad_type(self):
with self.assertRaisesRegexp(Exception, 'Invalid table type *'):
result = get_filename_from_filetype(self.context, "schema", self.context.timestamp)
def test_write_state_file_bad_type(self):
table_type = 'foo'
partition_list = ['testschema, t1, 100', 'testschema, t2, 100']
with self.assertRaisesRegexp(Exception, 'Invalid table type *'):
write_state_file(self.context, table_type, partition_list)
@patch('gppylib.operations.dump.get_filename_from_filetype', return_value='/tmp/db_dumps/20160101/gp_dump_20160101010101')
def test_write_state_file_default(self, mock1):
table_type = 'ao'
part_list = ['testschema, t1, 100', 'testschema, t2, 100']
m = mock_open()
with patch('__builtin__.open', m, create=True):
write_state_file(self.context, table_type, part_list)
result = m()
self.assertEqual(len(part_list), len(result.write.call_args_list))
for i in range(len(part_list)):
self.assertEqual(call(part_list[i]+'\n'), result.write.call_args_list[i])
@patch('gppylib.operations.dump.get_filename_from_filetype', return_value='/tmp/db_dumps/20170413/gp_dump_20170413224743_ao_state_file')
def test_write_state_file_empty(self, mock1):
table_type = 'ao'
part_list = ['']
m = mock_open()
with patch('__builtin__.open', m, create=True):
write_state_file(self.context, table_type, part_list)
result = m()
self.assertEqual(1, len(result.write.call_args_list))
for i in range(len(part_list)):
self.assertEqual(call('\n'), result.write.call_args_list[i])
@patch('gppylib.operations.dump.execute_sql', return_value=[['public', 'ao_table', 123, 'CREATE', 'table', '2012: 1'], ['testschema', 'co_table', 333, 'TRUNCATE', '', '2033 :1 - 111']])
def test_get_last_operation_data_default(self, mock):
output = get_last_operation_data(self.context)
expected = ['public,ao_table,123,CREATE,table,2012: 1', 'testschema,co_table,333,TRUNCATE,,2033 :1 - 111']
self.assertEquals(output, expected)
@patch('gppylib.operations.dump.execute_sql', return_value=[])
def test_get_last_operation_data_empty(self, mock):
output = get_last_operation_data(self.context)
expected = []
self.assertEquals(output, expected)
@patch('gppylib.operations.dump.execute_sql', return_value=[[123, 'table', '2012: 1'], [333, 'TRUNCATE', '', '2033 :1 - 111']])
def test_get_last_operation_data_invalid(self, mock):
with self.assertRaisesRegexp(Exception, 'Invalid return from query'):
get_last_operation_data(self.context)
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101121212')
@patch('gppylib.operations.dump.os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['testschema, t1, 100', 'testschema, t2, 100'])
def test_get_last_state_default(self, mock1, mock2, mock3):
table_type = 'ao'
expected_output = ['testschema, t1, 100', 'testschema, t2, 100']
output = get_last_state(self.context, table_type)
self.assertEqual(output, expected_output)
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101121212')
@patch('gppylib.operations.dump.os.path.isfile', return_value=False)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='foo')
def test_get_last_state_no_file(self, mock1, mock2, mock3):
table_type = 'ao'
with self.assertRaisesRegexp(Exception, 'ao state file does not exist: foo'):
get_last_state(self.context, table_type)
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101121212')
@patch('gppylib.operations.dump.os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_lines_from_file', return_value=[])
def test_get_last_state_empty_file(self, mock1, mock2, mock3):
table_type = 'ao'
output = get_last_state(self.context, table_type)
self.assertEqual(output, [])
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101121212')
@patch('gppylib.operations.dump.os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_lines_from_file', return_value=[])
@patch('gppylib.operations.dump.check_file_dumped_with_nbu', return_value=True)
@patch('gppylib.operations.dump.restore_file_with_nbu')
def test_get_last_state_nbu(self, mock1, mock2, mock3, mock4, mock5):
table_type = 'ao'
self.context.netbackup_service_host = "mdw"
self.context.netbackup_block_size = "1024"
output = get_last_state(self.context, table_type)
self.assertEqual(output, [])
def test_compare_dict_different(self):
last_dict = {'testschema.t1':'100', 'testschema.t2':'200'}
curr_dict = {'testschema.t1':'200', 'testschema.t2':'200'}
expected_output = set(['testschema.t1'])
result = compare_dict(last_dict, curr_dict)
self.assertEqual(result, expected_output)
def test_compare_dict_extra(self):
last_dict = {'testschema.t1':'100', 'testschema.t2':'200', 'testschema.t3':'300'}
curr_dict = {'testschema.t1':'100', 'testschema.t2':'100'}
expected_output = set(['testschema.t2'])
result = compare_dict(last_dict, curr_dict)
self.assertEqual(result, expected_output)
def test_compare_dict_missing(self):
last_dict = {'testschema.t1':'100', 'testschema.t2':'200'}
curr_dict = {'testschema.t1':'100', 'testschema.t2':'200', 'testschema.t3':'300'}
expected_output = set(['testschema.t3'])
result = compare_dict(last_dict, curr_dict)
self.assertEqual(result, expected_output)
def test_compare_dict_identical(self):
last_dict = {'testschema.t1':'100', 'testschema.t2':'200'}
curr_dict = {'testschema.t1':'100', 'testschema.t2':'200'}
expected_output = set([])
result = compare_dict(last_dict, curr_dict)
self.assertEqual(result, expected_output)
def test_create_partition_dict_default(self):
partition_list = ['testschema, t1, 100', 'testschema, t2, 200']
expected_output = {'testschema.t1':'100', 'testschema.t2':'200'}
result = create_partition_dict(partition_list)
self.assertEqual(result, expected_output)
def test_create_partition_dict_empty(self):
partition_list = ['']
expected_output = {}
result = create_partition_dict(partition_list)
self.assertEqual(result, expected_output)
def test_create_partition_dict_invalid_format(self):
partition_list = ['testschema t1 100']
with self.assertRaisesRegexp(Exception, 'Invalid state file format *'):
create_partition_dict(partition_list)
@patch('gppylib.operations.backup_utils.Context.generate_filename')
@patch('gppylib.operations.dump.os.path.isdir', return_value=False)
@patch('gppylib.operations.dump.os.path.isfile', return_value=False)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
def test_get_last_dump_timestamp_default(self, mock1, mock2, mock3, mock4):
full_timestamp = '20160101000000'
result = get_last_dump_timestamp(self.context)
self.assertEqual(result, full_timestamp)
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['20160101010000', '20160101010001'])
@patch('gppylib.operations.dump.os.path.isdir', return_value=True)
@patch('gppylib.operations.dump.os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
def test_get_last_dump_timestamp_one_previous(self, mock1, mock2, mock3, mock4):
master_datadir = 'foo'
backup_dir = None
full_timestamp = '20160101000000'
expected_output = '20160101010001'
result = get_last_dump_timestamp(self.context)
self.assertEqual(result, expected_output)
@patch('gppylib.operations.dump.os.path.isdir', return_value=True)
@patch('gppylib.operations.dump.os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['2012093009300q'])
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
def test_get_last_dump_timestamp_invalid_timestamp(self, mock1, mock2, mock3, mock4):
with self.assertRaisesRegexp(Exception, 'get_last_dump_timestamp found invalid ts in file'):
get_last_dump_timestamp(self.context)
@patch('gppylib.operations.dump.os.path.isdir', return_value=True)
@patch('gppylib.operations.dump.os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_lines_from_file', return_value=[' 20160101010101 \n \n '])
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
def test_get_last_dump_timestamp_extra_whitespace(self, mock1, mock2, mock3, mock4):
expected = '20160101010101'
result = get_last_dump_timestamp(self.context)
self.assertEqual(result, expected)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
@patch('gppylib.operations.dump.check_file_dumped_with_nbu', return_value=False)
def test_get_last_dump_timestamp_nbu(self, mock1, mock2):
netbackup_service_host = "mdw"
netbackup_block_size = "1024"
expected = '20160101000000'
result = get_last_dump_timestamp(self.context)
self.assertEqual(result, expected)
def test_get_pgstatlastoperations_dict_single_input(self):
last_operations = ['public,t1,1234,ALTER,,201601011212:101010']
last_operations_dict = get_pgstatlastoperations_dict(last_operations)
expected_output = {('1234', 'ALTER'): 'public,t1,1234,ALTER,,201601011212:101010'}
self.assertEqual(last_operations_dict, expected_output)
def test_get_pgstatlastoperations_dict_multiple_input(self):
last_operations = ['public,t1,1234,ALTER,,201601011212:101010', 'public,t2,1234,VACCUM,TRUNCATE,201601011212:101015']
last_operations_dict = get_pgstatlastoperations_dict(last_operations)
expected_output = {('1234', 'ALTER'): 'public,t1,1234,ALTER,,201601011212:101010',
('1234', 'VACCUM'): 'public,t2,1234,VACCUM,TRUNCATE,201601011212:101015'}
self.assertEqual(last_operations_dict, expected_output)
def test_get_pgstatlastoperations_dict_empty(self):
last_operations = ['']
last_operations_dict = get_pgstatlastoperations_dict(last_operations)
expected_output = {}
self.assertEqual(last_operations_dict, expected_output)
def test_get_pgstatlastoperations_dict_invalid_input(self):
last_operations = ['public,t1,1234,ALTER,,201601011212:101010', '2345,VACCUM,TRUNCATE,201601011212:101015']
with self.assertRaisesRegexp(Exception, 'Wrong number of tokens in last_operation data for last backup'):
get_pgstatlastoperations_dict(last_operations)
def test_compare_metadata_(self):
old_metadata = {('1234', 'ALTER'): 'public,t1,1234,ALTER,,201601011212:101010'}
cur_metadata = ['public,t1,1234,ALTER,,201601011212:101010']
dirty_tables = compare_metadata(old_metadata, cur_metadata)
self.assertEquals(dirty_tables, set())
def test_compare_metadata_different_keyword(self):
old_metadata = {('1234', 'ALTER'): 'public,t1,1234,ALTER,,201601011212:101010'}
cur_metadata = ['public,t1,1234,TRUNCATE,,201601011212:101010']
dirty_tables = compare_metadata(old_metadata, cur_metadata)
self.assertEquals(dirty_tables, set(['public.t1']))
def test_compare_metadata_different_timestamp(self):
old_metadata = {('1234', 'ALTER'): 'public,t1,1234,ALTER,,201601011212:101010'}
cur_metadata = ['public,t1,1234,ALTER,,201601011212:102510']
dirty_tables = compare_metadata(old_metadata, cur_metadata)
self.assertEquals(dirty_tables, set(['public.t1']))
def test_compare_metadata_duplicate_input(self):
old_metadata = {('1234', 'ALTER'): 'public,t1,1234,ALTER,,201601011212:101010'}
cur_metadata = ['public,t1,1234,ALTER,,201601011212:101010','public,t1,1234,TRUNCATE,,201601011212:101010']
dirty_tables = compare_metadata(old_metadata, cur_metadata)
self.assertEquals(dirty_tables, set(['public.t1']))
def test_compare_metadata_invalid_input(self):
old_metadata = {('1234', 'ALTER'): 'public,t1,1234,ALTER,,201601011212:101010'}
cur_metadata = ['public,t1,1234,ALTER,,201601011212:101010,']
with self.assertRaisesRegexp(Exception, 'Wrong number of tokens in last_operation data for current backup'):
compare_metadata(old_metadata, cur_metadata)
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101010100')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=[])
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
def test_get_tables_with_dirty_metadata_empty(self, mock1, mock2, mock3):
expected_output = set()
full_timestamp = '20160101010101'
cur_pgstatoperations = []
dirty_tables = get_tables_with_dirty_metadata(self.context, cur_pgstatoperations)
self.assertEqual(dirty_tables, expected_output)
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101010100')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['public,t1,1234,ALTER,CHANGE COLUMN,201601011212:102510', 'testschema,t2,2234,TRUNCATE,,201601011213:102510'])
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
def test_get_tables_with_dirty_metadata_default(self, mock1, mock2, mock3):
expected_output = set()
cur_pgstatoperations = ['public,t1,1234,ALTER,CHANGE COLUMN,201601011212:102510', 'testschema,t2,2234,TRUNCATE,,201601011213:102510']
dirty_tables = get_tables_with_dirty_metadata(self.context, cur_pgstatoperations)
self.assertEqual(dirty_tables, expected_output)
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101010100')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['public,t1,1234,ALTER,CHANGE COLUMN,201601011212:102510', 'testschema,t2,2234,TRUNCATE,,201601011213:102511'])
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
def test_get_tables_with_dirty_metadata_changed_table(self, mock1, mock2, mock3):
expected_output = set(['testschema.t2'])
cur_pgstatoperations = ['public,t1,1234,ALTER,CHANGE COLUMN,201601011212:102510', 'testschema,t2,2234,TRUNCATE,,201601011213:102510']
dirty_tables = get_tables_with_dirty_metadata(self.context, cur_pgstatoperations)
self.assertEqual(dirty_tables, expected_output)
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101010100')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['testschema,t1,2234,TRUNCATE,,201601011213:102510', 'testschema,t2,2234,TRUNCATE,,201601011213:102510'])
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
def test_get_tables_with_dirty_metadata_extras(self, mock1, mock2, mock3):
expected_output = set(['testschema.t2', 'public.t3'])
full_timestamp = '20160101010101'
cur_pgstatoperations = ['testschema,t2,1234,ALTER,CHANGE COLUMN,201601011212:102510',
'testschema,t2,2234,TRUNCATE,,201601011213:102510',
'public,t3,2234,TRUNCATE,,201601011213:102510']
dirty_tables = get_tables_with_dirty_metadata(self.context, cur_pgstatoperations)
self.assertEqual(dirty_tables, expected_output)
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101010100')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['testschema,t1,1234,ALTER,CHANGE COLUMN,201601011212:102510', 'testschema,t2,2234,TRUNCATE,,201601011213:102510'])
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101000000')
def test_get_tables_with_dirty_metadata_different_schema(self, mock1, mock2, mock3):
expected_output = set(['public.t1'])
cur_pgstatoperations = ['public,t1,1234,ALTER,CHANGE COLUMN,201601011212:102510', 'testschema,t2,2234,TRUNCATE,,201601011213:102510']
dirty_tables = get_tables_with_dirty_metadata(self.context, cur_pgstatoperations)
self.assertEqual(dirty_tables, expected_output)
@patch('gppylib.operations.dump.get_last_dump_timestamp', return_value='20160101010100')
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['testschema,t1,1234,ALTER,CHANGE COLUMN,201601011212:102510', 'testschema,t2,2234,TRUNCATE,,201601011213:102510'])
@patch('gppylib.operations.dump.restore_file_with_nbu')
def test_get_tables_with_dirty_metadata_nbu(self, mock1, mock2, mock3):
expected_output = set(['public.t1'])
cur_pgstatoperations = ['public,t1,1234,ALTER,CHANGE COLUMN,201601011212:102510', 'testschema,t2,2234,TRUNCATE,,201601011213:102510']
self.context.netbackup_service_host = "mdw"
self.context.netbackup_block_size = "1024"
dirty_tables = get_tables_with_dirty_metadata(self.context, cur_pgstatoperations)
self.assertEqual(dirty_tables, expected_output)
@patch('gppylib.operations.dump.get_last_state', return_value=['testschema, t1, 100', 'testschema, t2, 200'])
def test_get_dirty_partition_tables_default(self, mock1):
table_type = 'ao'
curr_state_partition_list = ['testschema, t3, 300', 'testschema, t1, 200']
expected_output = set(['testschema.t3', 'testschema.t1'])
result = get_dirty_partition_tables(self.context, table_type, curr_state_partition_list)
self.assertEqual(result, expected_output)
@patch('gppylib.operations.dump.get_last_state', return_value=['testschema, t1, 100', 'testschema, t2, 200'])
def test_get_dirty_partition_tables_nbu(self, mock1):
table_type = 'ao'
curr_state_partition_list = ['testschema, t3, 300', 'testschema, t1, 200']
self.context.netbackup_service_host = "mdw"
self.context.netbackup_block_size = "1024"
expected_output = set(['testschema.t3', 'testschema.t1'])
result = get_dirty_partition_tables(self.context, table_type, curr_state_partition_list)
self.assertEqual(result, expected_output)
@patch('gppylib.operations.dump.get_dirty_heap_tables', return_value=set(['public.heap_table1']))
@patch('gppylib.operations.dump.get_dirty_partition_tables', side_effect=[set(['public,ao_t1,100', 'public,ao_t2,100']), set(['public,co_t1,100', 'public,co_t2,100'])])
@patch('gppylib.operations.dump.get_tables_with_dirty_metadata', return_value=set(['public,ao_t3,1234,CREATE,,20160101101010', 'public,co_t3,2345,VACCUM,,20160101101010', 'public,ao_t1,1234,CREATE,,20160101101010']))
def test_get_dirty_tables(self, mock1, mock2, mock3):
ao_partition_list = []
co_partition_list = []
last_operation_data = []
dirty_tables = get_dirty_tables(self.context, ao_partition_list, co_partition_list, last_operation_data)
expected_output = ['public.heap_table1', 'public.ao_t1', 'public.ao_t2', 'public.co_t1', 'public.co_t2', 'public.ao_t3', 'public.co_t3']
self.assertEqual(dirty_tables.sort(), expected_output.sort())
@patch('gppylib.operations.dump.get_latest_report_timestamp', return_value = '20160101010100')
def test_validate_current_timestamp_default(self, mock):
directory = '/foo'
#no exception
validate_current_timestamp(self.context, current='20160101010101')
@patch('gppylib.operations.dump.get_latest_report_timestamp', return_value = '20160101010101')
def test_validate_current_timestamp_same_timestamp(self, mock):
directory = '/foo'
with self.assertRaisesRegexp(Exception, 'There is a future dated backup on the system preventing new backups'):
validate_current_timestamp(self.context, current='20160101010101')
@patch('gppylib.operations.dump.get_latest_report_timestamp', return_value = '20170101010101')
def test_validate_current_timestamp_future_timestamp(self, mock):
directory = '/foo'
with self.assertRaisesRegexp(Exception, 'There is a future dated backup on the system preventing new backups'):
validate_current_timestamp(self.context, current='20160101010101')
def test_validate_modcount_default(self):
schemaname = 'public'
partitionname = 't1'
tuple_count = '999999999999999'
validate_modcount(schemaname, partitionname, tuple_count)
def test_validate_modcount_non_int(self):
schemaname = 'public'
partitionname = 't1'
tuple_count = '#########'
with self.assertRaisesRegexp(Exception, 'Can not convert modification count for table.'):
validate_modcount(schemaname, partitionname, tuple_count)
def test_validate_modcount_scientific_notation(self):
schemaname = 'public'
partitionname = 't1'
tuple_count = '1+e15'
with self.assertRaisesRegexp(Exception, 'Can not convert modification count for table.'):
validate_modcount(schemaname, partitionname, tuple_count)
def test_validate_modcount_exceeded_count(self):
schemaname = 'public'
partitionname = 't1'
tuple_count = '1000000000000000'
with self.assertRaisesRegexp(Exception, 'Exceeded backup max tuple count of 1 quadrillion rows per table for:'):
validate_modcount(schemaname, partitionname, tuple_count)
def test_generate_dump_timestamp_default(self):
ts_key = datetime(2013, 02, 04, 10, 10, 10, 10000).strftime("%Y%m%d%H%M%S")
self.context.timestamp_key = ts_key
self.context.generate_dump_timestamp()
self.assertEqual(ts_key, self.context.timestamp)
self.assertEqual(ts_key[0:8], self.context.db_date_dir)
def test_generate_dump_timestamp_no_timestamp(self):
self.context.timestamp_key = None
self.context.generate_dump_timestamp()
self.assertNotEqual(None, self.context.timestamp)
self.assertNotEqual(None, self.context.db_date_dir)
def test_generate_dump_timestamp_replace_timestamp(self):
ts1 = datetime(2013, 02, 04, 10, 10, 10, 10000)
ts2 = datetime(2013, 03, 04, 10, 10, 10, 10000)
self.context.timestamp_key = ts1.strftime("%Y%m%d%H%M%S")
self.context.generate_dump_timestamp()
self.context.timestamp_key = ts2.strftime("%Y%m%d%H%M%S")
self.context.generate_dump_timestamp()
ts_key = ts2.strftime("%Y%m%d%H%M%S")
self.assertEqual(ts_key, self.context.timestamp)
self.assertEqual(ts_key[0:8], self.context.db_date_dir)
def test_create_dump_string_with_prefix_schema_level_dump(self):
self.context.dump_prefix = 'foo_'
self.context.schema_file = '/tmp/schema_file '
with patch.dict(os.environ, {'LOGNAME':'gpadmin'}):
output = self.dumper.create_dump_string()
expected_output = """gp_dump -p 5432 -U gpadmin --gp-d=/data/master/db_dumps/20160101 --gp-r=/data/master/db_dumps/20160101 --gp-s=p --gp-k=20160101010101 --no-lock --gp-c --prefix=foo_ --no-expand-children -n "\\"testschema\\"" "testdb" --schema-file=/tmp/schema_file """
self.assertEquals(output, expected_output)
def test_create_dump_string_default(self):
self.context.schema_file = '/tmp/schema_file'
with patch.dict(os.environ, {'LOGNAME':'gpadmin'}):
output = self.dumper.create_dump_string()
expected_output = """gp_dump -p 5432 -U gpadmin --gp-d=/data/master/db_dumps/20160101 --gp-r=/data/master/db_dumps/20160101 --gp-s=p --gp-k=20160101010101 --no-lock --gp-c --no-expand-children -n "\\"testschema\\"" "testdb" --schema-file=/tmp/schema_file"""
self.assertEquals(output, expected_output)
def test_create_dump_string_without_incremental(self):
with patch.dict(os.environ, {'LOGNAME':'gpadmin'}):
output = self.dumper.create_dump_string()
expected_output = """gp_dump -p 5432 -U gpadmin --gp-d=/data/master/db_dumps/20160101 --gp-r=/data/master/db_dumps/20160101 --gp-s=p --gp-k=20160101010101 --no-lock --gp-c --no-expand-children -n "\\"testschema\\"" "testdb" --table-file=/tmp/table_list.txt"""
self.assertEquals(output, expected_output)
def test_create_dump_string_with_prefix(self):
self.context.dump_prefix = 'foo_'
with patch.dict(os.environ, {'LOGNAME':'gpadmin'}):
output = self.dumper.create_dump_string()
expected_output = """gp_dump -p 5432 -U gpadmin --gp-d=/data/master/db_dumps/20160101 --gp-r=/data/master/db_dumps/20160101 --gp-s=p --gp-k=20160101010101 --no-lock --gp-c --prefix=foo_ --no-expand-children -n "\\"testschema\\"" "testdb" --table-file=/tmp/table_list.txt"""
self.assertEquals(output, expected_output)
def test_create_dump_string_with_include_file(self):
self.context.dump_prefix = 'metro_'
self.context.include_dump_tables_file = 'bar'
with patch.dict(os.environ, {'LOGNAME':'gpadmin'}):
output = self.dumper.create_dump_string()
expected_output = """gp_dump -p 5432 -U gpadmin --gp-d=/data/master/db_dumps/20160101 --gp-r=/data/master/db_dumps/20160101 --gp-s=p --gp-k=20160101010101 --no-lock --gp-c --prefix=metro_ --no-expand-children -n "\\"testschema\\"" "testdb" --table-file=%s""" % self.context.include_dump_tables_file
self.assertEquals(output, expected_output)
def test_create_dump_string_with_no_file_args(self):
self.context.dump_prefix = 'metro_'
self.context.include_dump_tables_file = None
with patch.dict(os.environ, {'LOGNAME':'gpadmin'}):
output = self.dumper.create_dump_string()
expected_output = """gp_dump -p 5432 -U gpadmin --gp-d=/data/master/db_dumps/20160101 --gp-r=/data/master/db_dumps/20160101 --gp-s=p --gp-k=20160101010101 --no-lock --gp-c --prefix=metro_ --no-expand-children -n "\\"testschema\\"" "testdb\""""
self.assertEquals(output, expected_output)
def test_create_dump_string_with_netbackup_params(self):
self.context.include_dump_tables_file = None
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "test_policy"
self.context.netbackup_schedule = "test_schedule"
with patch.dict(os.environ, {'LOGNAME':'gpadmin'}):
output = self.dumper.create_dump_string()
expected_output = """gp_dump -p 5432 -U gpadmin --gp-d=/data/master/db_dumps/20160101 --gp-r=/data/master/db_dumps/20160101 --gp-s=p --gp-k=20160101010101 --no-lock --gp-c --no-expand-children -n "\\"testschema\\"" "testdb" --netbackup-service-host=mdw --netbackup-policy=test_policy --netbackup-schedule=test_schedule"""
self.assertEquals(output, expected_output)
def test_get_backup_dir_with_master_data_dir(self):
self.assertEquals('/data/master/db_dumps/20160101', self.context.get_backup_dir())
def test_get_backup_dir_with_backup_dir(self):
self.context.backup_dir = '/tmp'
self.assertEquals('/tmp/db_dumps/20160101', self.context.get_backup_dir())
@patch('gppylib.operations.backup_utils.Context.is_timestamp_in_old_format', return_value=False)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101010101')
@patch('os.path.isfile', return_value=True)
def test_get_filter_file_file_exists(self, mock1, mock2, mock3):
self.context.dump_prefix = 'foo_'
expected_output = '/data/master/db_dumps/20160101/foo_gp_dump_20160101010101_filter'
self.assertEquals(expected_output, get_filter_file(self.context))
@patch('os.path.isfile', return_value=False)
@patch('gppylib.operations.backup_utils.Context.is_timestamp_in_old_format', return_value=False)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101010101')
@patch('gppylib.operations.dump.get_latest_full_ts_with_nbu', return_value='20160101010101')
@patch('gppylib.operations.dump.check_file_dumped_with_nbu', return_value=True)
@patch('gppylib.operations.dump.restore_file_with_nbu')
def test_get_filter_file_file_exists_on_nbu(self, mock1, mock2, mock3, mock4, mock5, mock6):
self.context.dump_prefix = 'foo_'
self.context.netbackup_block_size = "1024"
self.context.netbackup_service_host = "mdw"
expected_output = '/data/master/db_dumps/20160101/foo_gp_dump_20160101010101_filter'
self.assertEquals(expected_output, get_filter_file(self.context))
@patch('gppylib.operations.backup_utils.Context.is_timestamp_in_old_format', return_value=False)
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20160101010101')
@patch('os.path.isfile', return_value=False)
def test_get_filter_file_file_does_not_exist(self, mock1, mock2, mock3):
self.assertEquals(None, get_filter_file(self.context))
def test_update_filter_file_with_dirty_list_default(self):
filter_file = '/tmp/foo'
dirty_tables = ['public.t1', 'public.t2']
expected_output = ['public.t1', 'public.t2']
m = mock_open()
with patch('__builtin__.open', m, create=True):
update_filter_file_with_dirty_list(filter_file, dirty_tables)
result = m()
self.assertEqual(len(dirty_tables), len(result.write.call_args_list))
for i in range(len(dirty_tables)):
self.assertEqual(call(dirty_tables[i]+'\n'), result.write.call_args_list[i])
@patch('gppylib.operations.backup_utils.get_lines_from_file', return_value=['public.t1', 'public.t2'])
def test_update_filter_file_with_dirty_list_duplicates(self, mock1):
filter_file = '/tmp/foo'
dirty_tables = ['public.t2']
expected_output = ['public.t1', 'public.t2']
m = mock_open()
with patch('__builtin__.open', m, create=True):
update_filter_file_with_dirty_list(filter_file, dirty_tables)
result = m()
self.assertEqual(len(dirty_tables), len(result.write.call_args_list))
for i in range(len(dirty_tables)):
self.assertEqual(call(dirty_tables[i]+'\n'), result.write.call_args_list[i])
def test_update_filter_file_with_dirty_list_empty_file(self):
filter_file = '/tmp/foo'
dirty_tables = ['public.t1', 'public.t2']
expected_output = ['public.t1', 'public.t2']
m = mock_open()
with patch('__builtin__.open', m, create=True):
update_filter_file_with_dirty_list(filter_file, dirty_tables)
result = m()
self.assertEqual(len(dirty_tables), len(result.write.call_args_list))
for i in range(len(dirty_tables)):
self.assertEqual(call(dirty_tables[i]+'\n'), result.write.call_args_list[i])
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['public.t1', 'testschema.t2'])
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20130101010101')
@patch('gppylib.operations.dump.get_filter_file', return_value='/foo/metro_gp_dump_20130101010101_filter')
@patch('gppylib.operations.dump.get_latest_full_ts_with_nbu', return_value='20130101010101')
def test_filter_dirty_tables_with_filter(self, mock1, mock2, mock3, mock4):
dirty_tables = ['public.t1', 'public.t2', 'testschema.t1', 'testschema.t2']
expected_output = ['public.t1', 'testschema.t2']
self.context.netbackup_service_host = 'mdw'
self.assertEquals(sorted(expected_output), sorted(filter_dirty_tables(self.context, dirty_tables)))
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['public.t1', 'testschema.t2'])
@patch('gppylib.operations.dump.get_filter_file', return_value='/foo/metro_gp_dump_20130101010101_filter')
@patch('gppylib.operations.dump.get_latest_full_ts_with_nbu', return_value='20130101010101')
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20130101010101')
def test_filter_dirty_tables_with_filter_with_nbu(self, mock1, mock2, mock3, mock4):
self.context.netbackup_service_host = "mdw"
self.context.netbackup_block_size = "1024"
dirty_tables = ['public.t1', 'public.t2', 'testschema.t1', 'testschema.t2']
expected_output = ['public.t1', 'testschema.t2']
self.assertEquals(sorted(expected_output), sorted(filter_dirty_tables(self.context, dirty_tables)))
@patch('gppylib.operations.dump.get_lines_from_file', return_value=['public.t1', 'testschema.t2'])
@patch('gppylib.operations.dump.get_latest_full_dump_timestamp', return_value='20130101010101')
@patch('gppylib.operations.dump.get_filter_file', return_value=None)
def test_filter_dirty_tables_without_filter(self, mock1, mock2, mock3):
dirty_tables = ['public.t1', 'public.t2', 'testschema.t1', 'testschema.t2']
self.assertEquals(sorted(dirty_tables), sorted(filter_dirty_tables(self.context, dirty_tables)))
@patch('gppylib.operations.dump.get_filter_file', return_value='/tmp/db_dumps/20160101/foo_gp_dump_01234567891234_filter')
def test_create_filtered_dump_string(self, mock1):
self.context.dump_prefix = 'foo_'
with patch.dict(os.environ, {'LOGNAME':'gpadmin'}):
output = self.dumper.create_filtered_dump_string()
expected_output = """gp_dump -p 5432 -U gpadmin --gp-d=/data/master/db_dumps/20160101 --gp-r=/data/master/db_dumps/20160101 --gp-s=p --gp-k=20160101010101 --no-lock --gp-c --prefix=foo_ --no-expand-children -n "\\"testschema\\"" "testdb" --table-file=/tmp/table_list.txt --incremental-filter=/tmp/db_dumps/20160101/foo_gp_dump_01234567891234_filter"""
self.assertEquals(output, expected_output)
@patch('gppylib.operations.dump.Command.get_results', return_value=CommandResult(0, "", "", True, False))
@patch('gppylib.operations.dump.Command.run')
def test_perform_dump_normal(self, mock1, mock2):
self.context.dump_prefix = 'foo_'
title = 'Dump process'
dump_line = """gp_dump -p 5432 -U gpadmin --gp-d=/data/master/db_dumps/20160101 --gp-r=/data/master/db_dumps/20160101 --gp-s=p --gp-k=01234567891234 --no-lock --gp-c --prefix=foo_ --no-expand-children -n "\\"testschema\\"" "testdb" --table-file=/tmp/table_list.txt"""
(start, end, rc) = self.dumper.perform_dump(title, dump_line)
self.assertNotEqual(start, None)
self.assertNotEqual(end, None)
self.assertEquals(rc, 0)
def test_create_pgdump_command_line(self):
global_file_name = '/data/master/db_dumps/20160101/gp_global_-1_1_20160101010101'
expected_output = "pg_dumpall -p 5432 -g --gp-syntax > %s" % global_file_name
output = self.dump_globals.create_pgdump_command_line()
self.assertEquals(output, expected_output)
@patch('os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_filter_file', return_value = '/tmp/update_test')
@patch('gppylib.operations.dump.get_lines_from_file', return_value = ['public.heap_table1','public.ao_part_table','public.ao_part_table_1_prt_p1'])
@patch('gppylib.operations.dump.execute_sql', side_effect = [ [['public.ao_part_table']], [['public.ao_part_table_1_prt_p1'], ['public.ao_part_table_1_prt_p2']] ])
def test_update_filter_file_default(self, mock1, mock2, mock3, mock4):
filter_filename = '/tmp/update_test'
contents = ['public.heap_table1','public.ao_part_table','public.ao_part_table_1_prt_p1']
expected_result = ['public.heap_table1','public.ao_part_table','public.ao_part_table_1_prt_p1', 'public.ao_part_table_1_prt_p2']
m = mock_open()
with patch('__builtin__.open', m, create=True):
update_filter_file(self.context)
result = m()
self.assertEqual(len(expected_result), len(result.write.call_args_list))
expected = sorted(expected_result)
output = sorted(result.write.call_args_list)
for i in range(len(expected)):
self.assertEqual(call(expected[i]+'\n'), output[i])
@patch('os.path.isfile', return_value=True)
@patch('gppylib.operations.dump.get_filter_file', return_value = '/tmp/update_test')
@patch('gppylib.operations.dump.get_lines_from_file', return_value = ['public.heap_table1','public.ao_part_table','public.ao_part_table_1_prt_p1'])
@patch('gppylib.operations.dump.execute_sql', side_effect = [ [['public.ao_part_table']], [['public.ao_part_table_1_prt_p1'], ['public.ao_part_table_1_prt_p2']] ])
@patch('gppylib.operations.dump.restore_file_with_nbu')
@patch('gppylib.operations.dump.backup_file_with_nbu')
def test_update_filter_file_default_with_nbu(self, mock1, mock2, mock3, mock4, mock5, mock6):
filter_filename = '/tmp/update_test'
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "nbu_policy"
self.context.netbackup_schedule = "nbu_schedule"
self.context.netbackup_block_size = "1024"
contents = ['public.heap_table1','public.ao_part_table','public.ao_part_table_1_prt_p1']
expected_result = ['public.heap_table1','public.ao_part_table','public.ao_part_table_1_prt_p1', 'public.ao_part_table_1_prt_p2']
m = mock_open()
with patch('__builtin__.open', m, create=True):
update_filter_file(self.context)
result = m()
self.assertEqual(len(expected_result), len(result.write.call_args_list))
expected = sorted(expected_result)
output = sorted(result.write.call_args_list)
for i in range(len(expected)):
self.assertEqual(call(expected[i]+'\n'), output[i])
@patch('gppylib.operations.dump.backup_file_with_nbu')
def test_backup_state_files_with_nbu_default(self, mock):
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "test_policy"
self.context.netbackup_schedule = "test_schedule"
backup_state_files_with_nbu(self.context)
self.assertEqual(mock.call_count, 3)
class MyMock(MagicMock):
def __init__(self, num_segs):
super(MagicMock, self).__init__()
self.mock_segs = []
for i in range(num_segs):
self.mock_segs.append(Mock())
def getSegmentList(self):
for id, seg in enumerate(self.mock_segs):
seg.get_active_primary.getSegmentHostName.return_value = Mock()
seg.get_primary_dbid.return_value = id + 2
return self.mock_segs
@patch('gppylib.gparray.GpDB.getSegmentHostName', return_value='sdw')
def test_backup_config_files_with_nbu_default(self, mock1):
with patch('gppylib.operations.dump.backup_file_with_nbu', side_effect=my_counter) as nbu_mock:
global i
i = 0
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "test_policy"
self.context.netbackup_schedule = "test_schedule"
backup_config_files_with_nbu(self.context)
args, _ = nbu_mock.call_args_list[0]
self.assertEqual(args[1], "master_config")
for id, seg in enumerate(mock1.mock_segs):
self.assertEqual(seg.get_active_primary.call_count, 1)
self.assertEqual(seg.get_primary_dbid.call_count, 1)
args, _ = nbu_mock.call_args_list[id]
self.assertEqual(args, ("segment_config", id+2, "sdw"))
self.assertEqual(i, 3)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='foo_schema')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_ddboost_default(self, mock1, mock2):
self.context.backup_dir = None
self.context.dump_dir = 'backup/DCA-35'
with patch.object(Command, '__init__', return_value=None) as cmd:
backup_file_with_ddboost(self.context, "schema")
cmd.assert_called_with("copy file foo_schema to DD machine", "gpddboost --copyToDDBoost --from-file=foo_schema --to-file=backup/DCA-35/20160101/foo_schema")
self.assertEqual(mock2.call_count, 1)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='foo_schema')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_ddboost_no_filetype(self, mock1, mock2):
self.context.backup_dir = None
self.context.dump_dir = 'backup/DCA-35'
with self.assertRaisesRegexp(Exception, 'Cannot call backup_file_with_ddboost without a filetype argument'):
backup_file_with_ddboost(self.context)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='/tmp/foo_schema')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_nbu_default(self, mock1, mock2):
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "test_policy"
self.context.netbackup_schedule = "test_schedule"
self.context.netbackup_block_size = 100
cmdStr = "cat /tmp/foo_schema | gp_bsa_dump_agent --netbackup-service-host mdw --netbackup-policy test_policy --netbackup-schedule test_schedule --netbackup-filename /tmp/foo_schema --netbackup-block-size 100"
with patch.object(Command, '__init__', return_value=None) as cmd:
backup_file_with_nbu(self.context, "schema")
cmd.assert_called_with("dumping metadata files from master", cmdStr)
self.assertEqual(mock2.call_count, 1)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_nbu_no_filetype(self, mock1, mock2):
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "test_policy"
self.context.netbackup_schedule = "test_schedule"
self.context.netbackup_block_size = 100
cmdStr = "cat /tmp/foo_schema | gp_bsa_dump_agent --netbackup-service-host mdw --netbackup-policy test_policy --netbackup-schedule test_schedule --netbackup-filename /tmp/foo_schema --netbackup-block-size 100"
with patch.object(Command, '__init__', return_value=None) as cmd:
backup_file_with_nbu(self.context, path="/tmp/foo_schema")
cmd.assert_called_with("dumping metadata files from master", cmdStr)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='/tmp/foo_schema')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_nbu_no_path(self, mock1, mock2):
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "test_policy"
self.context.netbackup_schedule = "test_schedule"
self.context.netbackup_block_size = 100
cmdStr = "cat /tmp/foo_schema | gp_bsa_dump_agent --netbackup-service-host mdw --netbackup-policy test_policy --netbackup-schedule test_schedule --netbackup-filename /tmp/foo_schema --netbackup-block-size 100"
with patch.object(Command, '__init__', return_value=None) as cmd:
backup_file_with_nbu(self.context, "schema")
cmd.assert_called_with("dumping metadata files from master", cmdStr)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='foo_schema')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_nbu_both_args(self, mock1, mock2):
with self.assertRaisesRegexp(Exception, 'Cannot supply both a file type and a file path to backup_file_with_nbu'):
backup_file_with_nbu(self.context, "schema", "/tmp/foo_schema")
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='foo_schema')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_nbu_neither_arg(self, mock1, mock2):
with self.assertRaisesRegexp(Exception, 'Cannot call backup_file_with_nbu with no type or path argument'):
backup_file_with_nbu(self.context)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='/tmp/foo_schema')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_nbu_block_size(self, mock1, mock2):
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "test_policy"
self.context.netbackup_schedule = "test_schedule"
self.context.netbackup_block_size = 1024
cmdStr = "cat /tmp/foo_schema | gp_bsa_dump_agent --netbackup-service-host mdw --netbackup-policy test_policy --netbackup-schedule test_schedule --netbackup-filename /tmp/foo_schema --netbackup-block-size 1024"
with patch.object(Command, '__init__', return_value=None) as cmd:
backup_file_with_nbu(self.context, "schema")
cmd.assert_called_with("dumping metadata files from master", cmdStr)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='/tmp/foo_schema')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_nbu_keyword(self, mock1, mock2):
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "test_policy"
self.context.netbackup_schedule = "test_schedule"
self.context.netbackup_block_size = 100
self.context.netbackup_keyword = "foo"
cmdStr = "cat /tmp/foo_schema | gp_bsa_dump_agent --netbackup-service-host mdw --netbackup-policy test_policy --netbackup-schedule test_schedule --netbackup-filename /tmp/foo_schema --netbackup-block-size 100 --netbackup-keyword foo"
with patch.object(Command, '__init__', return_value=None) as cmd:
backup_file_with_nbu(self.context, "schema")
cmd.assert_called_with("dumping metadata files from master", cmdStr)
@patch('gppylib.operations.backup_utils.Context.generate_filename', return_value='/tmp/foo_schema')
@patch('gppylib.commands.base.Command.run')
def test_backup_file_with_nbu_segment(self, mock1, mock2):
self.context.netbackup_service_host = "mdw"
self.context.netbackup_policy = "test_policy"
self.context.netbackup_schedule = "test_schedule"
self.context.netbackup_block_size = 100
cmdStr = "cat /tmp/foo_schema | gp_bsa_dump_agent --netbackup-service-host mdw --netbackup-policy test_policy --netbackup-schedule test_schedule --netbackup-filename /tmp/foo_schema --netbackup-block-size 100"
with patch.object(Command, '__init__', return_value=None) as cmd:
backup_file_with_nbu(self.context, "schema", hostname="sdw")
from gppylib.commands.base import REMOTE
cmd.assert_called_with("dumping metadata files from segment", cmdStr, ctxt=REMOTE, remoteHost="sdw")
@patch('gppylib.operations.dump.execute_sql', return_value = [['gp_toolkit'], ['pg_aoseg'], ['pg_toast'], ['pg_bitmapindex'], ['bar'], ['foo'], ['pg_catalog'], ['public'], ['information_schema']])
def test_get_include_schema_list_from_exclude_schema_default(self, mock1):
exclude_schema_list = ['public', 'foo']
expected_result = ['bar']
output = get_include_schema_list_from_exclude_schema(self.context, exclude_schema_list)
self.assertEqual(expected_result.sort(), output.sort())
@patch('gppylib.operations.dump.execute_sql', return_value = [['gp_toolkit'], ['pg_aoseg'], ['pg_toast'], ['pg_bitmapindex'], ['bar'], ['foo'], ['pg_catalog'], ['public'], ['information_schema']])
def test_get_include_schema_list_from_exclude_schema_empty_list(self, mock1):
exclude_schema_list = []
expected_result = ['public', 'foo', 'bar']
output = get_include_schema_list_from_exclude_schema(self.context, exclude_schema_list)
self.assertEqual(expected_result.sort(), output.sort())
@patch('gppylib.operations.dump.Command.run')
@patch('gppylib.operations.dump.findCmdInPath', return_value='/bin/mail')
def test_mail_execute_default(self, mock1, mock2):
m = MailEvent(subject="test", message="Hello", to_addrs="example@pivotal.io")
m.execute()
@patch('gppylib.operations.dump.execute_sql', side_effect=[[['public', 'test'], ['public', 'foo']], [['public', 'foo']]])
def test_check_table_exists_table_list_changes(self, mock):
self.context.target_db = "gptest"
exists = CheckTableExists(self.context, "public", "test").run()
self.assertTrue(exists)
exists = CheckTableExists(self.context, "public", "test").run()
self.assertFalse(exists)
@patch('gppylib.operations.dump.dbconn.DbURL')
@patch('gppylib.operations.dump.dbconn.connect')
@patch('gppylib.operations.dump.CheckTableExists.run', return_value=True)
@patch('gppylib.operations.dump.execSQL', return_value='10000000000000000')
def test_update_history_table_with_existing_history_table(self, execSQL_mock, mock2, mock3, mock4):
self.context.history = True
time_start = datetime(2015, 7, 31, 9, 30, 00)
time_end = datetime(2015, 8, 1, 12, 21, 11)
timestamp = '121601010101'
options_list = '-x 1337 -a'
dump_exit_status = 0
pseudo_exit_status = 0
UpdateHistoryTable(self.context, time_start, time_end,
options_list, timestamp,
dump_exit_status,
pseudo_exit_status).execute()
expected_queries = " insert into public.gpcrondump_history values (now(), '2015-07-31 09:30:00', '2015-08-01 12:21:11', '-x 1337 -a', '121601010101', 0, 0, 'COMPLETED'); "
for exec_sql in execSQL_mock.call_args_list:
# [0] index removes the call object,
# [1] grabs the sql command from execSQL
self.assertEquals(exec_sql[0][1], expected_queries)
@patch('gppylib.operations.dump.dbconn.DbURL')
@patch('gppylib.operations.dump.dbconn.connect')
@patch('gppylib.operations.dump.CheckTableExists.run', return_value=False)
@patch('gppylib.operations.dump.execSQL', return_value='10000000000000000')
def test_update_history_table_with_new_update_table(self, execSQL_mock, mock2, mock3, mock4):
self.context.history = True
time_start = datetime(2015, 7, 31, 9, 30, 00)
time_end = datetime(2015, 8, 1, 12, 21, 11)
timestamp = '121601010101'
options_list = '-x bkdb -a'
dump_exit_status = 0
pseudo_exit_status = 0
UpdateHistoryTable(self.context, time_start, time_end,
options_list, timestamp,
dump_exit_status,
pseudo_exit_status).execute()
expected_queries = []
expected_queries.append(' create table public.gpcrondump_history (rec_date timestamp, start_time char(8), end_time char(8), options text, dump_key varchar(20), dump_exit_status smallint, script_exit_status smallint, exit_text varchar(10)) distributed by (rec_date); ')
expected_queries.append(" insert into public.gpcrondump_history values (now(), '2015-07-31 09:30:00', '2015-08-01 12:21:11', '-x bkdb -a', '121601010101', 0, 0, 'COMPLETED'); ")
for i, exec_sql in enumerate(execSQL_mock.call_args_list):
# [0] index removes the call object,
# [1] grabs the sql command from execSQL
self.assertEquals(exec_sql[0][1] , expected_queries[i])
@patch('gppylib.operations.dump.DumpStats.print_tuples')
@patch('gppylib.operations.dump.execute_sql_with_connection', return_value=[[1]*4, [2]*4, [3]*4])
def test_dump_stats_writes_tuples_to_file_when_dumping_tuples(self, execute_sql_with_connection, print_tuples):
dump_stats = DumpStats(Mock())
db_connection = Mock()
dump_stats.dump_tuples('select * from foo', db_connection)
execute_sql_with_connection.assert_called_with('select * from foo', db_connection)
print_tuples.assert_any_call([1,1,1,1])
print_tuples.assert_any_call([2,2,2,2])
print_tuples.assert_any_call([3,3,3,3])
@patch('gppylib.operations.dump.DumpStats.print_stats')
@patch('gppylib.operations.dump.execute_sql_with_connection', return_value=[[1]*25, [2]*25, [3]*25])
def test_dump_stats_writes_stats_to_file_when_dumping_stats(self, execute_sql_with_connection, print_stats):
dump_stats = DumpStats(Mock())
db_connection = Mock()
dump_stats.dump_stats('select * from foo', db_connection)
execute_sql_with_connection.assert_called_with('select * from foo', db_connection)
print_stats.assert_any_call([1]*25)
print_stats.assert_any_call([2]*25)
print_stats.assert_any_call([3]*25)
@patch('gppylib.operations.dump.DumpStats.dump_tuples')
@patch('gppylib.operations.dump.DumpStats.dump_stats')
def test_dump_stats_uses_db_connection_to_dump_tables(self, dump_stats, dump_tuples):
db_connection = Mock()
subject = DumpStats(Mock())
subject.dump_table('someSchema.someTable', db_connection)
dump_stats.assert_called_with(ANY, db_connection)
dump_tuples.assert_called_with(ANY, db_connection)
@patch('gppylib.operations.dump.dbconn.DbURL')
@patch('gppylib.operations.dump.dbconn.connect')
def test_excute_uses_the_same_connection_for_all_queries(self, connect, DbURL):
DbURL.return_value = 'dburl'
db_connection = Mock()
connect.return_value = db_connection
fakeContext = Mock()
fakeContext.ddboost = False
fakeContext.master_port = 9999
fakeContext.target_db= 'db_name'
dump_stats = DumpStats(fakeContext)
dump_stats.get_include_tables_from_context = Mock(return_value=['schema1.table1', 'schema2.table2'])
dump_stats.write_stats_file_header = Mock()
dump_stats.dump_table = Mock()
dump_stats.execute()
dump_stats.dump_table.assert_any_call('schema1.table1', db_connection)
dump_stats.dump_table.assert_any_call('schema2.table2', db_connection)
connect.assert_called_with('dburl')
DbURL.assert_called_with(port=9999, dbname='db_name')
db_connection.close.assert_any_call()
if __name__ == '__main__':
unittest.main()
i=0
def my_counter(*args, **kwargs):
global i
i += 1
return Mock()
| apache-2.0 |
mammique/django | django/core/cache/backends/filebased.py | 112 | 4798 | "File-based cache backend"
import hashlib
import os
import shutil
import time
try:
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
from django.core.cache.backends.base import BaseCache
from django.utils.encoding import force_bytes
class FileBasedCache(BaseCache):
def __init__(self, dir, params):
BaseCache.__init__(self, params)
self._dir = dir
if not os.path.exists(self._dir):
self._createdir()
def add(self, key, value, timeout=None, version=None):
if self.has_key(key, version=version):
return False
self.set(key, value, timeout, version=version)
return True
def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
fname = self._key_to_file(key)
try:
with open(fname, 'rb') as f:
exp = pickle.load(f)
now = time.time()
if exp < now:
self._delete(fname)
else:
return pickle.load(f)
except (IOError, OSError, EOFError, pickle.PickleError):
pass
return default
def set(self, key, value, timeout=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
fname = self._key_to_file(key)
dirname = os.path.dirname(fname)
if timeout is None:
timeout = self.default_timeout
self._cull()
try:
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(fname, 'wb') as f:
now = time.time()
pickle.dump(now + timeout, f, pickle.HIGHEST_PROTOCOL)
pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
except (IOError, OSError):
pass
def delete(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
try:
self._delete(self._key_to_file(key))
except (IOError, OSError):
pass
def _delete(self, fname):
os.remove(fname)
try:
# Remove the 2 subdirs if they're empty
dirname = os.path.dirname(fname)
os.rmdir(dirname)
os.rmdir(os.path.dirname(dirname))
except (IOError, OSError):
pass
def has_key(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
fname = self._key_to_file(key)
try:
with open(fname, 'rb') as f:
exp = pickle.load(f)
now = time.time()
if exp < now:
self._delete(fname)
return False
else:
return True
except (IOError, OSError, EOFError, pickle.PickleError):
return False
def _cull(self):
if int(self._num_entries) < self._max_entries:
return
try:
filelist = sorted(os.listdir(self._dir))
except (IOError, OSError):
return
if self._cull_frequency == 0:
doomed = filelist
else:
doomed = [os.path.join(self._dir, k) for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0]
for topdir in doomed:
try:
for root, _, files in os.walk(topdir):
for f in files:
self._delete(os.path.join(root, f))
except (IOError, OSError):
pass
def _createdir(self):
try:
os.makedirs(self._dir)
except OSError:
raise EnvironmentError("Cache directory '%s' does not exist and could not be created'" % self._dir)
def _key_to_file(self, key):
"""
Convert the filename into an md5 string. We'll turn the first couple
bits of the path into directory prefixes to be nice to filesystems
that have problems with large numbers of files in a directory.
Thus, a cache key of "foo" gets turnned into a file named
``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``.
"""
path = hashlib.md5(force_bytes(key)).hexdigest()
path = os.path.join(path[:2], path[2:4], path[4:])
return os.path.join(self._dir, path)
def _get_num_entries(self):
count = 0
for _,_,files in os.walk(self._dir):
count += len(files)
return count
_num_entries = property(_get_num_entries)
def clear(self):
try:
shutil.rmtree(self._dir)
except (IOError, OSError):
pass
# For backwards compatibility
class CacheClass(FileBasedCache):
pass
| bsd-3-clause |
analyseuc3m/ANALYSE-v1 | lms/djangoapps/course_blocks/transformers/tests/test_visibility.py | 9 | 1377 | """
Tests for VisibilityTransformer.
"""
import ddt
from ..visibility import VisibilityTransformer
from .helpers import BlockParentsMapTestCase, update_block
@ddt.ddt
class VisibilityTransformerTestCase(BlockParentsMapTestCase):
"""
VisibilityTransformer Test
"""
TRANSFORMER_CLASS_TO_TEST = VisibilityTransformer
# Following test cases are based on BlockParentsMapTestCase.parents_map
@ddt.data(
({}, {0, 1, 2, 3, 4, 5, 6}, {}),
({0}, {}, {1, 2, 3, 4, 5, 6}),
({1}, {0, 2, 5, 6}, {3, 4}),
({2}, {0, 1, 3, 4, 6}, {5}),
({3}, {0, 1, 2, 4, 5, 6}, {}),
({4}, {0, 1, 2, 3, 5, 6}, {}),
({5}, {0, 1, 2, 3, 4, 6}, {}),
({6}, {0, 1, 2, 3, 4, 5}, {}),
({1, 2}, {0}, {3, 4, 5, 6}),
({2, 4}, {0, 1, 3}, {5, 6}),
({1, 2, 3, 4, 5, 6}, {0}, {}),
)
@ddt.unpack
def test_block_visibility(
self, staff_only_blocks, expected_visible_blocks, blocks_with_differing_access
):
for idx, _ in enumerate(self.parents_map):
block = self.get_block(idx)
block.visible_to_staff_only = (idx in staff_only_blocks)
update_block(block)
self.assert_transform_results(
self.student,
expected_visible_blocks,
blocks_with_differing_access,
self.transformers,
)
| agpl-3.0 |
renatofb/weblate | weblate/accounts/captcha.py | 8 | 4889 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Simple mathematical captcha.
"""
from django.conf import settings
import hashlib
import binascii
import time
from random import randint, choice
import ast
import operator
TIMEDELTA = 600
# Supported operators
OPERATORS = {
ast.Add: operator.add,
ast.Sub: operator.sub,
ast.Mult: operator.mul,
}
class MathCaptcha(object):
'''
Simple match captcha object.
'''
operators = ('+', '-', '*')
operators_display = {
'+': u'+',
'-': u'-',
'*': u'×',
}
interval = (1, 10)
def __init__(self, question=None, timestamp=None):
if question is None:
self.question = self.generate_question()
else:
self.question = question
if timestamp is None:
self.timestamp = time.time()
else:
self.timestamp = timestamp
def generate_question(self):
'''
Generates random question.
'''
operation = choice(self.operators)
first = randint(self.interval[0], self.interval[1])
second = randint(self.interval[0], self.interval[1])
# We don't want negative answers
if operation == '-':
first += self.interval[1]
return '{0} {1} {2}'.format(
first,
operation,
second
)
@staticmethod
def from_hash(hashed):
'''
Creates object from hash.
'''
question, timestamp = unhash_question(hashed)
return MathCaptcha(question, timestamp)
@property
def hashed(self):
'''
Returns hashed question.
'''
return hash_question(self.question, self.timestamp)
def validate(self, answer):
'''
Validates answer.
'''
return (
self.result == answer and
self.timestamp + TIMEDELTA > time.time()
)
@property
def result(self):
'''
Returns result.
'''
return eval_expr(self.question)
@property
def display(self):
'''
Gets unicode for display.
'''
parts = self.question.split()
return u'{0} {1} {2}'.format(
parts[0],
self.operators_display[parts[1]],
parts[2],
)
def format_timestamp(timestamp):
'''
Formats timestamp in a form usable in captcha.
'''
return '{0:>010x}'.format(int(timestamp))
def checksum_question(question, timestamp):
'''
Returns checksum for a question.
'''
sha = hashlib.sha1(settings.SECRET_KEY + question + timestamp)
return sha.hexdigest()
def hash_question(question, timestamp):
'''
Hashes question so that it can be later verified.
'''
timestamp = format_timestamp(timestamp)
hexsha = checksum_question(question, timestamp)
return '{0}{1}{2}'.format(
hexsha,
timestamp,
question.encode('base64')
)
def unhash_question(question):
'''
Unhashes question, verifying it's content.
'''
if len(question) < 40:
raise ValueError('Invalid data')
hexsha = question[:40]
timestamp = question[40:50]
try:
question = question[50:].decode('base64')
except binascii.Error:
raise ValueError('Invalid encoding')
if hexsha != checksum_question(question, timestamp):
raise ValueError('Tampered question!')
return question, int(timestamp, 16)
def eval_expr(expr):
"""
Evaluates arithmetic expression used in Captcha.
>>> eval_expr('2+6')
8
>>> eval_expr('2*6')
12
"""
return eval_node(ast.parse(expr).body[0].value)
def eval_node(node):
"""
Evaluates single AST node.
"""
if isinstance(node, ast.Num):
# number
return node.n
elif isinstance(node, ast.operator):
# operator
return OPERATORS[type(node)]
elif isinstance(node, ast.BinOp):
# binary operation
return eval_node(node.op)(
eval_node(node.left),
eval_node(node.right)
)
else:
raise ValueError(node)
| gpl-3.0 |
terbolous/SickRage | lib/github/GistComment.py | 72 | 5098 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.NamedUser
class GistComment(github.GithubObject.CompletableGithubObject):
"""
This class represents GistComments as returned for example by http://developer.github.com/v3/todo
"""
@property
def body(self):
"""
:type: string
"""
self._completeIfNotSet(self._body)
return self._body.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
@property
def user(self):
"""
:type: :class:`github.NamedUser.NamedUser`
"""
self._completeIfNotSet(self._user)
return self._user.value
def delete(self):
"""
:calls: `DELETE /gists/:gist_id/comments/:id <http://developer.github.com/v3/gists/comments>`_
:rtype: None
"""
headers, data = self._requester.requestJsonAndCheck(
"DELETE",
self.url
)
def edit(self, body):
"""
:calls: `PATCH /gists/:gist_id/comments/:id <http://developer.github.com/v3/gists/comments>`_
:param body: string
:rtype: None
"""
assert isinstance(body, (str, unicode)), body
post_parameters = {
"body": body,
}
headers, data = self._requester.requestJsonAndCheck(
"PATCH",
self.url,
input=post_parameters
)
self._useAttributes(data)
def _initAttributes(self):
self._body = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
self._user = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "body" in attributes: # pragma no branch
self._body = self._makeStringAttribute(attributes["body"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
if "user" in attributes: # pragma no branch
self._user = self._makeClassAttribute(github.NamedUser.NamedUser, attributes["user"])
| gpl-3.0 |
deltreey/ansible | v1/ansible/module_utils/basic.py | 81 | 63055 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# == BEGIN DYNAMICALLY INSERTED CODE ==
ANSIBLE_VERSION = "<<ANSIBLE_VERSION>>"
MODULE_ARGS = "<<INCLUDE_ANSIBLE_MODULE_ARGS>>"
MODULE_COMPLEX_ARGS = "<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>"
BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1]
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0]
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
SELINUX_SPECIAL_FS="<<SELINUX_SPECIAL_FILESYSTEMS>>"
# ansible modules can be written in any language. To simplify
# development of Python modules, the functions available here
# can be inserted in any module source automatically by including
# #<<INCLUDE_ANSIBLE_MODULE_COMMON>> on a blank line by itself inside
# of an ansible module. The source of this common code lives
# in lib/ansible/module_common.py
import locale
import os
import re
import pipes
import shlex
import subprocess
import sys
import syslog
import types
import time
import select
import shutil
import stat
import tempfile
import traceback
import grp
import pwd
import platform
import errno
import tempfile
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
sys.stderr.write('Error: ansible requires a json module, none found!')
sys.exit(1)
except SyntaxError:
sys.stderr.write('SyntaxError: probably due to json and python being for different versions')
sys.exit(1)
HAVE_SELINUX=False
try:
import selinux
HAVE_SELINUX=True
except ImportError:
pass
HAVE_HASHLIB=False
try:
from hashlib import sha1 as _sha1
HAVE_HASHLIB=True
except ImportError:
from sha import sha as _sha1
try:
from hashlib import md5 as _md5
except ImportError:
try:
from md5 import md5 as _md5
except ImportError:
# MD5 unavailable. Possibly FIPS mode
_md5 = None
try:
from hashlib import sha256 as _sha256
except ImportError:
pass
try:
from systemd import journal
has_journal = True
except ImportError:
import syslog
has_journal = False
try:
from ast import literal_eval as _literal_eval
except ImportError:
# a replacement for literal_eval that works with python 2.4. from:
# https://mail.python.org/pipermail/python-list/2009-September/551880.html
# which is essentially a cut/past from an earlier (2.6) version of python's
# ast.py
from compiler import parse
from compiler.ast import *
def _literal_eval(node_or_string):
"""
Safely evaluate an expression node or a string containing a Python
expression. The string or node provided may only consist of the following
Python literal structures: strings, numbers, tuples, lists, dicts, booleans,
and None.
"""
_safe_names = {'None': None, 'True': True, 'False': False}
if isinstance(node_or_string, basestring):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
node_or_string = node_or_string.node
def _convert(node):
if isinstance(node, Const) and isinstance(node.value, (basestring, int, float, long, complex)):
return node.value
elif isinstance(node, Tuple):
return tuple(map(_convert, node.nodes))
elif isinstance(node, List):
return list(map(_convert, node.nodes))
elif isinstance(node, Dict):
return dict((_convert(k), _convert(v)) for k, v in node.items)
elif isinstance(node, Name):
if node.name in _safe_names:
return _safe_names[node.name]
elif isinstance(node, UnarySub):
return -_convert(node.expr)
raise ValueError('malformed string')
return _convert(node_or_string)
FILE_COMMON_ARGUMENTS=dict(
src = dict(),
mode = dict(),
owner = dict(),
group = dict(),
seuser = dict(),
serole = dict(),
selevel = dict(),
setype = dict(),
follow = dict(type='bool', default=False),
# not taken by the file module, but other modules call file so it must ignore them.
content = dict(no_log=True),
backup = dict(),
force = dict(),
remote_src = dict(), # used by assemble
regexp = dict(), # used by assemble
delimiter = dict(), # used by assemble
directory_mode = dict(), # used by copy
)
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
def get_platform():
''' what's the platform? example: Linux is a platform. '''
return platform.system()
def get_distribution():
''' return the distribution name '''
if platform.system() == 'Linux':
try:
supported_dists = platform._supported_dists + ('arch',)
distribution = platform.linux_distribution(supported_dists=supported_dists)[0].capitalize()
if not distribution and os.path.isfile('/etc/system-release'):
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
if 'Amazon' in distribution:
distribution = 'Amazon'
else:
distribution = 'OtherLinux'
except:
# FIXME: MethodMissing, I assume?
distribution = platform.dist()[0].capitalize()
else:
distribution = None
return distribution
def get_distribution_version():
''' return the distribution version '''
if platform.system() == 'Linux':
try:
distribution_version = platform.linux_distribution()[1]
if not distribution_version and os.path.isfile('/etc/system-release'):
distribution_version = platform.linux_distribution(supported_dists=['system'])[1]
except:
# FIXME: MethodMissing, I assume?
distribution_version = platform.dist()[1]
else:
distribution_version = None
return distribution_version
def load_platform_subclass(cls, *args, **kwargs):
'''
used by modules like User to have different implementations based on detected platform. See User
module for an example.
'''
this_platform = get_platform()
distribution = get_distribution()
subclass = None
# get the most specific superclass for this platform
if distribution is not None:
for sc in cls.__subclasses__():
if sc.distribution is not None and sc.distribution == distribution and sc.platform == this_platform:
subclass = sc
if subclass is None:
for sc in cls.__subclasses__():
if sc.platform == this_platform and sc.distribution is None:
subclass = sc
if subclass is None:
subclass = cls
return super(cls, subclass).__new__(subclass)
def json_dict_unicode_to_bytes(d):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns)
'''
if isinstance(d, unicode):
return d.encode('utf-8')
elif isinstance(d, dict):
return dict(map(json_dict_unicode_to_bytes, d.iteritems()))
elif isinstance(d, list):
return list(map(json_dict_unicode_to_bytes, d))
elif isinstance(d, tuple):
return tuple(map(json_dict_unicode_to_bytes, d))
else:
return d
def json_dict_bytes_to_unicode(d):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns)
'''
if isinstance(d, str):
return unicode(d, 'utf-8')
elif isinstance(d, dict):
return dict(map(json_dict_bytes_to_unicode, d.iteritems()))
elif isinstance(d, list):
return list(map(json_dict_bytes_to_unicode, d))
elif isinstance(d, tuple):
return tuple(map(json_dict_bytes_to_unicode, d))
else:
return d
def heuristic_log_sanitize(data):
''' Remove strings that look like passwords from log messages '''
# Currently filters:
# user:pass@foo/whatever and http://username:pass@wherever/foo
# This code has false positives and consumes parts of logs that are
# not passwds
# begin: start of a passwd containing string
# end: end of a passwd containing string
# sep: char between user and passwd
# prev_begin: where in the overall string to start a search for
# a passwd
# sep_search_end: where in the string to end a search for the sep
output = []
begin = len(data)
prev_begin = begin
sep = 1
while sep:
# Find the potential end of a passwd
try:
end = data.rindex('@', 0, begin)
except ValueError:
# No passwd in the rest of the data
output.insert(0, data[0:begin])
break
# Search for the beginning of a passwd
sep = None
sep_search_end = end
while not sep:
# URL-style username+password
try:
begin = data.rindex('://', 0, sep_search_end)
except ValueError:
# No url style in the data, check for ssh style in the
# rest of the string
begin = 0
# Search for separator
try:
sep = data.index(':', begin + 3, end)
except ValueError:
# No separator; choices:
if begin == 0:
# Searched the whole string so there's no password
# here. Return the remaining data
output.insert(0, data[0:begin])
break
# Search for a different beginning of the password field.
sep_search_end = begin
continue
if sep:
# Password was found; remove it.
output.insert(0, data[end:prev_begin])
output.insert(0, '********')
output.insert(0, data[begin:sep + 1])
prev_begin = begin
return ''.join(output)
class AnsibleModule(object):
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
required_one_of=None, add_file_common_args=False, supports_check_mode=False,
required_if=None):
'''
common code for quickly building an ansible module in Python
(although you can write modules in anything that can return JSON)
see library/* for examples
'''
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
self.no_log = no_log
self.cleanup_files = []
self.aliases = {}
if add_file_common_args:
for k, v in FILE_COMMON_ARGUMENTS.iteritems():
if k not in self.argument_spec:
self.argument_spec[k] = v
# check the locale as set by the current environment, and
# reset to LANG=C if it's an invalid/unavailable locale
self._check_locale()
(self.params, self.args) = self._load_params()
self._legal_inputs = ['CHECKMODE', 'NO_LOG']
self.aliases = self._handle_aliases()
if check_invalid_arguments:
self._check_invalid_arguments()
self._check_for_check_mode()
self._check_for_no_log()
# check exclusive early
if not bypass_checks:
self._check_mutually_exclusive(mutually_exclusive)
self._set_defaults(pre=True)
if not bypass_checks:
self._check_required_arguments()
self._check_argument_values()
self._check_argument_types()
self._check_required_together(required_together)
self._check_required_one_of(required_one_of)
self._check_required_if(required_if)
self._set_defaults(pre=False)
if not self.no_log:
self._log_invocation()
# finally, make sure we're in a sane working dir
self._set_cwd()
def load_file_common_arguments(self, params):
'''
many modules deal with files, this encapsulates common
options that the file module accepts such that it is directly
available to all modules and they can share code.
'''
path = params.get('path', params.get('dest', None))
if path is None:
return {}
else:
path = os.path.expanduser(path)
# if the path is a symlink, and we're following links, get
# the target of the link instead for testing
if params.get('follow', False) and os.path.islink(path):
path = os.path.realpath(path)
mode = params.get('mode', None)
owner = params.get('owner', None)
group = params.get('group', None)
# selinux related options
seuser = params.get('seuser', None)
serole = params.get('serole', None)
setype = params.get('setype', None)
selevel = params.get('selevel', None)
secontext = [seuser, serole, setype]
if self.selinux_mls_enabled():
secontext.append(selevel)
default_secontext = self.selinux_default_context(path)
for i in range(len(default_secontext)):
if i is not None and secontext[i] == '_default':
secontext[i] = default_secontext[i]
return dict(
path=path, mode=mode, owner=owner, group=group,
seuser=seuser, serole=serole, setype=setype,
selevel=selevel, secontext=secontext,
)
# Detect whether using selinux that is MLS-aware.
# While this means you can set the level/range with
# selinux.lsetfilecon(), it may or may not mean that you
# will get the selevel as part of the context returned
# by selinux.lgetfilecon().
def selinux_mls_enabled(self):
if not HAVE_SELINUX:
return False
if selinux.is_selinux_mls_enabled() == 1:
return True
else:
return False
def selinux_enabled(self):
if not HAVE_SELINUX:
seenabled = self.get_bin_path('selinuxenabled')
if seenabled is not None:
(rc,out,err) = self.run_command(seenabled)
if rc == 0:
self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
return False
if selinux.is_selinux_enabled() == 1:
return True
else:
return False
# Determine whether we need a placeholder for selevel/mls
def selinux_initial_context(self):
context = [None, None, None]
if self.selinux_mls_enabled():
context.append(None)
return context
def _to_filesystem_str(self, path):
'''Returns filesystem path as a str, if it wasn't already.
Used in selinux interactions because it cannot accept unicode
instances, and specifying complex args in a playbook leaves
you with unicode instances. This method currently assumes
that your filesystem encoding is UTF-8.
'''
if isinstance(path, unicode):
path = path.encode("utf-8")
return path
# If selinux fails to find a default, return an array of None
def selinux_default_context(self, path, mode=0):
context = self.selinux_initial_context()
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
ret = selinux.matchpathcon(self._to_filesystem_str(path), mode)
except OSError:
return context
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def selinux_context(self, path):
context = self.selinux_initial_context()
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
ret = selinux.lgetfilecon_raw(self._to_filesystem_str(path))
except OSError, e:
if e.errno == errno.ENOENT:
self.fail_json(path=path, msg='path %s does not exist' % path)
else:
self.fail_json(path=path, msg='failed to retrieve selinux context')
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def user_and_group(self, filename):
filename = os.path.expanduser(filename)
st = os.lstat(filename)
uid = st.st_uid
gid = st.st_gid
return (uid, gid)
def find_mount_point(self, path):
path = os.path.abspath(os.path.expanduser(os.path.expandvars(path)))
while not os.path.ismount(path):
path = os.path.dirname(path)
return path
def is_special_selinux_path(self, path):
"""
Returns a tuple containing (True, selinux_context) if the given path is on a
NFS or other 'special' fs mount point, otherwise the return will be (False, None).
"""
try:
f = open('/proc/mounts', 'r')
mount_data = f.readlines()
f.close()
except:
return (False, None)
path_mount_point = self.find_mount_point(path)
for line in mount_data:
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
if path_mount_point == mount_point:
for fs in SELINUX_SPECIAL_FS.split(','):
if fs in fstype:
special_context = self.selinux_context(path_mount_point)
return (True, special_context)
return (False, None)
def set_default_selinux_context(self, path, changed):
if not HAVE_SELINUX or not self.selinux_enabled():
return changed
context = self.selinux_default_context(path)
return self.set_context_if_different(path, context, False)
def set_context_if_different(self, path, context, changed):
if not HAVE_SELINUX or not self.selinux_enabled():
return changed
cur_context = self.selinux_context(path)
new_context = list(cur_context)
# Iterate over the current context instead of the
# argument context, which may have selevel.
(is_special_se, sp_context) = self.is_special_selinux_path(path)
if is_special_se:
new_context = sp_context
else:
for i in range(len(cur_context)):
if len(context) > i:
if context[i] is not None and context[i] != cur_context[i]:
new_context[i] = context[i]
if context[i] is None:
new_context[i] = cur_context[i]
if cur_context != new_context:
try:
if self.check_mode:
return True
rc = selinux.lsetfilecon(self._to_filesystem_str(path),
str(':'.join(new_context)))
except OSError:
self.fail_json(path=path, msg='invalid selinux context', new_context=new_context, cur_context=cur_context, input_was=context)
if rc != 0:
self.fail_json(path=path, msg='set selinux context failed')
changed = True
return changed
def set_owner_if_different(self, path, owner, changed):
path = os.path.expanduser(path)
if owner is None:
return changed
orig_uid, orig_gid = self.user_and_group(path)
try:
uid = int(owner)
except ValueError:
try:
uid = pwd.getpwnam(owner).pw_uid
except KeyError:
self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
if orig_uid != uid:
if self.check_mode:
return True
try:
os.lchown(path, uid, -1)
except OSError:
self.fail_json(path=path, msg='chown failed')
changed = True
return changed
def set_group_if_different(self, path, group, changed):
path = os.path.expanduser(path)
if group is None:
return changed
orig_uid, orig_gid = self.user_and_group(path)
try:
gid = int(group)
except ValueError:
try:
gid = grp.getgrnam(group).gr_gid
except KeyError:
self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
if orig_gid != gid:
if self.check_mode:
return True
try:
os.lchown(path, -1, gid)
except OSError:
self.fail_json(path=path, msg='chgrp failed')
changed = True
return changed
def set_mode_if_different(self, path, mode, changed):
path = os.path.expanduser(path)
path_stat = os.lstat(path)
if mode is None:
return changed
if not isinstance(mode, int):
try:
mode = int(mode, 8)
except Exception:
try:
mode = self._symbolic_mode_to_octal(path_stat, mode)
except Exception, e:
self.fail_json(path=path,
msg="mode must be in octal or symbolic form",
details=str(e))
prev_mode = stat.S_IMODE(path_stat.st_mode)
if prev_mode != mode:
if self.check_mode:
return True
# FIXME: comparison against string above will cause this to be executed
# every time
try:
if hasattr(os, 'lchmod'):
os.lchmod(path, mode)
else:
if not os.path.islink(path):
os.chmod(path, mode)
else:
# Attempt to set the perms of the symlink but be
# careful not to change the perms of the underlying
# file while trying
underlying_stat = os.stat(path)
os.chmod(path, mode)
new_underlying_stat = os.stat(path)
if underlying_stat.st_mode != new_underlying_stat.st_mode:
os.chmod(path, stat.S_IMODE(underlying_stat.st_mode))
q_stat = os.stat(path)
except OSError, e:
if os.path.islink(path) and e.errno == errno.EPERM: # Can't set mode on symbolic links
pass
elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
pass
else:
raise e
except Exception, e:
self.fail_json(path=path, msg='chmod failed', details=str(e))
path_stat = os.lstat(path)
new_mode = stat.S_IMODE(path_stat.st_mode)
if new_mode != prev_mode:
changed = True
return changed
def _symbolic_mode_to_octal(self, path_stat, symbolic_mode):
new_mode = stat.S_IMODE(path_stat.st_mode)
mode_re = re.compile(r'^(?P<users>[ugoa]+)(?P<operator>[-+=])(?P<perms>[rwxXst]*|[ugo])$')
for mode in symbolic_mode.split(','):
match = mode_re.match(mode)
if match:
users = match.group('users')
operator = match.group('operator')
perms = match.group('perms')
if users == 'a': users = 'ugo'
for user in users:
mode_to_apply = self._get_octal_mode_from_symbolic_perms(path_stat, user, perms)
new_mode = self._apply_operation_to_mode(user, operator, mode_to_apply, new_mode)
else:
raise ValueError("bad symbolic permission for mode: %s" % mode)
return new_mode
def _apply_operation_to_mode(self, user, operator, mode_to_apply, current_mode):
if operator == '=':
if user == 'u': mask = stat.S_IRWXU | stat.S_ISUID
elif user == 'g': mask = stat.S_IRWXG | stat.S_ISGID
elif user == 'o': mask = stat.S_IRWXO | stat.S_ISVTX
# mask out u, g, or o permissions from current_mode and apply new permissions
inverse_mask = mask ^ 07777
new_mode = (current_mode & inverse_mask) | mode_to_apply
elif operator == '+':
new_mode = current_mode | mode_to_apply
elif operator == '-':
new_mode = current_mode - (current_mode & mode_to_apply)
return new_mode
def _get_octal_mode_from_symbolic_perms(self, path_stat, user, perms):
prev_mode = stat.S_IMODE(path_stat.st_mode)
is_directory = stat.S_ISDIR(path_stat.st_mode)
has_x_permissions = (prev_mode & 00111) > 0
apply_X_permission = is_directory or has_x_permissions
# Permission bits constants documented at:
# http://docs.python.org/2/library/stat.html#stat.S_ISUID
if apply_X_permission:
X_perms = {
'u': {'X': stat.S_IXUSR},
'g': {'X': stat.S_IXGRP},
'o': {'X': stat.S_IXOTH}
}
else:
X_perms = {
'u': {'X': 0},
'g': {'X': 0},
'o': {'X': 0}
}
user_perms_to_modes = {
'u': {
'r': stat.S_IRUSR,
'w': stat.S_IWUSR,
'x': stat.S_IXUSR,
's': stat.S_ISUID,
't': 0,
'u': prev_mode & stat.S_IRWXU,
'g': (prev_mode & stat.S_IRWXG) << 3,
'o': (prev_mode & stat.S_IRWXO) << 6 },
'g': {
'r': stat.S_IRGRP,
'w': stat.S_IWGRP,
'x': stat.S_IXGRP,
's': stat.S_ISGID,
't': 0,
'u': (prev_mode & stat.S_IRWXU) >> 3,
'g': prev_mode & stat.S_IRWXG,
'o': (prev_mode & stat.S_IRWXO) << 3 },
'o': {
'r': stat.S_IROTH,
'w': stat.S_IWOTH,
'x': stat.S_IXOTH,
's': 0,
't': stat.S_ISVTX,
'u': (prev_mode & stat.S_IRWXU) >> 6,
'g': (prev_mode & stat.S_IRWXG) >> 3,
'o': prev_mode & stat.S_IRWXO }
}
# Insert X_perms into user_perms_to_modes
for key, value in X_perms.items():
user_perms_to_modes[key].update(value)
or_reduce = lambda mode, perm: mode | user_perms_to_modes[user][perm]
return reduce(or_reduce, perms, 0)
def set_fs_attributes_if_different(self, file_args, changed):
# set modes owners and context as needed
changed = self.set_context_if_different(
file_args['path'], file_args['secontext'], changed
)
changed = self.set_owner_if_different(
file_args['path'], file_args['owner'], changed
)
changed = self.set_group_if_different(
file_args['path'], file_args['group'], changed
)
changed = self.set_mode_if_different(
file_args['path'], file_args['mode'], changed
)
return changed
def set_directory_attributes_if_different(self, file_args, changed):
return self.set_fs_attributes_if_different(file_args, changed)
def set_file_attributes_if_different(self, file_args, changed):
return self.set_fs_attributes_if_different(file_args, changed)
def add_path_info(self, kwargs):
'''
for results that are files, supplement the info about the file
in the return path with stats about the file path.
'''
path = kwargs.get('path', kwargs.get('dest', None))
if path is None:
return kwargs
if os.path.exists(path):
(uid, gid) = self.user_and_group(path)
kwargs['uid'] = uid
kwargs['gid'] = gid
try:
user = pwd.getpwuid(uid)[0]
except KeyError:
user = str(uid)
try:
group = grp.getgrgid(gid)[0]
except KeyError:
group = str(gid)
kwargs['owner'] = user
kwargs['group'] = group
st = os.lstat(path)
kwargs['mode'] = oct(stat.S_IMODE(st[stat.ST_MODE]))
# secontext not yet supported
if os.path.islink(path):
kwargs['state'] = 'link'
elif os.path.isdir(path):
kwargs['state'] = 'directory'
elif os.stat(path).st_nlink > 1:
kwargs['state'] = 'hard'
else:
kwargs['state'] = 'file'
if HAVE_SELINUX and self.selinux_enabled():
kwargs['secontext'] = ':'.join(self.selinux_context(path))
kwargs['size'] = st[stat.ST_SIZE]
else:
kwargs['state'] = 'absent'
return kwargs
def _check_locale(self):
'''
Uses the locale module to test the currently set locale
(per the LANG and LC_CTYPE environment settings)
'''
try:
# setting the locale to '' uses the default locale
# as it would be returned by locale.getdefaultlocale()
locale.setlocale(locale.LC_ALL, '')
except locale.Error, e:
# fallback to the 'C' locale, which may cause unicode
# issues but is preferable to simply failing because
# of an unknown locale
locale.setlocale(locale.LC_ALL, 'C')
os.environ['LANG'] = 'C'
os.environ['LC_CTYPE'] = 'C'
os.environ['LC_MESSAGES'] = 'C'
except Exception, e:
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e)
def _handle_aliases(self):
aliases_results = {} #alias:canon
for (k,v) in self.argument_spec.iteritems():
self._legal_inputs.append(k)
aliases = v.get('aliases', None)
default = v.get('default', None)
required = v.get('required', False)
if default is not None and required:
# not alias specific but this is a good place to check this
self.fail_json(msg="internal error: required and default are mutually exclusive for %s" % k)
if aliases is None:
continue
if type(aliases) != list:
self.fail_json(msg='internal error: aliases must be a list')
for alias in aliases:
self._legal_inputs.append(alias)
aliases_results[alias] = k
if alias in self.params:
self.params[k] = self.params[alias]
return aliases_results
def _check_for_check_mode(self):
for (k,v) in self.params.iteritems():
if k == 'CHECKMODE':
if not self.supports_check_mode:
self.exit_json(skipped=True, msg="remote module does not support check mode")
if self.supports_check_mode:
self.check_mode = True
def _check_for_no_log(self):
for (k,v) in self.params.iteritems():
if k == 'NO_LOG':
self.no_log = self.boolean(v)
def _check_invalid_arguments(self):
for (k,v) in self.params.iteritems():
# these should be in legal inputs already
#if k in ('CHECKMODE', 'NO_LOG'):
# continue
if k not in self._legal_inputs:
self.fail_json(msg="unsupported parameter for module: %s" % k)
def _count_terms(self, check):
count = 0
for term in check:
if term in self.params:
count += 1
return count
def _check_mutually_exclusive(self, spec):
if spec is None:
return
for check in spec:
count = self._count_terms(check)
if count > 1:
self.fail_json(msg="parameters are mutually exclusive: %s" % check)
def _check_required_one_of(self, spec):
if spec is None:
return
for check in spec:
count = self._count_terms(check)
if count == 0:
self.fail_json(msg="one of the following is required: %s" % ','.join(check))
def _check_required_together(self, spec):
if spec is None:
return
for check in spec:
counts = [ self._count_terms([field]) for field in check ]
non_zero = [ c for c in counts if c > 0 ]
if len(non_zero) > 0:
if 0 in counts:
self.fail_json(msg="parameters are required together: %s" % check)
def _check_required_arguments(self):
''' ensure all required arguments are present '''
missing = []
for (k,v) in self.argument_spec.iteritems():
required = v.get('required', False)
if required and k not in self.params:
missing.append(k)
if len(missing) > 0:
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
def _check_required_if(self, spec):
''' ensure that parameters which conditionally required are present '''
if spec is None:
return
for (key, val, requirements) in spec:
missing = []
if key in self.params and self.params[key] == val:
for check in requirements:
count = self._count_terms(check)
if count == 0:
missing.append(check)
if len(missing) > 0:
self.fail_json(msg="%s is %s but the following are missing: %s" % (key, val, ','.join(missing)))
def _check_argument_values(self):
''' ensure all arguments have the requested values, and there are no stray arguments '''
for (k,v) in self.argument_spec.iteritems():
choices = v.get('choices',None)
if choices is None:
continue
if type(choices) == list:
if k in self.params:
if self.params[k] not in choices:
choices_str=",".join([str(c) for c in choices])
msg="value of %s must be one of: %s, got: %s" % (k, choices_str, self.params[k])
self.fail_json(msg=msg)
else:
self.fail_json(msg="internal error: do not know how to interpret argument_spec")
def safe_eval(self, str, locals=None, include_exceptions=False):
# do not allow method calls to modules
if not isinstance(str, basestring):
# already templated to a datastructure, perhaps?
if include_exceptions:
return (str, None)
return str
if re.search(r'\w\.\w+\(', str):
if include_exceptions:
return (str, None)
return str
# do not allow imports
if re.search(r'import \w+', str):
if include_exceptions:
return (str, None)
return str
try:
result = None
if not locals:
result = _literal_eval(str)
else:
result = _literal_eval(str, None, locals)
if include_exceptions:
return (result, None)
else:
return result
except Exception, e:
if include_exceptions:
return (str, e)
return str
def _check_argument_types(self):
''' ensure all arguments have the requested type '''
for (k, v) in self.argument_spec.iteritems():
wanted = v.get('type', None)
if wanted is None:
continue
if k not in self.params:
continue
value = self.params[k]
is_invalid = False
try:
if wanted == 'str':
if not isinstance(value, basestring):
self.params[k] = str(value)
elif wanted == 'list':
if not isinstance(value, list):
if isinstance(value, basestring):
self.params[k] = value.split(",")
elif isinstance(value, int) or isinstance(value, float):
self.params[k] = [ str(value) ]
else:
is_invalid = True
elif wanted == 'dict':
if not isinstance(value, dict):
if isinstance(value, basestring):
if value.startswith("{"):
try:
self.params[k] = json.loads(value)
except:
(result, exc) = self.safe_eval(value, dict(), include_exceptions=True)
if exc is not None:
self.fail_json(msg="unable to evaluate dictionary for %s" % k)
self.params[k] = result
elif '=' in value:
self.params[k] = dict([x.strip().split("=", 1) for x in value.split(",")])
else:
self.fail_json(msg="dictionary requested, could not parse JSON or key=value")
else:
is_invalid = True
elif wanted == 'bool':
if not isinstance(value, bool):
if isinstance(value, basestring):
self.params[k] = self.boolean(value)
else:
is_invalid = True
elif wanted == 'int':
if not isinstance(value, int):
if isinstance(value, basestring):
self.params[k] = int(value)
else:
is_invalid = True
elif wanted == 'float':
if not isinstance(value, float):
if isinstance(value, basestring):
self.params[k] = float(value)
else:
is_invalid = True
else:
self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
if is_invalid:
self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
except ValueError, e:
self.fail_json(msg="value of argument %s is not of type %s and we were unable to automatically convert" % (k, wanted))
def _set_defaults(self, pre=True):
for (k,v) in self.argument_spec.iteritems():
default = v.get('default', None)
if pre == True:
# this prevents setting defaults on required items
if default is not None and k not in self.params:
self.params[k] = default
else:
# make sure things without a default still get set None
if k not in self.params:
self.params[k] = default
def _load_params(self):
''' read the input and return a dictionary and the arguments string '''
args = MODULE_ARGS
items = shlex.split(args)
params = {}
for x in items:
try:
(k, v) = x.split("=",1)
except Exception, e:
self.fail_json(msg="this module requires key=value arguments (%s)" % (items))
if k in params:
self.fail_json(msg="duplicate parameter: %s (value=%s)" % (k, v))
params[k] = v
params2 = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
params2.update(params)
return (params2, args)
def _log_invocation(self):
''' log that ansible ran the module '''
# TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging.
log_args = dict()
passwd_keys = ['password', 'login_password']
for param in self.params:
canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', False)
if self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
elif param in passwd_keys:
log_args[param] = 'NOT_LOGGING_PASSWORD'
else:
param_val = self.params[param]
if not isinstance(param_val, basestring):
param_val = str(param_val)
elif isinstance(param_val, unicode):
param_val = param_val.encode('utf-8')
log_args[param] = heuristic_log_sanitize(param_val)
module = 'ansible-%s' % os.path.basename(__file__)
msg = []
for arg in log_args:
arg_val = log_args[arg]
if not isinstance(arg_val, basestring):
arg_val = str(arg_val)
elif isinstance(arg_val, unicode):
arg_val = arg_val.encode('utf-8')
msg.append('%s=%s ' % (arg, arg_val))
if msg:
msg = 'Invoked with %s' % ''.join(msg)
else:
msg = 'Invoked'
# 6655 - allow for accented characters
if isinstance(msg, unicode):
# We should never get here as msg should be type str, not unicode
msg = msg.encode('utf-8')
if (has_journal):
journal_args = [("MODULE", os.path.basename(__file__))]
for arg in log_args:
journal_args.append((arg.upper(), str(log_args[arg])))
try:
journal.send("%s %s" % (module, msg), **dict(journal_args))
except IOError, e:
# fall back to syslog since logging to journal failed
syslog.openlog(str(module), 0, syslog.LOG_USER)
syslog.syslog(syslog.LOG_NOTICE, msg) #1
else:
syslog.openlog(str(module), 0, syslog.LOG_USER)
syslog.syslog(syslog.LOG_NOTICE, msg) #2
def _set_cwd(self):
try:
cwd = os.getcwd()
if not os.access(cwd, os.F_OK|os.R_OK):
raise
return cwd
except:
# we don't have access to the cwd, probably because of sudo.
# Try and move to a neutral location to prevent errors
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try:
if os.access(cwd, os.F_OK|os.R_OK):
os.chdir(cwd)
return cwd
except:
pass
# we won't error here, as it may *not* be a problem,
# and we don't want to break modules unnecessarily
return None
def get_bin_path(self, arg, required=False, opt_dirs=[]):
'''
find system executable in PATH.
Optional arguments:
- required: if executable is not found and required is true, fail_json
- opt_dirs: optional list of directories to search in addition to PATH
if found return full path; otherwise return None
'''
sbin_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin']
paths = []
for d in opt_dirs:
if d is not None and os.path.exists(d):
paths.append(d)
paths += os.environ.get('PATH', '').split(os.pathsep)
bin_path = None
# mangle PATH to include /sbin dirs
for p in sbin_paths:
if p not in paths and os.path.exists(p):
paths.append(p)
for d in paths:
path = os.path.join(d, arg)
if os.path.exists(path) and self.is_executable(path):
bin_path = path
break
if required and bin_path is None:
self.fail_json(msg='Failed to find required executable %s' % arg)
return bin_path
def boolean(self, arg):
''' return a bool for the arg '''
if arg is None or type(arg) == bool:
return arg
if type(arg) in types.StringTypes:
arg = arg.lower()
if arg in BOOLEANS_TRUE:
return True
elif arg in BOOLEANS_FALSE:
return False
else:
self.fail_json(msg='Boolean %s not in either boolean list' % arg)
def jsonify(self, data):
for encoding in ("utf-8", "latin-1", "unicode_escape"):
try:
return json.dumps(data, encoding=encoding)
# Old systems using simplejson module does not support encoding keyword.
except TypeError, e:
return json.dumps(data)
except UnicodeDecodeError, e:
continue
self.fail_json(msg='Invalid unicode encoding encountered')
def from_json(self, data):
return json.loads(data)
def add_cleanup_file(self, path):
if path not in self.cleanup_files:
self.cleanup_files.append(path)
def do_cleanup_files(self):
for path in self.cleanup_files:
self.cleanup(path)
def exit_json(self, **kwargs):
''' return from the module, without error '''
self.add_path_info(kwargs)
if not 'changed' in kwargs:
kwargs['changed'] = False
self.do_cleanup_files()
print self.jsonify(kwargs)
sys.exit(0)
def fail_json(self, **kwargs):
''' return from the module, with an error message '''
self.add_path_info(kwargs)
assert 'msg' in kwargs, "implementation error -- msg to explain the error is required"
kwargs['failed'] = True
self.do_cleanup_files()
print self.jsonify(kwargs)
sys.exit(1)
def is_executable(self, path):
'''is the given path executable?'''
return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
def digest_from_file(self, filename, digest_method):
''' Return hex digest of local file for a given digest_method, or None if file is not present. '''
if not os.path.exists(filename):
return None
if os.path.isdir(filename):
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
digest = digest_method
blocksize = 64 * 1024
infile = open(filename, 'rb')
block = infile.read(blocksize)
while block:
digest.update(block)
block = infile.read(blocksize)
infile.close()
return digest.hexdigest()
def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file().
Do not use this function unless you have no other choice for:
1) Optional backwards compatibility
2) Compatibility with a third party protocol
This function will not work on systems complying with FIPS-140-2.
Most uses of this function can use the module.sha1 function instead.
'''
if not _md5:
raise ValueError('MD5 not available. Possibly running in FIPS mode')
return self.digest_from_file(filename, _md5())
def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, _sha1())
def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
if not HAVE_HASHLIB:
self.fail_json(msg="SHA-256 checksums require hashlib, which is available in Python 2.5 and higher")
return self.digest_from_file(filename, _sha256())
def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure'''
backupdest = ''
if os.path.exists(fn):
# backups named basename-YYYY-MM-DD@HH:MM:SS~
ext = time.strftime("%Y-%m-%d@%H:%M:%S~", time.localtime(time.time()))
backupdest = '%s.%s' % (fn, ext)
try:
shutil.copy2(fn, backupdest)
except (shutil.Error, IOError), e:
self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, e))
return backupdest
def cleanup(self, tmpfile):
if os.path.exists(tmpfile):
try:
os.unlink(tmpfile)
except OSError, e:
sys.stderr.write("could not cleanup %s: %s" % (tmpfile, e))
def atomic_move(self, src, dest):
'''atomically move src to dest, copying attributes from dest, returns true on success
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
to work around limitations, corner cases and ensure selinux context is saved if possible'''
context = None
dest_stat = None
if os.path.exists(dest):
try:
dest_stat = os.stat(dest)
os.chmod(src, dest_stat.st_mode & 07777)
os.chown(src, dest_stat.st_uid, dest_stat.st_gid)
except OSError, e:
if e.errno != errno.EPERM:
raise
if self.selinux_enabled():
context = self.selinux_context(dest)
else:
if self.selinux_enabled():
context = self.selinux_default_context(dest)
creating = not os.path.exists(dest)
try:
login_name = os.getlogin()
except OSError:
# not having a tty can cause the above to fail, so
# just get the LOGNAME environment variable instead
login_name = os.environ.get('LOGNAME', None)
# if the original login_name doesn't match the currently
# logged-in user, or if the SUDO_USER environment variable
# is set, then this user has switched their credentials
switched_user = login_name and login_name != pwd.getpwuid(os.getuid())[0] or os.environ.get('SUDO_USER')
try:
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
os.rename(src, dest)
except (IOError,OSError), e:
# only try workarounds for errno 18 (cross device), 1 (not permitted) and 13 (permission denied)
if e.errno != errno.EPERM and e.errno != errno.EXDEV and e.errno != errno.EACCES:
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
dest_dir = os.path.dirname(dest)
dest_file = os.path.basename(dest)
try:
tmp_dest = tempfile.NamedTemporaryFile(
prefix=".ansible_tmp", dir=dest_dir, suffix=dest_file)
except (OSError, IOError), e:
self.fail_json(msg='The destination directory (%s) is not writable by the current user.' % dest_dir)
try: # leaves tmp file behind when sudo and not root
if switched_user and os.getuid() != 0:
# cleanup will happen by 'rm' of tempdir
# copy2 will preserve some metadata
shutil.copy2(src, tmp_dest.name)
else:
shutil.move(src, tmp_dest.name)
if self.selinux_enabled():
self.set_context_if_different(
tmp_dest.name, context, False)
try:
tmp_stat = os.stat(tmp_dest.name)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(tmp_dest.name, dest_stat.st_uid, dest_stat.st_gid)
except OSError, e:
if e.errno != errno.EPERM:
raise
os.rename(tmp_dest.name, dest)
except (shutil.Error, OSError, IOError), e:
self.cleanup(tmp_dest.name)
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
if creating:
# make sure the file has the correct permissions
# based on the current value of umask
umask = os.umask(0)
os.umask(umask)
os.chmod(dest, 0666 & ~umask)
if switched_user:
os.chown(dest, os.getuid(), os.getgid())
if self.selinux_enabled():
# rename might not preserve context
self.set_context_if_different(dest, context, False)
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None):
'''
Execute a command, returns rc, stdout, and stderr.
args is the command to run
If args is a list, the command will be run with shell=False.
If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
If args is a string and use_unsafe_shell=True it run with shell=True.
Other arguments:
- check_rc (boolean) Whether to call fail_json in case of
non zero RC. Default is False.
- close_fds (boolean) See documentation for subprocess.Popen().
Default is True.
- executable (string) See documentation for subprocess.Popen().
Default is None.
- prompt_regex (string) A regex string (not a compiled regex) which
can be used to detect prompts in the stdout
which would otherwise cause the execution
to hang (especially if no input data is
specified)
'''
shell = False
if isinstance(args, list):
if use_unsafe_shell:
args = " ".join([pipes.quote(x) for x in args])
shell = True
elif isinstance(args, basestring) and use_unsafe_shell:
shell = True
elif isinstance(args, basestring):
args = shlex.split(args.encode('utf-8'))
else:
msg = "Argument 'args' to run_command must be list or string"
self.fail_json(rc=257, cmd=args, msg=msg)
prompt_re = None
if prompt_regex:
try:
prompt_re = re.compile(prompt_regex, re.MULTILINE)
except re.error:
self.fail_json(msg="invalid prompt regular expression given to run_command")
# expand things like $HOME and ~
if not shell:
args = [ os.path.expandvars(os.path.expanduser(x)) for x in args ]
rc = 0
msg = None
st_in = None
# Set a temporart env path if a prefix is passed
env=os.environ
if path_prefix:
env['PATH']="%s:%s" % (path_prefix, env['PATH'])
# create a printable version of the command for use
# in reporting later, which strips out things like
# passwords from the args list
if isinstance(args, basestring):
if isinstance(args, unicode):
b_args = args.encode('utf-8')
else:
b_args = args
to_clean_args = shlex.split(b_args)
del b_args
else:
to_clean_args = args
clean_args = []
is_passwd = False
for arg in to_clean_args:
if is_passwd:
is_passwd = False
clean_args.append('********')
continue
if PASSWD_ARG_RE.match(arg):
sep_idx = arg.find('=')
if sep_idx > -1:
clean_args.append('%s=********' % arg[:sep_idx])
continue
else:
is_passwd = True
clean_args.append(heuristic_log_sanitize(arg))
clean_args = ' '.join(pipes.quote(arg) for arg in clean_args)
if data:
st_in = subprocess.PIPE
kwargs = dict(
executable=executable,
shell=shell,
close_fds=close_fds,
stdin=st_in,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
if path_prefix:
kwargs['env'] = env
if cwd and os.path.isdir(cwd):
kwargs['cwd'] = cwd
# store the pwd
prev_dir = os.getcwd()
# make sure we're in the right working directory
if cwd and os.path.isdir(cwd):
try:
os.chdir(cwd)
except (OSError, IOError), e:
self.fail_json(rc=e.errno, msg="Could not open %s, %s" % (cwd, str(e)))
try:
cmd = subprocess.Popen(args, **kwargs)
# the communication logic here is essentially taken from that
# of the _communicate() function in ssh.py
stdout = ''
stderr = ''
rpipes = [cmd.stdout, cmd.stderr]
if data:
if not binary_data:
data += '\n'
cmd.stdin.write(data)
cmd.stdin.close()
while True:
rfd, wfd, efd = select.select(rpipes, [], rpipes, 1)
if cmd.stdout in rfd:
dat = os.read(cmd.stdout.fileno(), 9000)
stdout += dat
if dat == '':
rpipes.remove(cmd.stdout)
if cmd.stderr in rfd:
dat = os.read(cmd.stderr.fileno(), 9000)
stderr += dat
if dat == '':
rpipes.remove(cmd.stderr)
# if we're checking for prompts, do it now
if prompt_re:
if prompt_re.search(stdout) and not data:
return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
# only break out if no pipes are left to read or
# the pipes are completely read and
# the process is terminated
if (not rpipes or not rfd) and cmd.poll() is not None:
break
# No pipes are left to read but process is not yet terminated
# Only then it is safe to wait for the process to be finished
# NOTE: Actually cmd.poll() is always None here if rpipes is empty
elif not rpipes and cmd.poll() == None:
cmd.wait()
# The process is terminated. Since no pipes to read from are
# left, there is no need to call select() again.
break
cmd.stdout.close()
cmd.stderr.close()
rc = cmd.returncode
except (OSError, IOError), e:
self.fail_json(rc=e.errno, msg=str(e), cmd=clean_args)
except:
self.fail_json(rc=257, msg=traceback.format_exc(), cmd=clean_args)
if rc != 0 and check_rc:
msg = heuristic_log_sanitize(stderr.rstrip())
self.fail_json(cmd=clean_args, rc=rc, stdout=stdout, stderr=stderr, msg=msg)
# reset the pwd
os.chdir(prev_dir)
return (rc, stdout, stderr)
def append_to_file(self, filename, str):
filename = os.path.expandvars(os.path.expanduser(filename))
fh = open(filename, 'a')
fh.write(str)
fh.close()
def pretty_bytes(self,size):
ranges = (
(1<<70L, 'ZB'),
(1<<60L, 'EB'),
(1<<50L, 'PB'),
(1<<40L, 'TB'),
(1<<30L, 'GB'),
(1<<20L, 'MB'),
(1<<10L, 'KB'),
(1, 'Bytes')
)
for limit, suffix in ranges:
if size >= limit:
break
return '%.2f %s' % (float(size)/ limit, suffix)
def get_module_path():
return os.path.dirname(os.path.realpath(__file__))
| gpl-3.0 |
windyuuy/opera | chromium/src/third_party/tlslite/tlslite/mathtls.py | 80 | 11862 | """Miscellaneous helper functions."""
from utils.compat import *
from utils.cryptomath import *
import hmac
# The sha module is deprecated in Python 2.6
try:
import sha
except ImportError:
from hashlib import sha1 as sha
# The md5 module is deprecated in Python 2.6
try:
import md5
except ImportError:
from hashlib import md5
#1024, 1536, 2048, 3072, 4096, 6144, and 8192 bit groups]
goodGroupParameters = [(2,0xEEAF0AB9ADB38DD69C33F80AFA8FC5E86072618775FF3C0B9EA2314C9C256576D674DF7496EA81D3383B4813D692C6E0E0D5D8E250B98BE48E495C1D6089DAD15DC7D7B46154D6B6CE8EF4AD69B15D4982559B297BCF1885C529F566660E57EC68EDBC3C05726CC02FD4CBF4976EAA9AFD5138FE8376435B9FC61D2FC0EB06E3),\
(2,0x9DEF3CAFB939277AB1F12A8617A47BBBDBA51DF499AC4C80BEEEA9614B19CC4D5F4F5F556E27CBDE51C6A94BE4607A291558903BA0D0F84380B655BB9A22E8DCDF028A7CEC67F0D08134B1C8B97989149B609E0BE3BAB63D47548381DBC5B1FC764E3F4B53DD9DA1158BFD3E2B9C8CF56EDF019539349627DB2FD53D24B7C48665772E437D6C7F8CE442734AF7CCB7AE837C264AE3A9BEB87F8A2FE9B8B5292E5A021FFF5E91479E8CE7A28C2442C6F315180F93499A234DCF76E3FED135F9BB),\
(2,0xAC6BDB41324A9A9BF166DE5E1389582FAF72B6651987EE07FC3192943DB56050A37329CBB4A099ED8193E0757767A13DD52312AB4B03310DCD7F48A9DA04FD50E8083969EDB767B0CF6095179A163AB3661A05FBD5FAAAE82918A9962F0B93B855F97993EC975EEAA80D740ADBF4FF747359D041D5C33EA71D281E446B14773BCA97B43A23FB801676BD207A436C6481F1D2B9078717461A5B9D32E688F87748544523B524B0D57D5EA77A2775D2ECFA032CFBDBF52FB3786160279004E57AE6AF874E7303CE53299CCC041C7BC308D82A5698F3A8D0C38271AE35F8E9DBFBB694B5C803D89F7AE435DE236D525F54759B65E372FCD68EF20FA7111F9E4AFF73),\
(2,0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A93AD2CAFFFFFFFFFFFFFFFF),\
(5,0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF),\
(5,0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DCC4024FFFFFFFFFFFFFFFF),\
(5,0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E438777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F5683423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD922222E04A4037C0713EB57A81A23F0C73473FC646CEA306B4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A364597E899A0255DC164F31CC50846851DF9AB48195DED7EA1B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F924009438B481C6CD7889A002ED5EE382BC9190DA6FC026E479558E4475677E9AA9E3050E2765694DFC81F56E880B96E7160C980DD98EDD3DFFFFFFFFFFFFFFFFF)]
def P_hash(hashModule, secret, seed, length):
bytes = createByteArrayZeros(length)
secret = bytesToString(secret)
seed = bytesToString(seed)
A = seed
index = 0
while 1:
A = hmac.HMAC(secret, A, hashModule).digest()
output = hmac.HMAC(secret, A+seed, hashModule).digest()
for c in output:
if index >= length:
return bytes
bytes[index] = ord(c)
index += 1
return bytes
def PRF(secret, label, seed, length):
#Split the secret into left and right halves
S1 = secret[ : int(math.ceil(len(secret)/2.0))]
S2 = secret[ int(math.floor(len(secret)/2.0)) : ]
#Run the left half through P_MD5 and the right half through P_SHA1
p_md5 = P_hash(md5, S1, concatArrays(stringToBytes(label), seed), length)
p_sha1 = P_hash(sha, S2, concatArrays(stringToBytes(label), seed), length)
#XOR the output values and return the result
for x in range(length):
p_md5[x] ^= p_sha1[x]
return p_md5
def PRF_SSL(secret, seed, length):
secretStr = bytesToString(secret)
seedStr = bytesToString(seed)
bytes = createByteArrayZeros(length)
index = 0
for x in range(26):
A = chr(ord('A')+x) * (x+1) # 'A', 'BB', 'CCC', etc..
input = secretStr + sha.sha(A + secretStr + seedStr).digest()
output = md5.md5(input).digest()
for c in output:
if index >= length:
return bytes
bytes[index] = ord(c)
index += 1
return bytes
def makeX(salt, username, password):
if len(username)>=256:
raise ValueError("username too long")
if len(salt)>=256:
raise ValueError("salt too long")
return stringToNumber(sha.sha(salt + sha.sha(username + ":" + password)\
.digest()).digest())
#This function is used by VerifierDB.makeVerifier
def makeVerifier(username, password, bits):
bitsIndex = {1024:0, 1536:1, 2048:2, 3072:3, 4096:4, 6144:5, 8192:6}[bits]
g,N = goodGroupParameters[bitsIndex]
salt = bytesToString(getRandomBytes(16))
x = makeX(salt, username, password)
verifier = powMod(g, x, N)
return N, g, salt, verifier
def PAD(n, x):
nLength = len(numberToString(n))
s = numberToString(x)
if len(s) < nLength:
s = ("\0" * (nLength-len(s))) + s
return s
def makeU(N, A, B):
return stringToNumber(sha.sha(PAD(N, A) + PAD(N, B)).digest())
def makeK(N, g):
return stringToNumber(sha.sha(numberToString(N) + PAD(N, g)).digest())
"""
MAC_SSL
Modified from Python HMAC by Trevor
"""
class MAC_SSL:
"""MAC_SSL class.
This supports the API for Cryptographic Hash Functions (PEP 247).
"""
def __init__(self, key, msg = None, digestmod = None):
"""Create a new MAC_SSL object.
key: key for the keyed hash object.
msg: Initial input for the hash, if provided.
digestmod: A module supporting PEP 247. Defaults to the md5 module.
"""
if digestmod is None:
import md5
digestmod = md5
if key == None: #TREVNEW - for faster copying
return #TREVNEW
self.digestmod = digestmod
self.outer = digestmod.new()
self.inner = digestmod.new()
self.digest_size = digestmod.digest_size
ipad = "\x36" * 40
opad = "\x5C" * 40
self.inner.update(key)
self.inner.update(ipad)
self.outer.update(key)
self.outer.update(opad)
if msg is not None:
self.update(msg)
def update(self, msg):
"""Update this hashing object with the string msg.
"""
self.inner.update(msg)
def copy(self):
"""Return a separate copy of this hashing object.
An update to this copy won't affect the original object.
"""
other = MAC_SSL(None) #TREVNEW - for faster copying
other.digest_size = self.digest_size #TREVNEW
other.digestmod = self.digestmod
other.inner = self.inner.copy()
other.outer = self.outer.copy()
return other
def digest(self):
"""Return the hash value of this hashing object.
This returns a string containing 8-bit data. The object is
not altered in any way by this function; you can continue
updating the object after calling this function.
"""
h = self.outer.copy()
h.update(self.inner.digest())
return h.digest()
def hexdigest(self):
"""Like digest(), but returns a string of hexadecimal digits instead.
"""
return "".join([hex(ord(x))[2:].zfill(2)
for x in tuple(self.digest())])
| bsd-3-clause |
prampey/servo | tests/wpt/css-tests/tools/pywebsocket/src/mod_pywebsocket/_stream_hixie75.py | 681 | 8842 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file provides a class for parsing/building frames of the WebSocket
protocol version HyBi 00 and Hixie 75.
Specification:
- HyBi 00 http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-00
- Hixie 75 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-75
"""
from mod_pywebsocket import common
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_base import StreamBase
from mod_pywebsocket._stream_base import UnsupportedFrameException
from mod_pywebsocket import util
class StreamHixie75(StreamBase):
"""A class for parsing/building frames of the WebSocket protocol version
HyBi 00 and Hixie 75.
"""
def __init__(self, request, enable_closing_handshake=False):
"""Construct an instance.
Args:
request: mod_python request.
enable_closing_handshake: to let StreamHixie75 perform closing
handshake as specified in HyBi 00, set
this option to True.
"""
StreamBase.__init__(self, request)
self._logger = util.get_class_logger(self)
self._enable_closing_handshake = enable_closing_handshake
self._request.client_terminated = False
self._request.server_terminated = False
def send_message(self, message, end=True, binary=False):
"""Send message.
Args:
message: unicode string to send.
binary: not used in hixie75.
Raises:
BadOperationException: when called on a server-terminated
connection.
"""
if not end:
raise BadOperationException(
'StreamHixie75 doesn\'t support send_message with end=False')
if binary:
raise BadOperationException(
'StreamHixie75 doesn\'t support send_message with binary=True')
if self._request.server_terminated:
raise BadOperationException(
'Requested send_message after sending out a closing handshake')
self._write(''.join(['\x00', message.encode('utf-8'), '\xff']))
def _read_payload_length_hixie75(self):
"""Reads a length header in a Hixie75 version frame with length.
Raises:
ConnectionTerminatedException: when read returns empty string.
"""
length = 0
while True:
b_str = self._read(1)
b = ord(b_str)
length = length * 128 + (b & 0x7f)
if (b & 0x80) == 0:
break
return length
def receive_message(self):
"""Receive a WebSocket frame and return its payload an unicode string.
Returns:
payload unicode string in a WebSocket frame.
Raises:
ConnectionTerminatedException: when read returns empty
string.
BadOperationException: when called on a client-terminated
connection.
"""
if self._request.client_terminated:
raise BadOperationException(
'Requested receive_message after receiving a closing '
'handshake')
while True:
# Read 1 byte.
# mp_conn.read will block if no bytes are available.
# Timeout is controlled by TimeOut directive of Apache.
frame_type_str = self.receive_bytes(1)
frame_type = ord(frame_type_str)
if (frame_type & 0x80) == 0x80:
# The payload length is specified in the frame.
# Read and discard.
length = self._read_payload_length_hixie75()
if length > 0:
_ = self.receive_bytes(length)
# 5.3 3. 12. if /type/ is 0xFF and /length/ is 0, then set the
# /client terminated/ flag and abort these steps.
if not self._enable_closing_handshake:
continue
if frame_type == 0xFF and length == 0:
self._request.client_terminated = True
if self._request.server_terminated:
self._logger.debug(
'Received ack for server-initiated closing '
'handshake')
return None
self._logger.debug(
'Received client-initiated closing handshake')
self._send_closing_handshake()
self._logger.debug(
'Sent ack for client-initiated closing handshake')
return None
else:
# The payload is delimited with \xff.
bytes = self._read_until('\xff')
# The WebSocket protocol section 4.4 specifies that invalid
# characters must be replaced with U+fffd REPLACEMENT
# CHARACTER.
message = bytes.decode('utf-8', 'replace')
if frame_type == 0x00:
return message
# Discard data of other types.
def _send_closing_handshake(self):
if not self._enable_closing_handshake:
raise BadOperationException(
'Closing handshake is not supported in Hixie 75 protocol')
self._request.server_terminated = True
# 5.3 the server may decide to terminate the WebSocket connection by
# running through the following steps:
# 1. send a 0xFF byte and a 0x00 byte to the client to indicate the
# start of the closing handshake.
self._write('\xff\x00')
def close_connection(self, unused_code='', unused_reason=''):
"""Closes a WebSocket connection.
Raises:
ConnectionTerminatedException: when closing handshake was
not successfull.
"""
if self._request.server_terminated:
self._logger.debug(
'Requested close_connection but server is already terminated')
return
if not self._enable_closing_handshake:
self._request.server_terminated = True
self._logger.debug('Connection closed')
return
self._send_closing_handshake()
self._logger.debug('Sent server-initiated closing handshake')
# TODO(ukai): 2. wait until the /client terminated/ flag has been set,
# or until a server-defined timeout expires.
#
# For now, we expect receiving closing handshake right after sending
# out closing handshake, and if we couldn't receive non-handshake
# frame, we take it as ConnectionTerminatedException.
message = self.receive_message()
if message is not None:
raise ConnectionTerminatedException(
'Didn\'t receive valid ack for closing handshake')
# TODO: 3. close the WebSocket connection.
# note: mod_python Connection (mp_conn) doesn't have close method.
def send_ping(self, body):
raise BadOperationException(
'StreamHixie75 doesn\'t support send_ping')
# vi:sts=4 sw=4 et
| mpl-2.0 |
vlinhd11/vlinhd11-android-scripting | python/src/Lib/lib2to3/fixes/fix_has_key.py | 49 | 3209 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for has_key().
Calls to .has_key() methods are expressed in terms of the 'in'
operator:
d.has_key(k) -> k in d
CAVEATS:
1) While the primary target of this fixer is dict.has_key(), the
fixer will change any has_key() method call, regardless of its
class.
2) Cases like this will not be converted:
m = d.has_key
if m(k):
...
Only *calls* to has_key() are converted. While it is possible to
convert the above to something like
m = d.__contains__
if m(k):
...
this is currently not done.
"""
# Local imports
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Name, parenthesize
class FixHasKey(fixer_base.BaseFix):
PATTERN = """
anchor=power<
before=any+
trailer< '.' 'has_key' >
trailer<
'('
( not(arglist | argument<any '=' any>) arg=any
| arglist<(not argument<any '=' any>) arg=any ','>
)
')'
>
after=any*
>
|
negation=not_test<
'not'
anchor=power<
before=any+
trailer< '.' 'has_key' >
trailer<
'('
( not(arglist | argument<any '=' any>) arg=any
| arglist<(not argument<any '=' any>) arg=any ','>
)
')'
>
>
>
"""
def transform(self, node, results):
assert results
syms = self.syms
if (node.parent.type == syms.not_test and
self.pattern.match(node.parent)):
# Don't transform a node matching the first alternative of the
# pattern when its parent matches the second alternative
return None
negation = results.get("negation")
anchor = results["anchor"]
prefix = node.get_prefix()
before = [n.clone() for n in results["before"]]
arg = results["arg"].clone()
after = results.get("after")
if after:
after = [n.clone() for n in after]
if arg.type in (syms.comparison, syms.not_test, syms.and_test,
syms.or_test, syms.test, syms.lambdef, syms.argument):
arg = parenthesize(arg)
if len(before) == 1:
before = before[0]
else:
before = pytree.Node(syms.power, before)
before.set_prefix(" ")
n_op = Name("in", prefix=" ")
if negation:
n_not = Name("not", prefix=" ")
n_op = pytree.Node(syms.comp_op, (n_not, n_op))
new = pytree.Node(syms.comparison, (arg, n_op, before))
if after:
new = parenthesize(new)
new = pytree.Node(syms.power, (new,) + tuple(after))
if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr,
syms.and_expr, syms.shift_expr,
syms.arith_expr, syms.term,
syms.factor, syms.power):
new = parenthesize(new)
new.set_prefix(prefix)
return new
| apache-2.0 |
vegetableman/phantomjs | src/qt/qtwebkit/Tools/TestResultServer/main.py | 126 | 2186 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Request a modern Django
from google.appengine.dist import use_library
use_library('django', '1.1')
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from handlers import menu
from handlers import testfilehandler
routes = [
('/testfile/delete', testfilehandler.DeleteFile),
('/testfile/upload', testfilehandler.Upload),
('/testfile/uploadform', testfilehandler.UploadForm),
('/testfile/?', testfilehandler.GetFile),
('/*|/menu', menu.Menu),
]
application = webapp.WSGIApplication(routes, debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| bsd-3-clause |
jtrobec/pants | src/python/pants/backend/android/tasks/unpack_libraries.py | 2 | 12554 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from hashlib import sha1
from pants.backend.android.targets.android_library import AndroidLibrary
from pants.backend.android.targets.android_resources import AndroidResources
from pants.backend.jvm.jar_dependency_utils import M2Coordinate
from pants.backend.jvm.targets.jar_dependency import JarDependency
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.tasks.jar_import_products import JarImportProducts
from pants.base.build_environment import get_buildroot
from pants.base.fingerprint_strategy import DefaultFingerprintStrategy
from pants.build_graph.address import Address
from pants.fs.archive import ZIP
from pants.task.task import Task
class AndroidLibraryFingerprintStrategy(DefaultFingerprintStrategy):
def compute_fingerprint(self, target):
"""AndroidLibrary targets need to be re-unpacked if any of the imported jars have changed."""
# TODO(mateor) Create a utility function to add a block of fingerprints to a hasher with caller
# handing in list of items of the same type and a function to extract a fingerprint from each.
if isinstance(target, AndroidLibrary):
hasher = sha1()
for cache_key in sorted(jar.cache_key() for jar in target.imported_jars):
hasher.update(cache_key)
hasher.update(target.payload.fingerprint())
return hasher.hexdigest()
return None
class UnpackLibraries(Task):
"""Unpack AndroidDependency artifacts, including .jar and .aar libraries.
The UnpackLibraries task unpacks artifacts imported by AndroidLibraries, as .aar or .jar files,
through a 'libraries' attribute. The .aar files may contain components which require creation
of some synthetic targets, as well as a classes.jar. The classes.jar is packaged into a
JarDependency target and sent to javac compilation. All jar files are then unpacked-
android_binaries repack the class files of all the android_libraries in their transitive
dependencies into a dex file.
All archives are unpacked only once, regardless of differing include/exclude patterns or how many
targets depend upon it. All targets that depend on a particular artifact will be passed the
unpack_libraries product, which is a directory containing the entire source of the unpacked jars.
These sources are filtered against the AndroidLibrary's include/exclude patterns during the
creation of the dex file.
"""
class MissingElementException(Exception):
"""Raised if an unpacked file or directory unexpectedly does not exist."""
class UnexpectedArchiveType(Exception):
"""Raised if an archive has an extension that is not explicitly handled by this class."""
@classmethod
def prepare(cls, options, round_manager):
super(UnpackLibraries, cls).prepare(options, round_manager)
round_manager.require_data(JarImportProducts)
@classmethod
def product_types(cls):
return ['unpacked_libraries']
@staticmethod
def is_library(target):
"""Return True for AndroidLibrary targets."""
# TODO(mateor) add AndroidBinary support. If include/exclude patterns aren't needed, an
# android_binary should be able to simply declare an android_dependency as a dep.
return isinstance(target, AndroidLibrary)
def __init__(self, *args, **kwargs):
super(UnpackLibraries, self).__init__(*args, **kwargs)
self._created_targets = {}
self._unpacked_archives = set()
def create_classes_jar_target(self, target, coordinate, jar_file):
"""Create a JarLibrary target containing the jar_file as a JarDependency.
:param target: The new JarLibrary will be derived from this AndroidLibrary.
:type target: :class:`pants.backend.android.targets.android_library.AndroidLibrary`
:param coordinate: Archive coordinate fetched by ivy, e.g. 'org.pantsbuild:example::1.0:aar'.
:type coordinate: :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate`
:param string jar_file: Full path of the classes.jar contained within unpacked aar files.
:returns: A new jar library target.
:rtype: :class:`pants.backend.jvm.targets.jar_library.JarLibrary`
"""
# TODO(mateor) add another JarDependency for every jar under 'libs'.
jar_url = 'file://{0}'.format(jar_file)
jar_dep = JarDependency(org=target.id, name=coordinate.artifact_filename, rev=coordinate.rev,
url=jar_url)
address = Address(self.workdir, '{}-classes.jar'.format(coordinate.artifact_filename))
new_target = self.context.add_new_target(address, JarLibrary, jars=[jar_dep],
derived_from=target)
return new_target
def create_resource_target(self, target, coordinate, manifest, resource_dir):
"""Create an AndroidResources target.
:param target: AndroidLibrary that the new AndroidResources target derives from.
:type target: :class:`pants.backend.android.targets.android_library.AndroidLibrary`
:param coordinate: Archive coordinate fetched by ivy, e.g. 'org.pantsbuild:example::1.0:aar'.
:type coordinate: :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate`
:param string manifest: The path of 'AndroidManifest.xml'
:param string resource_dir: Full path of the res directory contained within aar files.
:return: A new android resources target.
:rtype::class:`pants.backend.android.targets.AndroidResources`
"""
address = Address(self.workdir, '{}-resources'.format(coordinate.artifact_filename))
new_target = self.context.add_new_target(address, AndroidResources,
manifest=manifest, resource_dir=resource_dir,
derived_from=target)
return new_target
def create_android_library_target(self, target, coordinate, unpacked_aar_location):
"""Create an AndroidLibrary target.
The aar files are unpacked and the contents used to create a new AndroidLibrary target.
:param AndroidLibrary target: AndroidLibrary that the new AndroidLibrary target derives from.
:param coordinate: Archive coordinate fetched by ivy, e.g. 'org.pantsbuild:example::1.0:aar'.
:type coordinate: :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate`
:param string unpacked_aar_location: Full path of dir holding contents of an unpacked aar file.
:return: A new android library target.
:rtype::class:`pants.backend.android.targets.AndroidLibrary`
"""
# The following three elements of an aar file have names mandated by the aar spec:
# http://tools.android.com/tech-docs/new-build-system/aar-format
# They are said to be mandatory although in practice that assumption only holds for manifest.
manifest = os.path.join(unpacked_aar_location, 'AndroidManifest.xml')
jar_file = os.path.join(unpacked_aar_location, 'classes.jar')
resource_dir = os.path.join(unpacked_aar_location, 'res')
# Sanity check to make sure all .aar files we expect to be unpacked are actually unpacked.
if not os.path.isfile(manifest):
raise self.MissingElementException("An AndroidManifest.xml is expected in every unpacked "
".aar file but none was found in the {} archive "
"for the {} target".format(coordinate, target))
# Depending on the contents of the unpacked aar file, create the dependencies.
deps = []
if os.path.isdir(resource_dir):
deps.append(self.create_resource_target(target, coordinate, manifest, resource_dir))
if os.path.isfile(jar_file):
deps.append(self.create_classes_jar_target(target, coordinate, jar_file))
address = Address(self.workdir, '{}-android_library'.format(coordinate.artifact_filename))
new_target = self.context.add_new_target(address, AndroidLibrary,
manifest=manifest,
include_patterns=target.payload.include_patterns,
exclude_patterns=target.payload.exclude_patterns,
dependencies=deps,
derived_from=target)
return new_target
def _unpack_artifacts(self, jar_imports):
# Unpack the aar and jar library artifacts. If the aar files have a jar in the contents,
# unpack that jar as well.
for coordinate, aar_or_jar in jar_imports:
jar_outdir = self.unpacked_jar_location(coordinate)
if 'jar' == coordinate.ext:
jar_file = aar_or_jar
elif 'aar' == coordinate.ext:
unpacked_aar_destination = self.unpacked_aar_location(coordinate)
jar_file = os.path.join(unpacked_aar_destination, 'classes.jar')
# Unpack .aar files.
if coordinate not in self._unpacked_archives:
ZIP.extract(aar_or_jar, unpacked_aar_destination)
self._unpacked_archives.add(aar_or_jar)
# Create an .aar/classes.jar signature for self._unpacked_archives.
coordinate = M2Coordinate(org=coordinate.org,
name=coordinate.name,
rev=coordinate.rev,
classifier=coordinate.classifier,
ext='classes.jar')
else:
raise self.UnexpectedArchiveType('Android dependencies can be .aar or .jar archives '
'(was: {} at {})'.format(coordinate, aar_or_jar))
# Unpack the jar files.
if coordinate not in self._unpacked_archives and os.path.isfile(jar_file):
ZIP.extract(jar_file, jar_outdir)
self._unpacked_archives.add(aar_or_jar)
def _create_target(self, target, coordinates):
# Create a target for the components of an unpacked .aar file.
for coordinate in coordinates:
# The contents of the unpacked aar file must be made into an AndroidLibrary target.
if 'aar' == coordinate.ext:
if coordinate not in self._created_targets:
unpacked_location = self.unpacked_aar_location(coordinate)
if not os.path.isdir(unpacked_location):
raise self.MissingElementException('{}: Expected to unpack {} at {} but did not!'
.format(target, coordinate, unpacked_location))
new_target = self.create_android_library_target(target,
coordinate,
unpacked_location)
self._created_targets[coordinate] = new_target
target.inject_dependency(self._created_targets[coordinate].address)
# The unpacked_libraries product is a dir containing the full unpacked source. The files
# that match the include/exclude patterns are calculated during DxCompile.
unpacked_products = self.context.products.get('unpacked_libraries')
unpacked_products.add(target, get_buildroot()).append(self.unpacked_jar_location(coordinate))
def execute(self):
jar_import_products = self.context.products.get_data(JarImportProducts)
library_targets = self.context.targets(predicate=self.is_library)
with self.invalidated(library_targets,
fingerprint_strategy=AndroidLibraryFingerprintStrategy(),
invalidate_dependents=True) as invalidation_check:
for vt in invalidation_check.invalid_vts:
jar_imports = jar_import_products.imports(vt.target)
if jar_imports:
self._unpack_artifacts(jar_imports)
# Create the new targets from the contents of unpacked aar files.
for target in library_targets:
jar_imports = jar_import_products.imports(target)
if jar_imports:
self._create_target(target, (jar_import.coordinate for jar_import in jar_imports))
def unpacked_jar_location(self, coordinate):
"""Location for unpacked jar files, whether imported as-is or found inside an aar file."""
return os.path.join(self.workdir, 'explode-jars', coordinate.artifact_filename)
def unpacked_aar_location(self, coordinate):
"""Output location for unpacking .aar archives."""
return os.path.join(self.workdir, coordinate.artifact_filename)
| apache-2.0 |
xtenex/raft | core/network/InMemoryCache.py | 11 | 3444 | #
# Implementation of in-memory cache
#
# Author: Gregory Fleischer (gfleischer@gmail.com)
#
# Copyright (c) 2011 RAFT Team
#
# This file is part of RAFT.
#
# RAFT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RAFT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RAFT. If not, see <http://www.gnu.org/licenses/>.
#
from PyQt4.QtCore import *
from PyQt4 import QtNetwork
import time
class InMemoryCache(QtNetwork.QAbstractNetworkCache):
def __init__(self, framework, parent = None):
QtNetwork.QAbstractNetworkCache.__init__(self, parent)
self.framework = framework
self.framework.subscribe_responses_cleared(self.responses_cleared)
self.size = 0
self.cache = {} # list of [metaData, device, ctime] entries by url
self.outstanding = {}
def responses_cleared(self):
self.clear()
def cacheSize(self):
return self.size
def clear(self):
for k in list(self.cache.keys()):
metaData, buf, mtime = self.cache.pop(k)
if buf:
self.size -= buf.length()
buf.clear()
metaData, buf = None, None
def data(self, url):
k = url.toEncoded()
if k in self.cache:
buf = self.cache[k][1]
device = QBuffer(buf)
device.open(QIODevice.ReadOnly|QIODevice.Unbuffered)
return device
return None
def insert(self, device):
# TODO: implement max size of cache using LRU approach
for k in list(self.outstanding.keys()):
if self.outstanding[k] == device:
self.size += device.size()
self.cache[k][1] = device.data()
device = None
return
else:
raise Exception('Failed to find outstanding entry on cache insert')
def metaData(self, url):
k = url.toEncoded()
if k in self.cache:
metaData, buf, mtime = self.cache[k]
if buf:
return metaData
# return non-valid
metaData = QtNetwork.QNetworkCacheMetaData()
return metaData
def prepare(self, metaData):
k = metaData.url().toEncoded()
self.cache[k] = [metaData, None, time.time()]
device = QBuffer()
device.open(QIODevice.ReadWrite|QIODevice.Unbuffered)
self.outstanding[k] = device
return device
def remove(self, url):
k = url.toEncoded()
if k in self.outstanding:
device = self.outstanding.pop(k)
device = None
if k in self.cache:
metaData, buf, mtime = self.cache.pop(k)
if buf:
self.size -= buf.length()
buf.clear()
metaData, buf = None, None
return True
return False
def updateMetaData(self, metaData):
url = metaData.url().toEncoded()
if url in self.cache:
self.cache[url][0] = metaData
| gpl-3.0 |
ramezquitao/pyoptools | pyoptools/all.py | 2 | 2955 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2007, 2008, 2009,2010 Ricardo Amézquita Orozco
# <ramezquitao@unal.edu.co>,
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
Package containing modules and submodules defining an *API* for optical
raytracing and wave propagation calculations.
'''
# Import all pyoptools packages
from pyoptools.misc.cmisc import *
from pyoptools.misc.definitions import *
#from misc.frft import *
from pyoptools.misc.lsq import *
from pyoptools.misc.pmisc import *
from pyoptools.misc.plist import *
from pyoptools.misc.Poly2D import *
from pyoptools.misc.resources import *
from pyoptools.raytrace.calc import *
from pyoptools.raytrace.comp_lib import *
from pyoptools.raytrace.component import *
from pyoptools.raytrace.library import *
from pyoptools.raytrace.mat_lib import *
from pyoptools.raytrace.ray import *
from pyoptools.raytrace.shape import *
from pyoptools.raytrace.surface import *
from pyoptools.raytrace.system import *
from pyoptools.wavefront.field import *
from pyoptools.wavefront.calc import *
from pyoptools.wavefront.psurfrep import *
from pyoptools.wavefront.zernike import *
#
#
# Import graphic packages This should be imported somewhere else
from pyoptools.gui.plotutils import *
# This module has problems with MESA in buster is disabled for the moment
#from pyoptools.gui.ipynbplotutils import *
# Module implemented using pythreejs
from pyoptools.gui.ipywidgets import *
| gpl-3.0 |
dparks1134/UniteM | unitem/tree_common_bases.py | 1 | 7409 | ###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import math
import logging
from biolib.common import alphanumeric_sort
import svgwrite
class TreeCommonBases():
"""Create dendrogram showing common bases between bins."""
def __init__(self):
"""Initialization."""
self.logger = logging.getLogger('timestamp')
# size of labels
self.font_size = 8
self.scale_font_size = 6
# width and height of each gene cell
self.row_height = 1.3*self.font_size
self.scale_interval_width = 15
self.label_offset = 2
def _render_scale(self, dwg):
"""Render scale."""
scale_group = svgwrite.container.Group(id='scale')
dwg.add(scale_group)
scale_text_group = svgwrite.container.Group(id='scale_text')
dwg.add(scale_text_group)
# draw baseline
scale_y_pos = self.fig_size_y + 0.5*self.row_height
scale = dwg.path("M%f,%f" % (0, scale_y_pos + 0.25*self.row_height))
scale.push("L%f,%f" % (0, scale_y_pos))
scale.push("L%f,%f" % (self.fig_size_x, scale_y_pos))
scale.push("L%f,%f" % (self.fig_size_x, scale_y_pos + 0.25*self.row_height))
scale.fill(color='none')
scale.stroke(color='black', width=0.5)
scale_group.add(scale)
for s in [50, 60, 70, 80, 90, 100]:
xpos = self.fig_size_x - ((100-s)/10)*self.scale_interval_width
t = dwg.text(int(s),
x=[(xpos)],
y=[(scale_y_pos + 0.75*self.row_height + self.label_offset)],
font_size="%fpt" % self.scale_font_size,
text_anchor='middle',
fill='rgb(0,0,0)')
scale_text_group.add(t)
if s != 50 and s != 100:
tick = dwg.line(start=(xpos, scale_y_pos),
end=(xpos, scale_y_pos + 0.25*self.row_height),
fill='black',
stroke_width=0.5)
tick.stroke(color='black')
scale_group.add(tick)
def _render_labels(self, dwg, bin_id, common_bases):
"""Render labels."""
label_group = svgwrite.container.Group(id='labels')
dwg.add(label_group)
t = dwg.text(bin_id,
x=[(self.fig_size_x + self.label_offset)],
y=[(self.fig_size_y + 0.5*self.font_size)],
font_size="%fpt" % self.font_size,
direction='ltr',
fill='rgb(0,0,0)')
label_group.add(t)
y = self.fig_size_y
for bm, bid, cb in common_bases:
y -= self.row_height
t = dwg.text(bm,
x=[(self.fig_size_x + self.label_offset)],
y=[(y + 0.5*self.font_size)],
font_size="%fpt" % self.font_size,
direction='ltr',
fill='rgb(0,0,0)')
label_group.add(t)
def _render_tree(self, dwg, common_bases):
"""Render tree."""
tree_group = svgwrite.container.Group(id='tree')
dwg.add(tree_group)
x_start = self.fig_size_x
y_start = self.fig_size_y
width_start = 0
for r, (bm, bid, cb) in enumerate(common_bases):
width = ((100-cb)/10)*self.scale_interval_width
delta_width = width - width_start
branch = dwg.path("M%f,%f" % (x_start, y_start))
branch.push("L%f,%f" % (x_start-delta_width, y_start))
branch.push("L%f,%f" % (x_start-delta_width, self.fig_size_y - (r+1)*self.row_height))
branch.push("L%f,%f" % (self.fig_size_x, self.fig_size_y - (r+1)*self.row_height))
branch.fill(color='none')
branch.stroke(color='black', width=1)
tree_group.add(branch)
x_start = self.fig_size_x - width
y_start = self.fig_size_y - r*self.row_height - 0.5*self.row_height
width_start = width
# render root branch
root = dwg.line(start=(x_start, y_start),
end=(0, y_start),
fill='black',
stroke_width=1)
root.stroke(color='black')
tree_group.add(root)
def plot(self, bin_id, common_bases, output_plot):
"""Create plot.
Parameters
----------
bin_id : str
Identifier of bin compared to other bins.
common_bases : d[binning method][bin id] -> percent common bases
Percentage of common bases for each binning method.
output_plot : str
Desired output file.
"""
# setup SVG drawing
start_x = 0
start_y = 0
if not output_plot.endswith('.svg'):
output_plot += '.svg'
self.fig_size_x = 5*self.scale_interval_width # 50, 60, 70, 80, 90, 100
self.fig_size_y = start_y + len(common_bases)*self.row_height
dwg = svgwrite.Drawing(filename=output_plot,
size=(self.fig_size_x, self.fig_size_y),
profile='full')
dwg.set_desc(title='UniteM shared base pair tree.')
self._render_scale(dwg)
self._render_labels(dwg, bin_id, common_bases)
self._render_tree(dwg, common_bases)
dwg.save()
| gpl-3.0 |
ttm/oscEmRede | venv/share/doc/networkx-1.8.1/examples/graph/football.py | 17 | 1366 | #!/usr/bin/env python
"""
Load football network in GML format and compute some network statistcs.
Shows how to download GML graph in a zipped file, unpack it, and load
into a NetworkX graph.
Requires Internet connection to download the URL
http://www-personal.umich.edu/~mejn/netdata/football.zip
"""
__author__ = """Aric Hagberg (hagberg@lanl.gov)"""
# Copyright (C) 2007 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
try:
import pyparsing
except ImportError as e:
raise ImportError(str(e)+". Check http://pyparsing.wikispaces.com/")
from networkx import *
url="http://www-personal.umich.edu/~mejn/netdata/football.zip"
try: # Python 3.x
import urllib.request as urllib
except ImportError: # Python 2.x
import urllib
import io
import zipfile
sock = urllib.urlopen(url) # open URL
s=io.BytesIO(sock.read()) # read into BytesIO "file"
sock.close()
zf = zipfile.ZipFile(s) # zipfile object
txt=zf.read('football.txt').decode() # read info file
gml=zf.read('football.gml').decode() # read gml data
# throw away bogus first line with # from mejn files
gml=gml.split('\n')[1:]
G=parse_gml(gml) # parse gml data
print(txt)
# print degree for each team - number of games
for n,d in G.degree_iter():
print('%s %d' % (n, d))
| gpl-3.0 |
miketry0313/test | Tools/autotest/pysim/sim_multicopter.py | 54 | 5967 | #!/usr/bin/env python
from multicopter import MultiCopter
import util, time, os, sys, math
import socket, struct
import select, errno
from pymavlink import fgFDM
def sim_send(m, a):
'''send flight information to mavproxy and flightgear'''
global fdm
from math import degrees
earth_rates = util.BodyRatesToEarthRates(a.dcm, a.gyro)
(roll, pitch, yaw) = a.dcm.to_euler()
fdm.set('latitude', a.latitude, units='degrees')
fdm.set('longitude', a.longitude, units='degrees')
fdm.set('altitude', a.altitude, units='meters')
fdm.set('phi', roll, units='radians')
fdm.set('theta', pitch, units='radians')
fdm.set('psi', yaw, units='radians')
fdm.set('phidot', earth_rates.x, units='rps')
fdm.set('thetadot', earth_rates.y, units='rps')
fdm.set('psidot', earth_rates.z, units='rps')
fdm.set('vcas', math.sqrt(a.velocity.x*a.velocity.x + a.velocity.y*a.velocity.y), units='mps')
fdm.set('v_north', a.velocity.x, units='mps')
fdm.set('v_east', a.velocity.y, units='mps')
# FG FDM protocol only supports 4 motors for display :(
fdm.set('num_engines', 4)
for i in range(4):
fdm.set('rpm', 1000*m[i], idx=i)
try:
fg_out.send(fdm.pack())
except socket.error as e:
if not e.errno in [ errno.ECONNREFUSED ]:
raise
buf = struct.pack('<17dI',
a.latitude, a.longitude, a.altitude, degrees(yaw),
a.velocity.x, a.velocity.y, a.velocity.z,
a.accelerometer.x, a.accelerometer.y, a.accelerometer.z,
degrees(earth_rates.x), degrees(earth_rates.y), degrees(earth_rates.z),
degrees(roll), degrees(pitch), degrees(yaw),
math.sqrt(a.velocity.x*a.velocity.x + a.velocity.y*a.velocity.y),
0x4c56414f)
try:
sim_out.send(buf)
except socket.error as e:
if not e.errno in [ errno.ECONNREFUSED ]:
raise
def sim_recv(m):
'''receive control information from SITL'''
try:
buf = sim_in.recv(28)
except socket.error as e:
if not e.errno in [ errno.EAGAIN, errno.EWOULDBLOCK ]:
raise
return
if len(buf) != 28:
return
control = list(struct.unpack('<14H', buf))
pwm = control[0:11]
# update motors
for i in range(11):
m[i] = (pwm[i]-1000)/1000.0
# update wind
global a
(speed, direction, turbulance) = control[11:]
a.wind.speed = speed*0.01
a.wind.direction = direction*0.01
a.wind.turbulance = turbulance*0.01
def interpret_address(addrstr):
'''interpret a IP:port string'''
a = addrstr.split(':')
a[1] = int(a[1])
return tuple(a)
##################
# main program
from optparse import OptionParser
parser = OptionParser("sim_multicopter.py [options]")
parser.add_option("--fgout", dest="fgout", help="flightgear output (IP:port)", default="127.0.0.1:5503")
parser.add_option("--simin", dest="simin", help="SIM input (IP:port)", default="127.0.0.1:5502")
parser.add_option("--simout", dest="simout", help="SIM output (IP:port)", default="127.0.0.1:5501")
parser.add_option("--home", dest="home", type='string', default=None, help="home lat,lng,alt,hdg (required)")
parser.add_option("--rate", dest="rate", type='int', help="SIM update rate", default=400)
parser.add_option("--wind", dest="wind", help="Simulate wind (speed,direction,turbulance)", default='0,0,0')
parser.add_option("--frame", dest="frame", help="frame type (+,X,octo)", default='+')
(opts, args) = parser.parse_args()
for m in [ 'home' ]:
if not opts.__dict__[m]:
print("Missing required option '%s'" % m)
parser.print_help()
sys.exit(1)
# UDP socket addresses
fg_out_address = interpret_address(opts.fgout)
sim_out_address = interpret_address(opts.simout)
sim_in_address = interpret_address(opts.simin)
# setup output to flightgear
fg_out = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
fg_out.connect(fg_out_address)
fg_out.setblocking(0)
# setup input from SITL
sim_in = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sim_in.bind(sim_in_address)
sim_in.setblocking(0)
# setup output to SITL
sim_out = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sim_out.connect(sim_out_address)
sim_out.setblocking(0)
# FG FDM object
fdm = fgFDM.fgFDM()
# create the quadcopter model
a = MultiCopter(frame=opts.frame)
print("Simulating %u motors for frame %s" % (len(a.motors), opts.frame))
# motors initially off
m = [0.0] * 11
lastt = time.time()
frame_count = 0
# parse home
v = opts.home.split(',')
if len(v) != 4:
print("home should be lat,lng,alt,hdg")
sys.exit(1)
a.home_latitude = float(v[0])
a.home_longitude = float(v[1])
a.home_altitude = float(v[2])
a.latitude = a.home_latitude
a.longitude = a.home_longitude
a.altitude = a.home_altitude
a.yaw = float(v[3])
a.ground_level = a.home_altitude
a.position.z = 0
a.wind = util.Wind(opts.wind)
a.set_yaw_degrees(a.yaw)
print("Starting at lat=%f lon=%f alt=%.1f heading=%.1f" % (
a.home_latitude,
a.home_longitude,
a.home_altitude,
a.yaw))
frame_time = 1.0/opts.rate
sleep_overhead = 0
while True:
frame_start = time.time()
sim_recv(m)
m2 = m[:]
a.update(m2)
sim_send(m, a)
frame_count += 1
t = time.time()
if t - lastt > 1.0:
# print("%.2f fps sleepOverhead=%f zspeed=%.2f zaccel=%.2f h=%.1f a=%.1f yaw=%.1f" % (
# frame_count/(t-lastt),
# sleep_overhead,
# a.velocity.z, a.accelerometer.z, a.position.z, a.altitude,
# a.yaw))
lastt = t
frame_count = 0
frame_end = time.time()
if frame_end - frame_start < frame_time:
dt = frame_time - (frame_end - frame_start)
dt -= sleep_overhead
if dt > 0:
time.sleep(dt)
sleep_overhead = 0.99*sleep_overhead + 0.01*(time.time() - frame_end - dt)
| gpl-3.0 |
B-Stefan/prov-simple-rest-service | api/document.py | 1 | 1855 | from logging import Logger
from connexion import NoContent, problem
from provdbconnector.provapi import ProvApiException
from provdbconnector import ProvApi
from provdbconnector.db_adapters import SimpleInMemoryAdapter
prov_api = ProvApi(adapter=SimpleInMemoryAdapter, auth_info=None)
pets = {}
logger = Logger(__name__)
def problem_exception(e):
"""
Returns a problem description for the http response
:param e:
:type Exception
:return: problem instance for http response
"""
return problem(500,"Prov-Connector Error",str(e),str(type(e)))
def post(document):
"""
Convert a string to prov and save this prov document
:param document:
:type: str
:return:id
:type str
"""
try:
id = prov_api.create_document(document)
except ProvApiException as e:
logger.debug(e)
return problem_exception(e)
except NotImplementedError as e:
logger.debug(e)
return problem_exception(e)
return id, 201
def get(id):
"""
Get method for prov documents
:param id:
:type str
:return: Return a str in prov-n
:type str
"""
try:
provn = prov_api.get_document_as_provn(id)
except ProvApiException as e:
logger.debug(e)
return problem_exception(e)
except NotImplementedError as e:
logger.debug(e)
return problem_exception(e)
return provn, 200
###UNSUPPORTED METHODS###
def put(id, document):
"""
Updated a prov document
:param id:
:param document:
:return:
"""
return NoContent,501
def delete(id):
"""
Delete command for prov documents
:param id:
:return:
"""
return NoContent, 501
def search():
# NOTE: we need to wrap it with list for Python 3 as dict_values is not JSON serializable
return list(pets.values()) | apache-2.0 |
lgarren/spack | var/spack/repos/builtin/packages/xrandr/package.py | 3 | 1826 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Xrandr(AutotoolsPackage):
"""xrandr - primitive command line interface to X11 Resize, Rotate, and
Reflect (RandR) extension."""
homepage = "http://cgit.freedesktop.org/xorg/app/xrandr"
url = "https://www.x.org/archive/individual/app/xrandr-1.5.0.tar.gz"
version('1.5.0', 'fe9cf76033fe5d973131eac67b6a3118')
depends_on('libxrandr@1.5:')
depends_on('libxrender')
depends_on('libx11')
depends_on('xproto@7.0.17:', type='build')
depends_on('pkg-config@0.9.0:', type='build')
depends_on('util-macros', type='build')
| lgpl-2.1 |
pshen/ansible | lib/ansible/modules/network/aos/aos_logical_device_map.py | 78 | 8922 | #!/usr/bin/python
#
# (c) 2017 Apstra Inc, <community@apstra.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aos_logical_device_map
author: Damien Garros (@dgarros)
version_added: "2.3"
short_description: Manage AOS Logical Device Map
description:
- Apstra AOS Logical Device Map module let you manage your Logical Device Map easily. You can create
create and delete Logical Device Map by Name, ID or by using a JSON File. This module
is idempotent and support the I(check) mode. It's using the AOS REST API.
requirements:
- "aos-pyez >= 0.6.0"
options:
session:
description:
- An existing AOS session as obtained by M(aos_login) module.
required: true
name:
description:
- Name of the Logical Device Map to manage.
Only one of I(name), I(id) or I(content) can be set.
id:
description:
- AOS Id of the Logical Device Map to manage (can't be used to create a new Logical Device Map),
Only one of I(name), I(id) or I(content) can be set.
content:
description:
- Datastructure of the Logical Device Map to manage. The data can be in YAML / JSON or
directly a variable. It's the same datastructure that is returned
on success in I(value). Only one of I(name), I(id) or I(content) can be set.
state:
description:
- Indicate what is the expected state of the Logical Device Map (present or not).
default: present
choices: ['present', 'absent']
'''
EXAMPLES = '''
- name: "Create an Logical Device Map with one subnet"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-logical-device-map"
state: present
- name: "Create an Logical Device Map with multiple subnets"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-other-logical-device-map"
state: present
- name: "Check if an Logical Device Map exist with same subnets by ID"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "45ab26fc-c2ed-4307-b330-0870488fa13e"
state: present
- name: "Delete an Logical Device Map by name"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-logical-device-map"
state: absent
- name: "Delete an Logical Device Map by id"
aos_logical_device_map:
session: "{{ aos_session }}"
id: "45ab26fc-c2ed-4307-b330-0870488fa13e"
state: absent
# Save an Logical Device Map to a file
- name: "Access Logical Device Map 1/3"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-logical-device-map"
state: present
register: logical_device_map
- name: "Save Logical Device Map into a file in JSON 2/3"
copy:
content: "{{ logical_device_map.value | to_nice_json }}"
dest: logical_device_map_saved.json
- name: "Save Logical Device Map into a file in YAML 3/3"
copy:
content: "{{ logical_device_map.value | to_nice_yaml }}"
dest: logical_device_map_saved.yaml
- name: "Load Logical Device Map from a JSON file"
aos_logical_device_map:
session: "{{ aos_session }}"
content: "{{ lookup('file', 'resources/logical_device_map_saved.json') }}"
state: present
- name: "Load Logical Device Map from a YAML file"
aos_logical_device_map:
session: "{{ aos_session }}"
content: "{{ lookup('file', 'resources/logical_device_map_saved.yaml') }}"
state: present
'''
RETURNS = '''
name:
description: Name of the Logical Device Map
returned: always
type: str
sample: Server-IpAddrs
id:
description: AOS unique ID assigned to the Logical Device Map
returned: always
type: str
sample: fcc4ac1c-e249-4fe7-b458-2138bfb44c06
value:
description: Value of the object as returned by the AOS Server
returned: always
type: dict
sample: {'...'}
'''
import json
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.aos import get_aos_session, find_collection_item, do_load_resource, check_aos_version, content_to_dict
#########################################################
# State Processing
#########################################################
def logical_device_map_absent(module, aos, my_log_dev_map):
margs = module.params
# If the module do not exist, return directly
if my_log_dev_map.exists is False:
module.exit_json(changed=False, name=margs['name'], id='', value={})
# If not in check mode, delete Logical Device Map
if not module.check_mode:
try:
# Need to wait for 1sec before a delete to workaround a current
# limitation in AOS
time.sleep(1)
my_log_dev_map.delete()
except:
module.fail_json(msg="An error occurred, while trying to delete the Logical Device Map")
module.exit_json( changed=True,
name=my_log_dev_map.name,
id=my_log_dev_map.id,
value={} )
def logical_device_map_present(module, aos, my_log_dev_map):
margs = module.params
# if content is defined, create object from Content
if margs['content'] is not None:
if 'display_name' in module.params['content'].keys():
do_load_resource(module, aos.LogicalDeviceMaps, module.params['content']['display_name'])
else:
module.fail_json(msg="Unable to find display_name in 'content', Mandatory")
# if my_log_dev_map doesn't exist already, create a new one
if my_log_dev_map.exists is False and 'content' not in margs.keys():
module.fail_json(msg="'Content' is mandatory for module that don't exist currently")
module.exit_json( changed=False,
name=my_log_dev_map.name,
id=my_log_dev_map.id,
value=my_log_dev_map.value )
#########################################################
# Main Function
#########################################################
def logical_device_map(module):
margs = module.params
try:
aos = get_aos_session(module, margs['session'])
except:
module.fail_json(msg="Unable to login to the AOS server")
item_name = False
item_id = False
if margs['content'] is not None:
content = content_to_dict(module, margs['content'] )
if 'display_name' in content.keys():
item_name = content['display_name']
else:
module.fail_json(msg="Unable to extract 'display_name' from 'content'")
elif margs['name'] is not None:
item_name = margs['name']
elif margs['id'] is not None:
item_id = margs['id']
#----------------------------------------------------
# Find Object if available based on ID or Name
#----------------------------------------------------
try:
my_log_dev_map = find_collection_item(aos.LogicalDeviceMaps,
item_name=item_name,
item_id=item_id)
except:
module.fail_json(msg="Unable to find the Logical Device Map based on name or ID, something went wrong")
#----------------------------------------------------
# Proceed based on State value
#----------------------------------------------------
if margs['state'] == 'absent':
logical_device_map_absent(module, aos, my_log_dev_map)
elif margs['state'] == 'present':
logical_device_map_present(module, aos, my_log_dev_map)
def main():
module = AnsibleModule(
argument_spec=dict(
session=dict(required=True, type="dict"),
name=dict(required=False ),
id=dict(required=False ),
content=dict(required=False, type="json"),
state=dict( required=False,
choices=['present', 'absent'],
default="present")
),
mutually_exclusive = [('name', 'id', 'content')],
required_one_of=[('name', 'id', 'content')],
supports_check_mode=True
)
# Check if aos-pyez is present and match the minimum version
check_aos_version(module, '0.6.0')
logical_device_map(module)
if __name__ == "__main__":
main()
| gpl-3.0 |
mcgachey/edx-platform | common/djangoapps/util/disable_rate_limit.py | 164 | 2640 | """Utilities for disabling Django Rest Framework rate limiting.
This is useful for performance tests in which we need to generate
a lot of traffic from a particular IP address. By default,
Django Rest Framework uses the IP address to throttle traffic
for users who are not authenticated.
To disable rate limiting:
1) Decorate the Django Rest Framework APIView with `@can_disable_rate_limit`
2) In Django's admin interface, set `RateLimitConfiguration.enabled` to False.
Note: You should NEVER disable rate limiting in production.
"""
from functools import wraps
import logging
from rest_framework.views import APIView
from util.models import RateLimitConfiguration
LOGGER = logging.getLogger(__name__)
def _check_throttles_decorator(func):
"""Decorator for `APIView.check_throttles`.
The decorated function will first check model-based config
to see if rate limiting is disabled; if so, it skips
the throttle check. Otherwise, it calls the original
function to enforce rate-limiting.
Arguments:
func (function): The function to decorate.
Returns:
The decorated function.
"""
@wraps(func)
def _decorated(*args, **kwargs):
# Skip the throttle check entirely if we've disabled rate limiting.
# Otherwise, perform the checks (as usual)
if RateLimitConfiguration.current().enabled:
return func(*args, **kwargs)
else:
msg = "Rate limiting is disabled because `RateLimitConfiguration` is not enabled."
LOGGER.info(msg)
return
return _decorated
def can_disable_rate_limit(clz):
"""Class decorator that allows rate limiting to be disabled.
Arguments:
clz (class): The APIView subclass to decorate.
Returns:
class: the decorated class.
Example Usage:
>>> from rest_framework.views import APIView
>>> @can_disable_rate_limit
>>> class MyApiView(APIView):
>>> pass
"""
# No-op if the class isn't a Django Rest Framework view.
if not issubclass(clz, APIView):
msg = (
u"{clz} is not a Django Rest Framework APIView subclass."
).format(clz=clz)
LOGGER.warning(msg)
return clz
# If we ARE explicitly disabling rate limiting,
# modify the class to always allow requests.
# Note that this overrides both rate limiting applied
# for the particular view, as well as global rate limits
# configured in Django settings.
if hasattr(clz, 'check_throttles'):
clz.check_throttles = _check_throttles_decorator(clz.check_throttles)
return clz
| agpl-3.0 |
sintefmath/Splipy | splipy/utils/nutils.py | 1 | 1605 | __doc__ = 'Implementation of convenience methods with respect to nutils integration.'
from splipy import Curve, Surface, Volume
import numpy as np
def controlpoints(spline):
""" Return controlpoints according to nutils ordering """
n = len(spline)
dim = spline.dimension
if isinstance(spline, Curve):
return np.reshape(spline[:,:] , (n, dim), order='F')
elif isinstance(spline, Surface):
return np.reshape(spline[:,:,:].swapaxes(0,1) , (n, dim), order='F')
elif isinstance(spline, Volume):
return np.reshape(spline[:,:,:,:].swapaxes(0,2), (n, dim), order='F')
raise RuntimeError('Non-spline argument detected')
def multiplicities(spline):
""" Returns the multiplicity of the knots at all knot values as a 2D array for
all parametric directions, for all knots """
return [[spline.order(d) - spline.bases[d].continuity(k) - 1 for k in spline.knots(d)] for d in range(spline.pardim)]
def degree(spline):
""" Returns polynomial degree (splipy order - 1) for all parametric directions """
return [p-1 for p in spline.order()]
def splipy_to_nutils(spline):
""" Returns nutils domain and geometry object for spline mapping given by the argument """
from nutils import mesh, function
domain, geom = mesh.rectilinear(spline.knots())
cp = controlpoints(spline)
basis = domain.basis('spline', degree=degree(spline), knotmultiplicities=multiplicities(spline))
geom = function.matmat(basis, cp)
#TODO: add correct behaviour for rational and/or periodic geometries
return domain, geom
| gpl-3.0 |
szezso/T.E.S.C.O-kernel_vivo | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
nugget/home-assistant | homeassistant/components/mqtt/cover.py | 2 | 20645 | """
Support for MQTT cover devices.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/cover.mqtt/
"""
import logging
import voluptuous as vol
from homeassistant.components import cover, mqtt
from homeassistant.components.cover import (
ATTR_POSITION, ATTR_TILT_POSITION, DEVICE_CLASSES_SCHEMA, SUPPORT_CLOSE,
SUPPORT_CLOSE_TILT, SUPPORT_OPEN, SUPPORT_OPEN_TILT, SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION, SUPPORT_STOP, SUPPORT_STOP_TILT, CoverDevice)
from homeassistant.components.mqtt import (
ATTR_DISCOVERY_HASH, CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN,
CONF_STATE_TOPIC, CONF_UNIQUE_ID, MqttAttributes, MqttAvailability,
MqttDiscoveryUpdate, MqttEntityDeviceInfo, subscription)
from homeassistant.components.mqtt.discovery import (
MQTT_DISCOVERY_NEW, clear_discovery_hash)
from homeassistant.const import (
CONF_DEVICE, CONF_DEVICE_CLASS, CONF_NAME, CONF_OPTIMISTIC,
CONF_VALUE_TEMPLATE, STATE_CLOSED, STATE_OPEN, STATE_UNKNOWN)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['mqtt']
CONF_GET_POSITION_TOPIC = 'position_topic'
CONF_SET_POSITION_TEMPLATE = 'set_position_template'
CONF_SET_POSITION_TOPIC = 'set_position_topic'
CONF_TILT_COMMAND_TOPIC = 'tilt_command_topic'
CONF_TILT_STATUS_TOPIC = 'tilt_status_topic'
CONF_PAYLOAD_CLOSE = 'payload_close'
CONF_PAYLOAD_OPEN = 'payload_open'
CONF_PAYLOAD_STOP = 'payload_stop'
CONF_POSITION_CLOSED = 'position_closed'
CONF_POSITION_OPEN = 'position_open'
CONF_STATE_CLOSED = 'state_closed'
CONF_STATE_OPEN = 'state_open'
CONF_TILT_CLOSED_POSITION = 'tilt_closed_value'
CONF_TILT_INVERT_STATE = 'tilt_invert_state'
CONF_TILT_MAX = 'tilt_max'
CONF_TILT_MIN = 'tilt_min'
CONF_TILT_OPEN_POSITION = 'tilt_opened_value'
CONF_TILT_STATE_OPTIMISTIC = 'tilt_optimistic'
TILT_PAYLOAD = 'tilt'
COVER_PAYLOAD = 'cover'
DEFAULT_NAME = 'MQTT Cover'
DEFAULT_OPTIMISTIC = False
DEFAULT_PAYLOAD_CLOSE = 'CLOSE'
DEFAULT_PAYLOAD_OPEN = 'OPEN'
DEFAULT_PAYLOAD_STOP = 'STOP'
DEFAULT_POSITION_CLOSED = 0
DEFAULT_POSITION_OPEN = 100
DEFAULT_RETAIN = False
DEFAULT_TILT_CLOSED_POSITION = 0
DEFAULT_TILT_INVERT_STATE = False
DEFAULT_TILT_MAX = 100
DEFAULT_TILT_MIN = 0
DEFAULT_TILT_OPEN_POSITION = 100
DEFAULT_TILT_OPTIMISTIC = False
OPEN_CLOSE_FEATURES = (SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP)
TILT_FEATURES = (SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT | SUPPORT_STOP_TILT |
SUPPORT_SET_TILT_POSITION)
def validate_options(value):
"""Validate options.
If set postion topic is set then get position topic is set as well.
"""
if (CONF_SET_POSITION_TOPIC in value and
CONF_GET_POSITION_TOPIC not in value):
raise vol.Invalid(
"set_position_topic must be set together with position_topic.")
return value
PLATFORM_SCHEMA = vol.All(mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend({
vol.Optional(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_SET_POSITION_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_SET_POSITION_TEMPLATE): cv.template,
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_GET_POSITION_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PAYLOAD_OPEN, default=DEFAULT_PAYLOAD_OPEN): cv.string,
vol.Optional(CONF_PAYLOAD_CLOSE, default=DEFAULT_PAYLOAD_CLOSE): cv.string,
vol.Optional(CONF_PAYLOAD_STOP, default=DEFAULT_PAYLOAD_STOP): cv.string,
vol.Optional(CONF_STATE_OPEN, default=STATE_OPEN): cv.string,
vol.Optional(CONF_STATE_CLOSED, default=STATE_CLOSED): cv.string,
vol.Optional(CONF_POSITION_OPEN,
default=DEFAULT_POSITION_OPEN): int,
vol.Optional(CONF_POSITION_CLOSED,
default=DEFAULT_POSITION_CLOSED): int,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_TILT_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_TILT_STATUS_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_TILT_CLOSED_POSITION,
default=DEFAULT_TILT_CLOSED_POSITION): int,
vol.Optional(CONF_TILT_OPEN_POSITION,
default=DEFAULT_TILT_OPEN_POSITION): int,
vol.Optional(CONF_TILT_MIN, default=DEFAULT_TILT_MIN): int,
vol.Optional(CONF_TILT_MAX, default=DEFAULT_TILT_MAX): int,
vol.Optional(CONF_TILT_STATE_OPTIMISTIC,
default=DEFAULT_TILT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_TILT_INVERT_STATE,
default=DEFAULT_TILT_INVERT_STATE): cv.boolean,
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
}).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema).extend(
mqtt.MQTT_JSON_ATTRS_SCHEMA.schema), validate_options)
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType,
async_add_entities, discovery_info=None):
"""Set up MQTT cover through configuration.yaml."""
await _async_setup_entity(config, async_add_entities)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT cover dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
"""Discover and add an MQTT cover."""
try:
discovery_hash = discovery_payload.pop(ATTR_DISCOVERY_HASH)
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(config, async_add_entities, config_entry,
discovery_hash)
except Exception:
if discovery_hash:
clear_discovery_hash(hass, discovery_hash)
raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(cover.DOMAIN, 'mqtt'),
async_discover)
async def _async_setup_entity(config, async_add_entities, config_entry=None,
discovery_hash=None):
"""Set up the MQTT Cover."""
async_add_entities([MqttCover(config, config_entry, discovery_hash)])
class MqttCover(MqttAttributes, MqttAvailability, MqttDiscoveryUpdate,
MqttEntityDeviceInfo, CoverDevice):
"""Representation of a cover that can be controlled using MQTT."""
def __init__(self, config, config_entry, discovery_hash):
"""Initialize the cover."""
self._unique_id = config.get(CONF_UNIQUE_ID)
self._position = None
self._state = None
self._sub_state = None
self._optimistic = None
self._tilt_value = None
self._tilt_optimistic = None
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(
self, discovery_hash, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
async def async_added_to_hass(self):
"""Subscribe MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_schedule_update_ha_state()
def _setup_from_config(self, config):
self._config = config
self._optimistic = (config.get(CONF_OPTIMISTIC) or
(config.get(CONF_STATE_TOPIC) is None and
config.get(CONF_GET_POSITION_TOPIC) is None))
self._tilt_optimistic = config.get(CONF_TILT_STATE_OPTIMISTIC)
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
template.hass = self.hass
set_position_template = self._config.get(CONF_SET_POSITION_TEMPLATE)
if set_position_template is not None:
set_position_template.hass = self.hass
topics = {}
@callback
def tilt_updated(topic, payload, qos):
"""Handle tilt updates."""
if (payload.isnumeric() and
(self._config.get(CONF_TILT_MIN) <= int(payload) <=
self._config.get(CONF_TILT_MAX))):
level = self.find_percentage_in_range(float(payload))
self._tilt_value = level
self.async_schedule_update_ha_state()
@callback
def state_message_received(topic, payload, qos):
"""Handle new MQTT state messages."""
if template is not None:
payload = template.async_render_with_possible_json_value(
payload)
if payload == self._config.get(CONF_STATE_OPEN):
self._state = False
elif payload == self._config.get(CONF_STATE_CLOSED):
self._state = True
else:
_LOGGER.warning("Payload is not True or False: %s", payload)
return
self.async_schedule_update_ha_state()
@callback
def position_message_received(topic, payload, qos):
"""Handle new MQTT state messages."""
if template is not None:
payload = template.async_render_with_possible_json_value(
payload)
if payload.isnumeric():
percentage_payload = self.find_percentage_in_range(
float(payload), COVER_PAYLOAD)
self._position = percentage_payload
self._state = percentage_payload == DEFAULT_POSITION_CLOSED
else:
_LOGGER.warning(
"Payload is not integer within range: %s",
payload)
return
self.async_schedule_update_ha_state()
if self._config.get(CONF_GET_POSITION_TOPIC):
topics['get_position_topic'] = {
'topic': self._config.get(CONF_GET_POSITION_TOPIC),
'msg_callback': position_message_received,
'qos': self._config.get(CONF_QOS)}
elif self._config.get(CONF_STATE_TOPIC):
topics['state_topic'] = {
'topic': self._config.get(CONF_STATE_TOPIC),
'msg_callback': state_message_received,
'qos': self._config.get(CONF_QOS)}
else:
# Force into optimistic mode.
self._optimistic = True
if self._config.get(CONF_TILT_STATUS_TOPIC) is None:
self._tilt_optimistic = True
else:
self._tilt_optimistic = False
self._tilt_value = STATE_UNKNOWN
topics['tilt_status_topic'] = {
'topic': self._config.get(CONF_TILT_STATUS_TOPIC),
'msg_callback': tilt_updated,
'qos': self._config.get(CONF_QOS)}
self._sub_state = await subscription.async_subscribe_topics(
self.hass, self._sub_state,
topics)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
@property
def name(self):
"""Return the name of the cover."""
return self._config.get(CONF_NAME)
@property
def is_closed(self):
"""Return if the cover is closed."""
return self._state
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
return self._position
@property
def current_cover_tilt_position(self):
"""Return current position of cover tilt."""
return self._tilt_value
@property
def device_class(self):
"""Return the class of this sensor."""
return self._config.get(CONF_DEVICE_CLASS)
@property
def supported_features(self):
"""Flag supported features."""
supported_features = 0
if self._config.get(CONF_COMMAND_TOPIC) is not None:
supported_features = OPEN_CLOSE_FEATURES
if self._config.get(CONF_SET_POSITION_TOPIC) is not None:
supported_features |= SUPPORT_SET_POSITION
if self._config.get(CONF_TILT_COMMAND_TOPIC) is not None:
supported_features |= TILT_FEATURES
return supported_features
async def async_open_cover(self, **kwargs):
"""Move the cover up.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass, self._config.get(CONF_COMMAND_TOPIC),
self._config.get(CONF_PAYLOAD_OPEN), self._config.get(CONF_QOS),
self._config.get(CONF_RETAIN))
if self._optimistic:
# Optimistically assume that cover has changed state.
self._state = False
if self._config.get(CONF_GET_POSITION_TOPIC):
self._position = self.find_percentage_in_range(
self._config.get(CONF_POSITION_OPEN), COVER_PAYLOAD)
self.async_schedule_update_ha_state()
async def async_close_cover(self, **kwargs):
"""Move the cover down.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass, self._config.get(CONF_COMMAND_TOPIC),
self._config.get(CONF_PAYLOAD_CLOSE), self._config.get(CONF_QOS),
self._config.get(CONF_RETAIN))
if self._optimistic:
# Optimistically assume that cover has changed state.
self._state = True
if self._config.get(CONF_GET_POSITION_TOPIC):
self._position = self.find_percentage_in_range(
self._config.get(CONF_POSITION_CLOSED), COVER_PAYLOAD)
self.async_schedule_update_ha_state()
async def async_stop_cover(self, **kwargs):
"""Stop the device.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass, self._config.get(CONF_COMMAND_TOPIC),
self._config.get(CONF_PAYLOAD_STOP), self._config.get(CONF_QOS),
self._config.get(CONF_RETAIN))
async def async_open_cover_tilt(self, **kwargs):
"""Tilt the cover open."""
mqtt.async_publish(self.hass,
self._config.get(CONF_TILT_COMMAND_TOPIC),
self._config.get(CONF_TILT_OPEN_POSITION),
self._config.get(CONF_QOS),
self._config.get(CONF_RETAIN))
if self._tilt_optimistic:
self._tilt_value = self._config.get(CONF_TILT_OPEN_POSITION)
self.async_schedule_update_ha_state()
async def async_close_cover_tilt(self, **kwargs):
"""Tilt the cover closed."""
mqtt.async_publish(self.hass,
self._config.get(CONF_TILT_COMMAND_TOPIC),
self._config.get(CONF_TILT_CLOSED_POSITION),
self._config.get(CONF_QOS),
self._config.get(CONF_RETAIN))
if self._tilt_optimistic:
self._tilt_value = self._config.get(CONF_TILT_CLOSED_POSITION)
self.async_schedule_update_ha_state()
async def async_set_cover_tilt_position(self, **kwargs):
"""Move the cover tilt to a specific position."""
if ATTR_TILT_POSITION not in kwargs:
return
position = float(kwargs[ATTR_TILT_POSITION])
# The position needs to be between min and max
level = self.find_in_range_from_percent(position)
mqtt.async_publish(self.hass,
self._config.get(CONF_TILT_COMMAND_TOPIC),
level,
self._config.get(CONF_QOS),
self._config.get(CONF_RETAIN))
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
set_position_template = self._config.get(CONF_SET_POSITION_TEMPLATE)
if ATTR_POSITION in kwargs:
position = kwargs[ATTR_POSITION]
percentage_position = position
if set_position_template is not None:
try:
position = set_position_template.async_render(
**kwargs)
except TemplateError as ex:
_LOGGER.error(ex)
self._state = None
elif (self._config.get(CONF_POSITION_OPEN) != 100 and
self._config.get(CONF_POSITION_CLOSED) != 0):
position = self.find_in_range_from_percent(
position, COVER_PAYLOAD)
mqtt.async_publish(self.hass,
self._config.get(CONF_SET_POSITION_TOPIC),
position,
self._config.get(CONF_QOS),
self._config.get(CONF_RETAIN))
if self._optimistic:
self._state = percentage_position == \
self._config.get(CONF_POSITION_CLOSED)
self._position = percentage_position
self.async_schedule_update_ha_state()
def find_percentage_in_range(self, position, range_type=TILT_PAYLOAD):
"""Find the 0-100% value within the specified range."""
# the range of motion as defined by the min max values
if range_type == COVER_PAYLOAD:
max_range = self._config.get(CONF_POSITION_OPEN)
min_range = self._config.get(CONF_POSITION_CLOSED)
else:
max_range = self._config.get(CONF_TILT_MAX)
min_range = self._config.get(CONF_TILT_MIN)
current_range = max_range - min_range
# offset to be zero based
offset_position = position - min_range
position_percentage = round(
float(offset_position) / current_range * 100.0)
max_percent = 100
min_percent = 0
position_percentage = min(max(position_percentage, min_percent),
max_percent)
if range_type == TILT_PAYLOAD and \
self._config.get(CONF_TILT_INVERT_STATE):
return 100 - position_percentage
return position_percentage
def find_in_range_from_percent(self, percentage, range_type=TILT_PAYLOAD):
"""
Find the adjusted value for 0-100% within the specified range.
if the range is 80-180 and the percentage is 90
this method would determine the value to send on the topic
by offsetting the max and min, getting the percentage value and
returning the offset
"""
if range_type == COVER_PAYLOAD:
max_range = self._config.get(CONF_POSITION_OPEN)
min_range = self._config.get(CONF_POSITION_CLOSED)
else:
max_range = self._config.get(CONF_TILT_MAX)
min_range = self._config.get(CONF_TILT_MIN)
offset = min_range
current_range = max_range - min_range
position = round(current_range * (percentage / 100.0))
position += offset
if range_type == TILT_PAYLOAD and \
self._config.get(CONF_TILT_INVERT_STATE):
position = max_range - position + offset
return position
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
| apache-2.0 |
jckarter/swift | benchmark/scripts/test_utils.py | 16 | 3878 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# ===--- test_utils.py ---------------------------------------------------===//
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ===---------------------------------------------------------------------===//
"""
Homegrown unit testing helpers.
The issue is that the unittest.mock was added in Python 3.3 and we need to run
on Python 2.7.
`Stub` and `Mock` are very rudimentary and support only the limited subset of
common unit testing patterns that is used in this project.
"""
import logging
import sys
try:
from StringIO import StringIO # for Python 2
except ImportError:
from io import StringIO # for Python 3
from contextlib import contextmanager
@contextmanager
def captured_output():
"""Capture stdout and stderr and return their output as string buffers."""
new_out, new_err = StringIO(), StringIO()
old_out, old_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = new_out, new_err
yield sys.stdout, sys.stderr
finally:
sys.stdout, sys.stderr = old_out, old_err
class Stub:
"""Object literal stub for value objects."""
def __init__(self, **attributes):
"""All named parameters will create properties on this object."""
self.__dict__.update(attributes)
class Mock(object):
"""Minimal infrastructure for manually mocking calls to single method.
Classes inheriting from Mock are expected to create their own mock of the
tested method with appropriate signature, which appends the call arguments
as tuple to the `calls` list and returns the canned response retrieved from
the `respond` dictionary.
"""
def __init__(self, responses=None):
"""Optionaly initialized with a list of expected calls. See expect."""
self.calls = []
self.expected = []
self.respond = dict()
responses = responses or []
for call_args, response in responses:
self.expect(call_args, response)
def expect(self, call_args, response):
"""Expect invocation of tested method with given arguments.
Stores the canned reponse in the `respond` dictionary.
"""
call_args = tuple(call_args)
self.expected.append(call_args)
self.respond[call_args] = response
def assert_called_with(self, expected_args):
"""Verify that the tested method was called with provided arguments."""
expected_args = tuple(expected_args)
assert expected_args in self.calls, "Expected: {0} in Called: {1}".format(
expected_args, self.calls
)
def assert_called_all_expected(self):
"""Verify that all expeced invocations of tested method were called."""
assert self.calls == self.expected, "\nExpected: {0}, \n Called: {1}".format(
self.expected, self.calls
)
class MockLoggingHandler(logging.Handler):
"""Mock logging handler to check for expected logs."""
def __init__(self, *args, **kwargs):
"""Prepare the logger for recording messages for each log level."""
self.reset()
super(MockLoggingHandler, self).__init__(*args, **kwargs)
def emit(self, record):
"""Store the message in list for the given log level."""
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
"""Clear all log messages."""
self.messages = {
"debug": [],
"info": [],
"warning": [],
"error": [],
"critical": [],
}
| apache-2.0 |
vipul-sharma20/oh-mainline | vendor/packages/gdata/src/atom/http.py | 23 | 12907 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""HttpClients in this module use httplib to make HTTP requests.
This module make HTTP requests based on httplib, but there are environments
in which an httplib based approach will not work (if running in Google App
Engine for example). In those cases, higher level classes (like AtomService
and GDataService) can swap out the HttpClient to transparently use a
different mechanism for making HTTP requests.
HttpClient: Contains a request method which performs an HTTP call to the
server.
ProxiedHttpClient: Contains a request method which connects to a proxy using
settings stored in operating system environment variables then
performs an HTTP call to the endpoint server.
"""
__author__ = 'api.jscudder (Jeff Scudder)'
import types
import os
import httplib
import atom.url
import atom.http_interface
import socket
import base64
import atom.http_core
ssl_imported = False
ssl = None
try:
import ssl
ssl_imported = True
except ImportError:
pass
class ProxyError(atom.http_interface.Error):
pass
class TestConfigurationError(Exception):
pass
DEFAULT_CONTENT_TYPE = 'application/atom+xml'
class HttpClient(atom.http_interface.GenericHttpClient):
# Added to allow old v1 HttpClient objects to use the new
# http_code.HttpClient. Used in unit tests to inject a mock client.
v2_http_client = None
def __init__(self, headers=None):
self.debug = False
self.headers = headers or {}
def request(self, operation, url, data=None, headers=None):
"""Performs an HTTP call to the server, supports GET, POST, PUT, and
DELETE.
Usage example, perform and HTTP GET on http://www.google.com/:
import atom.http
client = atom.http.HttpClient()
http_response = client.request('GET', 'http://www.google.com/')
Args:
operation: str The HTTP operation to be performed. This is usually one
of 'GET', 'POST', 'PUT', or 'DELETE'
data: filestream, list of parts, or other object which can be converted
to a string. Should be set to None when performing a GET or DELETE.
If data is a file-like object which can be read, this method will
read a chunk of 100K bytes at a time and send them.
If the data is a list of parts to be sent, each part will be
evaluated and sent.
url: The full URL to which the request should be sent. Can be a string
or atom.url.Url.
headers: dict of strings. HTTP headers which should be sent
in the request.
"""
all_headers = self.headers.copy()
if headers:
all_headers.update(headers)
# If the list of headers does not include a Content-Length, attempt to
# calculate it based on the data object.
if data and 'Content-Length' not in all_headers:
if isinstance(data, types.StringTypes):
all_headers['Content-Length'] = str(len(data))
else:
raise atom.http_interface.ContentLengthRequired('Unable to calculate '
'the length of the data parameter. Specify a value for '
'Content-Length')
# Set the content type to the default value if none was set.
if 'Content-Type' not in all_headers:
all_headers['Content-Type'] = DEFAULT_CONTENT_TYPE
if self.v2_http_client is not None:
http_request = atom.http_core.HttpRequest(method=operation)
atom.http_core.Uri.parse_uri(str(url)).modify_request(http_request)
http_request.headers = all_headers
if data:
http_request._body_parts.append(data)
return self.v2_http_client.request(http_request=http_request)
if not isinstance(url, atom.url.Url):
if isinstance(url, types.StringTypes):
url = atom.url.parse_url(url)
else:
raise atom.http_interface.UnparsableUrlObject('Unable to parse url '
'parameter because it was not a string or atom.url.Url')
connection = self._prepare_connection(url, all_headers)
if self.debug:
connection.debuglevel = 1
connection.putrequest(operation, self._get_access_url(url),
skip_host=True)
if url.port is not None:
connection.putheader('Host', '%s:%s' % (url.host, url.port))
else:
connection.putheader('Host', url.host)
# Overcome a bug in Python 2.4 and 2.5
# httplib.HTTPConnection.putrequest adding
# HTTP request header 'Host: www.google.com:443' instead of
# 'Host: www.google.com', and thus resulting the error message
# 'Token invalid - AuthSub token has wrong scope' in the HTTP response.
if (url.protocol == 'https' and int(url.port or 443) == 443 and
hasattr(connection, '_buffer') and
isinstance(connection._buffer, list)):
header_line = 'Host: %s:443' % url.host
replacement_header_line = 'Host: %s' % url.host
try:
connection._buffer[connection._buffer.index(header_line)] = (
replacement_header_line)
except ValueError: # header_line missing from connection._buffer
pass
# Send the HTTP headers.
for header_name in all_headers:
connection.putheader(header_name, all_headers[header_name])
connection.endheaders()
# If there is data, send it in the request.
if data:
if isinstance(data, list):
for data_part in data:
_send_data_part(data_part, connection)
else:
_send_data_part(data, connection)
# Return the HTTP Response from the server.
return connection.getresponse()
def _prepare_connection(self, url, headers):
if not isinstance(url, atom.url.Url):
if isinstance(url, types.StringTypes):
url = atom.url.parse_url(url)
else:
raise atom.http_interface.UnparsableUrlObject('Unable to parse url '
'parameter because it was not a string or atom.url.Url')
if url.protocol == 'https':
if not url.port:
return httplib.HTTPSConnection(url.host)
return httplib.HTTPSConnection(url.host, int(url.port))
else:
if not url.port:
return httplib.HTTPConnection(url.host)
return httplib.HTTPConnection(url.host, int(url.port))
def _get_access_url(self, url):
return url.to_string()
class ProxiedHttpClient(HttpClient):
"""Performs an HTTP request through a proxy.
The proxy settings are obtained from enviroment variables. The URL of the
proxy server is assumed to be stored in the environment variables
'https_proxy' and 'http_proxy' respectively. If the proxy server requires
a Basic Auth authorization header, the username and password are expected to
be in the 'proxy-username' or 'proxy_username' variable and the
'proxy-password' or 'proxy_password' variable, or in 'http_proxy' or
'https_proxy' as "protocol://[username:password@]host:port".
After connecting to the proxy server, the request is completed as in
HttpClient.request.
"""
def _prepare_connection(self, url, headers):
proxy_settings = os.environ.get('%s_proxy' % url.protocol)
if not proxy_settings:
# The request was HTTP or HTTPS, but there was no appropriate proxy set.
return HttpClient._prepare_connection(self, url, headers)
else:
print '!!!!%s' % proxy_settings
proxy_auth = _get_proxy_auth(proxy_settings)
proxy_netloc = _get_proxy_net_location(proxy_settings)
print '!!!!%s' % proxy_auth
print '!!!!%s' % proxy_netloc
if url.protocol == 'https':
# Set any proxy auth headers
if proxy_auth:
proxy_auth = 'Proxy-authorization: %s' % proxy_auth
# Construct the proxy connect command.
port = url.port
if not port:
port = '443'
proxy_connect = 'CONNECT %s:%s HTTP/1.0\r\n' % (url.host, port)
# Set the user agent to send to the proxy
if headers and 'User-Agent' in headers:
user_agent = 'User-Agent: %s\r\n' % (headers['User-Agent'])
else:
user_agent = 'User-Agent: python\r\n'
proxy_pieces = '%s%s%s\r\n' % (proxy_connect, proxy_auth, user_agent)
# Find the proxy host and port.
proxy_url = atom.url.parse_url(proxy_netloc)
if not proxy_url.port:
proxy_url.port = '80'
# Connect to the proxy server, very simple recv and error checking
p_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
p_sock.connect((proxy_url.host, int(proxy_url.port)))
p_sock.sendall(proxy_pieces)
response = ''
# Wait for the full response.
while response.find("\r\n\r\n") == -1:
response += p_sock.recv(8192)
p_status = response.split()[1]
if p_status != str(200):
raise ProxyError('Error status=%s' % str(p_status))
# Trivial setup for ssl socket.
sslobj = None
if ssl_imported:
sslobj = ssl.wrap_socket(p_sock, None, None)
else:
sock_ssl = socket.ssl(p_sock, None, None)
sslobj = httplib.FakeSocket(p_sock, sock_ssl)
# Initalize httplib and replace with the proxy socket.
connection = httplib.HTTPConnection(proxy_url.host)
connection.sock = sslobj
return connection
else:
# If protocol was not https.
# Find the proxy host and port.
proxy_url = atom.url.parse_url(proxy_netloc)
if not proxy_url.port:
proxy_url.port = '80'
if proxy_auth:
headers['Proxy-Authorization'] = proxy_auth.strip()
return httplib.HTTPConnection(proxy_url.host, int(proxy_url.port))
def _get_access_url(self, url):
return url.to_string()
def _get_proxy_auth(proxy_settings):
"""Returns proxy authentication string for header.
Will check environment variables for proxy authentication info, starting with
proxy(_/-)username and proxy(_/-)password before checking the given
proxy_settings for a [protocol://]username:password@host[:port] string.
Args:
proxy_settings: String from http_proxy or https_proxy environment variable.
Returns:
Authentication string for proxy, or empty string if no proxy username was
found.
"""
proxy_username = None
proxy_password = None
proxy_username = os.environ.get('proxy-username')
if not proxy_username:
proxy_username = os.environ.get('proxy_username')
proxy_password = os.environ.get('proxy-password')
if not proxy_password:
proxy_password = os.environ.get('proxy_password')
if not proxy_username:
if '@' in proxy_settings:
protocol_and_proxy_auth = proxy_settings.split('@')[0].split(':')
if len(protocol_and_proxy_auth) == 3:
# 3 elements means we have [<protocol>, //<user>, <password>]
proxy_username = protocol_and_proxy_auth[1].lstrip('/')
proxy_password = protocol_and_proxy_auth[2]
elif len(protocol_and_proxy_auth) == 2:
# 2 elements means we have [<user>, <password>]
proxy_username = protocol_and_proxy_auth[0]
proxy_password = protocol_and_proxy_auth[1]
if proxy_username:
user_auth = base64.encodestring('%s:%s' % (proxy_username,
proxy_password))
return 'Basic %s\r\n' % (user_auth.strip())
else:
return ''
def _get_proxy_net_location(proxy_settings):
"""Returns proxy host and port.
Args:
proxy_settings: String from http_proxy or https_proxy environment variable.
Must be in the form of protocol://[username:password@]host:port
Returns:
String in the form of protocol://host:port
"""
if '@' in proxy_settings:
protocol = proxy_settings.split(':')[0]
netloc = proxy_settings.split('@')[1]
return '%s://%s' % (protocol, netloc)
else:
return proxy_settings
def _send_data_part(data, connection):
if isinstance(data, types.StringTypes):
connection.send(data)
return
# Check to see if data is a file-like object that has a read method.
elif hasattr(data, 'read'):
# Read the file and send it a chunk at a time.
while 1:
binarydata = data.read(100000)
if binarydata == '': break
connection.send(binarydata)
return
else:
# The data object was not a file.
# Try to convert to a string and send the data.
connection.send(str(data))
return
| agpl-3.0 |
Nikea/VisTrails | vistrails/core/repository/poster/streaminghttp.py | 2 | 11131 | ###############################################################################
##
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
# Copyright (c) 2010 Chris AtLee
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Streaming HTTP uploads module.
This module extends the standard httplib and urllib2 objects so that
iterable objects can be used in the body of HTTP requests.
In most cases all one should have to do is call :func:`register_openers()`
to register the new streaming http handlers which will take priority over
the default handlers, and then you can use iterable objects in the body
of HTTP requests.
**N.B.** You must specify a Content-Length header if using an iterable object
since there is no way to determine in advance the total size that will be
yielded, and there is no way to reset an interator.
Example usage:
>>> from StringIO import StringIO
>>> import urllib2, poster.streaminghttp
>>> opener = poster.streaminghttp.register_openers()
>>> s = "Test file data"
>>> f = StringIO(s)
>>> req = urllib2.Request("http://localhost:5000", f,
... {'Content-Length': str(len(s))})
"""
import httplib, urllib2, socket
from httplib import NotConnected
__all__ = ['StreamingHTTPConnection', 'StreamingHTTPRedirectHandler',
'StreamingHTTPHandler', 'register_openers']
if hasattr(httplib, 'HTTPS'):
__all__.extend(['StreamingHTTPSHandler', 'StreamingHTTPSConnection'])
class _StreamingHTTPMixin(object):
"""Mixin class for HTTP and HTTPS connections that implements a streaming
send method."""
def send(self, value):
"""Send ``value`` to the server.
``value`` can be a string object, a file-like object that supports
a .read() method, or an iterable object that supports a .next()
method.
"""
# Based on python 2.6's httplib.HTTPConnection.send()
if self.sock is None:
if self.auto_open:
self.connect()
else:
raise NotConnected()
# send the data to the server. if we get a broken pipe, then close
# the socket. we want to reconnect when somebody tries to send again.
#
# NOTE: we DO propagate the error, though, because we cannot simply
# ignore the error... the caller will know if they can retry.
if self.debuglevel > 0:
print "send:", repr(value)
try:
blocksize = 8192
if hasattr(value, 'read') :
if hasattr(value, 'seek'):
value.seek(0)
if self.debuglevel > 0:
print "sendIng a read()able"
data = value.read(blocksize)
while data:
self.sock.sendall(data)
data = value.read(blocksize)
elif hasattr(value, 'next'):
if hasattr(value, 'reset'):
value.reset()
if self.debuglevel > 0:
print "sendIng an iterable"
for data in value:
self.sock.sendall(data)
else:
self.sock.sendall(value)
except socket.error, v:
if v[0] == 32: # Broken pipe
self.close()
raise
class StreamingHTTPConnection(_StreamingHTTPMixin, httplib.HTTPConnection):
"""Subclass of `httplib.HTTPConnection` that overrides the `send()` method
to support iterable body objects"""
class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
"""Subclass of `urllib2.HTTPRedirectHandler` that overrides the
`redirect_request` method to properly handle redirected POST requests
This class is required because python 2.5's HTTPRedirectHandler does
not remove the Content-Type or Content-Length headers when requesting
the new resource, but the body of the original request is not preserved.
"""
handler_order = urllib2.HTTPRedirectHandler.handler_order - 1
# From python2.6 urllib2's HTTPRedirectHandler
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
or code in (301, 302, 303) and m == "POST"):
# Strictly (according to RFC 2616), 301 or 302 in response
# to a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we
# do the same.
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
newheaders = dict((k, v) for k, v in req.headers.items()
if k.lower() not in (
"content-length", "content-type")
)
return urllib2.Request(newurl,
headers=newheaders,
origin_req_host=req.get_origin_req_host(),
unverifiable=True)
else:
raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
class StreamingHTTPHandler(urllib2.HTTPHandler):
"""Subclass of `urllib2.HTTPHandler` that uses
StreamingHTTPConnection as its http connection class."""
handler_order = urllib2.HTTPHandler.handler_order - 1
def http_open(self, req):
"""Open a StreamingHTTPConnection for the given request"""
return self.do_open(StreamingHTTPConnection, req)
def http_request(self, req):
"""Handle a HTTP request. Make sure that Content-Length is specified
if we're using an interable value"""
# Make sure that if we're using an iterable object as the request
# body, that we've also specified Content-Length
if req.has_data():
data = req.get_data()
if hasattr(data, 'read') or hasattr(data, 'next'):
if not req.has_header('Content-length'):
raise ValueError(
"No Content-Length specified for iterable body")
return urllib2.HTTPHandler.do_request_(self, req)
if hasattr(httplib, 'HTTPS'):
class StreamingHTTPSConnection(_StreamingHTTPMixin,
httplib.HTTPSConnection):
"""Subclass of `httplib.HTTSConnection` that overrides the `send()`
method to support iterable body objects"""
class StreamingHTTPSHandler(urllib2.HTTPSHandler):
"""Subclass of `urllib2.HTTPSHandler` that uses
StreamingHTTPSConnection as its http connection class."""
handler_order = urllib2.HTTPSHandler.handler_order - 1
def https_open(self, req):
return self.do_open(StreamingHTTPSConnection, req)
def https_request(self, req):
# Make sure that if we're using an iterable object as the request
# body, that we've also specified Content-Length
if req.has_data():
data = req.get_data()
if hasattr(data, 'read') or hasattr(data, 'next'):
if not req.has_header('Content-length'):
raise ValueError(
"No Content-Length specified for iterable body")
return urllib2.HTTPSHandler.do_request_(self, req)
def register_openers(cookiejar=None):
"""Register the streaming http handlers in the global urllib2 default
opener object.
Returns the created OpenerDirector object."""
handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
if hasattr(httplib, "HTTPS"):
handlers.append(StreamingHTTPSHandler)
if cookiejar:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar), *handlers)
else:
opener = urllib2.build_opener(*handlers)
urllib2.install_opener(opener)
return opener
| bsd-3-clause |
dvitme/odoo-addons | account_analytic_and_plans/__openerp__.py | 1 | 1492 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Account Analytic and Plans",
"version": "1.0",
"description": """
Account Analytic And Plans
==========================
""",
"author": "ADHOC SA",
"website": "www.adhoc.com.ar",
"category": "Account",
"depends": [
'sale_analytic_plans', 'purchase_analytic_plans'
],
"data": [
'invoice_view.xml',
'account_view.xml',
],
"demo_xml": [],
"active": False,
"installable": True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
furf/pledge_service | lib/validictory/tests/test_schema_schema.py | 40 | 3783 | from unittest import TestCase
import validictory
schema = {
"$schema": "http://json-schema.org/draft-03/schema#",
"id": "http://json-schema.org/draft-03/schema#",
"type": "object",
"properties": {
"type": {
"type": ["string", "array"],
"items": {
"type": ["string", {"$ref": "#"}]
},
"uniqueItems": True,
"default": "any"
},
"properties": {
"type": "object",
"additionalProperties": {"$ref": "#"},
"default": {}
},
"patternProperties": {
"type": "object",
"additionalProperties": {"$ref": "#"},
"default": {}
},
"additionalProperties": {
"type": [{"$ref": "#"}, "boolean"],
"default": {}
},
"items": {
"type": [{"$ref": "#"}, "array"],
"items": {"$ref": "#"},
"default": {}
},
"additionalItems": {
"type": [{"$ref": "#"}, "boolean"],
"default": {}
},
"required": {
"type": "boolean",
"default": False
},
"dependencies": {
"type": "object",
"additionalProperties": {
"type": ["string", "array", {"$ref": "#"}],
"items": {
"type": "string"
}
},
"default": {}
},
"minimum": {
"type": "number"
},
"maximum": {
"type": "number"
},
"exclusiveMinimum": {
"type": "boolean",
"default": False
},
"exclusiveMaximum": {
"type": "boolean",
"default": False
},
"minItems": {
"type": "integer",
"minimum": 0,
"default": 0
},
"maxItems": {
"type": "integer",
"minimum": 0
},
"uniqueItems": {
"type": "boolean",
"default": False
},
"pattern": {
"type": "string",
"format": "regex"
},
"minLength": {
"type": "integer",
"minimum": 0,
"default": 0
},
"maxLength": {
"type": "integer"
},
"enum": {
"type": "array",
"minItems": 1,
"uniqueItems": True
},
"default": {
"type": "any"
},
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"format": {
"type": "string"
},
"divisibleBy": {
"type": "number",
"minimum": 0,
"exclusiveMinimum": True,
"default": 1
},
"disallow": {
"type": ["string", "array"],
"items": {
"type": ["string", {"$ref": "#"}]
},
"uniqueItems": True
},
"extends": {
"type": [{"$ref": "#"}, "array"],
"items": {"$ref": "#"},
"default": {}
},
"id": {
"type": "string",
"format": "uri"
},
"$ref": {
"type": "string",
"format": "uri"
},
"$schema": {
"type": "string",
"format": "uri"
}
},
"dependencies": {
"exclusiveMinimum": "minimum",
"exclusiveMaximum": "maximum"
},
"default": {}
}
class TestSchemaSchema(TestCase):
def test_schema(self):
validictory.validate(schema, schema, required_by_default=False)
| apache-2.0 |
nick-thompson/servo | components/script/dom/bindings/codegen/parser/tests/test_enum.py | 134 | 2866 | import WebIDL
def WebIDLTest(parser, harness):
parser.parse("""
enum TestEnum {
"",
"foo",
"bar"
};
interface TestEnumInterface {
TestEnum doFoo(boolean arg);
readonly attribute TestEnum foo;
};
""")
results = parser.finish()
harness.ok(True, "TestEnumInterfaces interface parsed without error.")
harness.check(len(results), 2, "Should be one production")
harness.ok(isinstance(results[0], WebIDL.IDLEnum),
"Should be an IDLEnum")
harness.ok(isinstance(results[1], WebIDL.IDLInterface),
"Should be an IDLInterface")
enum = results[0]
harness.check(enum.identifier.QName(), "::TestEnum", "Enum has the right QName")
harness.check(enum.identifier.name, "TestEnum", "Enum has the right name")
harness.check(enum.values(), ["", "foo", "bar"], "Enum has the right values")
iface = results[1]
harness.check(iface.identifier.QName(), "::TestEnumInterface", "Interface has the right QName")
harness.check(iface.identifier.name, "TestEnumInterface", "Interface has the right name")
harness.check(iface.parent, None, "Interface has no parent")
members = iface.members
harness.check(len(members), 2, "Should be one production")
harness.ok(isinstance(members[0], WebIDL.IDLMethod),
"Should be an IDLMethod")
method = members[0]
harness.check(method.identifier.QName(), "::TestEnumInterface::doFoo",
"Method has correct QName")
harness.check(method.identifier.name, "doFoo", "Method has correct name")
signatures = method.signatures()
harness.check(len(signatures), 1, "Expect one signature")
(returnType, arguments) = signatures[0]
harness.check(str(returnType), "TestEnum (Wrapper)", "Method type is the correct name")
harness.check(len(arguments), 1, "Method has the right number of arguments")
arg = arguments[0]
harness.ok(isinstance(arg, WebIDL.IDLArgument), "Should be an IDLArgument")
harness.check(str(arg.type), "Boolean", "Argument has the right type")
attr = members[1]
harness.check(attr.identifier.QName(), "::TestEnumInterface::foo",
"Attr has correct QName")
harness.check(attr.identifier.name, "foo", "Attr has correct name")
harness.check(str(attr.type), "TestEnum (Wrapper)", "Attr type is the correct name")
# Now reset our parser
parser = parser.reset()
threw = False
try:
parser.parse("""
enum Enum {
"a",
"b",
"c"
};
interface TestInterface {
void foo(optional Enum e = "d");
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow a bogus default value for an enum")
| mpl-2.0 |
tafaRU/odoo | addons/l10n_ar/__init__.py | 2120 | 1456 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Cubic ERP - Teradata SAC. (http://cubicerp.com).
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
manipopopo/tensorflow | tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py | 3 | 33533 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(josh11b): Forked from contrib/eager/python to test OptimizerV2 the same way
# OptimizerV1 is tested. This file should be removed once the fork is resolved.
import functools
import os
import six
from tensorflow.contrib.optimizer_v2 import adam
from tensorflow.python.client import session as session_lib
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import training
from tensorflow.python.keras.layers import core
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import template
from tensorflow.python.ops import variable_scope
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training import saver as core_saver
from tensorflow.python.training import training_util
from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.training.checkpointable import util
class NonLayerCheckpointable(tracking.Checkpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()
self.a_variable = util.add_variable(
self, name="a_variable", shape=[])
# pylint: disable=not-callable
class MyModel(training.Model):
"""A concrete Model for testing."""
def __init__(self):
super(MyModel, self).__init__()
self._named_dense = core.Dense(1, use_bias=True)
self._second = core.Dense(1, use_bias=False)
# We can still track Checkpointables which aren't Layers.
self._non_layer = NonLayerCheckpointable()
def call(self, values):
ret = self._second(self._named_dense(values))
return ret
class _MirroringSaveable(
core_saver.BaseSaverBuilder.ResourceVariableSaveable):
def __init__(self, primary_variable, mirrored_variable, name):
self._primary_variable = primary_variable
self._mirrored_variable = mirrored_variable
super(_MirroringSaveable, self).__init__(
self._primary_variable, "", name)
def restore(self, restored_tensors, restored_shapes):
"""Restore the same value into both variables."""
tensor, = restored_tensors
return control_flow_ops.group(
self._primary_variable.assign(tensor),
self._mirrored_variable.assign(tensor))
class CheckpointingTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testNamingWithOptimizer(self):
input_value = constant_op.constant([[3.]])
model = MyModel()
# A nuisance Model using the same optimizer. Its slot variables should not
# go in the checkpoint, since it is never depended on.
other_model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
optimizer_step = training_util.get_or_create_global_step()
root_checkpointable = util.Checkpoint(
optimizer=optimizer, model=model, optimizer_step=optimizer_step)
if context.executing_eagerly():
optimizer.minimize(
lambda: model(input_value),
global_step=optimizer_step)
optimizer.minimize(
lambda: other_model(input_value),
global_step=optimizer_step)
else:
train_op = optimizer.minimize(
model(input_value), global_step=optimizer_step)
optimizer.minimize(
other_model(input_value),
global_step=optimizer_step)
self.evaluate(util.gather_initializers(
root_checkpointable))
self.evaluate(train_op)
named_variables, serialized_graph, _ = (
util._serialize_object_graph(
root_checkpointable, saveables_cache=None))
expected_checkpoint_names = (
# Created in the root node, so no prefix.
"optimizer_step",
"model/_second/kernel",
"model/_named_dense/kernel",
"model/_named_dense/bias",
# non-Layer dependency of the model
"model/_non_layer/a_variable",
# The optimizer creates two non-slot variables
"optimizer/beta1_power",
"optimizer/beta2_power",
# Slot variables
"model/_second/kernel/.OPTIMIZER_SLOT/optimizer/m",
"model/_second/kernel/.OPTIMIZER_SLOT/optimizer/v",
"model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/m",
"model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/v",
"model/_named_dense/bias/.OPTIMIZER_SLOT/optimizer/m",
"model/_named_dense/bias/.OPTIMIZER_SLOT/optimizer/v",
)
suffix = "/.ATTRIBUTES/VARIABLE_VALUE"
expected_checkpoint_names = [
name + suffix for name in expected_checkpoint_names]
# The Dense layers also save get_config() JSON
expected_checkpoint_names.extend(
["model/_second/.ATTRIBUTES/OBJECT_CONFIG_JSON",
"model/_named_dense/.ATTRIBUTES/OBJECT_CONFIG_JSON"])
named_variables = {v.name: v for v in named_variables}
six.assertCountEqual(self, expected_checkpoint_names,
named_variables.keys())
# Check that we've mapped to the right variable objects (not exhaustive)
self.assertEqual(
"global_step",
named_variables["optimizer_step" + suffix].full_name)
self.assertEqual(
"my_model/dense_1/kernel",
named_variables["model/_second/kernel" + suffix].full_name)
self.assertEqual(
"my_model/dense/kernel",
named_variables["model/_named_dense/kernel" + suffix].full_name)
self.assertEqual(
"beta1_power",
named_variables["optimizer/beta1_power" + suffix].full_name)
self.assertEqual(
"beta2_power",
named_variables["optimizer/beta2_power" + suffix].full_name)
# Spot check the generated protocol buffers.
self.assertEqual("optimizer",
serialized_graph.nodes[0].children[1].local_name)
optimizer_node = serialized_graph.nodes[serialized_graph.nodes[0].children[
1].node_id]
self.assertEqual("beta1_power",
optimizer_node.children[0].local_name)
self.assertEqual("beta1_power",
serialized_graph.nodes[optimizer_node.children[0].node_id]
.attributes[0].full_name)
self.assertEqual(
"my_model/dense/kernel",
serialized_graph.nodes[optimizer_node.slot_variables[0]
.original_variable_node_id]
.attributes[0].full_name)
# We strip off the :0 suffix, as variable.name-based saving does.
self.assertEqual(
"my_model/dense/kernel/Adam",
serialized_graph.nodes[optimizer_node.slot_variables[0]
.slot_variable_node_id]
.attributes[0].full_name)
self.assertEqual(
"my_model/dense/kernel/Adam:0",
optimizer.get_slot(
var=model._named_dense.kernel,
name="m").name)
self.assertEqual(
"model/_named_dense/kernel" + suffix,
serialized_graph.nodes[
optimizer_node.slot_variables[0]
.original_variable_node_id].attributes[0].checkpoint_key)
self.assertEqual("m", optimizer_node.slot_variables[0].slot_name)
self.assertEqual(
"model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/m" + suffix,
serialized_graph.nodes[
optimizer_node.slot_variables[0]
.slot_variable_node_id].attributes[0].checkpoint_key)
@test_util.run_in_graph_and_eager_modes
def testSaveRestore(self):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root_checkpointable = util.Checkpoint(
optimizer=optimizer, model=model)
input_value = constant_op.constant([[3.]])
if context.executing_eagerly():
optimizer.minimize(
lambda: model(input_value))
else:
train_op = optimizer.minimize(model(input_value))
# TODO(allenl): Make initialization more pleasant when graph building.
root_checkpointable.save_counter # pylint: disable=pointless-statement
self.evaluate(util.gather_initializers(
root_checkpointable))
self.evaluate(train_op)
prefix = os.path.join(self.get_temp_dir(), "ckpt")
self.evaluate(state_ops.assign(model._named_dense.variables[1], [42.]))
m_bias_slot = optimizer.get_slot(model._named_dense.variables[1], "m")
self.evaluate(state_ops.assign(m_bias_slot, [1.5]))
save_path = root_checkpointable.save(file_prefix=prefix)
self.evaluate(state_ops.assign(model._named_dense.variables[1], [43.]))
self.evaluate(state_ops.assign(root_checkpointable.save_counter, 3))
optimizer_variables = self.evaluate(optimizer.variables())
self.evaluate(state_ops.assign(m_bias_slot, [-2.]))
# Immediate restoration
status = root_checkpointable.restore(save_path=save_path).assert_consumed()
status.run_restore_ops()
self.assertAllEqual([42.], self.evaluate(model._named_dense.variables[1]))
self.assertAllEqual(1, self.evaluate(root_checkpointable.save_counter))
self.assertAllEqual([1.5], self.evaluate(m_bias_slot))
if not context.executing_eagerly():
return # Restore-on-create is only supported when executing eagerly
on_create_model = MyModel()
on_create_optimizer = adam.AdamOptimizer(
0.001,
# Preserve beta1_power and beta2_power when appying gradients so we can
# test that they've been restored correctly.
beta1=1.0, beta2=1.0)
on_create_root = util.Checkpoint(
optimizer=on_create_optimizer, model=on_create_model)
# Deferred restoration
status = on_create_root.restore(save_path=save_path)
on_create_model(constant_op.constant([[3.]])) # create variables
self.assertAllEqual(1, self.evaluate(on_create_root.save_counter))
self.assertAllEqual([42.],
self.evaluate(
on_create_model._named_dense.variables[1]))
on_create_m_bias_slot = on_create_optimizer.get_slot(
on_create_model._named_dense.variables[1], "m")
# Optimizer slot variables are created when the original variable is
# restored.
self.assertAllEqual([1.5], self.evaluate(on_create_m_bias_slot))
self.assertAllEqual(optimizer_variables[2:],
self.evaluate(on_create_optimizer.variables()))
dummy_var = resource_variable_ops.ResourceVariable([1.])
on_create_optimizer.minimize(loss=dummy_var.read_value)
status.assert_consumed()
beta1_power, beta2_power = on_create_optimizer._get_beta_accumulators()
self.assertAllEqual(optimizer_variables[0], self.evaluate(beta1_power))
self.assertAllEqual(optimizer_variables[1], self.evaluate(beta2_power))
# TODO(allenl): Debug garbage created by this test in python3.
def testDeferredRestorationUsageEager(self):
"""An idiomatic eager execution example."""
num_training_steps = 10
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
for training_continuation in range(3):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root = util.Checkpoint(
optimizer=optimizer, model=model,
optimizer_step=training_util.get_or_create_global_step())
root.restore(checkpoint_management.latest_checkpoint(
checkpoint_directory))
for _ in range(num_training_steps):
# TODO(allenl): Use a Dataset and serialize/checkpoint it.
input_value = constant_op.constant([[3.]])
optimizer.minimize(
lambda: model(input_value), # pylint: disable=cell-var-from-loop
global_step=root.optimizer_step)
root.save(file_prefix=checkpoint_prefix)
self.assertEqual((training_continuation + 1) * num_training_steps,
root.optimizer_step.numpy())
def testUsageGraph(self):
"""Expected usage when graph building."""
with context.graph_mode():
num_training_steps = 10
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
for training_continuation in range(3):
with ops.Graph().as_default():
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root = util.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
input_value = constant_op.constant([[3.]])
train_op = optimizer.minimize(
model(input_value),
global_step=root.global_step)
checkpoint_path = checkpoint_management.latest_checkpoint(
checkpoint_directory)
with self.test_session(graph=ops.get_default_graph()) as session:
status = root.restore(save_path=checkpoint_path)
status.initialize_or_restore(session=session)
if checkpoint_path is None:
self.assertEqual(0, training_continuation)
with self.assertRaises(AssertionError):
status.assert_consumed()
else:
status.assert_consumed()
for _ in range(num_training_steps):
session.run(train_op)
root.save(file_prefix=checkpoint_prefix, session=session)
self.assertEqual((training_continuation + 1) * num_training_steps,
session.run(root.global_step))
self.assertEqual(training_continuation + 1,
session.run(root.save_counter))
@test_util.run_in_graph_and_eager_modes
def testAgnosticUsage(self):
"""Graph/eager agnostic usage."""
# Does create garbage when executing eagerly due to ops.Graph() creation.
num_training_steps = 10
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
for training_continuation in range(3):
with ops.Graph().as_default(), self.test_session(
graph=ops.get_default_graph()), test_util.device(use_gpu=True):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root = util.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
checkpoint_path = checkpoint_management.latest_checkpoint(
checkpoint_directory)
status = root.restore(save_path=checkpoint_path)
input_value = constant_op.constant([[3.]])
train_fn = functools.partial(
optimizer.minimize,
functools.partial(model, input_value),
global_step=root.global_step)
if not context.executing_eagerly():
train_fn = functools.partial(self.evaluate, train_fn())
status.initialize_or_restore()
for _ in range(num_training_steps):
train_fn()
root.save(file_prefix=checkpoint_prefix)
self.assertEqual((training_continuation + 1) * num_training_steps,
self.evaluate(root.global_step))
self.assertEqual(training_continuation + 1,
self.evaluate(root.save_counter))
# pylint: disable=cell-var-from-loop
@test_util.run_in_graph_and_eager_modes
def testWithDefun(self):
num_training_steps = 2
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
for training_continuation in range(3):
with ops.Graph().as_default(), self.test_session(
graph=ops.get_default_graph()), test_util.device(use_gpu=True):
model = MyModel()
# Don't actually train so we can test variable values
optimizer = adam.AdamOptimizer(0.)
root = util.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
checkpoint_path = checkpoint_management.latest_checkpoint(
checkpoint_directory)
status = root.restore(save_path=checkpoint_path)
def train_fn():
@function.defun
def _call_model(x):
return model(x)
with backprop.GradientTape() as tape:
loss = _call_model(constant_op.constant([[3.]]))
gradients = tape.gradient(loss, model.variables)
return optimizer.apply_gradients(zip(gradients, model.variables),
global_step=root.global_step)
if not context.executing_eagerly():
train_fn = functools.partial(
self.evaluate, train_fn())
status.initialize_or_restore()
for _ in range(num_training_steps):
train_fn()
if training_continuation > 0:
status.assert_consumed()
self.assertAllClose([[42.]], self.evaluate(model.variables[0]))
else:
self.evaluate(model.variables[0].assign([[42.]]))
root.save(file_prefix=checkpoint_prefix)
self.assertEqual((training_continuation + 1) * num_training_steps,
self.evaluate(root.global_step))
self.assertEqual(training_continuation + 1,
self.evaluate(root.save_counter))
# pylint: enable=cell-var-from-loop
def testAnonymousVarsInInit(self):
class Model(training.Model):
def __init__(self):
super(Model, self).__init__()
self.w = resource_variable_ops.ResourceVariable(0.0)
self.b = resource_variable_ops.ResourceVariable(0.0)
self.vars = [self.w, self.b]
def call(self, x):
return x * self.w + self.b
with context.eager_mode():
model = Model()
optimizer = adam.AdamOptimizer(learning_rate=0.05)
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
checkpoint = util.Checkpoint(
model=model, optimizer=optimizer)
for _ in range(2):
checkpoint.save(checkpoint_prefix)
with backprop.GradientTape() as tape:
loss = (constant_op.constant(1.)
- model(constant_op.constant(1.))) ** 2
grad = tape.gradient(loss, model.vars)
optimizer.apply_gradients(
[(g, v) for g, v in zip(grad, model.vars)])
@test_util.run_in_graph_and_eager_modes
def testDeferredSlotRestoration(self):
checkpoint_directory = self.get_temp_dir()
root = tracking.Checkpointable()
root.var = util.add_variable(
root, name="var", initializer=0.)
optimizer = adam.AdamOptimizer(0.1)
if context.executing_eagerly():
optimizer.minimize(root.var.read_value)
else:
train_op = optimizer.minimize(root.var)
# Note that `optimizer` has not been added as a dependency of
# `root`. Create a one-off grouping so that slot variables for `root.var`
# get initialized too.
self.evaluate(util.gather_initializers(
util.Checkpoint(root=root, optimizer=optimizer)))
self.evaluate(train_op)
self.evaluate(state_ops.assign(root.var, 12.))
no_slots_path = util.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "no_slots"))
root.optimizer = optimizer
self.evaluate(state_ops.assign(root.var, 13.))
self.evaluate(state_ops.assign(optimizer.get_slot(name="m", var=root.var),
14.))
slots_path = util.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "with_slots"))
new_root = tracking.Checkpointable()
# Load the slot-containing checkpoint (deferred), then immediately overwrite
# the non-slot variable (also deferred).
slot_status = util.CheckpointableSaver(
new_root).restore(slots_path)
no_slot_status = util.CheckpointableSaver(
new_root).restore(no_slots_path)
with self.assertRaises(AssertionError):
no_slot_status.assert_consumed()
new_root.var = util.add_variable(
new_root, name="var", shape=[])
no_slot_status.assert_consumed()
no_slot_status.run_restore_ops()
self.assertEqual(12., self.evaluate(new_root.var))
new_root.optimizer = adam.AdamOptimizer(0.1)
with self.assertRaisesRegexp(AssertionError, "beta1_power"):
slot_status.assert_consumed()
self.assertEqual(12., self.evaluate(new_root.var))
if context.executing_eagerly():
# Slot variables are only created with restoring initializers when
# executing eagerly.
self.assertEqual(14., self.evaluate(
new_root.optimizer.get_slot(name="m", var=new_root.var)))
else:
self.assertIs(new_root.optimizer.get_slot(name="m", var=new_root.var),
None)
if context.executing_eagerly():
new_root.optimizer.minimize(new_root.var.read_value)
else:
train_op = new_root.optimizer.minimize(new_root.var)
# The slot variable now exists; restore() didn't create it, but we should
# now have a restore op for it.
slot_status.run_restore_ops()
self.assertEqual(14., self.evaluate(
new_root.optimizer.get_slot(name="m", var=new_root.var)))
self.evaluate(train_op)
slot_status.assert_consumed()
def testManySavesGraph(self):
"""Saves after the first should not modify the graph."""
with context.graph_mode():
graph = ops.Graph()
with graph.as_default(), self.test_session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
self.evaluate(util.gather_initializers(obj))
saver = util.CheckpointableSaver(obj)
saver.save(checkpoint_prefix)
before_ops = graph.get_operations()
saver.save(checkpoint_prefix)
self.assertEqual(before_ops, graph.get_operations())
def testManyRestoresGraph(self):
"""Restores after the first should not modify the graph."""
with context.graph_mode():
graph = ops.Graph()
with graph.as_default(), self.test_session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
self.evaluate(util.gather_initializers(obj))
saver = util.CheckpointableSaver(obj)
save_path = saver.save(checkpoint_prefix)
saver.restore(save_path)
before_ops = graph.get_operations()
saver.restore(save_path)
self.assertEqual(before_ops, graph.get_operations())
def testMultipleGraphsNonSlotVariables(self):
with context.graph_mode():
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
optimizer = adam.AdamOptimizer(0.001)
# Construct a model in one graph
first_graph = ops.Graph()
first_session = session_lib.Session(graph=first_graph)
with first_graph.as_default(), first_session.as_default():
first_variable = resource_variable_ops.ResourceVariable([1.])
first_root_checkpointable = util.Checkpoint(
optimizer=optimizer, variable=first_variable)
train_op = optimizer.minimize(first_variable.read_value)
self.evaluate(util.gather_initializers(
first_root_checkpointable))
self.evaluate(train_op)
self.evaluate(first_variable.assign([1.]))
self.evaluate(optimizer.get_slot(
var=first_variable, name="m").assign([2.]))
beta1_power, _ = optimizer._get_beta_accumulators()
self.evaluate(beta1_power.assign(3.))
# Save and load in a second graph
second_graph = ops.Graph()
with second_graph.as_default(), session_lib.Session(graph=second_graph):
second_variable = resource_variable_ops.ResourceVariable([1.])
second_root_checkpointable = util.Checkpoint(
optimizer=optimizer, variable=second_variable)
train_op = optimizer.minimize(second_variable.read_value)
second_root_checkpointable.restore(None).initialize_or_restore()
self.evaluate(train_op)
self.evaluate(second_variable.assign([4.]))
self.evaluate(optimizer.get_slot(
var=second_variable, name="m").assign([5.]))
beta1_power, _ = optimizer._get_beta_accumulators()
self.evaluate(beta1_power.assign(6.))
save_path = second_root_checkpointable.save(checkpoint_prefix)
self.evaluate(second_variable.assign([7.]))
self.evaluate(optimizer.get_slot(
var=second_variable, name="m").assign([8.]))
beta1_power, _ = optimizer._get_beta_accumulators()
self.assertAllEqual(6., self.evaluate(beta1_power))
status = second_root_checkpointable.restore(save_path)
status.assert_consumed().run_restore_ops()
self.assertAllEqual([4.], self.evaluate(second_variable))
self.assertAllEqual([5.], self.evaluate(optimizer.get_slot(
var=second_variable, name="m")))
beta1_power, _ = optimizer._get_beta_accumulators()
self.assertAllEqual(6., self.evaluate(beta1_power))
# Check that the first graph is unmolested
with first_graph.as_default(), first_session.as_default():
self.assertAllEqual([1.], self.evaluate(first_variable))
self.assertAllEqual([2.], self.evaluate(optimizer.get_slot(
var=first_variable, name="m")))
beta1_power, _ = optimizer._get_beta_accumulators()
self.assertAllEqual(3., self.evaluate(beta1_power))
class TemplateTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_checkpointable_save_restore(self):
def _templated():
v = variable_scope.get_variable(
"v", shape=[1], initializer=init_ops.zeros_initializer(),
use_resource=True)
v2 = variable_scope.get_variable(
"v2", shape=[1], initializer=init_ops.zeros_initializer(),
use_resource=True)
return v, v + 1., v2
save_template = template.make_template("s1", _templated)
v1_save, _, v2_save = save_template()
optimizer = adam.AdamOptimizer(0.0)
save_root = util.Checkpoint(
my_template=save_template, optimizer=optimizer)
optimizer.minimize(v1_save.read_value)
self.evaluate([v.initializer for v in optimizer.variables()])
self.evaluate(v1_save.assign([12.]))
self.evaluate(v2_save.assign([14.]))
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = save_root.save(checkpoint_prefix)
load_template = template.make_template("s2", _templated)
load_optimizer = adam.AdamOptimizer(0.0)
load_root = util.Checkpoint(
my_template=load_template, optimizer=load_optimizer)
status = load_root.restore(save_path)
var, var_plus_one, var2 = load_template()
load_optimizer.minimize(var.read_value)
self.assertEqual(2, len(load_template._checkpoint_dependencies))
self.assertEqual("v", load_template._checkpoint_dependencies[0].name)
self.assertEqual("v2", load_template._checkpoint_dependencies[1].name)
status.assert_consumed().run_restore_ops()
self.assertAllEqual([12.], self.evaluate(var))
self.assertAllEqual([13.], self.evaluate(var_plus_one))
self.assertAllEqual([14.], self.evaluate(var2))
class CheckpointCompatibilityTests(test.TestCase):
def _initialized_model(self):
input_value = constant_op.constant([[3.]])
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
optimizer_step = training_util.get_or_create_global_step()
root_checkpointable = util.Checkpoint(
optimizer=optimizer, model=model, optimizer_step=optimizer_step)
train_op = optimizer.minimize(
functools.partial(model, input_value),
global_step=optimizer_step)
self.evaluate(util.gather_initializers(
root_checkpointable))
self.evaluate(train_op)
# A regular variable, a slot variable, and a non-slot Optimizer variable
# with known values to check when loading.
self.evaluate(model._named_dense.bias.assign([1.]))
self.evaluate(optimizer.get_slot(
var=model._named_dense.bias, name="m").assign([2.]))
beta1_power, _ = optimizer._get_beta_accumulators()
self.evaluate(beta1_power.assign(3.))
return root_checkpointable
def _set_sentinels(self, root_checkpointable):
self.evaluate(root_checkpointable.model._named_dense.bias.assign([101.]))
self.evaluate(
root_checkpointable.optimizer.get_slot(
var=root_checkpointable.model._named_dense.bias, name="m")
.assign([102.]))
beta1_power, _ = root_checkpointable.optimizer._get_beta_accumulators()
self.evaluate(beta1_power.assign(103.))
def _check_sentinels(self, root_checkpointable):
self.assertAllEqual(
[1.], self.evaluate(root_checkpointable.model._named_dense.bias))
self.assertAllEqual([2.], self.evaluate(
root_checkpointable.optimizer.get_slot(
var=root_checkpointable.model._named_dense.bias, name="m")))
beta1_power, _ = root_checkpointable.optimizer._get_beta_accumulators()
self.assertAllEqual(3., self.evaluate(beta1_power))
def _write_name_based_checkpoint(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
with context.graph_mode():
save_graph = ops.Graph()
with save_graph.as_default(), self.test_session(
graph=save_graph) as session:
root = self._initialized_model()
name_saver = core_saver.Saver()
return name_saver.save(
sess=session, save_path=checkpoint_prefix,
global_step=root.optimizer_step)
@test_util.run_in_graph_and_eager_modes
def testLoadFromNameBasedSaver(self):
"""Save a name-based checkpoint, load it using the object-based API."""
with test_util.device(use_gpu=True):
save_path = self._write_name_based_checkpoint()
root = self._initialized_model()
self._set_sentinels(root)
with self.assertRaises(AssertionError):
self._check_sentinels(root)
object_saver = util.CheckpointableSaver(root)
self._set_sentinels(root)
status = object_saver.restore(save_path)
if context.executing_eagerly():
self._check_sentinels(root)
if context.executing_eagerly():
with self.assertRaisesRegexp(AssertionError, "OBJECT_CONFIG_JSON"):
status.assert_consumed()
else:
# When graph building, we haven't read any keys, so we don't know
# whether the restore will be complete.
with self.assertRaisesRegexp(AssertionError, "not restored"):
status.assert_consumed()
status.run_restore_ops()
self._check_sentinels(root)
self._set_sentinels(root)
status = object_saver.restore(save_path)
status.initialize_or_restore()
self._check_sentinels(root)
# TODO(allenl): Test for the core name-based saver loading object-based
# checkpoints once object-based checkpointing is in core.
def testSaveGraphLoadEager(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
with context.graph_mode():
save_graph = ops.Graph()
with save_graph.as_default(), self.test_session(
graph=save_graph) as session:
root = self._initialized_model()
save_path = root.save(
session=session, file_prefix=checkpoint_prefix)
with context.eager_mode():
root = self._initialized_model()
self._set_sentinels(root)
root.restore(save_path).assert_consumed()
self._check_sentinels(root)
def testSaveEagerLoadGraph(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
with context.eager_mode():
root = self._initialized_model()
save_path = root.save(file_prefix=checkpoint_prefix)
with context.graph_mode():
save_graph = ops.Graph()
with save_graph.as_default(), self.test_session(
graph=save_graph):
root = self._initialized_model()
self._set_sentinels(root)
root.restore(save_path).assert_consumed().run_restore_ops()
self._check_sentinels(root)
if __name__ == "__main__":
test.main()
| apache-2.0 |
LLNL/spack | var/spack/repos/builtin/packages/py-maestrowf/package.py | 2 | 2065 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyMaestrowf(PythonPackage):
"""A general purpose workflow conductor for running multi-step
simulation studies."""
homepage = "https://github.com/LLNL/maestrowf/"
url = "https://github.com/LLNL/maestrowf/archive/v1.1.6.tar.gz"
git = "https://github.com/LLNL/maestrowf/"
maintainers = ['FrankD412']
# git branches
version('develop', branch='develop')
version('master', branch='master')
# Pre-release candidates
version('1.1.5dev', sha256='eb3d6f31c233e2cde3b84e15c657002b83ff43d4d6b218b33d023a4f527b9e08')
version('1.1.4dev1.0', sha256='67f59eed6fa69fc71b88a0a769de9f080300497d3c30d3a0893eabd0702bc48e')
version('1.1.4dev1.1', sha256='c8612b5423b44f11e2a7c4fbc31eb741013245870512ee2dbf7367024517528f')
# pypi releases
version('1.1.6', sha256='27a4ab9072c5b5e2edf91c192d9fe67f040dd45be7f3e44fd9a998ce4cb1e92d', preferred=True)
version('1.1.4', sha256='2cb0fa6f6281d8618ac79217ea5f4fd8cb24955c4315e873657f96b815f171d5')
version('1.1.2', sha256='ebb45bff54625435bc9f2462e1bdc3b5bdc4d943378c53e7810c11836794c5e0')
version('1.1.1', sha256='a476ad4b40846d7b7f9540d6413df1b42eb655735e8d3c6c07e0baa68e20a8bb')
version('1.1.0', sha256='14e701d6a10ab758215aab6b6809817d9a39416a4f477cd2f2551883fc68477b')
version('1.0.1', sha256='cdd503f0b11db9114405132274b28766044402d1183b5836406ed91d558fd06c')
depends_on('py-setuptools', type='build')
depends_on('py-pyyaml@4.2b1:', type=('build', 'run'))
depends_on('py-six', type=('build', 'run'))
depends_on('py-enum34', type=('build', 'run'), when='^python@:3.3')
depends_on('py-enum34', type=('build', 'run'), when='@:1.1.3')
depends_on('py-tabulate', type=('build', 'run'), when='@1.1.0:')
depends_on('py-filelock', type=('build', 'run'), when='@1.1.0:')
| lgpl-2.1 |
sjsucohort6/openstack | python/venv/lib/python2.7/site-packages/pymongo/__init__.py | 10 | 2997 | # Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python driver for MongoDB."""
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
GEO2D = "2d"
"""Index specifier for a 2-dimensional `geospatial index`_.
.. _geospatial index: http://docs.mongodb.org/manual/core/2d/
"""
GEOHAYSTACK = "geoHaystack"
"""Index specifier for a 2-dimensional `haystack index`_.
.. versionadded:: 2.1
.. _haystack index: http://docs.mongodb.org/manual/core/geohaystack/
"""
GEOSPHERE = "2dsphere"
"""Index specifier for a `spherical geospatial index`_.
.. versionadded:: 2.5
.. note:: 2dsphere indexing requires server version **>= 2.4.0**.
.. _spherical geospatial index: http://docs.mongodb.org/manual/core/2dsphere/
"""
HASHED = "hashed"
"""Index specifier for a `hashed index`_.
.. versionadded:: 2.5
.. note:: hashed indexing requires server version **>= 2.4.0**.
.. _hashed index: http://docs.mongodb.org/manual/core/index-hashed/
"""
TEXT = "text"
"""Index specifier for a `text index`_.
.. versionadded:: 2.7.1
.. note:: text search requires server version **>= 2.4.0**.
.. _text index: http://docs.mongodb.org/manual/core/index-text/
"""
OFF = 0
"""No database profiling."""
SLOW_ONLY = 1
"""Only profile slow operations."""
ALL = 2
"""Profile all operations."""
version_tuple = (3, 0, 3)
def get_version_string():
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
"""Current version of PyMongo."""
from pymongo.collection import ReturnDocument
from pymongo.common import (MIN_SUPPORTED_WIRE_VERSION,
MAX_SUPPORTED_WIRE_VERSION)
from pymongo.cursor import CursorType
from pymongo.mongo_client import MongoClient
from pymongo.mongo_replica_set_client import MongoReplicaSetClient
from pymongo.operations import (IndexModel,
InsertOne,
DeleteOne,
DeleteMany,
UpdateOne,
UpdateMany,
ReplaceOne)
from pymongo.read_preferences import ReadPreference
from pymongo.write_concern import WriteConcern
def has_c():
"""Is the C extension installed?"""
try:
from pymongo import _cmessage
return True
except ImportError:
return False
| mit |
yasserglez/tagfs | packages/tagfs/contrib/django/core/urlresolvers.py | 4 | 14911 | """
This module converts requested URLs to callback view functions.
RegexURLResolver is the main class here. Its resolve() method takes a URL (as
a string) and returns a tuple in this format:
(view_function, function_args, function_kwargs)
"""
import re
from django.http import Http404
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import iri_to_uri, force_unicode, smart_str
from django.utils.functional import memoize
from django.utils.importlib import import_module
from django.utils.regex_helper import normalize
from django.utils.thread_support import currentThread
try:
reversed
except NameError:
from django.utils.itercompat import reversed # Python 2.3 fallback
from sets import Set as set
_resolver_cache = {} # Maps URLconf modules to RegexURLResolver instances.
_callable_cache = {} # Maps view and url pattern names to their view functions.
# SCRIPT_NAME prefixes for each thread are stored here. If there's no entry for
# the current thread (which is the only one we ever access), it is assumed to
# be empty.
_prefixes = {}
class Resolver404(Http404):
pass
class NoReverseMatch(Exception):
# Don't make this raise an error when used in a template.
silent_variable_failure = True
def get_callable(lookup_view, can_fail=False):
"""
Convert a string version of a function name to the callable object.
If the lookup_view is not an import path, it is assumed to be a URL pattern
label and the original string is returned.
If can_fail is True, lookup_view might be a URL pattern label, so errors
during the import fail and the string is returned.
"""
if not callable(lookup_view):
try:
# Bail early for non-ASCII strings (they can't be functions).
lookup_view = lookup_view.encode('ascii')
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(import_module(mod_name), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
except (ImportError, AttributeError):
if not can_fail:
raise
except UnicodeEncodeError:
pass
return lookup_view
get_callable = memoize(get_callable, _callable_cache, 1)
def get_resolver(urlconf):
if urlconf is None:
from django.conf import settings
urlconf = settings.ROOT_URLCONF
return RegexURLResolver(r'^/', urlconf)
get_resolver = memoize(get_resolver, _resolver_cache, 1)
def get_mod_func(callback):
# Converts 'django.views.news.stories.story_detail' to
# ['django.views.news.stories', 'story_detail']
try:
dot = callback.rindex('.')
except ValueError:
return callback, ''
return callback[:dot], callback[dot+1:]
class RegexURLPattern(object):
def __init__(self, regex, callback, default_args=None, name=None):
# regex is a string representing a regular expression.
# callback is either a string like 'foo.views.news.stories.story_detail'
# which represents the path to a module and a view function name, or a
# callable object (view).
self.regex = re.compile(regex, re.UNICODE)
if callable(callback):
self._callback = callback
else:
self._callback = None
self._callback_str = callback
self.default_args = default_args or {}
self.name = name
def __repr__(self):
return '<%s %s %s>' % (self.__class__.__name__, self.name, self.regex.pattern)
def add_prefix(self, prefix):
"""
Adds the prefix string to a string-based callback.
"""
if not prefix or not hasattr(self, '_callback_str'):
return
self._callback_str = prefix + '.' + self._callback_str
def resolve(self, path):
match = self.regex.search(path)
if match:
# If there are any named groups, use those as kwargs, ignoring
# non-named groups. Otherwise, pass all non-named arguments as
# positional arguments.
kwargs = match.groupdict()
if kwargs:
args = ()
else:
args = match.groups()
# In both cases, pass any extra_kwargs as **kwargs.
kwargs.update(self.default_args)
return self.callback, args, kwargs
def _get_callback(self):
if self._callback is not None:
return self._callback
try:
self._callback = get_callable(self._callback_str)
except ImportError, e:
mod_name, _ = get_mod_func(self._callback_str)
raise ViewDoesNotExist, "Could not import %s. Error was: %s" % (mod_name, str(e))
except AttributeError, e:
mod_name, func_name = get_mod_func(self._callback_str)
raise ViewDoesNotExist, "Tried %s in module %s. Error was: %s" % (func_name, mod_name, str(e))
return self._callback
callback = property(_get_callback)
class RegexURLResolver(object):
def __init__(self, regex, urlconf_name, default_kwargs=None, app_name=None, namespace=None):
# regex is a string representing a regular expression.
# urlconf_name is a string representing the module containing URLconfs.
self.regex = re.compile(regex, re.UNICODE)
self.urlconf_name = urlconf_name
if not isinstance(urlconf_name, basestring):
self._urlconf_module = self.urlconf_name
self.callback = None
self.default_kwargs = default_kwargs or {}
self.namespace = namespace
self.app_name = app_name
self._reverse_dict = None
self._namespace_dict = None
self._app_dict = None
def __repr__(self):
return '<%s %s (%s:%s) %s>' % (self.__class__.__name__, self.urlconf_name, self.app_name, self.namespace, self.regex.pattern)
def _populate(self):
lookups = MultiValueDict()
namespaces = {}
apps = {}
for pattern in reversed(self.url_patterns):
p_pattern = pattern.regex.pattern
if p_pattern.startswith('^'):
p_pattern = p_pattern[1:]
if isinstance(pattern, RegexURLResolver):
if pattern.namespace:
namespaces[pattern.namespace] = (p_pattern, pattern)
if pattern.app_name:
apps.setdefault(pattern.app_name, []).append(pattern.namespace)
else:
parent = normalize(pattern.regex.pattern)
for name in pattern.reverse_dict:
for matches, pat in pattern.reverse_dict.getlist(name):
new_matches = []
for piece, p_args in parent:
new_matches.extend([(piece + suffix, p_args + args) for (suffix, args) in matches])
lookups.appendlist(name, (new_matches, p_pattern + pat))
for namespace, (prefix, sub_pattern) in pattern.namespace_dict.items():
namespaces[namespace] = (p_pattern + prefix, sub_pattern)
for app_name, namespace_list in pattern.app_dict.items():
apps.setdefault(app_name, []).extend(namespace_list)
else:
bits = normalize(p_pattern)
lookups.appendlist(pattern.callback, (bits, p_pattern))
lookups.appendlist(pattern.name, (bits, p_pattern))
self._reverse_dict = lookups
self._namespace_dict = namespaces
self._app_dict = apps
def _get_reverse_dict(self):
if self._reverse_dict is None:
self._populate()
return self._reverse_dict
reverse_dict = property(_get_reverse_dict)
def _get_namespace_dict(self):
if self._namespace_dict is None:
self._populate()
return self._namespace_dict
namespace_dict = property(_get_namespace_dict)
def _get_app_dict(self):
if self._app_dict is None:
self._populate()
return self._app_dict
app_dict = property(_get_app_dict)
def resolve(self, path):
tried = []
match = self.regex.search(path)
if match:
new_path = path[match.end():]
for pattern in self.url_patterns:
try:
sub_match = pattern.resolve(new_path)
except Resolver404, e:
sub_tried = e.args[0].get('tried')
if sub_tried is not None:
tried.extend([(pattern.regex.pattern + ' ' + t) for t in sub_tried])
else:
tried.append(pattern.regex.pattern)
else:
if sub_match:
sub_match_dict = dict([(smart_str(k), v) for k, v in match.groupdict().items()])
sub_match_dict.update(self.default_kwargs)
for k, v in sub_match[2].iteritems():
sub_match_dict[smart_str(k)] = v
return sub_match[0], sub_match[1], sub_match_dict
tried.append(pattern.regex.pattern)
raise Resolver404, {'tried': tried, 'path': new_path}
raise Resolver404, {'path' : path}
def _get_urlconf_module(self):
try:
return self._urlconf_module
except AttributeError:
self._urlconf_module = import_module(self.urlconf_name)
return self._urlconf_module
urlconf_module = property(_get_urlconf_module)
def _get_url_patterns(self):
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
try:
iter(patterns)
except TypeError:
raise ImproperlyConfigured("The included urlconf %s doesn't have any "
"patterns in it" % self.urlconf_name)
return patterns
url_patterns = property(_get_url_patterns)
def _resolve_special(self, view_type):
callback = getattr(self.urlconf_module, 'handler%s' % view_type)
mod_name, func_name = get_mod_func(callback)
try:
return getattr(import_module(mod_name), func_name), {}
except (ImportError, AttributeError), e:
raise ViewDoesNotExist, "Tried %s. Error was: %s" % (callback, str(e))
def resolve404(self):
return self._resolve_special('404')
def resolve500(self):
return self._resolve_special('500')
def reverse(self, lookup_view, *args, **kwargs):
if args and kwargs:
raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
try:
lookup_view = get_callable(lookup_view, True)
except (ImportError, AttributeError), e:
raise NoReverseMatch("Error importing '%s': %s." % (lookup_view, e))
possibilities = self.reverse_dict.getlist(lookup_view)
for possibility, pattern in possibilities:
for result, params in possibility:
if args:
if len(args) != len(params):
continue
unicode_args = [force_unicode(val) for val in args]
candidate = result % dict(zip(params, unicode_args))
else:
if set(kwargs.keys()) != set(params):
continue
unicode_kwargs = dict([(k, force_unicode(v)) for (k, v) in kwargs.items()])
candidate = result % unicode_kwargs
if re.search(u'^%s' % pattern, candidate, re.UNICODE):
return candidate
# lookup_view can be URL label, or dotted path, or callable, Any of
# these can be passed in at the top, but callables are not friendly in
# error messages.
m = getattr(lookup_view, '__module__', None)
n = getattr(lookup_view, '__name__', None)
if m is not None and n is not None:
lookup_view_s = "%s.%s" % (m, n)
else:
lookup_view_s = lookup_view
raise NoReverseMatch("Reverse for '%s' with arguments '%s' and keyword "
"arguments '%s' not found." % (lookup_view_s, args, kwargs))
def resolve(path, urlconf=None):
return get_resolver(urlconf).resolve(path)
def reverse(viewname, urlconf=None, args=None, kwargs=None, prefix=None, current_app=None):
resolver = get_resolver(urlconf)
args = args or []
kwargs = kwargs or {}
if prefix is None:
prefix = get_script_prefix()
if not isinstance(viewname, basestring):
view = viewname
else:
parts = viewname.split(':')
parts.reverse()
view = parts[0]
path = parts[1:]
resolved_path = []
while path:
ns = path.pop()
# Lookup the name to see if it could be an app identifier
try:
app_list = resolver.app_dict[ns]
# Yes! Path part matches an app in the current Resolver
if current_app and current_app in app_list:
# If we are reversing for a particular app, use that namespace
ns = current_app
elif ns not in app_list:
# The name isn't shared by one of the instances (i.e., the default)
# so just pick the first instance as the default.
ns = app_list[0]
except KeyError:
pass
try:
extra, resolver = resolver.namespace_dict[ns]
resolved_path.append(ns)
prefix = prefix + extra
except KeyError, key:
if resolved_path:
raise NoReverseMatch("%s is not a registered namespace inside '%s'" % (key, ':'.join(resolved_path)))
else:
raise NoReverseMatch("%s is not a registered namespace" % key)
return iri_to_uri(u'%s%s' % (prefix, resolver.reverse(view,
*args, **kwargs)))
def clear_url_caches():
global _resolver_cache
global _callable_cache
_resolver_cache.clear()
_callable_cache.clear()
def set_script_prefix(prefix):
"""
Sets the script prefix for the current thread.
"""
if not prefix.endswith('/'):
prefix += '/'
_prefixes[currentThread()] = prefix
def get_script_prefix():
"""
Returns the currently active script prefix. Useful for client code that
wishes to construct their own URLs manually (although accessing the request
instance is normally going to be a lot cleaner).
"""
return _prefixes.get(currentThread(), u'/')
| mit |
rimbalinux/LMD3 | django/db/models/expressions.py | 3 | 5146 | import datetime
from django.utils import tree
from django.utils.copycompat import deepcopy
class ExpressionNode(tree.Node):
"""
Base class for all query expressions.
"""
# Arithmetic connectors
ADD = '+'
SUB = '-'
MUL = '*'
DIV = '/'
MOD = '%%' # This is a quoted % operator - it is quoted
# because it can be used in strings that also
# have parameter substitution.
# Bitwise operators
AND = '&'
OR = '|'
def __init__(self, children=None, connector=None, negated=False):
if children is not None and len(children) > 1 and connector is None:
raise TypeError('You have to specify a connector.')
super(ExpressionNode, self).__init__(children, connector, negated)
def _combine(self, other, connector, reversed, node=None):
if isinstance(other, datetime.timedelta):
return DateModifierNode([self, other], connector)
if reversed:
obj = ExpressionNode([other], connector)
obj.add(node or self, connector)
else:
obj = node or ExpressionNode([self], connector)
obj.add(other, connector)
return obj
###################
# VISITOR METHODS #
###################
def prepare(self, evaluator, query, allow_joins):
return evaluator.prepare_node(self, query, allow_joins)
def evaluate(self, evaluator, qn, connection):
return evaluator.evaluate_node(self, qn, connection)
#############
# OPERATORS #
#############
def __add__(self, other):
return self._combine(other, self.ADD, False)
def __sub__(self, other):
return self._combine(other, self.SUB, False)
def __mul__(self, other):
return self._combine(other, self.MUL, False)
def __div__(self, other):
return self._combine(other, self.DIV, False)
def __mod__(self, other):
return self._combine(other, self.MOD, False)
def __and__(self, other):
return self._combine(other, self.AND, False)
def __or__(self, other):
return self._combine(other, self.OR, False)
def __radd__(self, other):
return self._combine(other, self.ADD, True)
def __rsub__(self, other):
return self._combine(other, self.SUB, True)
def __rmul__(self, other):
return self._combine(other, self.MUL, True)
def __rdiv__(self, other):
return self._combine(other, self.DIV, True)
def __rmod__(self, other):
return self._combine(other, self.MOD, True)
def __rand__(self, other):
return self._combine(other, self.AND, True)
def __ror__(self, other):
return self._combine(other, self.OR, True)
def prepare_database_save(self, unused):
return self
class F(ExpressionNode):
"""
An expression representing the value of the given field.
"""
def __init__(self, name):
super(F, self).__init__(None, None, False)
self.name = name
def __deepcopy__(self, memodict):
obj = super(F, self).__deepcopy__(memodict)
obj.name = self.name
return obj
def prepare(self, evaluator, query, allow_joins):
return evaluator.prepare_leaf(self, query, allow_joins)
def evaluate(self, evaluator, qn, connection):
return evaluator.evaluate_leaf(self, qn, connection)
class DateModifierNode(ExpressionNode):
"""
Node that implements the following syntax:
filter(end_date__gt=F('start_date') + datetime.timedelta(days=3, seconds=200))
which translates into:
POSTGRES:
WHERE end_date > (start_date + INTERVAL '3 days 200 seconds')
MYSQL:
WHERE end_date > (start_date + INTERVAL '3 0:0:200:0' DAY_MICROSECOND)
ORACLE:
WHERE end_date > (start_date + INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6))
SQLITE:
WHERE end_date > django_format_dtdelta(start_date, "+" "3", "200", "0")
(A custom function is used in order to preserve six digits of fractional
second information on sqlite, and to format both date and datetime values.)
Note that microsecond comparisons are not well supported with MySQL, since
MySQL does not store microsecond information.
Only adding and subtracting timedeltas is supported, attempts to use other
operations raise a TypeError.
"""
def __init__(self, children, connector, negated=False):
if len(children) != 2:
raise TypeError('Must specify a node and a timedelta.')
if not isinstance(children[1], datetime.timedelta):
raise TypeError('Second child must be a timedelta.')
if connector not in (self.ADD, self.SUB):
raise TypeError('Connector must be + or -, not %s' % connector)
super(DateModifierNode, self).__init__(children, connector, negated)
def evaluate(self, evaluator, qn, connection):
return evaluator.evaluate_date_modifier_node(self, qn, connection)
| bsd-3-clause |
abstract-open-solutions/l10n-italy | account_fiscal_year_closing/fyc.py | 13 | 29800 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 Pexego Sistemas Informáticos. All Rights Reserved
# Copyright (C) 2011 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
Fiscal Year Closing
"""
__author__ = "Borja López Soilán (Pexego)"
from osv import fields, osv
from tools.translate import _
from datetime import datetime
import netsvc
#-------------------------------------------------------------------------------
# Predeclaration of the FYC object
#-------------------------------------------------------------------------------
class fiscal_year_closing_init(osv.osv):
"""
Fiscal Year Closing Wizard
"""
_name = "account_fiscal_year_closing.fyc"
_description = "Fiscal Year Closing Wizard"
_columns = {
'name': fields.char('Description', size=60, required=True),
}
fiscal_year_closing_init()
#-------------------------------------------------------------------------------
# Account mapping objects (to be used on the fyc configuration)
#-------------------------------------------------------------------------------
class fiscal_year_closing_lp_account_mapping(osv.osv):
"""
Loss & Profit Account Mapping
"""
_name = "account_fiscal_year_closing.fyc_lp_account_map"
_description = "SFYC Loss & Profit Account Mapping"
_columns = {
'name': fields.char('Description', size=60, required=False),
# Parent eoy
'fyc_id': fields.many2one('account_fiscal_year_closing.fyc', 'Fiscal Year Closing', ondelete='cascade', required=True, select=1),
# Accounts
'source_account_id':fields.many2one('account.account', 'Source account', required=True, ondelete='cascade'),
'dest_account_id':fields.many2one('account.account', 'Dest account', required=False, ondelete='cascade'),
}
fiscal_year_closing_lp_account_mapping()
class fiscal_year_closing_nlp_account_mapping(osv.osv):
"""
Net Loss & Profit Account Mapping
"""
_name = "account_fiscal_year_closing.fyc_nlp_account_map"
_description = "SFYC Net Loss & Profit Account Mapping"
_columns = {
'name': fields.char('Description', size=60, required=False),
# Parent eoy
'fyc_id': fields.many2one('account_fiscal_year_closing.fyc', 'Fiscal Year Closing', ondelete='cascade', required=True, select=1),
# Accounts
'source_account_id':fields.many2one('account.account', 'Source account', required=True, ondelete='cascade'),
'dest_account_id':fields.many2one('account.account', 'Dest account', required=False, ondelete='cascade'),
}
fiscal_year_closing_nlp_account_mapping()
class fiscal_year_closing_c_account_mapping(osv.osv):
"""
Closing Account Mapping
"""
_name = "account_fiscal_year_closing.fyc_c_account_map"
_description = "SFYC Closing Account Mapping"
_columns = {
'name': fields.char('Description', size=60, required=False),
# Parent eoy
'fyc_id': fields.many2one('account_fiscal_year_closing.fyc', 'Fiscal Year Closing', ondelete='cascade', required=True, select=1),
# Accounts
'source_account_id':fields.many2one('account.account', 'Account', required=True, ondelete='cascade'),
'dest_account_id':fields.many2one('account.account', 'Dest account', ondelete='cascade'),
}
fiscal_year_closing_c_account_mapping()
#-------------------------------------------------------------------------------
# Fiscal Year Closing Wizard
#-------------------------------------------------------------------------------
class fiscal_year_closing(osv.osv):
"""
Fiscal Year Closing Wizard
"""
_inherit = "account_fiscal_year_closing.fyc"
#
# Fields -------------------------------------------------------------------
#
_columns = {
# Company
'company_id': fields.many2one('res.company', 'Company', ondelete='cascade', readonly=True, required=True),
# Fiscal years
'closing_fiscalyear_id':fields.many2one('account.fiscalyear', 'Fiscal year to close', required=True, ondelete='cascade', select=1),
'opening_fiscalyear_id':fields.many2one('account.fiscalyear', 'Fiscal year to open', required=True, ondelete='cascade', select=2),
#
# Operations (to do), and their account moves (when done)
#
'create_loss_and_profit': fields.boolean('Create Loss & Profit move'),
'loss_and_profit_move_id': fields.many2one('account.move', 'L&P Move', ondelete='set null', readonly=True),
'create_net_loss_and_profit': fields.boolean('Create Net Loss & Profit'),
'net_loss_and_profit_move_id': fields.many2one('account.move', 'Net L&P Move', ondelete='set null', readonly=True),
'create_closing': fields.boolean('Close fiscal year'),
'closing_move_id': fields.many2one('account.move', 'Closing Move', ondelete='set null', readonly=True),
'create_opening': fields.boolean('Open next fiscal year'),
'opening_move_id': fields.many2one('account.move', 'Opening Move', ondelete='set null', readonly=True),
#
# Extra operations
#
'check_invalid_period_moves': fields.boolean('Check invalid period or date moves', help="Checks that there are no moves, on the fiscal year that is being closed, with dates or periods outside that fiscal year."),
'check_draft_moves': fields.boolean('Check draft moves', help="Checks that there are no draft moves on the fiscal year that is being closed. Non-confirmed moves won't be taken in account on the closing operations."),
'check_unbalanced_moves': fields.boolean('Check unbalanced moves', help="Checks that there are no unbalanced moves on the fiscal year that is being closed."),
# State
'state': fields.selection([
('new', 'New'),
('draft', 'Draft'),
('in_progress', 'In Progress'),
('done', 'Done'),
('canceled', 'Canceled'),
], 'Status'),
#
# Loss and Profit options
#
'lp_description': fields.char('Description', size=60),
'lp_journal_id': fields.many2one('account.journal', 'Journal'),
'lp_period_id': fields.many2one('account.period', 'Period'),
'lp_date': fields.date('Date'),
'lp_account_mapping_ids': fields.one2many('account_fiscal_year_closing.fyc_lp_account_map', 'fyc_id', 'Account mappings'),
#
# Net Loss and Profit options
#
'nlp_description': fields.char('Description', size=60),
'nlp_journal_id': fields.many2one('account.journal', 'Journal'),
'nlp_period_id': fields.many2one('account.period', 'Period'),
'nlp_date': fields.date('Date'),
'nlp_account_mapping_ids': fields.one2many('account_fiscal_year_closing.fyc_nlp_account_map', 'fyc_id', 'Account mappings'),
#
# Closing options
#
'c_description': fields.char('Description', size=60),
'c_journal_id': fields.many2one('account.journal', 'Journal'),
'c_period_id': fields.many2one('account.period', 'Period'),
'c_date': fields.date('Date'),
'c_account_mapping_ids': fields.one2many('account_fiscal_year_closing.fyc_c_account_map', 'fyc_id', 'Accounts'),
#
# Opening options
#
'o_description': fields.char('Description', size=60),
'o_journal_id': fields.many2one('account.journal', 'Journal'),
'o_period_id': fields.many2one('account.period', 'Period'),
'o_date': fields.date('Date'),
}
#
# Default values -----------------------------------------------------------
#
def _get_closing_fiscalyear_id(self, cr, uid, context):
"""
Gets the last (previous) fiscal year
"""
company = self.pool.get('res.users').browse(cr, uid, uid, context).company_id
str_date = '%s-06-01' % (datetime.now().year - 1)
fiscalyear_ids = self.pool.get('account.fiscalyear').search(cr, uid, [
('company_id', '=', company.id),
('date_start', '<=', str_date),
('date_stop', '>=', str_date),
])
if not fiscalyear_ids:
fiscalyear_ids = self.pool.get('account.fiscalyear').search(cr, uid, [
('company_id', '=', False),
('date_start', '<=', str_date),
('date_stop', '>=', str_date),
])
return fiscalyear_ids and fiscalyear_ids[0]
def _get_opening_fiscalyear_id(self, cr, uid, context):
"""
Gets the current fiscal year
"""
company = self.pool.get('res.users').browse(cr, uid, uid, context).company_id
str_date = '%s-06-01' % datetime.now().year
fiscalyear_ids = self.pool.get('account.fiscalyear').search(cr, uid, [
('company_id', '=', company.id),
('date_start', '<=', str_date),
('date_stop', '>=', str_date),
])
if not fiscalyear_ids:
fiscalyear_ids = self.pool.get('account.fiscalyear').search(cr, uid, [
('company_id', '=', False),
('date_start', '<=', str_date),
('date_stop', '>=', str_date),
])
return fiscalyear_ids and fiscalyear_ids[0]
_defaults = {
# Current company by default:
'company_id': lambda self, cr, uid, context: self.pool.get('res.users').browse(cr, uid, uid, context).company_id.id,
# Draft state by default:
'state': lambda *a: 'new',
# Name
'name': lambda self, cr, uid, context: _("%s Fiscal Year Closing") % (datetime.now().year - 1),
# Fiscal years
'closing_fiscalyear_id': _get_closing_fiscalyear_id,
'opening_fiscalyear_id': _get_opening_fiscalyear_id,
}
#
# Workflow actions ---------------------------------------------------------
#
def _get_journal_id(self, cr, uid, fyc, context):
"""
Gets the journal to use.
(It will search for a 'GRAL' or 'General' journal)
"""
assert fyc.company_id, "A company should have been selected"
journal_ids = self.pool.get('account.journal').search(cr, uid, [
('company_id', '=', fyc.company_id.id),
('code', '=', 'GRAL'),
])
if not journal_ids:
journal_ids = self.pool.get('account.journal').search(cr, uid, [
('company_id', '=', False),
('code', '=', 'GRAL'),
])
if not journal_ids:
journal_ids = self.pool.get('account.journal').search(cr, uid, [
('company_id', '=', fyc.company_id.id),
('name', 'ilike', 'General'),
])
if not journal_ids:
journal_ids = self.pool.get('account.journal').search(cr, uid, [
('company_id', '=', False),
('name', 'ilike', 'General'),
])
return journal_ids and journal_ids[0]
def _get_lp_period_id(self, cr, uid, fyc, context):
"""
Gets the period for the L&P entry
(It searches for a "PG%" special period on the previous fiscal year)
"""
period_ids = self.pool.get('account.period').search(cr, uid, [
('fiscalyear_id', '=', fyc.closing_fiscalyear_id.id),
('special', '=', True),
('date_start', '=', fyc.closing_fiscalyear_id.date_stop),
('code', 'ilike', 'PG'),
])
if not period_ids:
period_ids = self.pool.get('account.period').search(cr, uid, [
('fiscalyear_id', '=', fyc.closing_fiscalyear_id.id),
('special', '=', True),
('date_start', '=', fyc.closing_fiscalyear_id.date_stop),
])
return period_ids and period_ids[0]
def _get_c_period_id(self, cr, uid, fyc, context):
"""
Gets the period for the Closing entry
(It searches for a "C%" special period on the previous fiscal year)
"""
period_ids = self.pool.get('account.period').search(cr, uid, [
('fiscalyear_id', '=', fyc.closing_fiscalyear_id.id),
('special', '=', True),
('date_start', '=', fyc.closing_fiscalyear_id.date_stop),
('code', 'ilike', 'C'),
])
if not period_ids:
period_ids = self.pool.get('account.period').search(cr, uid, [
('fiscalyear_id', '=', fyc.closing_fiscalyear_id.id),
('special', '=', True),
('date_start', '=', fyc.closing_fiscalyear_id.date_stop),
])
return period_ids and period_ids[0]
def _get_o_period_id(self, cr, uid, fyc, context):
"""
Gets the period for the Opening entry
(It searches for a "A%" special period on the previous fiscal year)
"""
period_ids = self.pool.get('account.period').search(cr, uid, [
('fiscalyear_id', '=', fyc.opening_fiscalyear_id.id),
('special', '=', True),
('date_stop', '=', fyc.opening_fiscalyear_id.date_start),
('code', 'ilike', 'A'),
])
if not period_ids:
period_ids = self.pool.get('account.period').search(cr, uid, [
('fiscalyear_id', '=', fyc.opening_fiscalyear_id.id),
('special', '=', True),
('date_stop', '=', fyc.opening_fiscalyear_id.date_start),
])
return period_ids and period_ids[0]
def _get_account_mappings(self, cr, uid, fyc, mapping, context):
"""
Transforms the mapping dictionary on a list of mapping lines.
"""
account_mappings = []
for source, dest, description in mapping:
#
# Find the source account
#
account_ids = self.pool.get('account.account').search(cr, uid, [
('company_id', '=', fyc.company_id.id),
('code', '=like', source),
])
source_account_id = account_ids and account_ids[0] or None
#
# Find the dest account
#
account_ids = self.pool.get('account.account').search(cr, uid, [
('company_id', '=', fyc.company_id.id),
('code', '=like', dest),
('type', '!=', 'view'),
])
dest_account_id = account_ids and account_ids[0] or None
#
# Use a default description if not provided
#
if not description:
if source_account_id:
description = self.pool.get('account.account').read(cr, uid, source_account_id, ['name'])['name']
#
# If the mapping is valid for this chart of accounts
#
if source_account_id:
#
# Make sure that the dest account is valid
#
if dest_account_id:
# Add the line to the result
account_mappings.append({
'name': description,
'source_account_id': source_account_id,
'dest_account_id': dest_account_id,
})
else:
# Add the line to the result
account_mappings.append({
'name': _('No destination account %s found for account %s.') % (dest, source),
'source_account_id': source_account_id,
'dest_account_id': None,
})
return [(0, 0, acc_map) for acc_map in account_mappings]
def action_draft(self, cr, uid, ids, context=None):
"""
Called when the user clicks the confirm button.
"""
if context is None:
context = {}
#
# Make sure the lang is defined on the context
#
user = self.pool.get('res.users').browse(cr, uid, uid, context)
context['lang'] = context.get('lang') or user.lang
for fyc in self.browse(cr, uid, ids, context):
#
# Check for duplicated entries
#
fyc_ids = self.search(cr, uid, [('name', '=', fyc.name)])
if len(fyc_ids) > 1:
raise osv.except_osv(_('Error'), _('There is already a fiscal year closing with this name.'))
assert fyc.closing_fiscalyear_id and fyc.closing_fiscalyear_id.id
fyc_ids = self.search(cr, uid, [('closing_fiscalyear_id', '=', fyc.closing_fiscalyear_id.id)])
if len(fyc_ids) > 1:
raise osv.except_osv(_('Error'), _('There is already a fiscal year closing for the fiscal year to close.'))
assert fyc.opening_fiscalyear_id and fyc.opening_fiscalyear_id.id
fyc_ids = self.search(cr, uid, [('opening_fiscalyear_id', '=', fyc.opening_fiscalyear_id.id)])
if len(fyc_ids) > 1:
raise osv.except_osv(_('Error'), _('There is already a fiscal year closing for the fiscal year to open.'))
#
# Check whether the default values of the fyc object have to be computed
# or they have already been computed (restarted workflow)
#
if fyc.c_account_mapping_ids:
# Fyc wizard reverted to 'new' after canceled
self.write(cr, uid, [fyc.id], { 'state': 'draft' })
else:
# New fyc wizard object
vals = {
#
# Perform all the operations by default
#
'create_loss_and_profit': True,
'create_net_loss_and_profit': False,
'create_closing': True,
'create_opening': True,
'check_invalid_period_moves': True,
'check_draft_moves': True,
'check_unbalanced_moves': True,
#
# L&P options
#
'lp_description': _("Loss & Profit"),
'lp_journal_id': self._get_journal_id(cr, uid, fyc, context),
'lp_period_id': self._get_lp_period_id(cr, uid, fyc, context),
'lp_date': fyc.closing_fiscalyear_id.date_stop,
# 'lp_account_mapping_ids': self._get_account_mappings(cr, uid, fyc, _LP_ACCOUNT_MAPPING, context),
#
# Net L&P options
#
'nlp_description': _("Net Loss & Profit"),
'nlp_journal_id': self._get_journal_id(cr, uid, fyc, context),
'nlp_period_id': self._get_lp_period_id(cr, uid, fyc, context),
'nlp_date': fyc.closing_fiscalyear_id.date_stop,
# 'nlp_account_mapping_ids': self._get_account_mappings(cr, uid, fyc, _NLP_ACCOUNT_MAPPING, context),
#
# Closing options
#
'c_description': _("Fiscal Year Closing"),
'c_journal_id': self._get_journal_id(cr, uid, fyc, context),
'c_period_id': self._get_c_period_id(cr, uid, fyc, context),
'c_date': fyc.closing_fiscalyear_id.date_stop,
# 'c_account_mapping_ids': self._get_account_mappings(cr, uid, fyc, _C_ACCOUNT_MAPPING, context),
#
# Opening options
#
'o_description': _("Fiscal Year Opening"),
'o_journal_id': self._get_journal_id(cr, uid, fyc, context),
'o_period_id': self._get_o_period_id(cr, uid, fyc, context),
'o_date': fyc.opening_fiscalyear_id.date_start,
# *** New state ***
'state': 'draft',
}
self.write(cr, uid, [fyc.id], vals)
return True
def action_run(self, cr, uid, ids, context=None):
"""
Called when the create entries button is used.
"""
# Note: Just change the state, everything else is done on the run wizard
# *before* this action is called.
self.write(cr, uid, ids, {'state': 'in_progress'})
return True
def action_confirm(self, cr, uid, ids, context=None):
"""
Called when the user clicks the confirm button.
"""
if context is None:
context = {}
#
# Make sure the lang is defined on the context
#
user = self.pool.get('res.users').browse(cr, uid, uid, context)
context['lang'] = context.get('lang') or user.lang
for fyc in self.browse(cr, uid, ids, context):
#
# Require the L&P, closing, and opening moves to exist (NL&P is optional)
#
if not fyc.loss_and_profit_move_id:
raise osv.except_osv(_("Not all the operations have been performed!"), _("The Loss & Profit move is required"))
if not fyc.closing_move_id:
raise osv.except_osv(_("Not all the operations have been performed!"), _("The Closing move is required"))
if not fyc.opening_move_id:
raise osv.except_osv(_("Not all the operations have been performed!"), _("The Opening move is required"))
''' needed ?
#
# Calculate the moves to check
#
moves = []
moves.append(fyc.loss_and_profit_move_id)
if fyc.net_loss_and_profit_move_id:
moves.append(fyc.net_loss_and_profit_move_id)
moves.append(fyc.closing_move_id)
moves.append(fyc.opening_move_id)
#
# Check and reconcile each of the moves
#
for move in moves:
netsvc.Logger().notifyChannel('fyc', netsvc.LOG_DEBUG, "Checking %s" % move.ref)
#
# Check if it has been confirmed
#
if move.state == 'draft':
raise osv.except_osv(_("Some moves are in draft state!"), _("You have to review and confirm each of the moves before continuing"))
#
# Check the balance
#
amount = 0
for line in move.line_id:
amount += (line.debit - line.credit)
if abs(amount) > 0.5 * 10 ** -int(self.pool.get('decimal.precision').precision_get(cr, uid, 'Account')):
raise osv.except_osv(_("Some moves are unbalanced!"), _("All the moves should be balanced before continuing"))
#
# Reconcile the move
#
# Note: We will reconcile all the lines, even the 'not reconcile' ones,
# to prevent future problems (the user may change the
# reconcile option of an account in the future)
#
netsvc.Logger().notifyChannel('fyc', netsvc.LOG_DEBUG, "Reconcile %s" % move.ref)
tmp_context = context.copy()
tmp_context['fy_closing'] = True # Fiscal year closing = reconcile everything
line_ids = [line.id for line in move.line_id]
self.pool.get('account.move.line').reconcile(cr, uid, line_ids, context=tmp_context)
#
# Close the fiscal year and it's periods
#
# Note: We can not just do a write, cause it would raise a
# "You can not modify/delete a journal with entries for this period!"
# so we have to do it on SQL level :(
# This is based on the "account.fiscalyear.close.state" wizard.
#
netsvc.Logger().notifyChannel('fyc', netsvc.LOG_DEBUG, "Closing fiscal year")
query = """
UPDATE account_journal_period
SET state = 'done'
WHERE period_id IN (SELECT id FROM account_period WHERE fiscalyear_id = %d)
"""
cr.execute(query % fyc.closing_fiscalyear_id.id)
query = """
UPDATE account_period
SET state = 'done'
WHERE fiscalyear_id = %d
"""
cr.execute(query % fyc.closing_fiscalyear_id.id)
query = """
UPDATE account_fiscalyear
SET state = 'done'
WHERE id = %d
"""
cr.execute(query % fyc.closing_fiscalyear_id.id)
'''
# Done
self.write(cr, uid, ids, {'state': 'done'})
return True
def action_cancel(self, cr, uid, ids, context=None):
"""
Called when the user clicks the cancel button.
"""
if context is None:
context = {}
#
# Make sure the lang is defined on the context
#
user = self.pool.get('res.users').browse(cr, uid, uid, context)
context['lang'] = context.get('lang') or user.lang
#
# Uncheck all the operations
#
self.pool.get('account_fiscal_year_closing.fyc').write(cr, uid, ids, {
'create_loss_and_profit': False,
'create_net_loss_and_profit': False,
'create_closing': False,
'create_opening': False,
'check_invalid_period_moves': False,
'check_draft_moves': False,
'check_unbalanced_moves': False,
}, context=context)
''' needed?
#
# Open the fiscal year and it's periods
#
# Note: We can not just do a write, cause it would raise a
# "You can not modify/delete a journal with entries for this period!"
# so we have to do it on SQL level :(
# This is based on the "account.fiscalyear.close.state" wizard.
#
# TODO check this for 6.1
for fyc in self.browse(cr, uid, ids, context):
query = """
UPDATE account_journal_period
SET state = 'draft'
WHERE period_id IN (SELECT id FROM account_period WHERE fiscalyear_id = %d)
"""
cr.execute(query % fyc.closing_fiscalyear_id.id)
query = """
UPDATE account_period
SET state = 'draft'
WHERE fiscalyear_id = %d
"""
cr.execute(query % fyc.closing_fiscalyear_id.id)
query = """
UPDATE account_fiscalyear
SET state = 'draft'
WHERE id = %d
"""
cr.execute(query % fyc.closing_fiscalyear_id.id)
'''
for fyc in self.browse(cr, uid, ids, context):
if fyc.loss_and_profit_move_id:
fyc.loss_and_profit_move_id.unlink()
if fyc.net_loss_and_profit_move_id:
fyc.net_loss_and_profit_move_id.unlink()
if fyc.closing_move_id:
fyc.closing_move_id.unlink()
if fyc.opening_move_id:
fyc.opening_move_id.unlink()
# Canceled
self.write(cr, uid, ids, {'state': 'canceled'})
return True
def action_recover(self, cr, uid, ids, context=None):
"""
Called when the user clicks the draft button to create
a new workflow instance.
"""
self.write(cr, uid, ids, {'state': 'new'})
wf_service = netsvc.LocalService("workflow")
for item_id in ids:
wf_service.trg_create(uid, 'account_fiscal_year_closing.fyc', item_id, cr)
return True
fiscal_year_closing()
| agpl-3.0 |
broodplank/glass-omap-xrr02 | tools/perf/scripts/python/sctop.py | 11180 | 1924 | # system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
| gpl-2.0 |
gsehub/edx-platform | openedx/core/djangoapps/schedules/tests/test_resolvers.py | 18 | 2866 | import datetime
from unittest import skipUnless
import ddt
from django.conf import settings
from mock import Mock
from openedx.core.djangoapps.schedules.resolvers import BinnedSchedulesBaseResolver
from openedx.core.djangoapps.schedules.tests.factories import ScheduleConfigFactory
from openedx.core.djangoapps.site_configuration.tests.factories import SiteFactory, SiteConfigurationFactory
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
@ddt.ddt
@skip_unless_lms
@skipUnless('openedx.core.djangoapps.schedules.apps.SchedulesConfig' in settings.INSTALLED_APPS,
"Can't test schedules if the app isn't installed")
class TestBinnedSchedulesBaseResolver(CacheIsolationTestCase):
def setUp(self):
super(TestBinnedSchedulesBaseResolver, self).setUp()
self.site = SiteFactory.create()
self.site_config = SiteConfigurationFactory(site=self.site)
self.schedule_config = ScheduleConfigFactory.create(site=self.site)
self.resolver = BinnedSchedulesBaseResolver(
async_send_task=Mock(name='async_send_task'),
site=self.site,
target_datetime=datetime.datetime.now(),
day_offset=3,
bin_num=2,
)
@ddt.data(
'course1'
)
def test_get_course_org_filter_equal(self, course_org_filter):
self.site_config.values['course_org_filter'] = course_org_filter
self.site_config.save()
mock_query = Mock()
result = self.resolver.filter_by_org(mock_query)
self.assertEqual(result, mock_query.filter.return_value)
mock_query.filter.assert_called_once_with(enrollment__course__org=course_org_filter)
@ddt.unpack
@ddt.data(
(['course1', 'course2'], ['course1', 'course2'])
)
def test_get_course_org_filter_include__in(self, course_org_filter, expected_org_list):
self.site_config.values['course_org_filter'] = course_org_filter
self.site_config.save()
mock_query = Mock()
result = self.resolver.filter_by_org(mock_query)
self.assertEqual(result, mock_query.filter.return_value)
mock_query.filter.assert_called_once_with(enrollment__course__org__in=expected_org_list)
@ddt.unpack
@ddt.data(
(None, set([])),
('course1', set([u'course1'])),
(['course1', 'course2'], set([u'course1', u'course2']))
)
def test_get_course_org_filter_exclude__in(self, course_org_filter, expected_org_list):
SiteConfigurationFactory.create(
values={'course_org_filter': course_org_filter},
)
mock_query = Mock()
result = self.resolver.filter_by_org(mock_query)
mock_query.exclude.assert_called_once_with(enrollment__course__org__in=expected_org_list)
self.assertEqual(result, mock_query.exclude.return_value)
| agpl-3.0 |
sideffect0/django-tastypie | tastypie/utils/formatting.py | 47 | 1142 | from __future__ import unicode_literals
import email
import datetime
import time
from django.utils import dateformat
from tastypie.utils.timezone import make_aware, make_naive, aware_datetime
# Try to use dateutil for maximum date-parsing niceness. Fall back to
# hard-coded RFC2822 parsing if that's not possible.
try:
from dateutil.parser import parse as mk_datetime
except ImportError:
def mk_datetime(string):
return make_aware(datetime.datetime.fromtimestamp(time.mktime(email.utils.parsedate(string))))
def format_datetime(dt):
"""
RFC 2822 datetime formatter
"""
return dateformat.format(make_naive(dt), 'r')
def format_date(d):
"""
RFC 2822 date formatter
"""
# workaround because Django's dateformat utility requires a datetime
# object (not just date)
dt = aware_datetime(d.year, d.month, d.day, 0, 0, 0)
return dateformat.format(dt, 'j M Y')
def format_time(t):
"""
RFC 2822 time formatter
"""
# again, workaround dateformat input requirement
dt = aware_datetime(2000, 1, 1, t.hour, t.minute, t.second)
return dateformat.format(dt, 'H:i:s O')
| bsd-3-clause |
shrimpboyho/git.js | emscript/python/2.7.5.1_32bit/Lib/test/test_minidom.py | 45 | 62440 | # test for xml.dom.minidom
import pickle
from StringIO import StringIO
from test.test_support import verbose, run_unittest, findfile
import unittest
import xml.dom
import xml.dom.minidom
import xml.parsers.expat
from xml.dom.minidom import parse, Node, Document, parseString
from xml.dom.minidom import getDOMImplementation
tstfile = findfile("test.xml", subdir="xmltestdata")
# The tests of DocumentType importing use these helpers to construct
# the documents to work with, since not all DOM builders actually
# create the DocumentType nodes.
def create_doc_without_doctype(doctype=None):
return getDOMImplementation().createDocument(None, "doc", doctype)
def create_nonempty_doctype():
doctype = getDOMImplementation().createDocumentType("doc", None, None)
doctype.entities._seq = []
doctype.notations._seq = []
notation = xml.dom.minidom.Notation("my-notation", None,
"http://xml.python.org/notations/my")
doctype.notations._seq.append(notation)
entity = xml.dom.minidom.Entity("my-entity", None,
"http://xml.python.org/entities/my",
"my-notation")
entity.version = "1.0"
entity.encoding = "utf-8"
entity.actualEncoding = "us-ascii"
doctype.entities._seq.append(entity)
return doctype
def create_doc_with_doctype():
doctype = create_nonempty_doctype()
doc = create_doc_without_doctype(doctype)
doctype.entities.item(0).ownerDocument = doc
doctype.notations.item(0).ownerDocument = doc
return doc
class MinidomTest(unittest.TestCase):
def confirm(self, test, testname = "Test"):
self.assertTrue(test, testname)
def checkWholeText(self, node, s):
t = node.wholeText
self.confirm(t == s, "looking for %s, found %s" % (repr(s), repr(t)))
def testParseFromFile(self):
dom = parse(StringIO(open(tstfile).read()))
dom.unlink()
self.confirm(isinstance(dom,Document))
def testGetElementsByTagName(self):
dom = parse(tstfile)
self.confirm(dom.getElementsByTagName("LI") == \
dom.documentElement.getElementsByTagName("LI"))
dom.unlink()
def testInsertBefore(self):
dom = parseString("<doc><foo/></doc>")
root = dom.documentElement
elem = root.childNodes[0]
nelem = dom.createElement("element")
root.insertBefore(nelem, elem)
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2
and root.childNodes[0] is nelem
and root.childNodes.item(0) is nelem
and root.childNodes[1] is elem
and root.childNodes.item(1) is elem
and root.firstChild is nelem
and root.lastChild is elem
and root.toxml() == "<doc><element/><foo/></doc>"
, "testInsertBefore -- node properly placed in tree")
nelem = dom.createElement("element")
root.insertBefore(nelem, None)
self.confirm(len(root.childNodes) == 3
and root.childNodes.length == 3
and root.childNodes[1] is elem
and root.childNodes.item(1) is elem
and root.childNodes[2] is nelem
and root.childNodes.item(2) is nelem
and root.lastChild is nelem
and nelem.previousSibling is elem
and root.toxml() == "<doc><element/><foo/><element/></doc>"
, "testInsertBefore -- node properly placed in tree")
nelem2 = dom.createElement("bar")
root.insertBefore(nelem2, nelem)
self.confirm(len(root.childNodes) == 4
and root.childNodes.length == 4
and root.childNodes[2] is nelem2
and root.childNodes.item(2) is nelem2
and root.childNodes[3] is nelem
and root.childNodes.item(3) is nelem
and nelem2.nextSibling is nelem
and nelem.previousSibling is nelem2
and root.toxml() ==
"<doc><element/><foo/><bar/><element/></doc>"
, "testInsertBefore -- node properly placed in tree")
dom.unlink()
def _create_fragment_test_nodes(self):
dom = parseString("<doc/>")
orig = dom.createTextNode("original")
c1 = dom.createTextNode("foo")
c2 = dom.createTextNode("bar")
c3 = dom.createTextNode("bat")
dom.documentElement.appendChild(orig)
frag = dom.createDocumentFragment()
frag.appendChild(c1)
frag.appendChild(c2)
frag.appendChild(c3)
return dom, orig, c1, c2, c3, frag
def testInsertBeforeFragment(self):
dom, orig, c1, c2, c3, frag = self._create_fragment_test_nodes()
dom.documentElement.insertBefore(frag, None)
self.confirm(tuple(dom.documentElement.childNodes) ==
(orig, c1, c2, c3),
"insertBefore(<fragment>, None)")
frag.unlink()
dom.unlink()
dom, orig, c1, c2, c3, frag = self._create_fragment_test_nodes()
dom.documentElement.insertBefore(frag, orig)
self.confirm(tuple(dom.documentElement.childNodes) ==
(c1, c2, c3, orig),
"insertBefore(<fragment>, orig)")
frag.unlink()
dom.unlink()
def testAppendChild(self):
dom = parse(tstfile)
dom.documentElement.appendChild(dom.createComment(u"Hello"))
self.confirm(dom.documentElement.childNodes[-1].nodeName == "#comment")
self.confirm(dom.documentElement.childNodes[-1].data == "Hello")
dom.unlink()
def testAppendChildFragment(self):
dom, orig, c1, c2, c3, frag = self._create_fragment_test_nodes()
dom.documentElement.appendChild(frag)
self.confirm(tuple(dom.documentElement.childNodes) ==
(orig, c1, c2, c3),
"appendChild(<fragment>)")
frag.unlink()
dom.unlink()
def testReplaceChildFragment(self):
dom, orig, c1, c2, c3, frag = self._create_fragment_test_nodes()
dom.documentElement.replaceChild(frag, orig)
orig.unlink()
self.confirm(tuple(dom.documentElement.childNodes) == (c1, c2, c3),
"replaceChild(<fragment>)")
frag.unlink()
dom.unlink()
def testLegalChildren(self):
dom = Document()
elem = dom.createElement('element')
text = dom.createTextNode('text')
self.assertRaises(xml.dom.HierarchyRequestErr, dom.appendChild, text)
dom.appendChild(elem)
self.assertRaises(xml.dom.HierarchyRequestErr, dom.insertBefore, text,
elem)
self.assertRaises(xml.dom.HierarchyRequestErr, dom.replaceChild, text,
elem)
nodemap = elem.attributes
self.assertRaises(xml.dom.HierarchyRequestErr, nodemap.setNamedItem,
text)
self.assertRaises(xml.dom.HierarchyRequestErr, nodemap.setNamedItemNS,
text)
elem.appendChild(text)
dom.unlink()
def testNamedNodeMapSetItem(self):
dom = Document()
elem = dom.createElement('element')
attrs = elem.attributes
attrs["foo"] = "bar"
a = attrs.item(0)
self.confirm(a.ownerDocument is dom,
"NamedNodeMap.__setitem__() sets ownerDocument")
self.confirm(a.ownerElement is elem,
"NamedNodeMap.__setitem__() sets ownerElement")
self.confirm(a.value == "bar",
"NamedNodeMap.__setitem__() sets value")
self.confirm(a.nodeValue == "bar",
"NamedNodeMap.__setitem__() sets nodeValue")
elem.unlink()
dom.unlink()
def testNonZero(self):
dom = parse(tstfile)
self.confirm(dom)# should not be zero
dom.appendChild(dom.createComment("foo"))
self.confirm(not dom.childNodes[-1].childNodes)
dom.unlink()
def testUnlink(self):
dom = parse(tstfile)
dom.unlink()
def testElement(self):
dom = Document()
dom.appendChild(dom.createElement("abc"))
self.confirm(dom.documentElement)
dom.unlink()
def testAAA(self):
dom = parseString("<abc/>")
el = dom.documentElement
el.setAttribute("spam", "jam2")
self.confirm(el.toxml() == '<abc spam="jam2"/>', "testAAA")
a = el.getAttributeNode("spam")
self.confirm(a.ownerDocument is dom,
"setAttribute() sets ownerDocument")
self.confirm(a.ownerElement is dom.documentElement,
"setAttribute() sets ownerElement")
dom.unlink()
def testAAB(self):
dom = parseString("<abc/>")
el = dom.documentElement
el.setAttribute("spam", "jam")
el.setAttribute("spam", "jam2")
self.confirm(el.toxml() == '<abc spam="jam2"/>', "testAAB")
dom.unlink()
def testAddAttr(self):
dom = Document()
child = dom.appendChild(dom.createElement("abc"))
child.setAttribute("def", "ghi")
self.confirm(child.getAttribute("def") == "ghi")
self.confirm(child.attributes["def"].value == "ghi")
child.setAttribute("jkl", "mno")
self.confirm(child.getAttribute("jkl") == "mno")
self.confirm(child.attributes["jkl"].value == "mno")
self.confirm(len(child.attributes) == 2)
child.setAttribute("def", "newval")
self.confirm(child.getAttribute("def") == "newval")
self.confirm(child.attributes["def"].value == "newval")
self.confirm(len(child.attributes) == 2)
dom.unlink()
def testDeleteAttr(self):
dom = Document()
child = dom.appendChild(dom.createElement("abc"))
self.confirm(len(child.attributes) == 0)
child.setAttribute("def", "ghi")
self.confirm(len(child.attributes) == 1)
del child.attributes["def"]
self.confirm(len(child.attributes) == 0)
dom.unlink()
def testRemoveAttr(self):
dom = Document()
child = dom.appendChild(dom.createElement("abc"))
child.setAttribute("def", "ghi")
self.confirm(len(child.attributes) == 1)
child.removeAttribute("def")
self.confirm(len(child.attributes) == 0)
dom.unlink()
def testRemoveAttrNS(self):
dom = Document()
child = dom.appendChild(
dom.createElementNS("http://www.python.org", "python:abc"))
child.setAttributeNS("http://www.w3.org", "xmlns:python",
"http://www.python.org")
child.setAttributeNS("http://www.python.org", "python:abcattr", "foo")
self.confirm(len(child.attributes) == 2)
child.removeAttributeNS("http://www.python.org", "abcattr")
self.confirm(len(child.attributes) == 1)
dom.unlink()
def testRemoveAttributeNode(self):
dom = Document()
child = dom.appendChild(dom.createElement("foo"))
child.setAttribute("spam", "jam")
self.confirm(len(child.attributes) == 1)
node = child.getAttributeNode("spam")
child.removeAttributeNode(node)
self.confirm(len(child.attributes) == 0
and child.getAttributeNode("spam") is None)
dom.unlink()
def testChangeAttr(self):
dom = parseString("<abc/>")
el = dom.documentElement
el.setAttribute("spam", "jam")
self.confirm(len(el.attributes) == 1)
el.setAttribute("spam", "bam")
# Set this attribute to be an ID and make sure that doesn't change
# when changing the value:
el.setIdAttribute("spam")
self.confirm(len(el.attributes) == 1
and el.attributes["spam"].value == "bam"
and el.attributes["spam"].nodeValue == "bam"
and el.getAttribute("spam") == "bam"
and el.getAttributeNode("spam").isId)
el.attributes["spam"] = "ham"
self.confirm(len(el.attributes) == 1
and el.attributes["spam"].value == "ham"
and el.attributes["spam"].nodeValue == "ham"
and el.getAttribute("spam") == "ham"
and el.attributes["spam"].isId)
el.setAttribute("spam2", "bam")
self.confirm(len(el.attributes) == 2
and el.attributes["spam"].value == "ham"
and el.attributes["spam"].nodeValue == "ham"
and el.getAttribute("spam") == "ham"
and el.attributes["spam2"].value == "bam"
and el.attributes["spam2"].nodeValue == "bam"
and el.getAttribute("spam2") == "bam")
el.attributes["spam2"] = "bam2"
self.confirm(len(el.attributes) == 2
and el.attributes["spam"].value == "ham"
and el.attributes["spam"].nodeValue == "ham"
and el.getAttribute("spam") == "ham"
and el.attributes["spam2"].value == "bam2"
and el.attributes["spam2"].nodeValue == "bam2"
and el.getAttribute("spam2") == "bam2")
dom.unlink()
def testGetAttrList(self):
pass
def testGetAttrValues(self): pass
def testGetAttrLength(self): pass
def testGetAttribute(self): pass
def testGetAttributeNS(self): pass
def testGetAttributeNode(self): pass
def testGetElementsByTagNameNS(self):
d="""<foo xmlns:minidom='http://pyxml.sf.net/minidom'>
<minidom:myelem/>
</foo>"""
dom = parseString(d)
elems = dom.getElementsByTagNameNS("http://pyxml.sf.net/minidom",
"myelem")
self.confirm(len(elems) == 1
and elems[0].namespaceURI == "http://pyxml.sf.net/minidom"
and elems[0].localName == "myelem"
and elems[0].prefix == "minidom"
and elems[0].tagName == "minidom:myelem"
and elems[0].nodeName == "minidom:myelem")
dom.unlink()
def get_empty_nodelist_from_elements_by_tagName_ns_helper(self, doc, nsuri,
lname):
nodelist = doc.getElementsByTagNameNS(nsuri, lname)
self.confirm(len(nodelist) == 0)
def testGetEmptyNodeListFromElementsByTagNameNS(self):
doc = parseString('<doc/>')
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, 'http://xml.python.org/namespaces/a', 'localname')
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, '*', 'splat')
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, 'http://xml.python.org/namespaces/a', '*')
doc = parseString('<doc xmlns="http://xml.python.org/splat"><e/></doc>')
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, "http://xml.python.org/splat", "not-there")
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, "*", "not-there")
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, "http://somewhere.else.net/not-there", "e")
def testElementReprAndStr(self):
dom = Document()
el = dom.appendChild(dom.createElement("abc"))
string1 = repr(el)
string2 = str(el)
self.confirm(string1 == string2)
dom.unlink()
def testElementReprAndStrUnicode(self):
dom = Document()
el = dom.appendChild(dom.createElement(u"abc"))
string1 = repr(el)
string2 = str(el)
self.confirm(string1 == string2)
dom.unlink()
def testElementReprAndStrUnicodeNS(self):
dom = Document()
el = dom.appendChild(
dom.createElementNS(u"http://www.slashdot.org", u"slash:abc"))
string1 = repr(el)
string2 = str(el)
self.confirm(string1 == string2)
self.confirm("slash:abc" in string1)
dom.unlink()
def testAttributeRepr(self):
dom = Document()
el = dom.appendChild(dom.createElement(u"abc"))
node = el.setAttribute("abc", "def")
self.confirm(str(node) == repr(node))
dom.unlink()
def testTextNodeRepr(self): pass
def testWriteXML(self):
str = '<?xml version="1.0" ?><a b="c"/>'
dom = parseString(str)
domstr = dom.toxml()
dom.unlink()
self.confirm(str == domstr)
def testAltNewline(self):
str = '<?xml version="1.0" ?>\n<a b="c"/>\n'
dom = parseString(str)
domstr = dom.toprettyxml(newl="\r\n")
dom.unlink()
self.confirm(domstr == str.replace("\n", "\r\n"))
def test_toprettyxml_with_text_nodes(self):
# see issue #4147, text nodes are not indented
decl = '<?xml version="1.0" ?>\n'
self.assertEqual(parseString('<B>A</B>').toprettyxml(),
decl + '<B>A</B>\n')
self.assertEqual(parseString('<C>A<B>A</B></C>').toprettyxml(),
decl + '<C>\n\tA\n\t<B>A</B>\n</C>\n')
self.assertEqual(parseString('<C><B>A</B>A</C>').toprettyxml(),
decl + '<C>\n\t<B>A</B>\n\tA\n</C>\n')
self.assertEqual(parseString('<C><B>A</B><B>A</B></C>').toprettyxml(),
decl + '<C>\n\t<B>A</B>\n\t<B>A</B>\n</C>\n')
self.assertEqual(parseString('<C><B>A</B>A<B>A</B></C>').toprettyxml(),
decl + '<C>\n\t<B>A</B>\n\tA\n\t<B>A</B>\n</C>\n')
def test_toprettyxml_with_adjacent_text_nodes(self):
# see issue #4147, adjacent text nodes are indented normally
dom = Document()
elem = dom.createElement(u'elem')
elem.appendChild(dom.createTextNode(u'TEXT'))
elem.appendChild(dom.createTextNode(u'TEXT'))
dom.appendChild(elem)
decl = '<?xml version="1.0" ?>\n'
self.assertEqual(dom.toprettyxml(),
decl + '<elem>\n\tTEXT\n\tTEXT\n</elem>\n')
def test_toprettyxml_preserves_content_of_text_node(self):
# see issue #4147
for str in ('<B>A</B>', '<A><B>C</B></A>'):
dom = parseString(str)
dom2 = parseString(dom.toprettyxml())
self.assertEqual(
dom.getElementsByTagName('B')[0].childNodes[0].toxml(),
dom2.getElementsByTagName('B')[0].childNodes[0].toxml())
def testProcessingInstruction(self):
dom = parseString('<e><?mypi \t\n data \t\n ?></e>')
pi = dom.documentElement.firstChild
self.confirm(pi.target == "mypi"
and pi.data == "data \t\n "
and pi.nodeName == "mypi"
and pi.nodeType == Node.PROCESSING_INSTRUCTION_NODE
and pi.attributes is None
and not pi.hasChildNodes()
and len(pi.childNodes) == 0
and pi.firstChild is None
and pi.lastChild is None
and pi.localName is None
and pi.namespaceURI == xml.dom.EMPTY_NAMESPACE)
def testProcessingInstructionRepr(self): pass
def testTextRepr(self): pass
def testWriteText(self): pass
def testDocumentElement(self): pass
def testTooManyDocumentElements(self):
doc = parseString("<doc/>")
elem = doc.createElement("extra")
# Should raise an exception when adding an extra document element.
self.assertRaises(xml.dom.HierarchyRequestErr, doc.appendChild, elem)
elem.unlink()
doc.unlink()
def testCreateElementNS(self): pass
def testCreateAttributeNS(self): pass
def testParse(self): pass
def testParseString(self): pass
def testComment(self): pass
def testAttrListItem(self): pass
def testAttrListItems(self): pass
def testAttrListItemNS(self): pass
def testAttrListKeys(self): pass
def testAttrListKeysNS(self): pass
def testRemoveNamedItem(self):
doc = parseString("<doc a=''/>")
e = doc.documentElement
attrs = e.attributes
a1 = e.getAttributeNode("a")
a2 = attrs.removeNamedItem("a")
self.confirm(a1.isSameNode(a2))
self.assertRaises(xml.dom.NotFoundErr, attrs.removeNamedItem, "a")
def testRemoveNamedItemNS(self):
doc = parseString("<doc xmlns:a='http://xml.python.org/' a:b=''/>")
e = doc.documentElement
attrs = e.attributes
a1 = e.getAttributeNodeNS("http://xml.python.org/", "b")
a2 = attrs.removeNamedItemNS("http://xml.python.org/", "b")
self.confirm(a1.isSameNode(a2))
self.assertRaises(xml.dom.NotFoundErr, attrs.removeNamedItemNS,
"http://xml.python.org/", "b")
def testAttrListValues(self): pass
def testAttrListLength(self): pass
def testAttrList__getitem__(self): pass
def testAttrList__setitem__(self): pass
def testSetAttrValueandNodeValue(self): pass
def testParseElement(self): pass
def testParseAttributes(self): pass
def testParseElementNamespaces(self): pass
def testParseAttributeNamespaces(self): pass
def testParseProcessingInstructions(self): pass
def testChildNodes(self): pass
def testFirstChild(self): pass
def testHasChildNodes(self): pass
def _testCloneElementCopiesAttributes(self, e1, e2, test):
attrs1 = e1.attributes
attrs2 = e2.attributes
keys1 = attrs1.keys()
keys2 = attrs2.keys()
keys1.sort()
keys2.sort()
self.confirm(keys1 == keys2, "clone of element has same attribute keys")
for i in range(len(keys1)):
a1 = attrs1.item(i)
a2 = attrs2.item(i)
self.confirm(a1 is not a2
and a1.value == a2.value
and a1.nodeValue == a2.nodeValue
and a1.namespaceURI == a2.namespaceURI
and a1.localName == a2.localName
, "clone of attribute node has proper attribute values")
self.confirm(a2.ownerElement is e2,
"clone of attribute node correctly owned")
def _setupCloneElement(self, deep):
dom = parseString("<doc attr='value'><foo/></doc>")
root = dom.documentElement
clone = root.cloneNode(deep)
self._testCloneElementCopiesAttributes(
root, clone, "testCloneElement" + (deep and "Deep" or "Shallow"))
# mutilate the original so shared data is detected
root.tagName = root.nodeName = "MODIFIED"
root.setAttribute("attr", "NEW VALUE")
root.setAttribute("added", "VALUE")
return dom, clone
def testCloneElementShallow(self):
dom, clone = self._setupCloneElement(0)
self.confirm(len(clone.childNodes) == 0
and clone.childNodes.length == 0
and clone.parentNode is None
and clone.toxml() == '<doc attr="value"/>'
, "testCloneElementShallow")
dom.unlink()
def testCloneElementDeep(self):
dom, clone = self._setupCloneElement(1)
self.confirm(len(clone.childNodes) == 1
and clone.childNodes.length == 1
and clone.parentNode is None
and clone.toxml() == '<doc attr="value"><foo/></doc>'
, "testCloneElementDeep")
dom.unlink()
def testCloneDocumentShallow(self):
doc = parseString("<?xml version='1.0'?>\n"
"<!-- comment -->"
"<!DOCTYPE doc [\n"
"<!NOTATION notation SYSTEM 'http://xml.python.org/'>\n"
"]>\n"
"<doc attr='value'/>")
doc2 = doc.cloneNode(0)
self.confirm(doc2 is None,
"testCloneDocumentShallow:"
" shallow cloning of documents makes no sense!")
def testCloneDocumentDeep(self):
doc = parseString("<?xml version='1.0'?>\n"
"<!-- comment -->"
"<!DOCTYPE doc [\n"
"<!NOTATION notation SYSTEM 'http://xml.python.org/'>\n"
"]>\n"
"<doc attr='value'/>")
doc2 = doc.cloneNode(1)
self.confirm(not (doc.isSameNode(doc2) or doc2.isSameNode(doc)),
"testCloneDocumentDeep: document objects not distinct")
self.confirm(len(doc.childNodes) == len(doc2.childNodes),
"testCloneDocumentDeep: wrong number of Document children")
self.confirm(doc2.documentElement.nodeType == Node.ELEMENT_NODE,
"testCloneDocumentDeep: documentElement not an ELEMENT_NODE")
self.confirm(doc2.documentElement.ownerDocument.isSameNode(doc2),
"testCloneDocumentDeep: documentElement owner is not new document")
self.confirm(not doc.documentElement.isSameNode(doc2.documentElement),
"testCloneDocumentDeep: documentElement should not be shared")
if doc.doctype is not None:
# check the doctype iff the original DOM maintained it
self.confirm(doc2.doctype.nodeType == Node.DOCUMENT_TYPE_NODE,
"testCloneDocumentDeep: doctype not a DOCUMENT_TYPE_NODE")
self.confirm(doc2.doctype.ownerDocument.isSameNode(doc2))
self.confirm(not doc.doctype.isSameNode(doc2.doctype))
def testCloneDocumentTypeDeepOk(self):
doctype = create_nonempty_doctype()
clone = doctype.cloneNode(1)
self.confirm(clone is not None
and clone.nodeName == doctype.nodeName
and clone.name == doctype.name
and clone.publicId == doctype.publicId
and clone.systemId == doctype.systemId
and len(clone.entities) == len(doctype.entities)
and clone.entities.item(len(clone.entities)) is None
and len(clone.notations) == len(doctype.notations)
and clone.notations.item(len(clone.notations)) is None
and len(clone.childNodes) == 0)
for i in range(len(doctype.entities)):
se = doctype.entities.item(i)
ce = clone.entities.item(i)
self.confirm((not se.isSameNode(ce))
and (not ce.isSameNode(se))
and ce.nodeName == se.nodeName
and ce.notationName == se.notationName
and ce.publicId == se.publicId
and ce.systemId == se.systemId
and ce.encoding == se.encoding
and ce.actualEncoding == se.actualEncoding
and ce.version == se.version)
for i in range(len(doctype.notations)):
sn = doctype.notations.item(i)
cn = clone.notations.item(i)
self.confirm((not sn.isSameNode(cn))
and (not cn.isSameNode(sn))
and cn.nodeName == sn.nodeName
and cn.publicId == sn.publicId
and cn.systemId == sn.systemId)
def testCloneDocumentTypeDeepNotOk(self):
doc = create_doc_with_doctype()
clone = doc.doctype.cloneNode(1)
self.confirm(clone is None, "testCloneDocumentTypeDeepNotOk")
def testCloneDocumentTypeShallowOk(self):
doctype = create_nonempty_doctype()
clone = doctype.cloneNode(0)
self.confirm(clone is not None
and clone.nodeName == doctype.nodeName
and clone.name == doctype.name
and clone.publicId == doctype.publicId
and clone.systemId == doctype.systemId
and len(clone.entities) == 0
and clone.entities.item(0) is None
and len(clone.notations) == 0
and clone.notations.item(0) is None
and len(clone.childNodes) == 0)
def testCloneDocumentTypeShallowNotOk(self):
doc = create_doc_with_doctype()
clone = doc.doctype.cloneNode(0)
self.confirm(clone is None, "testCloneDocumentTypeShallowNotOk")
def check_import_document(self, deep, testName):
doc1 = parseString("<doc/>")
doc2 = parseString("<doc/>")
self.assertRaises(xml.dom.NotSupportedErr, doc1.importNode, doc2, deep)
def testImportDocumentShallow(self):
self.check_import_document(0, "testImportDocumentShallow")
def testImportDocumentDeep(self):
self.check_import_document(1, "testImportDocumentDeep")
def testImportDocumentTypeShallow(self):
src = create_doc_with_doctype()
target = create_doc_without_doctype()
self.assertRaises(xml.dom.NotSupportedErr, target.importNode,
src.doctype, 0)
def testImportDocumentTypeDeep(self):
src = create_doc_with_doctype()
target = create_doc_without_doctype()
self.assertRaises(xml.dom.NotSupportedErr, target.importNode,
src.doctype, 1)
# Testing attribute clones uses a helper, and should always be deep,
# even if the argument to cloneNode is false.
def check_clone_attribute(self, deep, testName):
doc = parseString("<doc attr='value'/>")
attr = doc.documentElement.getAttributeNode("attr")
self.assertNotEqual(attr, None)
clone = attr.cloneNode(deep)
self.confirm(not clone.isSameNode(attr))
self.confirm(not attr.isSameNode(clone))
self.confirm(clone.ownerElement is None,
testName + ": ownerElement should be None")
self.confirm(clone.ownerDocument.isSameNode(attr.ownerDocument),
testName + ": ownerDocument does not match")
self.confirm(clone.specified,
testName + ": cloned attribute must have specified == True")
def testCloneAttributeShallow(self):
self.check_clone_attribute(0, "testCloneAttributeShallow")
def testCloneAttributeDeep(self):
self.check_clone_attribute(1, "testCloneAttributeDeep")
def check_clone_pi(self, deep, testName):
doc = parseString("<?target data?><doc/>")
pi = doc.firstChild
self.assertEqual(pi.nodeType, Node.PROCESSING_INSTRUCTION_NODE)
clone = pi.cloneNode(deep)
self.confirm(clone.target == pi.target
and clone.data == pi.data)
def testClonePIShallow(self):
self.check_clone_pi(0, "testClonePIShallow")
def testClonePIDeep(self):
self.check_clone_pi(1, "testClonePIDeep")
def testNormalize(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode("first"))
root.appendChild(doc.createTextNode("second"))
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2,
"testNormalize -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 1
and root.childNodes.length == 1
and root.firstChild is root.lastChild
and root.firstChild.data == "firstsecond"
, "testNormalize -- result")
doc.unlink()
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode(""))
doc.normalize()
self.confirm(len(root.childNodes) == 0
and root.childNodes.length == 0,
"testNormalize -- single empty node removed")
doc.unlink()
def testNormalizeCombineAndNextSibling(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode("first"))
root.appendChild(doc.createTextNode("second"))
root.appendChild(doc.createElement("i"))
self.confirm(len(root.childNodes) == 3
and root.childNodes.length == 3,
"testNormalizeCombineAndNextSibling -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2
and root.firstChild.data == "firstsecond"
and root.firstChild is not root.lastChild
and root.firstChild.nextSibling is root.lastChild
and root.firstChild.previousSibling is None
and root.lastChild.previousSibling is root.firstChild
and root.lastChild.nextSibling is None
, "testNormalizeCombinedAndNextSibling -- result")
doc.unlink()
def testNormalizeDeleteWithPrevSibling(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode("first"))
root.appendChild(doc.createTextNode(""))
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2,
"testNormalizeDeleteWithPrevSibling -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 1
and root.childNodes.length == 1
and root.firstChild.data == "first"
and root.firstChild is root.lastChild
and root.firstChild.nextSibling is None
and root.firstChild.previousSibling is None
, "testNormalizeDeleteWithPrevSibling -- result")
doc.unlink()
def testNormalizeDeleteWithNextSibling(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode(""))
root.appendChild(doc.createTextNode("second"))
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2,
"testNormalizeDeleteWithNextSibling -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 1
and root.childNodes.length == 1
and root.firstChild.data == "second"
and root.firstChild is root.lastChild
and root.firstChild.nextSibling is None
and root.firstChild.previousSibling is None
, "testNormalizeDeleteWithNextSibling -- result")
doc.unlink()
def testNormalizeDeleteWithTwoNonTextSiblings(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createElement("i"))
root.appendChild(doc.createTextNode(""))
root.appendChild(doc.createElement("i"))
self.confirm(len(root.childNodes) == 3
and root.childNodes.length == 3,
"testNormalizeDeleteWithTwoSiblings -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2
and root.firstChild is not root.lastChild
and root.firstChild.nextSibling is root.lastChild
and root.firstChild.previousSibling is None
and root.lastChild.previousSibling is root.firstChild
and root.lastChild.nextSibling is None
, "testNormalizeDeleteWithTwoSiblings -- result")
doc.unlink()
def testNormalizeDeleteAndCombine(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode(""))
root.appendChild(doc.createTextNode("second"))
root.appendChild(doc.createTextNode(""))
root.appendChild(doc.createTextNode("fourth"))
root.appendChild(doc.createTextNode(""))
self.confirm(len(root.childNodes) == 5
and root.childNodes.length == 5,
"testNormalizeDeleteAndCombine -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 1
and root.childNodes.length == 1
and root.firstChild is root.lastChild
and root.firstChild.data == "secondfourth"
and root.firstChild.previousSibling is None
and root.firstChild.nextSibling is None
, "testNormalizeDeleteAndCombine -- result")
doc.unlink()
def testNormalizeRecursion(self):
doc = parseString("<doc>"
"<o>"
"<i/>"
"t"
#
#x
"</o>"
"<o>"
"<o>"
"t2"
#x2
"</o>"
"t3"
#x3
"</o>"
#
"</doc>")
root = doc.documentElement
root.childNodes[0].appendChild(doc.createTextNode(""))
root.childNodes[0].appendChild(doc.createTextNode("x"))
root.childNodes[1].childNodes[0].appendChild(doc.createTextNode("x2"))
root.childNodes[1].appendChild(doc.createTextNode("x3"))
root.appendChild(doc.createTextNode(""))
self.confirm(len(root.childNodes) == 3
and root.childNodes.length == 3
and len(root.childNodes[0].childNodes) == 4
and root.childNodes[0].childNodes.length == 4
and len(root.childNodes[1].childNodes) == 3
and root.childNodes[1].childNodes.length == 3
and len(root.childNodes[1].childNodes[0].childNodes) == 2
and root.childNodes[1].childNodes[0].childNodes.length == 2
, "testNormalize2 -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2
and len(root.childNodes[0].childNodes) == 2
and root.childNodes[0].childNodes.length == 2
and len(root.childNodes[1].childNodes) == 2
and root.childNodes[1].childNodes.length == 2
and len(root.childNodes[1].childNodes[0].childNodes) == 1
and root.childNodes[1].childNodes[0].childNodes.length == 1
, "testNormalize2 -- childNodes lengths")
self.confirm(root.childNodes[0].childNodes[1].data == "tx"
and root.childNodes[1].childNodes[0].childNodes[0].data == "t2x2"
and root.childNodes[1].childNodes[1].data == "t3x3"
, "testNormalize2 -- joined text fields")
self.confirm(root.childNodes[0].childNodes[1].nextSibling is None
and root.childNodes[0].childNodes[1].previousSibling
is root.childNodes[0].childNodes[0]
and root.childNodes[0].childNodes[0].previousSibling is None
and root.childNodes[0].childNodes[0].nextSibling
is root.childNodes[0].childNodes[1]
and root.childNodes[1].childNodes[1].nextSibling is None
and root.childNodes[1].childNodes[1].previousSibling
is root.childNodes[1].childNodes[0]
and root.childNodes[1].childNodes[0].previousSibling is None
and root.childNodes[1].childNodes[0].nextSibling
is root.childNodes[1].childNodes[1]
, "testNormalize2 -- sibling pointers")
doc.unlink()
def testBug0777884(self):
doc = parseString("<o>text</o>")
text = doc.documentElement.childNodes[0]
self.assertEqual(text.nodeType, Node.TEXT_NODE)
# Should run quietly, doing nothing.
text.normalize()
doc.unlink()
def testBug1433694(self):
doc = parseString("<o><i/>t</o>")
node = doc.documentElement
node.childNodes[1].nodeValue = ""
node.normalize()
self.confirm(node.childNodes[-1].nextSibling is None,
"Final child's .nextSibling should be None")
def testSiblings(self):
doc = parseString("<doc><?pi?>text?<elm/></doc>")
root = doc.documentElement
(pi, text, elm) = root.childNodes
self.confirm(pi.nextSibling is text and
pi.previousSibling is None and
text.nextSibling is elm and
text.previousSibling is pi and
elm.nextSibling is None and
elm.previousSibling is text, "testSiblings")
doc.unlink()
def testParents(self):
doc = parseString(
"<doc><elm1><elm2/><elm2><elm3/></elm2></elm1></doc>")
root = doc.documentElement
elm1 = root.childNodes[0]
(elm2a, elm2b) = elm1.childNodes
elm3 = elm2b.childNodes[0]
self.confirm(root.parentNode is doc and
elm1.parentNode is root and
elm2a.parentNode is elm1 and
elm2b.parentNode is elm1 and
elm3.parentNode is elm2b, "testParents")
doc.unlink()
def testNodeListItem(self):
doc = parseString("<doc><e/><e/></doc>")
children = doc.childNodes
docelem = children[0]
self.confirm(children[0] is children.item(0)
and children.item(1) is None
and docelem.childNodes.item(0) is docelem.childNodes[0]
and docelem.childNodes.item(1) is docelem.childNodes[1]
and docelem.childNodes.item(0).childNodes.item(0) is None,
"test NodeList.item()")
doc.unlink()
def testSAX2DOM(self):
from xml.dom import pulldom
sax2dom = pulldom.SAX2DOM()
sax2dom.startDocument()
sax2dom.startElement("doc", {})
sax2dom.characters("text")
sax2dom.startElement("subelm", {})
sax2dom.characters("text")
sax2dom.endElement("subelm")
sax2dom.characters("text")
sax2dom.endElement("doc")
sax2dom.endDocument()
doc = sax2dom.document
root = doc.documentElement
(text1, elm1, text2) = root.childNodes
text3 = elm1.childNodes[0]
self.confirm(text1.previousSibling is None and
text1.nextSibling is elm1 and
elm1.previousSibling is text1 and
elm1.nextSibling is text2 and
text2.previousSibling is elm1 and
text2.nextSibling is None and
text3.previousSibling is None and
text3.nextSibling is None, "testSAX2DOM - siblings")
self.confirm(root.parentNode is doc and
text1.parentNode is root and
elm1.parentNode is root and
text2.parentNode is root and
text3.parentNode is elm1, "testSAX2DOM - parents")
doc.unlink()
def testEncodings(self):
doc = parseString('<foo>€</foo>')
self.confirm(doc.toxml() == u'<?xml version="1.0" ?><foo>\u20ac</foo>'
and doc.toxml('utf-8') ==
'<?xml version="1.0" encoding="utf-8"?><foo>\xe2\x82\xac</foo>'
and doc.toxml('iso-8859-15') ==
'<?xml version="1.0" encoding="iso-8859-15"?><foo>\xa4</foo>',
"testEncodings - encoding EURO SIGN")
# Verify that character decoding errors raise exceptions instead
# of crashing
self.assertRaises(UnicodeDecodeError, parseString,
'<fran\xe7ais>Comment \xe7a va ? Tr\xe8s bien ?</fran\xe7ais>')
doc.unlink()
class UserDataHandler:
called = 0
def handle(self, operation, key, data, src, dst):
dst.setUserData(key, data + 1, self)
src.setUserData(key, None, None)
self.called = 1
def testUserData(self):
dom = Document()
n = dom.createElement('e')
self.confirm(n.getUserData("foo") is None)
n.setUserData("foo", None, None)
self.confirm(n.getUserData("foo") is None)
n.setUserData("foo", 12, 12)
n.setUserData("bar", 13, 13)
self.confirm(n.getUserData("foo") == 12)
self.confirm(n.getUserData("bar") == 13)
n.setUserData("foo", None, None)
self.confirm(n.getUserData("foo") is None)
self.confirm(n.getUserData("bar") == 13)
handler = self.UserDataHandler()
n.setUserData("bar", 12, handler)
c = n.cloneNode(1)
self.confirm(handler.called
and n.getUserData("bar") is None
and c.getUserData("bar") == 13)
n.unlink()
c.unlink()
dom.unlink()
def checkRenameNodeSharedConstraints(self, doc, node):
# Make sure illegal NS usage is detected:
self.assertRaises(xml.dom.NamespaceErr, doc.renameNode, node,
"http://xml.python.org/ns", "xmlns:foo")
doc2 = parseString("<doc/>")
self.assertRaises(xml.dom.WrongDocumentErr, doc2.renameNode, node,
xml.dom.EMPTY_NAMESPACE, "foo")
def testRenameAttribute(self):
doc = parseString("<doc a='v'/>")
elem = doc.documentElement
attrmap = elem.attributes
attr = elem.attributes['a']
# Simple renaming
attr = doc.renameNode(attr, xml.dom.EMPTY_NAMESPACE, "b")
self.confirm(attr.name == "b"
and attr.nodeName == "b"
and attr.localName is None
and attr.namespaceURI == xml.dom.EMPTY_NAMESPACE
and attr.prefix is None
and attr.value == "v"
and elem.getAttributeNode("a") is None
and elem.getAttributeNode("b").isSameNode(attr)
and attrmap["b"].isSameNode(attr)
and attr.ownerDocument.isSameNode(doc)
and attr.ownerElement.isSameNode(elem))
# Rename to have a namespace, no prefix
attr = doc.renameNode(attr, "http://xml.python.org/ns", "c")
self.confirm(attr.name == "c"
and attr.nodeName == "c"
and attr.localName == "c"
and attr.namespaceURI == "http://xml.python.org/ns"
and attr.prefix is None
and attr.value == "v"
and elem.getAttributeNode("a") is None
and elem.getAttributeNode("b") is None
and elem.getAttributeNode("c").isSameNode(attr)
and elem.getAttributeNodeNS(
"http://xml.python.org/ns", "c").isSameNode(attr)
and attrmap["c"].isSameNode(attr)
and attrmap[("http://xml.python.org/ns", "c")].isSameNode(attr))
# Rename to have a namespace, with prefix
attr = doc.renameNode(attr, "http://xml.python.org/ns2", "p:d")
self.confirm(attr.name == "p:d"
and attr.nodeName == "p:d"
and attr.localName == "d"
and attr.namespaceURI == "http://xml.python.org/ns2"
and attr.prefix == "p"
and attr.value == "v"
and elem.getAttributeNode("a") is None
and elem.getAttributeNode("b") is None
and elem.getAttributeNode("c") is None
and elem.getAttributeNodeNS(
"http://xml.python.org/ns", "c") is None
and elem.getAttributeNode("p:d").isSameNode(attr)
and elem.getAttributeNodeNS(
"http://xml.python.org/ns2", "d").isSameNode(attr)
and attrmap["p:d"].isSameNode(attr)
and attrmap[("http://xml.python.org/ns2", "d")].isSameNode(attr))
# Rename back to a simple non-NS node
attr = doc.renameNode(attr, xml.dom.EMPTY_NAMESPACE, "e")
self.confirm(attr.name == "e"
and attr.nodeName == "e"
and attr.localName is None
and attr.namespaceURI == xml.dom.EMPTY_NAMESPACE
and attr.prefix is None
and attr.value == "v"
and elem.getAttributeNode("a") is None
and elem.getAttributeNode("b") is None
and elem.getAttributeNode("c") is None
and elem.getAttributeNode("p:d") is None
and elem.getAttributeNodeNS(
"http://xml.python.org/ns", "c") is None
and elem.getAttributeNode("e").isSameNode(attr)
and attrmap["e"].isSameNode(attr))
self.assertRaises(xml.dom.NamespaceErr, doc.renameNode, attr,
"http://xml.python.org/ns", "xmlns")
self.checkRenameNodeSharedConstraints(doc, attr)
doc.unlink()
def testRenameElement(self):
doc = parseString("<doc/>")
elem = doc.documentElement
# Simple renaming
elem = doc.renameNode(elem, xml.dom.EMPTY_NAMESPACE, "a")
self.confirm(elem.tagName == "a"
and elem.nodeName == "a"
and elem.localName is None
and elem.namespaceURI == xml.dom.EMPTY_NAMESPACE
and elem.prefix is None
and elem.ownerDocument.isSameNode(doc))
# Rename to have a namespace, no prefix
elem = doc.renameNode(elem, "http://xml.python.org/ns", "b")
self.confirm(elem.tagName == "b"
and elem.nodeName == "b"
and elem.localName == "b"
and elem.namespaceURI == "http://xml.python.org/ns"
and elem.prefix is None
and elem.ownerDocument.isSameNode(doc))
# Rename to have a namespace, with prefix
elem = doc.renameNode(elem, "http://xml.python.org/ns2", "p:c")
self.confirm(elem.tagName == "p:c"
and elem.nodeName == "p:c"
and elem.localName == "c"
and elem.namespaceURI == "http://xml.python.org/ns2"
and elem.prefix == "p"
and elem.ownerDocument.isSameNode(doc))
# Rename back to a simple non-NS node
elem = doc.renameNode(elem, xml.dom.EMPTY_NAMESPACE, "d")
self.confirm(elem.tagName == "d"
and elem.nodeName == "d"
and elem.localName is None
and elem.namespaceURI == xml.dom.EMPTY_NAMESPACE
and elem.prefix is None
and elem.ownerDocument.isSameNode(doc))
self.checkRenameNodeSharedConstraints(doc, elem)
doc.unlink()
def testRenameOther(self):
# We have to create a comment node explicitly since not all DOM
# builders used with minidom add comments to the DOM.
doc = xml.dom.minidom.getDOMImplementation().createDocument(
xml.dom.EMPTY_NAMESPACE, "e", None)
node = doc.createComment("comment")
self.assertRaises(xml.dom.NotSupportedErr, doc.renameNode, node,
xml.dom.EMPTY_NAMESPACE, "foo")
doc.unlink()
def testWholeText(self):
doc = parseString("<doc>a</doc>")
elem = doc.documentElement
text = elem.childNodes[0]
self.assertEqual(text.nodeType, Node.TEXT_NODE)
self.checkWholeText(text, "a")
elem.appendChild(doc.createTextNode("b"))
self.checkWholeText(text, "ab")
elem.insertBefore(doc.createCDATASection("c"), text)
self.checkWholeText(text, "cab")
# make sure we don't cross other nodes
splitter = doc.createComment("comment")
elem.appendChild(splitter)
text2 = doc.createTextNode("d")
elem.appendChild(text2)
self.checkWholeText(text, "cab")
self.checkWholeText(text2, "d")
x = doc.createElement("x")
elem.replaceChild(x, splitter)
splitter = x
self.checkWholeText(text, "cab")
self.checkWholeText(text2, "d")
x = doc.createProcessingInstruction("y", "z")
elem.replaceChild(x, splitter)
splitter = x
self.checkWholeText(text, "cab")
self.checkWholeText(text2, "d")
elem.removeChild(splitter)
self.checkWholeText(text, "cabd")
self.checkWholeText(text2, "cabd")
def testPatch1094164(self):
doc = parseString("<doc><e/></doc>")
elem = doc.documentElement
e = elem.firstChild
self.confirm(e.parentNode is elem, "Before replaceChild()")
# Check that replacing a child with itself leaves the tree unchanged
elem.replaceChild(e, e)
self.confirm(e.parentNode is elem, "After replaceChild()")
def testReplaceWholeText(self):
def setup():
doc = parseString("<doc>a<e/>d</doc>")
elem = doc.documentElement
text1 = elem.firstChild
text2 = elem.lastChild
splitter = text1.nextSibling
elem.insertBefore(doc.createTextNode("b"), splitter)
elem.insertBefore(doc.createCDATASection("c"), text1)
return doc, elem, text1, splitter, text2
doc, elem, text1, splitter, text2 = setup()
text = text1.replaceWholeText("new content")
self.checkWholeText(text, "new content")
self.checkWholeText(text2, "d")
self.confirm(len(elem.childNodes) == 3)
doc, elem, text1, splitter, text2 = setup()
text = text2.replaceWholeText("new content")
self.checkWholeText(text, "new content")
self.checkWholeText(text1, "cab")
self.confirm(len(elem.childNodes) == 5)
doc, elem, text1, splitter, text2 = setup()
text = text1.replaceWholeText("")
self.checkWholeText(text2, "d")
self.confirm(text is None
and len(elem.childNodes) == 2)
def testSchemaType(self):
doc = parseString(
"<!DOCTYPE doc [\n"
" <!ENTITY e1 SYSTEM 'http://xml.python.org/e1'>\n"
" <!ENTITY e2 SYSTEM 'http://xml.python.org/e2'>\n"
" <!ATTLIST doc id ID #IMPLIED \n"
" ref IDREF #IMPLIED \n"
" refs IDREFS #IMPLIED \n"
" enum (a|b) #IMPLIED \n"
" ent ENTITY #IMPLIED \n"
" ents ENTITIES #IMPLIED \n"
" nm NMTOKEN #IMPLIED \n"
" nms NMTOKENS #IMPLIED \n"
" text CDATA #IMPLIED \n"
" >\n"
"]><doc id='name' notid='name' text='splat!' enum='b'"
" ref='name' refs='name name' ent='e1' ents='e1 e2'"
" nm='123' nms='123 abc' />")
elem = doc.documentElement
# We don't want to rely on any specific loader at this point, so
# just make sure we can get to all the names, and that the
# DTD-based namespace is right. The names can vary by loader
# since each supports a different level of DTD information.
t = elem.schemaType
self.confirm(t.name is None
and t.namespace == xml.dom.EMPTY_NAMESPACE)
names = "id notid text enum ref refs ent ents nm nms".split()
for name in names:
a = elem.getAttributeNode(name)
t = a.schemaType
self.confirm(hasattr(t, "name")
and t.namespace == xml.dom.EMPTY_NAMESPACE)
def testSetIdAttribute(self):
doc = parseString("<doc a1='v' a2='w'/>")
e = doc.documentElement
a1 = e.getAttributeNode("a1")
a2 = e.getAttributeNode("a2")
self.confirm(doc.getElementById("v") is None
and not a1.isId
and not a2.isId)
e.setIdAttribute("a1")
self.confirm(e.isSameNode(doc.getElementById("v"))
and a1.isId
and not a2.isId)
e.setIdAttribute("a2")
self.confirm(e.isSameNode(doc.getElementById("v"))
and e.isSameNode(doc.getElementById("w"))
and a1.isId
and a2.isId)
# replace the a1 node; the new node should *not* be an ID
a3 = doc.createAttribute("a1")
a3.value = "v"
e.setAttributeNode(a3)
self.confirm(doc.getElementById("v") is None
and e.isSameNode(doc.getElementById("w"))
and not a1.isId
and a2.isId
and not a3.isId)
# renaming an attribute should not affect its ID-ness:
doc.renameNode(a2, xml.dom.EMPTY_NAMESPACE, "an")
self.confirm(e.isSameNode(doc.getElementById("w"))
and a2.isId)
def testSetIdAttributeNS(self):
NS1 = "http://xml.python.org/ns1"
NS2 = "http://xml.python.org/ns2"
doc = parseString("<doc"
" xmlns:ns1='" + NS1 + "'"
" xmlns:ns2='" + NS2 + "'"
" ns1:a1='v' ns2:a2='w'/>")
e = doc.documentElement
a1 = e.getAttributeNodeNS(NS1, "a1")
a2 = e.getAttributeNodeNS(NS2, "a2")
self.confirm(doc.getElementById("v") is None
and not a1.isId
and not a2.isId)
e.setIdAttributeNS(NS1, "a1")
self.confirm(e.isSameNode(doc.getElementById("v"))
and a1.isId
and not a2.isId)
e.setIdAttributeNS(NS2, "a2")
self.confirm(e.isSameNode(doc.getElementById("v"))
and e.isSameNode(doc.getElementById("w"))
and a1.isId
and a2.isId)
# replace the a1 node; the new node should *not* be an ID
a3 = doc.createAttributeNS(NS1, "a1")
a3.value = "v"
e.setAttributeNode(a3)
self.confirm(e.isSameNode(doc.getElementById("w")))
self.confirm(not a1.isId)
self.confirm(a2.isId)
self.confirm(not a3.isId)
self.confirm(doc.getElementById("v") is None)
# renaming an attribute should not affect its ID-ness:
doc.renameNode(a2, xml.dom.EMPTY_NAMESPACE, "an")
self.confirm(e.isSameNode(doc.getElementById("w"))
and a2.isId)
def testSetIdAttributeNode(self):
NS1 = "http://xml.python.org/ns1"
NS2 = "http://xml.python.org/ns2"
doc = parseString("<doc"
" xmlns:ns1='" + NS1 + "'"
" xmlns:ns2='" + NS2 + "'"
" ns1:a1='v' ns2:a2='w'/>")
e = doc.documentElement
a1 = e.getAttributeNodeNS(NS1, "a1")
a2 = e.getAttributeNodeNS(NS2, "a2")
self.confirm(doc.getElementById("v") is None
and not a1.isId
and not a2.isId)
e.setIdAttributeNode(a1)
self.confirm(e.isSameNode(doc.getElementById("v"))
and a1.isId
and not a2.isId)
e.setIdAttributeNode(a2)
self.confirm(e.isSameNode(doc.getElementById("v"))
and e.isSameNode(doc.getElementById("w"))
and a1.isId
and a2.isId)
# replace the a1 node; the new node should *not* be an ID
a3 = doc.createAttributeNS(NS1, "a1")
a3.value = "v"
e.setAttributeNode(a3)
self.confirm(e.isSameNode(doc.getElementById("w")))
self.confirm(not a1.isId)
self.confirm(a2.isId)
self.confirm(not a3.isId)
self.confirm(doc.getElementById("v") is None)
# renaming an attribute should not affect its ID-ness:
doc.renameNode(a2, xml.dom.EMPTY_NAMESPACE, "an")
self.confirm(e.isSameNode(doc.getElementById("w"))
and a2.isId)
def testPickledDocument(self):
doc = parseString("<?xml version='1.0' encoding='us-ascii'?>\n"
"<!DOCTYPE doc PUBLIC 'http://xml.python.org/public'"
" 'http://xml.python.org/system' [\n"
" <!ELEMENT e EMPTY>\n"
" <!ENTITY ent SYSTEM 'http://xml.python.org/entity'>\n"
"]><doc attr='value'> text\n"
"<?pi sample?> <!-- comment --> <e/> </doc>")
s = pickle.dumps(doc)
doc2 = pickle.loads(s)
stack = [(doc, doc2)]
while stack:
n1, n2 = stack.pop()
self.confirm(n1.nodeType == n2.nodeType
and len(n1.childNodes) == len(n2.childNodes)
and n1.nodeName == n2.nodeName
and not n1.isSameNode(n2)
and not n2.isSameNode(n1))
if n1.nodeType == Node.DOCUMENT_TYPE_NODE:
len(n1.entities)
len(n2.entities)
len(n1.notations)
len(n2.notations)
self.confirm(len(n1.entities) == len(n2.entities)
and len(n1.notations) == len(n2.notations))
for i in range(len(n1.notations)):
# XXX this loop body doesn't seem to be executed?
no1 = n1.notations.item(i)
no2 = n1.notations.item(i)
self.confirm(no1.name == no2.name
and no1.publicId == no2.publicId
and no1.systemId == no2.systemId)
stack.append((no1, no2))
for i in range(len(n1.entities)):
e1 = n1.entities.item(i)
e2 = n2.entities.item(i)
self.confirm(e1.notationName == e2.notationName
and e1.publicId == e2.publicId
and e1.systemId == e2.systemId)
stack.append((e1, e2))
if n1.nodeType != Node.DOCUMENT_NODE:
self.confirm(n1.ownerDocument.isSameNode(doc)
and n2.ownerDocument.isSameNode(doc2))
for i in range(len(n1.childNodes)):
stack.append((n1.childNodes[i], n2.childNodes[i]))
def testSerializeCommentNodeWithDoubleHyphen(self):
doc = create_doc_without_doctype()
doc.appendChild(doc.createComment("foo--bar"))
self.assertRaises(ValueError, doc.toxml)
def testEmptyXMLNSValue(self):
doc = parseString("<element xmlns=''>\n"
"<foo/>\n</element>")
doc2 = parseString(doc.toxml())
self.confirm(doc2.namespaceURI == xml.dom.EMPTY_NAMESPACE)
def test_main():
run_unittest(MinidomTest)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
kmonsoor/python-for-android | python3-alpha/python3-src/Lib/test/test_poplib.py | 49 | 11406 | """Test script for poplib module."""
# Modified by Giampaolo Rodola' to give poplib.POP3 and poplib.POP3_SSL
# a real test suite
import poplib
import asyncore
import asynchat
import socket
import os
import time
import errno
from unittest import TestCase
from test import support as test_support
threading = test_support.import_module('threading')
HOST = test_support.HOST
PORT = 0
# the dummy data returned by server when LIST and RETR commands are issued
LIST_RESP = b'1 1\r\n2 2\r\n3 3\r\n4 4\r\n5 5\r\n.\r\n'
RETR_RESP = b"""From: postmaster@python.org\
\r\nContent-Type: text/plain\r\n\
MIME-Version: 1.0\r\n\
Subject: Dummy\r\n\
\r\n\
line1\r\n\
line2\r\n\
line3\r\n\
.\r\n"""
class DummyPOP3Handler(asynchat.async_chat):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
def collect_incoming_data(self, data):
self.in_buffer.append(data)
def found_terminator(self):
line = b''.join(self.in_buffer)
line = str(line, 'ISO-8859-1')
self.in_buffer = []
cmd = line.split(' ')[0].lower()
space = line.find(' ')
if space != -1:
arg = line[space + 1:]
else:
arg = ""
if hasattr(self, 'cmd_' + cmd):
method = getattr(self, 'cmd_' + cmd)
method(arg)
else:
self.push('-ERR unrecognized POP3 command "%s".' %cmd)
def handle_error(self):
raise
def push(self, data):
asynchat.async_chat.push(self, data.encode("ISO-8859-1") + b'\r\n')
def cmd_echo(self, arg):
# sends back the received string (used by the test suite)
self.push(arg)
def cmd_user(self, arg):
if arg != "guido":
self.push("-ERR no such user")
self.push('+OK password required')
def cmd_pass(self, arg):
if arg != "python":
self.push("-ERR wrong password")
self.push('+OK 10 messages')
def cmd_stat(self, arg):
self.push('+OK 10 100')
def cmd_list(self, arg):
if arg:
self.push('+OK %s %s' %(arg, arg))
else:
self.push('+OK')
asynchat.async_chat.push(self, LIST_RESP)
cmd_uidl = cmd_list
def cmd_retr(self, arg):
self.push('+OK %s bytes' %len(RETR_RESP))
asynchat.async_chat.push(self, RETR_RESP)
cmd_top = cmd_retr
def cmd_dele(self, arg):
self.push('+OK message marked for deletion.')
def cmd_noop(self, arg):
self.push('+OK done nothing.')
def cmd_rpop(self, arg):
self.push('+OK done nothing.')
def cmd_apop(self, arg):
self.push('+OK done nothing.')
class DummyPOP3Server(asyncore.dispatcher, threading.Thread):
handler = DummyPOP3Handler
def __init__(self, address, af=socket.AF_INET):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(af, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.active = False
self.active_lock = threading.Lock()
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
self.active_lock.acquire()
asyncore.loop(timeout=0.1, count=1)
self.active_lock.release()
asyncore.close_all(ignore_all=True)
def stop(self):
assert self.active
self.active = False
self.join()
def handle_accepted(self, conn, addr):
self.handler_instance = self.handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
class TestPOP3Class(TestCase):
def assertOK(self, resp):
self.assertTrue(resp.startswith(b"+OK"))
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.start()
self.client = poplib.POP3(self.server.host, self.server.port)
def tearDown(self):
self.client.quit()
self.server.stop()
def test_getwelcome(self):
self.assertEqual(self.client.getwelcome(),
b'+OK dummy pop3 server ready. <timestamp>')
def test_exceptions(self):
self.assertRaises(poplib.error_proto, self.client._shortcmd, 'echo -err')
def test_user(self):
self.assertOK(self.client.user('guido'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_pass_(self):
self.assertOK(self.client.pass_('python'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_stat(self):
self.assertEqual(self.client.stat(), (10, 100))
def test_list(self):
self.assertEqual(self.client.list()[1:],
([b'1 1', b'2 2', b'3 3', b'4 4', b'5 5'],
25))
self.assertTrue(self.client.list('1').endswith(b"OK 1 1"))
def test_retr(self):
expected = (b'+OK 116 bytes',
[b'From: postmaster@python.org', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy',
b'', b'line1', b'line2', b'line3'],
113)
foo = self.client.retr('foo')
self.assertEqual(foo, expected)
def test_dele(self):
self.assertOK(self.client.dele('foo'))
def test_noop(self):
self.assertOK(self.client.noop())
def test_rpop(self):
self.assertOK(self.client.rpop('foo'))
def test_apop(self):
self.assertOK(self.client.apop('foo', 'dummypassword'))
def test_top(self):
expected = (b'+OK 116 bytes',
[b'From: postmaster@python.org', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy', b'',
b'line1', b'line2', b'line3'],
113)
self.assertEqual(self.client.top(1, 1), expected)
def test_uidl(self):
self.client.uidl()
self.client.uidl('foo')
SUPPORTS_SSL = False
if hasattr(poplib, 'POP3_SSL'):
import ssl
SUPPORTS_SSL = True
CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem")
class DummyPOP3_SSLHandler(DummyPOP3Handler):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
ssl_socket = ssl.wrap_socket(self.socket, certfile=CERTFILE,
server_side=True,
do_handshake_on_connect=False)
self.del_channel()
self.set_socket(ssl_socket)
# Must try handshake before calling push()
self._ssl_accepting = True
self._do_ssl_handshake()
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
except socket.error as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self._ssl_accepting = False
def handle_read(self):
if self._ssl_accepting:
self._do_ssl_handshake()
else:
DummyPOP3Handler.handle_read(self)
class TestPOP3_SSLClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3_SSL
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.handler = DummyPOP3_SSLHandler
self.server.start()
self.client = poplib.POP3_SSL(self.server.host, self.server.port)
def test__all__(self):
self.assertIn('POP3_SSL', poplib.__all__)
def test_context(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, certfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE,
certfile=CERTFILE, context=ctx)
self.client.quit()
self.client = poplib.POP3_SSL(self.server.host, self.server.port,
context=ctx)
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
self.assertIs(self.client.sock.context, ctx)
self.assertTrue(self.client.noop().startswith(b'+OK'))
class TestTimeouts(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(3)
self.port = test_support.bind_port(self.sock)
threading.Thread(target=self.server, args=(self.evt,self.sock)).start()
time.sleep(.1)
def tearDown(self):
self.evt.wait()
def server(self, evt, serv):
serv.listen(5)
try:
conn, addr = serv.accept()
except socket.timeout:
pass
else:
conn.send(b"+ Hola mundo\n")
conn.close()
finally:
serv.close()
evt.set()
def testTimeoutDefault(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3("localhost", self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def testTimeoutNone(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(pop.sock.gettimeout() is None)
pop.sock.close()
def testTimeoutValue(self):
pop = poplib.POP3("localhost", self.port, timeout=30)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def test_main():
tests = [TestPOP3Class, TestTimeouts]
if SUPPORTS_SSL:
tests.append(TestPOP3_SSLClass)
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
finally:
test_support.threading_cleanup(*thread_info)
if __name__ == '__main__':
test_main()
| apache-2.0 |
samuknet/servo | components/script/dom/bindings/codegen/parser/tests/test_interface.py | 117 | 12227 | import WebIDL
def WebIDLTest(parser, harness):
parser.parse("interface Foo { };")
results = parser.finish()
harness.ok(True, "Empty interface parsed without error.")
harness.check(len(results), 1, "Should be one production")
harness.ok(isinstance(results[0], WebIDL.IDLInterface),
"Should be an IDLInterface")
iface = results[0]
harness.check(iface.identifier.QName(), "::Foo", "Interface has the right QName")
harness.check(iface.identifier.name, "Foo", "Interface has the right name")
harness.check(iface.parent, None, "Interface has no parent")
parser.parse("interface Bar : Foo { };")
results = parser.finish()
harness.ok(True, "Empty interface parsed without error.")
harness.check(len(results), 2, "Should be two productions")
harness.ok(isinstance(results[1], WebIDL.IDLInterface),
"Should be an IDLInterface")
iface = results[1]
harness.check(iface.identifier.QName(), "::Bar", "Interface has the right QName")
harness.check(iface.identifier.name, "Bar", "Interface has the right name")
harness.ok(isinstance(iface.parent, WebIDL.IDLInterface),
"Interface has a parent")
parser = parser.reset()
parser.parse("""
interface QNameBase {
attribute long foo;
};
interface QNameDerived : QNameBase {
attribute long long foo;
attribute byte bar;
};
""")
results = parser.finish()
harness.check(len(results), 2, "Should be two productions")
harness.ok(isinstance(results[0], WebIDL.IDLInterface),
"Should be an IDLInterface")
harness.ok(isinstance(results[1], WebIDL.IDLInterface),
"Should be an IDLInterface")
harness.check(results[1].parent, results[0], "Inheritance chain is right")
harness.check(len(results[0].members), 1, "Expect 1 productions")
harness.check(len(results[1].members), 2, "Expect 2 productions")
base = results[0]
derived = results[1]
harness.check(base.members[0].identifier.QName(), "::QNameBase::foo",
"Member has the right QName")
harness.check(derived.members[0].identifier.QName(), "::QNameDerived::foo",
"Member has the right QName")
harness.check(derived.members[1].identifier.QName(), "::QNameDerived::bar",
"Member has the right QName")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A : B {};
interface B : A {};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow cycles in interface inheritance chains")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A : C {};
interface C : B {};
interface B : A {};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow indirect cycles in interface inheritance chains")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {};
interface B {};
A implements B;
B implements A;
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow cycles via implements")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {};
interface C {};
interface B {};
A implements C;
C implements B;
B implements A;
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow indirect cycles via implements")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A : B {};
interface B {};
B implements A;
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow inheriting from an interface that implements us")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A : B {};
interface B {};
interface C {};
B implements C;
C implements A;
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow inheriting from an interface that indirectly implements us")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A : B {};
interface B : C {};
interface C {};
C implements A;
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow indirectly inheriting from an interface that implements us")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A : B {};
interface B : C {};
interface C {};
interface D {};
C implements D;
D implements A;
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow indirectly inheriting from an interface that indirectly implements us")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A;
interface B : A {};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow inheriting from an interface that is only forward declared")
parser = parser.reset()
parser.parse("""
[Constructor(long arg)]
interface A {
readonly attribute boolean x;
void foo();
};
[Constructor]
partial interface A {
readonly attribute boolean y;
void foo(long arg);
};
""");
results = parser.finish();
harness.check(len(results), 2,
"Should have two results with partial interface")
iface = results[0]
harness.check(len(iface.members), 3,
"Should have three members with partial interface")
harness.check(iface.members[0].identifier.name, "x",
"First member should be x with partial interface")
harness.check(iface.members[1].identifier.name, "foo",
"Second member should be foo with partial interface")
harness.check(len(iface.members[1].signatures()), 2,
"Should have two foo signatures with partial interface")
harness.check(iface.members[2].identifier.name, "y",
"Third member should be y with partial interface")
harness.check(len(iface.ctor().signatures()), 2,
"Should have two constructors with partial interface")
parser = parser.reset()
parser.parse("""
[Constructor]
partial interface A {
readonly attribute boolean y;
void foo(long arg);
};
[Constructor(long arg)]
interface A {
readonly attribute boolean x;
void foo();
};
""");
results = parser.finish();
harness.check(len(results), 2,
"Should have two results with reversed partial interface")
iface = results[1]
harness.check(len(iface.members), 3,
"Should have three members with reversed partial interface")
harness.check(iface.members[0].identifier.name, "x",
"First member should be x with reversed partial interface")
harness.check(iface.members[1].identifier.name, "foo",
"Second member should be foo with reversed partial interface")
harness.check(len(iface.members[1].signatures()), 2,
"Should have two foo signatures with reversed partial interface")
harness.check(iface.members[2].identifier.name, "y",
"Third member should be y with reversed partial interface")
harness.check(len(iface.ctor().signatures()), 2,
"Should have two constructors with reversed partial interface")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
readonly attribute boolean x;
};
interface A {
readonly attribute boolean y;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"Should not allow two non-partial interfaces with the same name")
parser = parser.reset()
threw = False
try:
parser.parse("""
partial interface A {
readonly attribute boolean x;
};
partial interface A {
readonly attribute boolean y;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"Must have a non-partial interface for a given name")
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A {
boolean x;
};
partial interface A {
readonly attribute boolean y;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"Should not allow a name collision between partial interface "
"and other object")
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A {
boolean x;
};
interface A {
readonly attribute boolean y;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"Should not allow a name collision between interface "
"and other object")
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A {
boolean x;
};
interface A;
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"Should not allow a name collision between external interface "
"and other object")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
readonly attribute boolean x;
};
interface A;
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"Should not allow a name collision between external interface "
"and interface")
parser = parser.reset()
parser.parse("""
interface A;
interface A;
""")
results = parser.finish()
harness.ok(len(results) == 1 and
isinstance(results[0], WebIDL.IDLExternalInterface),
"Should allow name collisions between external interface "
"declarations")
parser = parser.reset()
threw = False
try:
parser.parse("""
[SomeRandomAnnotation]
interface A {
readonly attribute boolean y;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"Should not allow unknown extended attributes on interfaces")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface B {};
[ArrayClass]
interface A : B {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"Should not allow [ArrayClass] on interfaces with parents")
parser = parser.reset()
threw = False
try:
parser.parse("""
[ArrayClass]
interface A {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(not threw,
"Should allow [ArrayClass] on interfaces without parents")
| mpl-2.0 |
preo/pylpsolve | pylpsolve/__init__.py | 1 | 1078 | # PyLPSolve is an object oriented wrapper for the open source LP
# solver lp_solve. Copyright (C) 2010 Hoyt Koepke.
#
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from pylpsolve import (
LP, LPException, LPSuboptimalException, LPInfeasibleException,
LPUnboundedException, LPModelDegenerateException,
LPNumericalFailureException, LPUserAbort, LPTimeout, LPProcFail,
LPProcBreak
) | lgpl-2.1 |
erichilarysmithsr/givinggraph | givinggraph/twitter/tweets.py | 3 | 1683 | from givinggraph.twitter.common import twitter_get
def get_tweets_by_id(user_id, include_retweets, since_id=1):
"""Takes a Twitter user id (a string) as input and returns all of that user's tweets. If since_id is not set, it will go back as far as the API will let you."""
params = {'user_id': user_id,
'include_rts': include_retweets,
'since_id': since_id}
return __get_tweets(params)
def get_tweets_by_name(screen_name, include_retweets, since_id=1):
"""Takes a Twitter screen name as input and returns all of that user's tweets. If since_id is not set, it will go back as far as the API will let you."""
params = {'screen_name': screen_name,
'include_rts': include_retweets,
'since_id': since_id}
return __get_tweets(params)
def __get_tweets(params):
"""Takes a Twitter user id (a string) or screen name as input (screen_name takes precedent) and returns all of that user's tweets, going back as far as the API will let you."""
base_url = 'https://api.twitter.com/1.1/statuses/user_timeline.json'
params['count'] = 200
all_tweets = []
while True:
chunk_of_tweets = twitter_get(base_url, params, 5)
if chunk_of_tweets is None:
return None # something went wrong
elif len(chunk_of_tweets) == 0:
return all_tweets
else:
all_tweets.extend(chunk_of_tweets)
min_id_found = min([tweet['id'] for tweet in chunk_of_tweets])
params['max_id'] = str(min_id_found - 1) # we want tweets with IDs lower than min_id_found
print '{0} tweets retrieved so far.'.format(len(all_tweets))
| mit |
quantopian/qdb | qdb/tracer.py | 1 | 23389 | #
# Copyright 2015 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from bdb import Bdb, Breakpoint, checkfuncname, BdbQuit
from contextlib import contextmanager
from functools import partial
import json
from pprint import pformat
import signal
import sys
import traceback
from uuid import uuid4
from logbook import Logger, FileHandler
from qdb.comm import TerminalCommandManager, fmt_msg
from qdb.compat import items, ExitStack, StringIO
from qdb.config import QdbConfig
from qdb.errors import (
QdbUnreachableBreakpoint,
QdbQuit,
QdbExecutionTimeout,
QdbPrognEndsInStatement,
)
from qdb.output import RemoteOutput, OutputTee
from qdb.utils import (
Timeout,
default_eval_fn,
default_exception_serializer,
progn,
)
log = Logger('Qdb')
class BoundCmdManager(object):
"""
Binds the tracer to the first argument of all the methods of the
command manager.
"""
def __init__(self, tracer, cmd_manager):
self._tracer = tracer
self._cmd_manager = cmd_manager
def __getattr__(self, name):
return partial(getattr(self._cmd_manager, name), self._tracer)
@contextmanager
def capture_output():
"""
Captures stdout and stderr for the duration of the body.
example
with capture_output() as (out, err):
print 'hello'
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()
try:
yield sys.stdout, sys.stderr
finally:
sys.stdout.close()
sys.stderr.close()
sys.stdout = old_stdout
sys.stderr = old_stderr
class Qdb(Bdb, object):
"""
The Quantopian Remote Debugger.
"""
_instance = None
def __new__(cls, *args, **kwargs):
"""
Qdb objects are singletons that persist until their disable method is
called.
"""
if not cls._instance:
inst = super(Qdb, cls).__new__(cls)
# `_init` might raise, so don't save as `_instance` yet:
inst._init(*args, **kwargs)
cls._instance = inst
return cls._instance
def __init__(self, *args, **kwargs):
pass
def _init(self, config=None, merge=False, **kwargs):
"""
See qdb.config for more information about the configuration of
qdb.
merge denotes how config and kwargs should be merged.
QdbConfig.kwargs_first says config will trample kwargs,
QdbConfig.config_first says kwargs will trample config.
Otherwise, kwargs and config cannot both be passed.
"""
self.super_ = super(Qdb, self)
self.super_.__init__()
self.reset()
if config and kwargs:
if merge == QdbConfig.kwargs_first:
first = kwargs
second = config
elif merge == QdbConfig.config_first:
first = config
second = kwargs
else:
raise TypeError('Cannot pass config and kwargs')
config = first.merge(second)
else:
config = QdbConfig.get_config(config or kwargs)
self.address = config.host, config.port
self.set_default_file(config.default_file)
self.default_namespace = config.default_namespace or {}
self.exception_serializer = config.exception_serializer or \
default_exception_serializer
self.eval_fn = config.eval_fn or default_eval_fn
self._file_cache = {}
self.retry_attempts = config.retry_attempts
self.repr_fn = config.repr_fn
self._skip_fn = config.skip_fn or (lambda _: False)
self.pause_signal = config.pause_signal \
if config.pause_signal else signal.SIGUSR2
self.uuid = str(config.uuid or uuid4())
self.watchlist = {}
self.execution_timeout = config.execution_timeout
self.reset()
self.log_handler = None
if config.log_file:
self.log_handler = FileHandler(config.log_file)
self.log_handler.push_application()
self.bound_cmd_manager = config.cmd_manager or TerminalCommandManager()
self.bound_cmd_manager.start(config.auth_msg)
# We need to be able to send stdout back to the user debugging the
# program. We hold a handle to this in case the program resets stdout.
self._old_stdout = sys.stdout
self._old_stderr = sys.stderr
self.redirect_output = (
config.redirect_output and
not isinstance(self.cmd_manager, TerminalCommandManager)
)
if self.redirect_output:
sys.stdout = OutputTee(
sys.stdout,
RemoteOutput(self.cmd_manager, '<stdout>'),
)
sys.stderr = OutputTee(
sys.stderr,
RemoteOutput(self.cmd_manager, '<stderr>'),
)
@property
def bound_cmd_manager(self):
return self.__cmd_manager
@bound_cmd_manager.setter
def bound_cmd_manager(self, value):
self.cmd_manager = value
self.__cmd_manager = BoundCmdManager(self, value)
def skip_fn(self, path):
return self._skip_fn(self.canonic(path))
def restore_output_streams(self):
"""
Restores the original output streams.
"""
if self.redirect_output:
sys.stdout = self._old_stdout
sys.stderr = self._old_stderr
def _new_execution_timeout(self, src):
"""
Return a new execution timeout context manager.
If not execution timeout is in place, returns ExitStack()
"""
# We use no_gevent=True because this could be cpu bound. This will
# still throw to the proper greenlet if this is gevented.
return (
Timeout(
self.execution_timeout,
QdbExecutionTimeout(src, self.execution_timeout),
no_gevent=True,
) if self.execution_timeout else ExitStack()
)
def set_default_file(self, filename):
"""
Safely sets the new default file.
"""
self.default_file = self.canonic(filename) if filename else None
def get_line(self, filename, line):
"""
Checks for any user cached files before deferring to the linecache.
"""
# The line - 1 is so that querying line 1 gives us the first line in
# the file.
try:
return self.get_file_lines(filename)[line - 1]
except IndexError:
return 'No source available for this line.'
def get_file(self, filename):
"""
Retrieves a file out of cache or opens and caches it.
"""
return '\n'.join(self.get_file_lines(filename))
def get_file_lines(self, filename):
"""
Retrieves the file from the file cache as a list of lines.
If the file does not exist in the cache, it is cached from
disk.
"""
canonic_name = self.canonic(filename)
try:
return self._file_cache[canonic_name]
except KeyError:
if not self.cache_file(canonic_name):
return []
return self._file_cache.get(canonic_name)
def cache_file(self, filename, contents=None):
"""
Caches filename from disk into memory.
This overrides whatever was cached for filename previously.
If contents is provided, it allows the user to cache a filename to a
string.
Returns True if the file caching succeeded, otherwise returns false.
"""
canonic_name = self.canonic(filename)
if contents:
self._file_cache[canonic_name] = contents.splitlines()
return True
try:
with open(canonic_name, 'r') as f:
self._file_cache[canonic_name] = f.read().splitlines()
return True
except IOError:
# The caching operation failed.
return False
def set_break(self, filename, lineno, temporary=False, cond=None,
funcname=None, **kwargs):
"""
Sets a breakpoint. This is overridden to account for the filecache
and for unreachable lines.
**kwargs are ignored. This is to work with payloads that pass extra
fields to the set_break payload.
"""
filename = self.canonic(filename) if filename else self.default_file
try:
self.get_line(filename, lineno)
except IndexError:
raise QdbUnreachableBreakpoint({
'file': filename,
'line': lineno,
'temp': temporary,
'cond': cond,
'func': funcname,
})
blist = self.breaks.setdefault(filename, [])
if lineno not in blist:
blist.append(lineno)
Breakpoint(filename, lineno, temporary, cond, funcname)
def clear_break(self, filename, lineno, *args, **kwargs):
"""
Wrapper to make the breakpoint json standardized for setting
and removing of breakpoints.
This means that the same json data that was used to set a break point
may be fed into this function with the extra values ignored.
"""
self.super_.clear_break(filename, lineno)
def canonic(self, filename):
canonic_filename = self.super_.canonic(filename)
if canonic_filename.endswith('pyc'):
return canonic_filename[:-1]
return canonic_filename
def reset(self):
self.botframe = None
self._set_stopinfo(None, None)
self.forget()
def forget(self):
self.lineno = None
self.stack = []
self.curindex = 0
self.curframe = None
def setup_stack(self, stackframe, traceback):
"""
Sets up the state of the debugger object for this frame.
"""
self.forget()
self.stack, self.curindex = self.get_stack(stackframe, traceback)
self.curframe = self.stack[self.curindex][0]
self.curframe_locals = self.curframe.f_locals
self.update_watchlist()
def extend_watchlist(self, *args):
"""
Adds every arg to the watchlist and updates.
"""
for expr in args:
self.watchlist[expr] = (False, '')
self.update_watchlist()
def update_watchlist(self):
"""
Updates the watchlist by evaluating all the watched expressions in
our current frame.
"""
id_ = lambda n: n # Why is this NOT a builtin?
for expr in self.watchlist:
try:
with self._new_execution_timeout(expr), \
self.inject_default_namespace() as stackframe:
self.watchlist[expr] = (
None,
(self.repr_fn or id_)(
self.eval_fn(expr, stackframe)
)
)
except Exception as e:
self.watchlist[expr] = (
type(e).__name__,
self.exception_serializer(e)
)
def effective(self, file, line, stackframe):
"""
Finds the effective breakpoint for this line; called only
when we know that there is a breakpoint here.
returns the breakpoint paired with a flag denoting if we should
remove this breakpoint or not.
"""
for breakpoint in Breakpoint.bplist[file, line]:
if breakpoint.enabled == 0:
continue
if not checkfuncname(breakpoint, stackframe):
continue
# Count every hit when breakpoint is enabled
breakpoint.hits = breakpoint.hits + 1
if not breakpoint.cond:
# If unconditional, and ignoring go on to next, else break
if breakpoint.ignore > 0:
breakpoint.ignore = breakpoint.ignore - 1
continue
else:
return breakpoint, True
else:
# Conditional breakpoint
# Ignore count applies only to those bpt hits where the
# condition evaluates to true.
try:
with self._new_execution_timeout(breakpoint.cond), \
self.inject_default_namespace(stackframe) as frame:
val = self.eval_fn(
breakpoint.cond,
frame,
'eval'
)
except Exception as e:
# Send back a message to let the user know there was an
# issue with their breakpoint.
self.cmd_manager.send_error(
'condition', {
'cond': breakpoint.cond,
'line': line,
'exc': type(e).__name__,
'output': self.exception_serializer(e),
}
)
# Return this breakpoint to be safe. The user will be
# stopped here so that they can fix the breakpoint.
return breakpoint, False
if val:
if breakpoint.ignore > 0:
breakpoint.ignore = breakpoint.ignore - 1
else:
return breakpoint, True
return None, False
def break_here(self, stackframe):
"""
Checks if we should break execution in this stackframe.
This function handles the cleanup and ignore counts for breakpoints.
Returns True iff we should stop in the stackframe, False otherwise.
"""
filename = self.canonic(stackframe.f_code.co_filename)
if filename not in self.breaks:
return False
lineno = stackframe.f_lineno
if lineno not in self.breaks[filename]:
# The line itself has no breakpoint, but maybe the line is the
# first line of a function with breakpoint set by function name.
lineno = stackframe.f_code.co_firstlineno
if lineno not in self.breaks[filename]:
return False
# flag says ok to delete temporary breakpoints.
breakpoint, flag = self.effective(filename, lineno, stackframe)
if breakpoint:
self.currentbp = breakpoint.number
if flag and breakpoint.temporary:
self.do_clear(breakpoint.number)
return True
else:
return False
def trace_dispatch(self, stackframe, event, arg):
"""
Trace function that does some preliminary checks and then defers to
the event handler for each type of event.
"""
if self.quitting:
# We were told to quit by the user, bubble this up to their code.
return
if self.skip_fn(stackframe.f_code.co_filename):
# We want to skip this, don't stop but keep tracing.
return self.trace_dispatch
try:
return self.super_.trace_dispatch(stackframe, event, arg)
except BdbQuit:
raise QdbQuit() # Rewrap as a QdbError object.
def user_call(self, stackframe, arg):
if self.break_here(stackframe):
self.user_line(stackframe)
def user_line(self, stackframe):
self.setup_stack(stackframe, None)
bound_cmd_manager = self.bound_cmd_manager
bound_cmd_manager.send_watchlist()
bound_cmd_manager.send_stack()
bound_cmd_manager.next_command()
def user_return(self, stackframe, return_value):
stackframe.f_locals['__return__'] = return_value
self.setup_stack(stackframe, None)
bound_cmd_manager = self.bound_cmd_manager
bound_cmd_manager.send_watchlist()
bound_cmd_manager.send_stack()
bound_cmd_manager.next_command(
fmt_msg('return', str(return_value), serial=json.dumps),
)
def user_exception(self, stackframe, exc_info):
exc_type, exc_value, exc_traceback = exc_info
stackframe.f_locals['__exception__'] = exc_type, exc_value
self.setup_stack(stackframe, exc_traceback)
bound_cmd_manager = self.bound_cmd_manager
bound_cmd_manager.send_watchlist()
bound_cmd_manager.send_stack()
msg = fmt_msg(
'exception', {
'type': exc_type.__name__,
'value': str(exc_value),
'traceback': traceback.format_tb(exc_traceback)
},
serial=json.dumps,
)
return self.bound_cmd_manager.next_command(msg)
def do_clear(self, bpnum):
"""
Handles deletion of temporary breakpoints.
"""
if not (0 <= bpnum < len(Breakpoint.bpbynumber)):
return
self.clear_bpbynumber(bpnum)
def set_quit(self):
"""
Sets the quitting state and restores the program state.
"""
self.quitting = True
def eval_(self, code, pprint=False):
repr_fn = self.repr_fn
outexc = None
outmsg = None
with capture_output() as (out, err), \
self._new_execution_timeout(code), \
self.inject_default_namespace() as stackframe:
try:
if not repr_fn and not pprint:
self.eval_fn(
code,
stackframe,
'single',
)
else:
try:
# Do some some custom single mode magic that lets us
# call the repr function on the last expr.
value = progn(
code,
self.eval_fn,
stackframe,
)
except QdbPrognEndsInStatement:
# Statements have no value to print.
pass
else:
if pprint:
value = pformat(value)
if repr_fn:
value = repr_fn(value)
print(value)
except Exception as e:
outexc = type(e).__name__
outmsg = self.exception_serializer(e)
else:
outmsg = out.getvalue().rstrip('\n')
if outexc is not None or outmsg is not None:
self.cmd_manager.send_print(code, outexc, outmsg)
self.update_watchlist()
def _stack_jump_to(self, index):
"""
Jumps the stack to a specific index.
Raises an IndexError if the desired index does not exist.
"""
# Try to jump here first. This could raise an IndexError which will
# prevent the tracer's state from being corrupted.
self.curframe = self.stack[index][0]
self.curindex = index
self.curframe_locals = self.curframe.f_locals
self.update_watchlist()
def stack_shift_direction(self, direction):
"""
Shifts the stack up or down depending on direction.
If direction is positive, travel up, if direction is negative, travel
down. If direction is 0, do nothing.
If you cannot shift in the desired direction, an IndexError will be
raised.
"""
if direction == 0:
return # nop
stride = -1 if direction > 0 else 1
stack = self.stack
stacksize = len(stack)
curindex = self.curindex
skip_fn = self.skip_fn
target = None
def pred_up(idx):
return idx > 0
def pred_down(idx):
return idx < stacksize - 1
pred = pred_up if direction > 0 else pred_down
while pred(curindex):
curindex += stride
if not skip_fn(stack[curindex][0].f_code.co_filename):
target = curindex
break
if target is None:
raise IndexError('Shifted off the stack')
self._stack_jump_to(target)
def disable(self, mode='soft'):
"""
Stops tracing.
"""
try:
if mode == 'soft':
self.clear_all_breaks()
self.set_continue()
# Remove this instance so that new ones may be created.
self.__class__._instance = None
elif mode == 'hard':
sys.exit(1)
else:
raise ValueError("mode must be 'hard' or 'soft'")
finally:
self.restore_output_streams()
if self.log_handler:
self.log_handler.pop_application()
self.cmd_manager.stop()
if sys.gettrace() is self.trace_dispatch:
sys.settrace(None)
def __enter__(self):
self.set_trace(sys._getframe().f_back, stop=False)
return self
def __exit__(self, type, value, traceback):
self.disable('soft')
def set_trace(self, stackframe=None, stop=True):
"""
Starts debugging in stackframe or in the callers frame.
If stop is True, begin stepping from here, otherwise, wait for
the first breakpoint or exception.
"""
# We need to look back 1 frame to get our caller.
stackframe = stackframe or sys._getframe().f_back
self.reset()
while stackframe:
stackframe.f_trace = self.trace_dispatch
self.botframe = stackframe
stackframe = stackframe.f_back
if stop:
self.set_step()
else:
self.set_continue()
sys.settrace(self.trace_dispatch)
@contextmanager
def inject_default_namespace(self, stackframe=None):
"""
Adds the default namespace to the frame, or if no frame is provided,
self.curframe is used.
"""
stackframe = stackframe or self.curframe
to_remove = set()
for k, v in items(self.default_namespace):
if k not in stackframe.f_globals:
# Only add the default things if the name is unbound.
stackframe.f_globals[k] = v
to_remove.add(k)
try:
yield stackframe
finally:
for k in to_remove:
try:
del stackframe.f_globals[k]
except IndexError:
# The body of this manager might have del'd this.
pass
# Prevent exceptions from generating ref cycles.
del stackframe
| apache-2.0 |
t-abe/chainer | tests/chainer_tests/links_tests/connection_tests/test_bilinear.py | 3 | 8202 | import unittest
import numpy
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer import links
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
from chainer.utils import array
def _check_forward(e1, e2, f, y_expect):
e1 = chainer.Variable(e1)
e2 = chainer.Variable(e2)
y = f(e1, e2)
gradient_check.assert_allclose(y_expect, y.data)
def _check_backward(e1, e2, y_grad, link, bias):
e1 = chainer.Variable(e1)
e2 = chainer.Variable(e2)
y = link(e1, e2)
y.grad = y_grad
y.backward()
f = lambda: (link(e1, e2).data,)
ge1, ge2, gW = gradient_check.numerical_grad(
f, (e1.data, e2.data, link.W.data), (y.grad,), eps=1e-2)
gradient_check.assert_allclose(ge1, e1.grad, rtol=1e-3)
gradient_check.assert_allclose(ge2, e2.grad, rtol=1e-3)
gradient_check.assert_allclose(gW, link.W.grad, rtol=1e-3)
if bias:
gV1, gV2, gb = gradient_check.numerical_grad(
f, (link.V1.data, link.V2.data, link.b.data),
(y.grad,), eps=1e-2)
gradient_check.assert_allclose(gV1, link.V1.grad, rtol=1e-3)
gradient_check.assert_allclose(gV2, link.V2.grad, rtol=1e-3)
gradient_check.assert_allclose(gb, link.b.grad, rtol=1e-3)
def _batch_to_gpu(*xs):
return tuple(cuda.to_gpu(x) for x in xs)
def _uniform(*shape):
return numpy.random.uniform(-1, 1, shape).astype(numpy.float32)
class TestBilinear(unittest.TestCase):
in_shape = (3, 4)
out_size = 4
batch_size = 10
def setUp(self):
self.f = links.Bilinear(
self.in_shape[0], self.in_shape[1], self.out_size)
self.f.W.data[...] = _uniform(*self.f.W.data.shape)
self.f.V1.data[...] = _uniform(*self.f.V1.data.shape)
self.f.V2.data[...] = _uniform(*self.f.V2.data.shape)
self.f.b.data[...] = _uniform(*self.f.b.data.shape)
self.f.zerograds()
self.W = self.f.W.data.copy()
self.V1 = self.f.V1.data.copy()
self.V2 = self.f.V2.data.copy()
self.b = self.f.b.data.copy()
self.e1 = _uniform(self.batch_size, self.in_shape[0])
self.e2 = _uniform(self.batch_size, self.in_shape[1])
self.gy = _uniform(self.batch_size, self.out_size)
self.y = (
numpy.einsum('ij,ik,jkl->il', self.e1, self.e2, self.W) +
self.e1.dot(self.V1) + self.e2.dot(self.V2) + self.b)
@condition.retry(3)
def test_forward_cpu(self):
_check_forward(self.e1, self.e2, self.f, self.y)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.f.to_gpu()
_check_forward(cuda.to_gpu(self.e1),
cuda.to_gpu(self.e2),
self.f, self.y)
@condition.retry(3)
def test_backward_cpu(self):
_check_backward(self.e1, self.e2, self.gy, self.f, True)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.f.to_gpu()
_check_backward(cuda.to_gpu(self.e1),
cuda.to_gpu(self.e2),
cuda.to_gpu(self.gy),
self.f, True)
class TestBilinear2(TestBilinear):
def setUp(self):
super(TestBilinear2, self).setUp()
assert self.in_shape[1] % 2 == 0
self.e1 = _uniform(self.batch_size, 1, self.in_shape[0])
self.e2 = _uniform(self.batch_size, self.in_shape[1] // 2, 2)
self.gy = _uniform(self.batch_size, self.out_size)
e1 = array.as_mat(self.e1)
e2 = array.as_mat(self.e2)
self.y = (
numpy.einsum('ij,ik,jkl->il', e1, e2, self.W) +
e1.dot(self.V1) + e2.dot(self.V2) + self.b)
class TestBilinear3(TestBilinear):
out_size = 1
class TestBilinear4(TestBilinear):
in_shape = (1, 2)
class TestBilinear5(TestBilinear):
in_shape = (2, 1)
class TestBilinear6(TestBilinear):
in_shape = (1, 1)
class TestBilinear7(TestBilinear):
in_shape = (1, 2)
out_size = 1
class TestBilinear8(TestBilinear):
in_shape = (2, 1)
out_size = 1
class TestBilinear9(TestBilinear):
in_shape = (1, 1)
out_size = 1
class TestBilinearWOBias(TestBilinear):
def setUp(self):
self.f = links.Bilinear(
self.in_shape[0], self.in_shape[1], self.out_size, True)
W = self.f.W.data
W[...] = numpy.random.uniform(-1, 1, W.shape)
self.f.zerograds()
self.W = W.copy()
self.e1 = _uniform(self.batch_size, self.in_shape[0])
self.e2 = _uniform(self.batch_size, self.in_shape[1])
self.gy = _uniform(self.batch_size, self.out_size)
self.y = numpy.einsum('ij,ik,jkl->il', self.e1, self.e2, self.W)
@condition.retry(3)
def test_backward_cpu(self):
_check_backward(self.e1, self.e2, self.gy, self.f, False)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.f.to_gpu()
_check_backward(cuda.to_gpu(self.e1), cuda.to_gpu(self.e2),
cuda.to_gpu(self.gy), self.f, False)
class TestBilinearWOBias2(TestBilinearWOBias):
def setUp(self):
super(TestBilinearWOBias2, self).setUp()
assert self.in_shape[1] % 2 == 0
self.e1 = _uniform(self.batch_size, 1, self.in_shape[0])
self.e2 = _uniform(self.batch_size, 2, self.in_shape[1] // 2)
self.gy = _uniform(self.batch_size, self.out_size)
e1 = array.as_mat(self.e1)
e2 = array.as_mat(self.e2)
self.y = numpy.einsum('ij,ik,jkl->il', e1, e2, self.W)
class TestBilinearWOBias3(TestBilinearWOBias):
out_size = 1
class TestBilinearWOBias4(TestBilinearWOBias):
in_shape = (1, 2)
class TestBilinearWOBias5(TestBilinearWOBias):
in_shape = (2, 1)
class TestBilinearWOBias6(TestBilinearWOBias):
in_shape = (1, 1)
class TestBilinearWOBias7(TestBilinearWOBias):
in_shape = (1, 2)
out_size = 1
class TestBilinearWOBias8(TestBilinearWOBias):
in_shape = (2, 1)
out_size = 1
class TestBilinearWOBias9(TestBilinearWOBias):
in_shape = (1, 1)
out_size = 1
class InitByInitialParameter(unittest.TestCase):
in_shape = (2, 3)
out_size = 4
batch_size = 10
def setUp(self):
self.W = _uniform(self.in_shape[0], self.in_shape[1], self.out_size)
self.V1 = _uniform(self.in_shape[0], self.out_size)
self.V2 = _uniform(self.in_shape[1], self.out_size)
self.b = _uniform(self.out_size,)
class NormalInitialParameter(InitByInitialParameter):
def check_normal(self, initialW, initial_bias, nobias):
links.Bilinear(
self.in_shape[0], self.in_shape[1], self.out_size, nobias,
initialW, initial_bias)
def test_normal_cpu_bias(self):
self.check_normal(self.W, (self.V1, self.V2, self.b), False)
def test_normal_cpu_nobias(self):
self.check_normal(self.W, None, False)
class InvalidInitialParameter(InitByInitialParameter):
def setUp(self):
super(InvalidInitialParameter, self).setUp()
self.invalidW = _uniform(self.in_shape[0] + 1, self.in_shape[1],
self.out_size)
self.invalidV1 = _uniform(self.in_shape[0] + 1, self.out_size)
self.invalidV2 = _uniform(self.in_shape[1] + 1, self.out_size)
self.invalidb = _uniform(self.out_size + 1,)
def check_invalid(self, initialW, initial_bias, nobias):
with self.assertRaises(AssertionError):
links.Bilinear(
self.in_shape[0], self.in_shape[1], self.out_size, nobias,
initialW, initial_bias)
def test_invalidW_cpu(self):
self.check_invalid(self.invalidW, (self.V1, self.V2, self.b), False)
self.check_invalid(self.invalidW, None, True)
def test_invalidV1_cpu(self):
self.check_invalid(self.W, (self.invalidV1, self.V2, self.b), False)
def test_invalidV2_cpu(self):
self.check_invalid(self.W, (self.V1, self.invalidV2, self.b), False)
def test_invalidb_cpu(self):
self.check_invalid(self.W, (self.V1, self.V2, self.invalidb), False)
testing.run_module(__name__, __file__)
| mit |
eadgarchen/tensorflow | tensorflow/python/layers/convolutional_test.py | 31 | 43050 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.layers.convolutional."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import convolutional as conv_layers
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class ConvTest(test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'data_format'):
conv_layers.conv2d(images, 32, 3, data_format='invalid')
def testInvalidStrides(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.conv2d(images, 32, 3, strides=(1, 2, 3))
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.conv2d(images, 32, 3, strides=None)
def testInvalidKernelSize(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.conv2d(images, 32, (1, 2, 3))
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.conv2d(images, 32, None)
def testCreateConv2D(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.Conv2D(32, [3, 3], activation=nn_ops.relu)
output = layer.apply(images)
self.assertEqual(output.op.name, 'conv2d/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testConv2DFloat16(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4), dtype='float16')
output = conv_layers.conv2d(images, 32, [3, 3], activation=nn_ops.relu)
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
def testCreateConv2DIntegerKernelSize(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.Conv2D(32, 3)
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testCreateConv2DChannelsFirst(self):
height, width = 7, 9
images = random_ops.random_uniform((5, 4, height, width))
layer = conv_layers.Conv2D(32, [3, 3], data_format='channels_first')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, 32, height - 2, width - 2])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testUnknownInputChannels(self):
images = random_ops.random_uniform((5, 7, 9, 4))
images._shape = tensor_shape.as_shape((5, 7, 9, None))
layer = conv_layers.Conv2D(32, [3, 3], activation=nn_ops.relu)
with self.assertRaisesRegexp(ValueError,
'The channel dimension of the inputs '
'should be defined. Found `None`.'):
_ = layer.apply(images)
images = random_ops.random_uniform((5, 4, 7, 9))
images._shape = tensor_shape.as_shape((5, None, 7, 9))
layer = conv_layers.Conv2D(32, [3, 3], data_format='channels_first')
with self.assertRaisesRegexp(ValueError,
'The channel dimension of the inputs '
'should be defined. Found `None`.'):
_ = layer.apply(images)
def testConv2DPaddingSame(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 32), seed=1)
layer = conv_layers.Conv2D(64, images.get_shape()[1:3], padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 64])
def testCreateConvWithStrides(self):
height, width = 6, 8
# Test strides tuple
images = random_ops.random_uniform((5, height, width, 3), seed=1)
layer = conv_layers.Conv2D(32, [3, 3], strides=(2, 2), padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width / 2, 32])
# Test strides integer
layer = conv_layers.Conv2D(32, [3, 3], strides=2, padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width / 2, 32])
# Test unequal strides
layer = conv_layers.Conv2D(32, [3, 3], strides=(2, 1), padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width, 32])
def testCreateConv1D(self):
width = 7
data = random_ops.random_uniform((5, width, 4))
layer = conv_layers.Conv1D(32, 3, activation=nn_ops.relu)
output = layer.apply(data)
self.assertEqual(output.op.name, 'conv1d/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, width - 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testConv1DFloat16(self):
width = 7
data = random_ops.random_uniform((5, width, 4), dtype='float16')
output = conv_layers.conv1d(data, 32, 3, activation=nn_ops.relu)
self.assertListEqual(output.get_shape().as_list(), [5, width - 2, 32])
def testCreateConv1DChannelsFirst(self):
width = 7
data = random_ops.random_uniform((5, 4, width))
layer = conv_layers.Conv1D(32, 3, data_format='channels_first')
output = layer.apply(data)
self.assertListEqual(output.get_shape().as_list(), [5, 32, width - 2])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testUnknownInputChannelsConv1D(self):
data = random_ops.random_uniform((5, 4, 7))
data._shape = tensor_shape.as_shape((5, 4, None))
layer = conv_layers.Conv1D(32, 3, activation=nn_ops.relu)
with self.assertRaisesRegexp(ValueError,
'The channel dimension of the inputs '
'should be defined. Found `None`.'):
_ = layer.apply(data)
data = random_ops.random_uniform((5, 7, 4))
data._shape = tensor_shape.as_shape((5, None, 4))
layer = conv_layers.Conv1D(32, 3, data_format='channels_first')
with self.assertRaisesRegexp(ValueError,
'The channel dimension of the inputs '
'should be defined. Found `None`.'):
_ = layer.apply(data)
def testCreateConv3D(self):
depth, height, width = 6, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 4))
layer = conv_layers.Conv3D(32, [3, 3, 3], activation=nn_ops.relu)
output = layer.apply(volumes)
self.assertEqual(output.op.name, 'conv3d/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, depth - 2, height - 2, width - 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testUnknownInputChannelsConv3D(self):
volumes = random_ops.random_uniform((5, 6, 7, 9, 9))
volumes._shape = tensor_shape.as_shape((5, 6, 7, 9, None))
layer = conv_layers.Conv3D(32, [3, 3, 3], activation=nn_ops.relu)
with self.assertRaisesRegexp(ValueError,
'The channel dimension of the inputs '
'should be defined. Found `None`.'):
_ = layer.apply(volumes)
def testConv2DKernelRegularizer(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
layer = conv_layers.Conv2D(32, [3, 3], kernel_regularizer=reg)
layer.apply(images)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(layer.losses, loss_keys)
def testConv2DBiasRegularizer(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
layer = conv_layers.Conv2D(32, [3, 3], bias_regularizer=reg)
layer.apply(images)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(layer.losses, loss_keys)
def testConv2DNoBias(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.Conv2D(
32, [3, 3], activation=nn_ops.relu, use_bias=False)
output = layer.apply(images)
self.assertEqual(output.op.name, 'conv2d/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 4, 32])
self.assertEqual(layer.bias, None)
def testDilatedConv2D(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.Conv2D(32, [3, 3], dilation_rate=3)
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(), [5, 1, 3, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
# Test tuple dilation rate
layer = conv_layers.Conv2D(32, [3, 3], dilation_rate=(1, 3))
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(), [5, height - 2, 3, 32])
def testFunctionalConv2DReuse(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.conv2d(images, 32, [3, 3], name='conv1')
self.assertEqual(len(variables.trainable_variables()), 2)
conv_layers.conv2d(images, 32, [3, 3], name='conv1', reuse=True)
self.assertEqual(len(variables.trainable_variables()), 2)
def testFunctionalConv2DReuseFromScope(self):
with variable_scope.variable_scope('scope'):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.conv2d(images, 32, [3, 3], name='conv1')
self.assertEqual(len(variables.trainable_variables()), 2)
with variable_scope.variable_scope('scope', reuse=True):
conv_layers.conv2d(images, 32, [3, 3], name='conv1')
self.assertEqual(len(variables.trainable_variables()), 2)
def testFunctionalConv2DInitializerFromScope(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.conv2d(images, 32, [3, 3], name='conv1')
weights = variables.trainable_variables()
# Check the names of weights in order.
self.assertTrue('kernel' in weights[0].name)
self.assertTrue('bias' in weights[1].name)
sess.run(variables.global_variables_initializer())
weights = sess.run(weights)
# Check that the kernel weights got initialized to ones (from scope)
self.assertAllClose(weights[0], np.ones((3, 3, 3, 32)))
# Check that the bias still got initialized to zeros.
self.assertAllClose(weights[1], np.zeros((32)))
def testFunctionalConv2DNoReuse(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.conv2d(images, 32, [3, 3])
self.assertEqual(len(variables.trainable_variables()), 2)
conv_layers.conv2d(images, 32, [3, 3])
self.assertEqual(len(variables.trainable_variables()), 4)
def testConstraints(self):
# Conv1D
k_constraint = lambda x: x / math_ops.reduce_sum(x)
b_constraint = lambda x: x / math_ops.reduce_max(x)
conv1d = conv_layers.Conv1D(2, 3,
kernel_constraint=k_constraint,
bias_constraint=b_constraint)
inputs = random_ops.random_uniform((5, 3, 5), seed=1)
conv1d(inputs)
self.assertEqual(conv1d.kernel_constraint, k_constraint)
self.assertEqual(conv1d.bias_constraint, b_constraint)
# Conv2D
k_constraint = lambda x: x / math_ops.reduce_sum(x)
b_constraint = lambda x: x / math_ops.reduce_max(x)
conv2d = conv_layers.Conv2D(2, 3,
kernel_constraint=k_constraint,
bias_constraint=b_constraint)
inputs = random_ops.random_uniform((5, 3, 3, 5), seed=1)
conv2d(inputs)
self.assertEqual(conv2d.kernel_constraint, k_constraint)
self.assertEqual(conv2d.bias_constraint, b_constraint)
# Conv3D
k_constraint = lambda x: x / math_ops.reduce_sum(x)
b_constraint = lambda x: x / math_ops.reduce_max(x)
conv3d = conv_layers.Conv3D(2, 3,
kernel_constraint=k_constraint,
bias_constraint=b_constraint)
inputs = random_ops.random_uniform((5, 3, 3, 3, 5), seed=1)
conv3d(inputs)
self.assertEqual(conv3d.kernel_constraint, k_constraint)
self.assertEqual(conv3d.bias_constraint, b_constraint)
class SeparableConv2DTest(test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'data_format'):
conv_layers.separable_conv2d(images, 32, 3, data_format='invalid')
def testInvalidStrides(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.separable_conv2d(images, 32, 3, strides=(1, 2, 3))
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.separable_conv2d(images, 32, 3, strides=None)
def testInvalidKernelSize(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.separable_conv2d(images, 32, (1, 2, 3))
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.separable_conv2d(images, 32, None)
def testCreateSeparableConv2D(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.SeparableConv2D(32, [3, 3], activation=nn_ops.relu)
output = layer.apply(images)
self.assertEqual(output.op.name, 'separable_conv2d/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
self.assertListEqual(layer.depthwise_kernel.get_shape().as_list(),
[3, 3, 4, 1])
self.assertListEqual(layer.pointwise_kernel.get_shape().as_list(),
[1, 1, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testCreateSeparableConv2DDepthMultiplier(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.SeparableConv2D(32, [3, 3], depth_multiplier=2)
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
self.assertListEqual(layer.depthwise_kernel.get_shape().as_list(),
[3, 3, 4, 2])
self.assertListEqual(layer.pointwise_kernel.get_shape().as_list(),
[1, 1, 8, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testCreateSeparableConv2DIntegerKernelSize(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.SeparableConv2D(32, 3)
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
self.assertListEqual(layer.depthwise_kernel.get_shape().as_list(),
[3, 3, 4, 1])
self.assertListEqual(layer.pointwise_kernel.get_shape().as_list(),
[1, 1, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testCreateSeparableConv2DChannelsFirst(self):
height, width = 7, 9
images = random_ops.random_uniform((5, 4, height, width))
layer = conv_layers.SeparableConv2D(
32, [3, 3], data_format='channels_first')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, 32, height - 2, width - 2])
self.assertListEqual(layer.depthwise_kernel.get_shape().as_list(),
[3, 3, 4, 1])
self.assertListEqual(layer.pointwise_kernel.get_shape().as_list(),
[1, 1, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testSeparableConv2DPaddingSame(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 32), seed=1)
layer = conv_layers.SeparableConv2D(
64, images.get_shape()[1:3], padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 64])
def testCreateSeparableConvWithStrides(self):
height, width = 6, 8
# Test strides tuple
images = random_ops.random_uniform((5, height, width, 3), seed=1)
layer = conv_layers.SeparableConv2D(
32, [3, 3], strides=(2, 2), padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width / 2, 32])
# Test strides integer
layer = conv_layers.SeparableConv2D(32, [3, 3], strides=2, padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width / 2, 32])
# Test unequal strides
layer = conv_layers.SeparableConv2D(
32, [3, 3], strides=(2, 1), padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width, 32])
def testCreateSeparableConvWithStridesChannelsFirst(self):
data_format = 'channels_first'
height, width = 6, 8
# Test strides tuple
images = random_ops.random_uniform((5, 3, height, width), seed=1)
layer = conv_layers.SeparableConv2D(
32, [3, 3], strides=(2, 2), padding='same', data_format=data_format)
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, 32, height / 2, width / 2])
# Test strides integer
layer = conv_layers.SeparableConv2D(32, [3, 3], strides=2, padding='same',
data_format=data_format)
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, 32, height / 2, width / 2])
# Test unequal strides
layer = conv_layers.SeparableConv2D(
32, [3, 3], strides=(2, 1), padding='same', data_format=data_format)
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, 32, height / 2, width])
def testFunctionalConv2DReuse(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.separable_conv2d(images, 32, [3, 3], name='sepconv1')
self.assertEqual(len(variables.trainable_variables()), 3)
conv_layers.separable_conv2d(
images, 32, [3, 3], name='sepconv1', reuse=True)
self.assertEqual(len(variables.trainable_variables()), 3)
def testFunctionalConv2DReuseFromScope(self):
with variable_scope.variable_scope('scope'):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.separable_conv2d(images, 32, [3, 3], name='sepconv1')
self.assertEqual(len(variables.trainable_variables()), 3)
with variable_scope.variable_scope('scope', reuse=True):
conv_layers.separable_conv2d(images, 32, [3, 3], name='sepconv1')
self.assertEqual(len(variables.trainable_variables()), 3)
def testFunctionalConv2DInitializerFromScope(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.separable_conv2d(images, 32, [3, 3], name='sepconv1')
weights = variables.trainable_variables()
# Check the names of weights in order.
self.assertTrue('depthwise_kernel' in weights[0].name)
self.assertTrue('pointwise_kernel' in weights[1].name)
self.assertTrue('bias' in weights[2].name)
sess.run(variables.global_variables_initializer())
weights = sess.run(weights)
# Check that the kernel weights got initialized to ones (from scope)
self.assertAllClose(weights[0], np.ones((3, 3, 3, 1)))
self.assertAllClose(weights[1], np.ones((1, 1, 3, 32)))
# Check that the bias still got initialized to zeros.
self.assertAllClose(weights[2], np.zeros((32)))
def testFunctionalConv2DNoReuse(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.separable_conv2d(images, 32, [3, 3])
self.assertEqual(len(variables.trainable_variables()), 3)
conv_layers.separable_conv2d(images, 32, [3, 3])
self.assertEqual(len(variables.trainable_variables()), 6)
def testSeparableConv2DDepthwiseRegularizer(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
layer = conv_layers.SeparableConv2D(32, [3, 3], depthwise_regularizer=reg)
layer.apply(images)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(layer.losses, loss_keys)
def testSeparableConv2DPointwiseRegularizer(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
layer = conv_layers.SeparableConv2D(32, [3, 3], pointwise_regularizer=reg)
layer.apply(images)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(layer.losses, loss_keys)
def testSeparableConv2DBiasRegularizer(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
layer = conv_layers.SeparableConv2D(32, [3, 3], bias_regularizer=reg)
layer.apply(images)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(layer.losses, loss_keys)
def testSeparableConv2DNoBias(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.SeparableConv2D(
32, [3, 3], activation=nn_ops.relu, use_bias=False)
output = layer.apply(images)
self.assertEqual(output.op.name, 'separable_conv2d/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
self.assertListEqual(layer.depthwise_kernel.get_shape().as_list(),
[3, 3, 4, 1])
self.assertListEqual(layer.pointwise_kernel.get_shape().as_list(),
[1, 1, 4, 32])
self.assertEqual(layer.bias, None)
def testConstraints(self):
d_constraint = lambda x: x / math_ops.reduce_sum(x)
p_constraint = lambda x: x / math_ops.reduce_sum(x)
b_constraint = lambda x: x / math_ops.reduce_max(x)
layer = conv_layers.SeparableConv2D(2, 3,
depthwise_constraint=d_constraint,
pointwise_constraint=p_constraint,
bias_constraint=b_constraint)
inputs = random_ops.random_uniform((5, 3, 3, 5), seed=1)
layer(inputs)
self.assertEqual(layer.depthwise_constraint, d_constraint)
self.assertEqual(layer.pointwise_constraint, p_constraint)
self.assertEqual(layer.bias_constraint, b_constraint)
class Conv2DTransposeTest(test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'data_format'):
conv_layers.conv2d_transpose(images, 32, 3, data_format='invalid')
def testInvalidStrides(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.conv2d_transpose(images, 32, 3, strides=(1, 2, 3))
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.conv2d_transpose(images, 32, 3, strides=None)
def testInvalidKernelSize(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.conv2d_transpose(images, 32, (1, 2, 3))
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.conv2d_transpose(images, 32, None)
def testCreateConv2DTranspose(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.Conv2DTranspose(32, [3, 3], activation=nn_ops.relu)
output = layer.apply(images)
self.assertEqual(output.op.name, 'conv2d_transpose/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, height + 2, width + 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 32, 4])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testConv2DTransposeFloat16(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4), dtype='float16')
output = conv_layers.conv2d_transpose(images, 32, [3, 3],
activation=nn_ops.relu)
self.assertListEqual(output.get_shape().as_list(),
[5, height + 2, width + 2, 32])
def testCreateConv2DTransposeIntegerKernelSize(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.Conv2DTranspose(32, 3)
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height + 2, width + 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 32, 4])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testCreateConv2DTransposeChannelsFirst(self):
height, width = 7, 9
images = random_ops.random_uniform((5, 4, height, width))
layer = conv_layers.Conv2DTranspose(
32, [3, 3], data_format='channels_first')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, 32, height + 2, width + 2])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 32, 4])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testConv2DTransposePaddingSame(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 32), seed=1)
layer = conv_layers.Conv2DTranspose(
64, images.get_shape()[1:3], padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 64])
def testCreateConv2DTransposeWithStrides(self):
height, width = 6, 8
# Test strides tuple
images = random_ops.random_uniform((5, height, width, 3), seed=1)
layer = conv_layers.Conv2DTranspose(
32, [3, 3], strides=(2, 2), padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height * 2, width * 2, 32])
# Test strides integer
layer = conv_layers.Conv2DTranspose(32, [3, 3], strides=2, padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height * 2, width * 2, 32])
# Test unequal strides
layer = conv_layers.Conv2DTranspose(
32, [3, 3], strides=(2, 1), padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height * 2, width, 32])
def testConv2DTransposeKernelRegularizer(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
layer = conv_layers.Conv2DTranspose(32, [3, 3], kernel_regularizer=reg)
layer.apply(images)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(layer.losses, loss_keys)
def testConv2DTransposeBiasRegularizer(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
layer = conv_layers.Conv2DTranspose(32, [3, 3], bias_regularizer=reg)
layer.apply(images)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(layer.losses, loss_keys)
def testConv2DTransposeNoBias(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.Conv2DTranspose(
32, [3, 3], activation=nn_ops.relu, use_bias=False)
output = layer.apply(images)
self.assertEqual(output.op.name, 'conv2d_transpose/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, height + 2, width + 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 32, 4])
self.assertEqual(layer.bias, None)
def testFunctionalConv2DTransposeReuse(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.conv2d_transpose(images, 32, [3, 3], name='deconv1')
self.assertEqual(len(variables.trainable_variables()), 2)
conv_layers.conv2d_transpose(images, 32, [3, 3], name='deconv1', reuse=True)
self.assertEqual(len(variables.trainable_variables()), 2)
def testFunctionalConv2DTransposeReuseFromScope(self):
with variable_scope.variable_scope('scope'):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.conv2d_transpose(images, 32, [3, 3], name='deconv1')
self.assertEqual(len(variables.trainable_variables()), 2)
with variable_scope.variable_scope('scope', reuse=True):
conv_layers.conv2d_transpose(images, 32, [3, 3], name='deconv1')
self.assertEqual(len(variables.trainable_variables()), 2)
def testFunctionalConv2DTransposeInitializerFromScope(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.conv2d_transpose(images, 32, [3, 3], name='deconv1')
weights = variables.trainable_variables()
# Check the names of weights in order.
self.assertTrue('kernel' in weights[0].name)
self.assertTrue('bias' in weights[1].name)
sess.run(variables.global_variables_initializer())
weights = sess.run(weights)
# Check that the kernel weights got initialized to ones (from scope)
self.assertAllClose(weights[0], np.ones((3, 3, 32, 3)))
# Check that the bias still got initialized to zeros.
self.assertAllClose(weights[1], np.zeros((32)))
def testFunctionalConv2DTransposeNoReuse(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
conv_layers.conv2d_transpose(images, 32, [3, 3])
self.assertEqual(len(variables.trainable_variables()), 2)
conv_layers.conv2d_transpose(images, 32, [3, 3])
self.assertEqual(len(variables.trainable_variables()), 4)
def testConstraints(self):
k_constraint = lambda x: x / math_ops.reduce_sum(x)
b_constraint = lambda x: x / math_ops.reduce_max(x)
layer = conv_layers.Conv2DTranspose(2, 3,
kernel_constraint=k_constraint,
bias_constraint=b_constraint)
inputs = random_ops.random_uniform((5, 3, 3, 5), seed=1)
layer(inputs)
self.assertEqual(layer.kernel_constraint, k_constraint)
self.assertEqual(layer.bias_constraint, b_constraint)
class Conv3DTransposeTest(test.TestCase):
def testInvalidDataFormat(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32), seed=1)
with self.assertRaisesRegexp(ValueError, 'data_format'):
conv_layers.conv3d_transpose(volumes, 4, 3, data_format='invalid')
def testInvalidStrides(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32), seed=1)
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.conv3d_transpose(volumes, 4, 3, strides=(1, 2))
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.conv3d_transpose(volumes, 4, 3, strides=None)
def testInvalidKernelSize(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32), seed=1)
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.conv3d_transpose(volumes, 4, (1, 2))
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.conv3d_transpose(volumes, 4, None)
def testCreateConv3DTranspose(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32))
layer = conv_layers.Conv3DTranspose(4, [3, 3, 3], activation=nn_ops.relu)
output = layer.apply(volumes)
self.assertEqual(output.op.name, 'conv3d_transpose/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, depth + 2, height + 2, width + 2, 4])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [4])
def testCreateConv3DTransposeIntegerKernelSize(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32))
layer = conv_layers.Conv3DTranspose(4, 3)
output = layer.apply(volumes)
self.assertListEqual(output.get_shape().as_list(),
[5, depth + 2, height + 2, width + 2, 4])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [4])
def testCreateConv3DTransposeChannelsFirst(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, 32, depth, height, width))
layer = conv_layers.Conv3DTranspose(
4, [3, 3, 3], data_format='channels_first')
output = layer.apply(volumes)
self.assertListEqual(output.get_shape().as_list(),
[5, 4, depth + 2, height + 2, width + 2])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [4])
def testConv3DTransposePaddingSame(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 64), seed=1)
layer = conv_layers.Conv3DTranspose(
32, volumes.get_shape()[1:4], padding='same')
output = layer.apply(volumes)
self.assertListEqual(output.get_shape().as_list(),
[5, depth, height, width, 32])
def testCreateConv3DTransposeWithStrides(self):
depth, height, width = 4, 6, 8
# Test strides tuple.
volumes = random_ops.random_uniform((5, depth, height, width, 32), seed=1)
layer = conv_layers.Conv3DTranspose(
4, [3, 3, 3], strides=(2, 2, 2), padding='same')
output = layer.apply(volumes)
self.assertListEqual(output.get_shape().as_list(),
[5, depth * 2, height * 2, width * 2, 4])
# Test strides integer.
layer = conv_layers.Conv3DTranspose(4, [3, 3, 3], strides=2, padding='same')
output = layer.apply(volumes)
self.assertListEqual(output.get_shape().as_list(),
[5, depth * 2, height * 2, width * 2, 4])
# Test unequal strides.
layer = conv_layers.Conv3DTranspose(
4, [3, 3, 3], strides=(2, 1, 1), padding='same')
output = layer.apply(volumes)
self.assertListEqual(output.get_shape().as_list(),
[5, depth * 2, height, width, 4])
def testConv3DTransposeKernelRegularizer(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32))
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
layer = conv_layers.Conv3DTranspose(4, [3, 3, 3], kernel_regularizer=reg)
layer.apply(volumes)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(layer.losses, loss_keys)
def testConv3DTransposeBiasRegularizer(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32))
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
layer = conv_layers.Conv3DTranspose(4, [3, 3, 3], bias_regularizer=reg)
layer.apply(volumes)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(layer.losses, loss_keys)
def testConv3DTransposeNoBias(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32))
layer = conv_layers.Conv3DTranspose(
4, [3, 3, 3], activation=nn_ops.relu, use_bias=False)
output = layer.apply(volumes)
self.assertEqual(output.op.name, 'conv3d_transpose/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, depth + 2, height + 2, width + 2, 4])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 3, 4, 32])
self.assertEqual(layer.bias, None)
def testFunctionalConv3DTransposeReuse(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32), seed=1)
conv_layers.conv3d_transpose(volumes, 4, [3, 3, 3], name='deconv1')
self.assertEqual(len(variables.trainable_variables()), 2)
conv_layers.conv3d_transpose(
volumes, 4, [3, 3, 3], name='deconv1', reuse=True)
self.assertEqual(len(variables.trainable_variables()), 2)
def testFunctionalConv3DTransposeReuseFromScope(self):
with variable_scope.variable_scope('scope'):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32), seed=1)
conv_layers.conv3d_transpose(volumes, 4, [3, 3, 3], name='deconv1')
self.assertEqual(len(variables.trainable_variables()), 2)
with variable_scope.variable_scope('scope', reuse=True):
conv_layers.conv3d_transpose(volumes, 4, [3, 3, 3], name='deconv1')
self.assertEqual(len(variables.trainable_variables()), 2)
def testFunctionalConv3DTransposeInitializerFromScope(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform(
(5, depth, height, width, 32), seed=1)
conv_layers.conv3d_transpose(volumes, 4, [3, 3, 3], name='deconv1')
weights = variables.trainable_variables()
# Check the names of weights in order.
self.assertTrue('kernel' in weights[0].name)
self.assertTrue('bias' in weights[1].name)
sess.run(variables.global_variables_initializer())
weights = sess.run(weights)
# Check that the kernel weights got initialized to ones (from scope)
self.assertAllClose(weights[0], np.ones((3, 3, 3, 4, 32)))
# Check that the bias still got initialized to zeros.
self.assertAllClose(weights[1], np.zeros((4)))
def testFunctionalConv3DTransposeNoReuse(self):
depth, height, width = 5, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 32), seed=1)
conv_layers.conv3d_transpose(volumes, 4, [3, 3, 3])
self.assertEqual(len(variables.trainable_variables()), 2)
conv_layers.conv3d_transpose(volumes, 4, [3, 3, 3])
self.assertEqual(len(variables.trainable_variables()), 4)
def testConstraints(self):
k_constraint = lambda x: x / math_ops.reduce_sum(x)
b_constraint = lambda x: x / math_ops.reduce_max(x)
layer = conv_layers.Conv3DTranspose(2, 3,
kernel_constraint=k_constraint,
bias_constraint=b_constraint)
inputs = random_ops.random_uniform((5, 3, 3, 3, 5), seed=1)
layer(inputs)
self.assertEqual(layer.kernel_constraint, k_constraint)
self.assertEqual(layer.bias_constraint, b_constraint)
if __name__ == '__main__':
test.main()
| apache-2.0 |
eadgarchen/tensorflow | tensorflow/contrib/receptive_field/python/util/examples/compute_rf.py | 26 | 3507 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Computes Receptive Field (RF) information given a graph protobuf.
For an example of usage, see accompanying file compute_rf.sh
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from google.protobuf import text_format
from tensorflow.contrib import receptive_field
from tensorflow.core.framework import graph_pb2
from tensorflow.python.platform import app
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
cmd_args = None
def _load_graphdef(path):
"""Helper function to load GraphDef from file.
Args:
path: Path to pbtxt file.
Returns:
graph_def: A GraphDef object.
"""
graph_def = graph_pb2.GraphDef()
pbstr = gfile.Open(path).read()
text_format.Parse(pbstr, graph_def)
return graph_def
def main(unused_argv):
graph_def = _load_graphdef(cmd_args.graph_path)
(receptive_field_x, receptive_field_y, effective_stride_x, effective_stride_y,
effective_padding_x, effective_padding_y
) = receptive_field.compute_receptive_field_from_graph_def(
graph_def, cmd_args.input_node, cmd_args.output_node)
logging.info('Receptive field size (horizontal) = %s', receptive_field_x)
logging.info('Receptive field size (vertical) = %s', receptive_field_y)
logging.info('Effective stride (horizontal) = %s', effective_stride_x)
logging.info('Effective stride (vertical) = %s', effective_stride_y)
logging.info('Effective padding (horizontal) = %s', effective_padding_x)
logging.info('Effective padding (vertical) = %s', effective_padding_y)
f = gfile.GFile('%s' % cmd_args.output_path, 'w')
f.write('Receptive field size (horizontal) = %s\n' % receptive_field_x)
f.write('Receptive field size (vertical) = %s\n' % receptive_field_y)
f.write('Effective stride (horizontal) = %s\n' % effective_stride_x)
f.write('Effective stride (vertical) = %s\n' % effective_stride_y)
f.write('Effective padding (horizontal) = %s\n' % effective_padding_x)
f.write('Effective padding (vertical) = %s\n' % effective_padding_y)
f.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.register('type', 'bool', lambda v: v.lower() == 'true')
parser.add_argument(
'--graph_path', type=str, default='', help='Graph path (pbtxt format).')
parser.add_argument(
'--output_path',
type=str,
default='',
help='Path to output text file where RF information will be written to.')
parser.add_argument(
'--input_node', type=str, default='', help='Name of input node.')
parser.add_argument(
'--output_node', type=str, default='', help='Name of output node.')
cmd_args, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
| apache-2.0 |