repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
prutseltje/ansible | lib/ansible/modules/network/aci/aci_interface_policy_leaf_policy_group.py | 4 | 18307 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <brunocalogero@hotmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_interface_policy_leaf_policy_group
short_description: Manage fabric interface policy leaf policy groups (infra:AccBndlGrp, infra:AccPortGrp)
description:
- Manage fabric interface policy leaf policy groups on Cisco ACI fabrics.
notes:
- When using the module please select the appropriate link_aggregation_type (lag_type).
C(link) for Port Channel(PC), C(node) for Virtual Port Channel(VPC) and C(leaf) for Leaf Access Port Policy Group.
- More information about the internal APIC classes B(infra:AccBndlGrp) and B(infra:AccPortGrp) from
L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Bruno Calogero (@brunocalogero)
version_added: '2.5'
options:
policy_group:
description:
- Name of the leaf policy group to be added/deleted.
aliases: [ name, policy_group_name ]
description:
description:
- Description for the leaf policy group to be created.
aliases: [ descr ]
lag_type:
description:
- Selector for the type of leaf policy group we want to create.
- C(leaf) for Leaf Access Port Policy Group
- C(link) for Port Channel (PC)
- C(node) for Virtual Port Channel (VPC)
aliases: [ lag_type_name ]
choices: [ leaf, link, node ]
link_level_policy:
description:
- Choice of link_level_policy to be used as part of the leaf policy group to be created.
aliases: [ link_level_policy_name ]
cdp_policy:
description:
- Choice of cdp_policy to be used as part of the leaf policy group to be created.
aliases: [ cdp_policy_name ]
mcp_policy:
description:
- Choice of mcp_policy to be used as part of the leaf policy group to be created.
aliases: [ mcp_policy_name ]
lldp_policy:
description:
- Choice of lldp_policy to be used as part of the leaf policy group to be created.
aliases: [ lldp_policy_name ]
stp_interface_policy:
description:
- Choice of stp_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ stp_interface_policy_name ]
egress_data_plane_policing_policy:
description:
- Choice of egress_data_plane_policing_policy to be used as part of the leaf policy group to be created.
aliases: [ egress_data_plane_policing_policy_name ]
ingress_data_plane_policing_policy:
description:
- Choice of ingress_data_plane_policing_policy to be used as part of the leaf policy group to be created.
aliases: [ ingress_data_plane_policing_policy_name ]
priority_flow_control_policy:
description:
- Choice of priority_flow_control_policy to be used as part of the leaf policy group to be created.
aliases: [ priority_flow_control_policy_name ]
fibre_channel_interface_policy:
description:
- Choice of fibre_channel_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ fibre_channel_interface_policy_name ]
slow_drain_policy:
description:
- Choice of slow_drain_policy to be used as part of the leaf policy group to be created.
aliases: [ slow_drain_policy_name ]
port_channel_policy:
description:
- Choice of port_channel_policy to be used as part of the leaf policy group to be created.
aliases: [ port_channel_policy_name ]
monitoring_policy:
description:
- Choice of monitoring_policy to be used as part of the leaf policy group to be created.
aliases: [ monitoring_policy_name ]
storm_control_interface_policy:
description:
- Choice of storm_control_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ storm_control_interface_policy_name ]
l2_interface_policy:
description:
- Choice of l2_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ l2_interface_policy_name ]
port_security_policy:
description:
- Choice of port_security_policy to be used as part of the leaf policy group to be created.
aliases: [ port_security_policy_name ]
aep:
description:
- Choice of attached_entity_profile (AEP) to be used as part of the leaf policy group to be created.
aliases: [ aep_name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
# FIXME: Add query examples
EXAMPLES = r'''
- name: Create a Port Channel (PC) Interface Policy Group
aci_interface_policy_leaf_policy_group:
host: apic
username: admin
password: SomeSecretPassword
policy_group: policygroupname
description: policygroupname description
lag_type: link
link_level_policy: whateverlinklevelpolicy
fibre_channel_interface_policy: whateverfcpolicy
state: present
- name: Create a Virtual Port Channel (VPC) Interface Policy Group (no description)
aci_interface_policy_leaf_policy_group:
host: apic
username: admin
password: SomeSecretPassword
policy_group: policygroupname
lag_type: node
link_level_policy: whateverlinklevelpolicy
fibre_channel_interface_policy: whateverfcpolicy
state: present
- name: Create a Leaf Access Port Policy Group (no description)
aci_interface_policy_leaf_policy_group:
host: apic
username: admin
password: SomeSecretPassword
policy_group: policygroupname
lag_type: leaf
link_level_policy: whateverlinklevelpolicy
fibre_channel_interface_policy: whateverfcpolicy
state: present
- name: Delete an Interface policy Leaf Policy Group
aci_interface_policy_leaf_policy_group:
host: apic
username: admin
password: SomeSecretPassword
policy_group: policygroupname
lag_type: type_name
state: absent
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
policy_group=dict(type='str', aliases=['name', 'policy_group_name']), # Not required for querying all objects
description=dict(type='str', aliases=['descr']),
# NOTE: Since this module needs to include both infra:AccBndlGrp (for PC and VPC) and infra:AccPortGrp (for leaf access port policy group):
# NOTE: I'll allow the user to make the choice here (link(PC), node(VPC), leaf(leaf-access port policy group))
lag_type=dict(type='str', aliases=['lag_type_name'], choices=['leaf', 'link', 'node']), # Not required for querying all objects
link_level_policy=dict(type='str', aliases=['link_level_policy_name']),
cdp_policy=dict(type='str', aliases=['cdp_policy_name']),
mcp_policy=dict(type='str', aliases=['mcp_policy_name']),
lldp_policy=dict(type='str', aliases=['lldp_policy_name']),
stp_interface_policy=dict(type='str', aliases=['stp_interface_policy_name']),
egress_data_plane_policing_policy=dict(type='str', aliases=['egress_data_plane_policing_policy_name']),
ingress_data_plane_policing_policy=dict(type='str', aliases=['ingress_data_plane_policing_policy_name']),
priority_flow_control_policy=dict(type='str', aliases=['priority_flow_control_policy_name']),
fibre_channel_interface_policy=dict(type='str', aliases=['fibre_channel_interface_policy_name']),
slow_drain_policy=dict(type='str', aliases=['slow_drain_policy_name']),
port_channel_policy=dict(type='str', aliases=['port_channel_policy_name']),
monitoring_policy=dict(type='str', aliases=['monitoring_policy_name']),
storm_control_interface_policy=dict(type='str', aliases=['storm_control_interface_policy_name']),
l2_interface_policy=dict(type='str', aliases=['l2_interface_policy_name']),
port_security_policy=dict(type='str', aliases=['port_security_policy_name']),
aep=dict(type='str', aliases=['aep_name']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['lag_type', 'policy_group']],
['state', 'present', ['lag_type', 'policy_group']],
],
)
policy_group = module.params['policy_group']
description = module.params['description']
lag_type = module.params['lag_type']
link_level_policy = module.params['link_level_policy']
cdp_policy = module.params['cdp_policy']
mcp_policy = module.params['mcp_policy']
lldp_policy = module.params['lldp_policy']
stp_interface_policy = module.params['stp_interface_policy']
egress_data_plane_policing_policy = module.params['egress_data_plane_policing_policy']
ingress_data_plane_policing_policy = module.params['ingress_data_plane_policing_policy']
priority_flow_control_policy = module.params['priority_flow_control_policy']
fibre_channel_interface_policy = module.params['fibre_channel_interface_policy']
slow_drain_policy = module.params['slow_drain_policy']
port_channel_policy = module.params['port_channel_policy']
monitoring_policy = module.params['monitoring_policy']
storm_control_interface_policy = module.params['storm_control_interface_policy']
l2_interface_policy = module.params['l2_interface_policy']
port_security_policy = module.params['port_security_policy']
aep = module.params['aep']
state = module.params['state']
if lag_type == 'leaf':
aci_class_name = 'infraAccPortGrp'
dn_name = 'accportgrp'
class_config_dict = dict(
name=policy_group,
descr=description,
)
elif lag_type == 'link' or lag_type == 'node':
aci_class_name = 'infraAccBndlGrp'
dn_name = 'accbundle'
class_config_dict = dict(
name=policy_group,
descr=description,
lagT=lag_type,
)
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class=aci_class_name,
aci_rn='infra/funcprof/{0}-{1}'.format(dn_name, policy_group),
filter_target='eq({0}.name, "{1}")'.format(aci_class_name, policy_group),
module_object=policy_group,
),
child_classes=[
'infraRsAttEntP',
'infraRsCdpIfPol',
'infraRsFcIfPol',
'infraRsHIfPol',
'infraRsL2IfPol',
'infraRsL2PortSecurityPol',
'infraRsLacpPol',
'infraRsLldpIfPol',
'infraRsMcpIfPol',
'infraRsMonIfInfraPol',
'infraRsQosEgressDppIfPol',
'infraRsQosIngressDppIfPol',
'infraRsQosPfcIfPol',
'infraRsQosSdIfPol',
'infraRsStormctrlIfPol',
'infraRsStpIfPol',
],
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class=aci_class_name,
class_config=class_config_dict,
child_configs=[
dict(
infraRsAttEntP=dict(
attributes=dict(
tDn='uni/infra/attentp-{0}'.format(aep),
),
),
),
dict(
infraRsCdpIfPol=dict(
attributes=dict(
tnCdpIfPolName=cdp_policy,
),
),
),
dict(
infraRsFcIfPol=dict(
attributes=dict(
tnFcIfPolName=fibre_channel_interface_policy,
),
),
),
dict(
infraRsHIfPol=dict(
attributes=dict(
tnFabricHIfPolName=link_level_policy,
),
),
),
dict(
infraRsL2IfPol=dict(
attributes=dict(
tnL2IfPolName=l2_interface_policy,
),
),
),
dict(
infraRsL2PortSecurityPol=dict(
attributes=dict(
tnL2PortSecurityPolName=port_security_policy,
),
),
),
dict(
infraRsLacpPol=dict(
attributes=dict(
tnLacpLagPolName=port_channel_policy,
),
),
),
dict(
infraRsLldpIfPol=dict(
attributes=dict(
tnLldpIfPolName=lldp_policy,
),
),
),
dict(
infraRsMcpIfPol=dict(
attributes=dict(
tnMcpIfPolName=mcp_policy,
),
),
),
dict(
infraRsMonIfInfraPol=dict(
attributes=dict(
tnMonInfraPolName=monitoring_policy,
),
),
),
dict(
infraRsQosEgressDppIfPol=dict(
attributes=dict(
tnQosDppPolName=egress_data_plane_policing_policy,
),
),
),
dict(
infraRsQosIngressDppIfPol=dict(
attributes=dict(
tnQosDppPolName=ingress_data_plane_policing_policy,
),
),
),
dict(
infraRsQosPfcIfPol=dict(
attributes=dict(
tnQosPfcIfPolName=priority_flow_control_policy,
),
),
),
dict(
infraRsQosSdIfPol=dict(
attributes=dict(
tnQosSdIfPolName=slow_drain_policy,
),
),
),
dict(
infraRsStormctrlIfPol=dict(
attributes=dict(
tnStormctrlIfPolName=storm_control_interface_policy,
),
),
),
dict(
infraRsStpIfPol=dict(
attributes=dict(
tnStpIfPolName=stp_interface_policy,
),
),
),
],
)
aci.get_diff(aci_class=aci_class_name)
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| gpl-3.0 |
mlperf/training_results_v0.7 | Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/vta/tests/python/integration/test_benchmark_gemm.py | 2 | 12873 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
import numpy as np
from tvm.contrib import util
import vta.testing
from vta.testing import simulator
def test_gemm():
def run_gemm_packed(env, remote, batch_size, channel, block):
data_shape = (batch_size // env.BATCH,
channel // env.BLOCK_IN,
env.BATCH,
env.BLOCK_IN)
weight_shape = (channel // env.BLOCK_OUT,
channel // env.BLOCK_IN,
env.BLOCK_OUT,
env.BLOCK_IN)
res_shape = (batch_size // env.BATCH,
channel // env.BLOCK_OUT,
env.BATCH,
env.BLOCK_OUT)
# To compute number of ops, use a x2 factor for FMA
num_ops = 2 * channel * channel * batch_size
ko = tvm.reduce_axis((0, channel // env.BLOCK_IN), name='ko')
ki = tvm.reduce_axis((0, env.BLOCK_IN), name='ki')
data = tvm.placeholder(data_shape,
name="data",
dtype=env.inp_dtype)
weight = tvm.placeholder(weight_shape,
name="weight",
dtype=env.wgt_dtype)
data_buf = tvm.compute(data_shape,
lambda *i: data(*i),
"data_buf")
weight_buf = tvm.compute(weight_shape,
lambda *i: weight(*i),
"weight_buf")
res_gem = tvm.compute(res_shape,
lambda bo, co, bi, ci: tvm.sum(
data_buf[bo, ko, bi, ki].astype(env.acc_dtype) *
weight_buf[co, ko, ci, ki].astype(env.acc_dtype),
axis=[ko, ki]),
name="res_gem")
res_shf = tvm.compute(res_shape,
lambda *i: res_gem(*i)>>8,
name="res_shf")
res_max = tvm.compute(res_shape,
lambda *i: tvm.max(res_shf(*i), 0),
"res_max") #relu
res_min = tvm.compute(res_shape,
lambda *i: tvm.min(res_max(*i), (1<<(env.INP_WIDTH-1))-1),
"res_min") #relu
res = tvm.compute(res_shape,
lambda *i: res_min(*i).astype(env.inp_dtype),
name="res")
def verify(s, check_correctness=True):
mod = vta.build(s, [data, weight, res],
"ext_dev", env.target_host, name="gemm")
temp = util.tempdir()
mod.save(temp.relpath("gemm.o"))
remote.upload(temp.relpath("gemm.o"))
f = remote.load_module("gemm.o")
# verify
ctx = remote.ext_dev(0)
# Data in original format
data_orig = np.random.randint(
-128, 128, size=(batch_size, channel)).astype(data.dtype)
weight_orig = np.random.randint(
-128, 128, size=(channel, channel)).astype(weight.dtype)
data_packed = data_orig.reshape(
batch_size // env.BATCH, env.BATCH,
channel // env.BLOCK_IN, env.BLOCK_IN).transpose((0, 2, 1, 3))
weight_packed = weight_orig.reshape(
channel // env.BLOCK_OUT, env.BLOCK_OUT,
channel // env.BLOCK_IN, env.BLOCK_IN).transpose((0, 2, 1, 3))
res_np = np.zeros(res_shape).astype(res.dtype)
data_arr = tvm.nd.array(data_packed, ctx)
weight_arr = tvm.nd.array(weight_packed, ctx)
res_arr = tvm.nd.array(res_np, ctx)
res_ref = np.zeros(res_shape).astype(env.acc_dtype)
for b in range(batch_size // env.BATCH):
for i in range(channel // env.BLOCK_OUT):
for j in range(channel // env.BLOCK_IN):
res_ref[b,i,:] += np.dot(data_packed[b,j,:].astype(env.acc_dtype),
weight_packed[i,j].T.astype(env.acc_dtype))
res_ref = np.right_shift(res_ref, 8)
res_ref = np.clip(res_ref, 0, (1<<(env.INP_WIDTH-1))-1).astype(res.dtype)
time_f = f.time_evaluator("gemm", ctx, number=20)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
cost = time_f(data_arr, weight_arr, res_arr)
if env.TARGET in ["sim", "tsim"]:
stats = simulator.stats()
print("Execution statistics:")
for k, v in stats.items():
print("\t{:<16}: {:>16}".format(k, v))
res_unpack = res_arr.asnumpy().reshape(batch_size // env.BATCH,
channel // env.BLOCK_OUT,
env.BATCH,
env.BLOCK_OUT)
if check_correctness:
tvm.testing.assert_allclose(res_unpack, res_ref)
return cost
def run_schedule(load_inp,
load_wgt,
gemm,
alu,
store_out,
print_ir,
check_correctness):
s = tvm.create_schedule(res.op)
s[data_buf].set_scope(env.inp_scope)
s[weight_buf].set_scope(env.wgt_scope)
s[res_gem].set_scope(env.acc_scope)
s[res_shf].set_scope(env.acc_scope)
s[res_min].set_scope(env.acc_scope)
s[res_max].set_scope(env.acc_scope)
if block:
bblock = block // env.BATCH
iblock = block // env.BLOCK_IN
oblock = block // env.BLOCK_OUT
xbo, xco, xbi, xci = s[res].op.axis
xb1, xco1, xb2, xco2 = s[res].tile(xbo, xco, bblock, oblock)
store_pt = xb2
s[res_gem].compute_at(s[res], xco1)
s[res_shf].compute_at(s[res], xco1)
s[res_min].compute_at(s[res], xco1)
s[res_max].compute_at(s[res], xco1)
xbo, xco, xbi, xci = s[res_gem].op.axis
# Compute one line at a time
ko1, ko2 = s[res_gem].split(ko, iblock)
s[res_gem].reorder(ko1, ko2, xbo, xco, xbi, xci, ki)
s[data_buf].compute_at(s[res_gem], ko1)
s[weight_buf].compute_at(s[res_gem], ko1)
# Use VTA instructions
s[data_buf].pragma(s[data_buf].op.axis[0], load_inp)
s[weight_buf].pragma(s[weight_buf].op.axis[0], load_wgt)
s[res_gem].tensorize(xbi, gemm)
s[res_shf].pragma(s[res_shf].op.axis[0], alu)
s[res_min].pragma(s[res_min].op.axis[0], alu)
s[res_max].pragma(s[res_max].op.axis[0], alu)
s[res].pragma(store_pt, store_out)
else:
xbo, xco, xbi, xci = s[res_gem].op.axis
s[res_gem].reorder(ko, xbo, xco, xbi, xci, ki)
# Use VTA instructions
s[data_buf].pragma(s[data_buf].op.axis[0], load_inp)
s[weight_buf].pragma(s[weight_buf].op.axis[0], load_wgt)
s[res_gem].tensorize(xbi, gemm)
s[res_shf].pragma(s[res_shf].op.axis[0], alu)
s[res_min].pragma(s[res_min].op.axis[0], alu)
s[res_max].pragma(s[res_max].op.axis[0], alu)
s[res].pragma(s[res].op.axis[0], store_out)
if print_ir:
print(tvm.lower(s, [data, weight, res], simple_mode=True))
return verify(s, check_correctness)
def gemm_normal(print_ir):
mock = env.mock
print("----- GEMM GOPS End-to-End Test-------")
def run_test(header, print_ir, check_correctness):
cost = run_schedule(
env.dma_copy, env.dma_copy, env.gemm, env.alu, env.dma_copy,
print_ir, check_correctness)
gops = (num_ops / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir, True)
def gemm_unittest(print_ir):
mock = env.mock
print("----- GEMM Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, env.gemm, mock.alu, mock.dma_copy,
print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir)
def alu_unittest(print_ir):
mock = env.mock
print("----- ALU Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, mock.gemm, env.alu, mock.dma_copy,
print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def load_inp_unittest(print_ir):
mock = env.mock
print("----- LoadInp Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
env.dma_copy, mock.dma_copy, mock.gemm, mock.alu, mock.dma_copy, print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
bandwith = (batch_size * channel * env.INP_WIDTH / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits" % (
cost.mean, gops, bandwith))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def load_wgt_unittest(print_ir):
mock = env.mock
print("----- LoadWgt Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, env.dma_copy, mock.gemm, mock.alu, mock.dma_copy, print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
bandwith = (channel * channel * env.WGT_WIDTH / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits" % (
cost.mean, gops, bandwith))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def store_out_unittest(print_ir):
mock = env.mock
print("----- StoreOut Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, mock.gemm, mock.alu, env.dma_copy,
print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
bandwith = (batch_size * channel * env.OUT_WIDTH / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits" % (
cost.mean, gops, bandwith))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
gemm_normal(False)
gemm_unittest(False)
alu_unittest(False)
def _run(env, remote):
print("========GEMM 128=========")
run_gemm_packed(env, remote, 128, 128, 128)
vta.testing.run(_run)
if __name__ == "__main__":
test_gemm()
| apache-2.0 |
adedayo/intellij-community | python/lib/Lib/site-packages/django/template/loader_tags.py | 73 | 10511 | from django.template.base import TemplateSyntaxError, TemplateDoesNotExist, Variable
from django.template.base import Library, Node, TextNode
from django.template.context import Context
from django.template.defaulttags import token_kwargs
from django.template.loader import get_template
from django.conf import settings
from django.utils.safestring import mark_safe
register = Library()
BLOCK_CONTEXT_KEY = 'block_context'
class ExtendsError(Exception):
pass
class BlockContext(object):
def __init__(self):
# Dictionary of FIFO queues.
self.blocks = {}
def add_blocks(self, blocks):
for name, block in blocks.iteritems():
if name in self.blocks:
self.blocks[name].insert(0, block)
else:
self.blocks[name] = [block]
def pop(self, name):
try:
return self.blocks[name].pop()
except (IndexError, KeyError):
return None
def push(self, name, block):
self.blocks[name].append(block)
def get_block(self, name):
try:
return self.blocks[name][-1]
except (IndexError, KeyError):
return None
class BlockNode(Node):
def __init__(self, name, nodelist, parent=None):
self.name, self.nodelist, self.parent = name, nodelist, parent
def __repr__(self):
return "<Block Node: %s. Contents: %r>" % (self.name, self.nodelist)
def render(self, context):
block_context = context.render_context.get(BLOCK_CONTEXT_KEY)
context.push()
if block_context is None:
context['block'] = self
result = self.nodelist.render(context)
else:
push = block = block_context.pop(self.name)
if block is None:
block = self
# Create new block so we can store context without thread-safety issues.
block = BlockNode(block.name, block.nodelist)
block.context = context
context['block'] = block
result = block.nodelist.render(context)
if push is not None:
block_context.push(self.name, push)
context.pop()
return result
def super(self):
render_context = self.context.render_context
if (BLOCK_CONTEXT_KEY in render_context and
render_context[BLOCK_CONTEXT_KEY].get_block(self.name) is not None):
return mark_safe(self.render(self.context))
return ''
class ExtendsNode(Node):
must_be_first = True
def __init__(self, nodelist, parent_name, parent_name_expr, template_dirs=None):
self.nodelist = nodelist
self.parent_name, self.parent_name_expr = parent_name, parent_name_expr
self.template_dirs = template_dirs
self.blocks = dict([(n.name, n) for n in nodelist.get_nodes_by_type(BlockNode)])
def __repr__(self):
if self.parent_name_expr:
return "<ExtendsNode: extends %s>" % self.parent_name_expr.token
return '<ExtendsNode: extends "%s">' % self.parent_name
def get_parent(self, context):
if self.parent_name_expr:
self.parent_name = self.parent_name_expr.resolve(context)
parent = self.parent_name
if not parent:
error_msg = "Invalid template name in 'extends' tag: %r." % parent
if self.parent_name_expr:
error_msg += " Got this from the '%s' variable." % self.parent_name_expr.token
raise TemplateSyntaxError(error_msg)
if hasattr(parent, 'render'):
return parent # parent is a Template object
return get_template(parent)
def render(self, context):
compiled_parent = self.get_parent(context)
if BLOCK_CONTEXT_KEY not in context.render_context:
context.render_context[BLOCK_CONTEXT_KEY] = BlockContext()
block_context = context.render_context[BLOCK_CONTEXT_KEY]
# Add the block nodes from this node to the block context
block_context.add_blocks(self.blocks)
# If this block's parent doesn't have an extends node it is the root,
# and its block nodes also need to be added to the block context.
for node in compiled_parent.nodelist:
# The ExtendsNode has to be the first non-text node.
if not isinstance(node, TextNode):
if not isinstance(node, ExtendsNode):
blocks = dict([(n.name, n) for n in
compiled_parent.nodelist.get_nodes_by_type(BlockNode)])
block_context.add_blocks(blocks)
break
# Call Template._render explicitly so the parser context stays
# the same.
return compiled_parent._render(context)
class BaseIncludeNode(Node):
def __init__(self, *args, **kwargs):
self.extra_context = kwargs.pop('extra_context', {})
self.isolated_context = kwargs.pop('isolated_context', False)
super(BaseIncludeNode, self).__init__(*args, **kwargs)
def render_template(self, template, context):
values = dict([(name, var.resolve(context)) for name, var
in self.extra_context.iteritems()])
if self.isolated_context:
return template.render(Context(values))
context.update(values)
output = template.render(context)
context.pop()
return output
class ConstantIncludeNode(BaseIncludeNode):
def __init__(self, template_path, *args, **kwargs):
super(ConstantIncludeNode, self).__init__(*args, **kwargs)
try:
t = get_template(template_path)
self.template = t
except:
if settings.TEMPLATE_DEBUG:
raise
self.template = None
def render(self, context):
if not self.template:
return ''
return self.render_template(self.template, context)
class IncludeNode(BaseIncludeNode):
def __init__(self, template_name, *args, **kwargs):
super(IncludeNode, self).__init__(*args, **kwargs)
self.template_name = template_name
def render(self, context):
try:
template_name = self.template_name.resolve(context)
template = get_template(template_name)
return self.render_template(template, context)
except TemplateSyntaxError:
if settings.TEMPLATE_DEBUG:
raise
return ''
except:
return '' # Fail silently for invalid included templates.
def do_block(parser, token):
"""
Define a block that can be overridden by child templates.
"""
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' tag takes only one argument" % bits[0])
block_name = bits[1]
# Keep track of the names of BlockNodes found in this template, so we can
# check for duplication.
try:
if block_name in parser.__loaded_blocks:
raise TemplateSyntaxError("'%s' tag with name '%s' appears more than once" % (bits[0], block_name))
parser.__loaded_blocks.append(block_name)
except AttributeError: # parser.__loaded_blocks isn't a list yet
parser.__loaded_blocks = [block_name]
nodelist = parser.parse(('endblock', 'endblock %s' % block_name))
parser.delete_first_token()
return BlockNode(block_name, nodelist)
def do_extends(parser, token):
"""
Signal that this template extends a parent template.
This tag may be used in two ways: ``{% extends "base" %}`` (with quotes)
uses the literal value "base" as the name of the parent template to extend,
or ``{% extends variable %}`` uses the value of ``variable`` as either the
name of the parent template to extend (if it evaluates to a string) or as
the parent tempate itelf (if it evaluates to a Template object).
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument" % bits[0])
parent_name, parent_name_expr = None, None
if bits[1][0] in ('"', "'") and bits[1][-1] == bits[1][0]:
parent_name = bits[1][1:-1]
else:
parent_name_expr = parser.compile_filter(bits[1])
nodelist = parser.parse()
if nodelist.get_nodes_by_type(ExtendsNode):
raise TemplateSyntaxError("'%s' cannot appear more than once in the same template" % bits[0])
return ExtendsNode(nodelist, parent_name, parent_name_expr)
def do_include(parser, token):
"""
Loads a template and renders it with the current context. You can pass
additional context using keyword arguments.
Example::
{% include "foo/some_include" %}
{% include "foo/some_include" with bar="BAZZ!" baz="BING!" %}
Use the ``only`` argument to exclude the current context when rendering
the included template::
{% include "foo/some_include" only %}
{% include "foo/some_include" with bar="1" only %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("%r tag takes at least one argument: the name of the template to be included." % bits[0])
options = {}
remaining_bits = bits[2:]
while remaining_bits:
option = remaining_bits.pop(0)
if option in options:
raise TemplateSyntaxError('The %r option was specified more '
'than once.' % option)
if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=False)
if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least '
'one keyword argument.' % bits[0])
elif option == 'only':
value = True
else:
raise TemplateSyntaxError('Unknown argument for %r tag: %r.' %
(bits[0], option))
options[option] = value
isolated_context = options.get('only', False)
namemap = options.get('with', {})
path = bits[1]
if path[0] in ('"', "'") and path[-1] == path[0]:
return ConstantIncludeNode(path[1:-1], extra_context=namemap,
isolated_context=isolated_context)
return IncludeNode(parser.compile_filter(bits[1]), extra_context=namemap,
isolated_context=isolated_context)
register.tag('block', do_block)
register.tag('extends', do_extends)
register.tag('include', do_include)
| apache-2.0 |
Lokke/eden | tests/unit_tests/modules/s3/s3gis/GeoRSSLayer.py | 43 | 2220 |
s3gis_tests = load_module("tests.unit_tests.modules.s3.s3gis")
def test_GeoRSSLayer():
# use debug to show up errors
# without debug, errors get to be turned into session warnings
# and the layer skipped altogether. No datastructure.
url = "test://test_GeoRSS"
current.request.utcnow = datetime.datetime.now()
test_utils.clear_table(db, db.gis_cache)
db.gis_cache.insert(
modified_on = datetime.datetime.now(),
source = url
)
db.commit()
current.session.s3.debug = True
s3gis_tests.layer_test(
db,
db.gis_layer_georss,
dict(
name = "Test GeoRSS",
description = "Test GeoRSS layer",
enabled = True,
created_on = datetime.datetime.now(),
modified_on = datetime.datetime.now(),
url = url,
),
"S3.gis.layers_georss",
[
{
"marker_height": 34,
"marker_image": u"gis_marker.image.marker_red.png",
"marker_width": 20,
"name": u"Test GeoRSS",
"url": u"/eden/gis/cache_feed.geojson?cache.source=test://test_GeoRSS"
}
],
session = session,
request = request,
)
test_utils = local_import("test_utils")
s3gis = local_import("s3.s3gis")
def test_no_cached_copy_available():
test_utils.clear_table(db, db.gis_cache)
current.request.utcnow = datetime.datetime.now()
current.session.s3.debug = True
gis = s3gis.GIS()
with s3gis_tests.InsertedRecord(
db,
db.gis_layer_georss,
dict(
name = "Test GeoRSS",
description = "Test GeoRSS layer",
enabled = True,
created_on = datetime.datetime.now(),
modified_on = datetime.datetime.now(),
url = "test://test_GeoRSS",
)
):
with s3gis_tests.ExpectedException(Exception):
gis.show_map(
window = True,
catalogue_toolbar = True,
toolbar = True,
search = True,
catalogue_layers = True,
projection = 900913,
)
| mit |
cloudbase/nova | nova/tests/unit/api/openstack/compute/test_limits.py | 3 | 9591 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests dealing with HTTP rate-limiting.
"""
import mock
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
from six.moves import http_client as httplib
from six.moves import StringIO
from nova.api.openstack.compute import limits as limits_v21
from nova.api.openstack.compute import views
from nova.api.openstack import wsgi
import nova.context
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import matchers
class BaseLimitTestSuite(test.NoDBTestCase):
"""Base test suite which provides relevant stubs and time abstraction."""
def setUp(self):
super(BaseLimitTestSuite, self).setUp()
self.time = 0.0
self.absolute_limits = {}
def stub_get_project_quotas(context, project_id, usages=True):
return {k: dict(limit=v)
for k, v in self.absolute_limits.items()}
self.stubs.Set(nova.quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
def _get_time(self):
"""Return the "time" according to this test suite."""
return self.time
class LimitsControllerTestV21(BaseLimitTestSuite):
"""Tests for `limits.LimitsController` class."""
limits_controller = limits_v21.LimitsController
def setUp(self):
"""Run before each test."""
super(LimitsControllerTestV21, self).setUp()
self.controller = wsgi.Resource(self.limits_controller())
self.ctrler = self.limits_controller()
def _get_index_request(self, accept_header="application/json",
tenant_id=None):
"""Helper to set routing arguments."""
request = fakes.HTTPRequest.blank('', version='2.1')
if tenant_id:
request = fakes.HTTPRequest.blank('/?tenant_id=%s' % tenant_id,
version='2.1')
request.accept = accept_header
request.environ["wsgiorg.routing_args"] = (None, {
"action": "index",
"controller": "",
})
context = nova.context.RequestContext('testuser', 'testproject')
request.environ["nova.context"] = context
return request
def test_empty_index_json(self):
# Test getting empty limit details in JSON.
request = self._get_index_request()
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def test_index_json(self):
self._test_index_json()
def test_index_json_by_tenant(self):
self._test_index_json('faketenant')
def _test_index_json(self, tenant_id=None):
# Test getting limit details in JSON.
request = self._get_index_request(tenant_id=tenant_id)
context = request.environ["nova.context"]
if tenant_id is None:
tenant_id = context.project_id
self.absolute_limits = {
'ram': 512,
'instances': 5,
'cores': 21,
'key_pairs': 10,
'floating_ips': 10,
'security_groups': 10,
'security_group_rules': 20,
}
expected = {
"limits": {
"rate": [],
"absolute": {
"maxTotalRAMSize": 512,
"maxTotalInstances": 5,
"maxTotalCores": 21,
"maxTotalKeypairs": 10,
"maxTotalFloatingIps": 10,
"maxSecurityGroups": 10,
"maxSecurityGroupRules": 20,
},
},
}
def _get_project_quotas(context, project_id, usages=True):
return {k: dict(limit=v) for k, v in self.absolute_limits.items()}
with mock.patch('nova.quota.QUOTAS.get_project_quotas') as \
get_project_quotas:
get_project_quotas.side_effect = _get_project_quotas
response = request.get_response(self.controller)
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
get_project_quotas.assert_called_once_with(context, tenant_id,
usages=False)
class FakeHttplibSocket(object):
"""Fake `httplib.HTTPResponse` replacement."""
def __init__(self, response_string):
"""Initialize new `FakeHttplibSocket`."""
self._buffer = StringIO(response_string)
def makefile(self, _mode, _other):
"""Returns the socket's internal buffer."""
return self._buffer
class FakeHttplibConnection(object):
"""Fake `httplib.HTTPConnection`."""
def __init__(self, app, host):
"""Initialize `FakeHttplibConnection`."""
self.app = app
self.host = host
def request(self, method, path, body="", headers=None):
"""Requests made via this connection actually get translated and routed
into our WSGI app, we then wait for the response and turn it back into
an `httplib.HTTPResponse`.
"""
if not headers:
headers = {}
req = fakes.HTTPRequest.blank(path)
req.method = method
req.headers = headers
req.host = self.host
req.body = encodeutils.safe_encode(body)
resp = str(req.get_response(self.app))
resp = "HTTP/1.0 %s" % resp
sock = FakeHttplibSocket(resp)
self.http_response = httplib.HTTPResponse(sock)
self.http_response.begin()
def getresponse(self):
"""Return our generated response from the request."""
return self.http_response
class LimitsViewBuilderTest(test.NoDBTestCase):
def setUp(self):
super(LimitsViewBuilderTest, self).setUp()
self.view_builder = views.limits.ViewBuilder()
self.rate_limits = []
self.absolute_limits = {"metadata_items": 1,
"injected_files": 5,
"injected_file_content_bytes": 5}
def test_build_limits(self):
expected_limits = {"limits": {
"rate": [],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 5}}}
output = self.view_builder.build(self.absolute_limits)
self.assertThat(output, matchers.DictMatches(expected_limits))
def test_build_limits_empty_limits(self):
expected_limits = {"limits": {"rate": [],
"absolute": {}}}
abs_limits = {}
output = self.view_builder.build(abs_limits)
self.assertThat(output, matchers.DictMatches(expected_limits))
class LimitsPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(LimitsPolicyEnforcementV21, self).setUp()
self.controller = limits_v21.LimitsController()
def test_limits_index_policy_failed(self):
rule_name = "os_compute_api:limits"
self.policy.set_rules({rule_name: "project:non_fake"})
req = fakes.HTTPRequest.blank('')
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, req=req)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
class LimitsControllerTestV236(BaseLimitTestSuite):
def setUp(self):
super(LimitsControllerTestV236, self).setUp()
self.controller = limits_v21.LimitsController()
self.req = fakes.HTTPRequest.blank("/?tenant_id=faketenant",
version='2.36')
def test_index_filtered(self):
absolute_limits = {
'ram': 512,
'instances': 5,
'cores': 21,
'key_pairs': 10,
'floating_ips': 10,
'security_groups': 10,
'security_group_rules': 20,
}
def _get_project_quotas(context, project_id, usages=True):
return {k: dict(limit=v) for k, v in absolute_limits.items()}
with mock.patch('nova.quota.QUOTAS.get_project_quotas') as \
get_project_quotas:
get_project_quotas.side_effect = _get_project_quotas
response = self.controller.index(self.req)
expected_response = {
"limits": {
"rate": [],
"absolute": {
"maxTotalRAMSize": 512,
"maxTotalInstances": 5,
"maxTotalCores": 21,
"maxTotalKeypairs": 10,
},
},
}
self.assertEqual(expected_response, response)
| apache-2.0 |
SnappleCap/oh-mainline | vendor/packages/Django/django/core/management/commands/testserver.py | 307 | 2006 | from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.'),
make_option('--addrport', action='store', dest='addrport',
type='string', default='',
help='port number or ipaddr:port to run the server on'),
make_option('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
help='Tells Django to use a IPv6 address.'),
)
help = 'Runs a development server with data from the given fixture(s).'
args = '[fixture ...]'
requires_model_validation = False
def handle(self, *fixture_labels, **options):
from django.core.management import call_command
from django.db import connection
verbosity = int(options.get('verbosity'))
interactive = options.get('interactive')
addrport = options.get('addrport')
# Create a test database.
db_name = connection.creation.create_test_db(verbosity=verbosity, autoclobber=not interactive)
# Import the fixture data into the test database.
call_command('loaddata', *fixture_labels, **{'verbosity': verbosity})
# Run the development server. Turn off auto-reloading because it causes
# a strange error -- it causes this handle() method to be called
# multiple times.
shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
use_threading = connection.features.test_db_allows_multiple_connections
call_command('runserver',
addrport=addrport,
shutdown_message=shutdown_message,
use_reloader=False,
use_ipv6=options['use_ipv6'],
use_threading=use_threading
)
| agpl-3.0 |
Timmenem/micropython | ports/cc3200/tools/update-wipy.py | 67 | 7022 | #!/usr/bin/env python
"""
The WiPy firmware update script. Transmits the specified firmware file
over FTP, and then resets the WiPy and optionally verifies that software
was correctly updated.
Usage:
./update-wipy.py --file "path_to_mcuimg.bin" --verify
Or:
python update-wipy.py --file "path_to_mcuimg.bin"
"""
import sys
import argparse
import time
import socket
from ftplib import FTP
from telnetlib import Telnet
def print_exception(e):
print ('Exception: {}, on line {}'.format(e, sys.exc_info()[-1].tb_lineno))
def ftp_directory_exists(ftpobj, directory_name):
filelist = []
ftpobj.retrlines('LIST',filelist.append)
for f in filelist:
if f.split()[-1] == directory_name:
return True
return False
def transfer_file(args):
with FTP(args.ip, timeout=20) as ftp:
print ('FTP connection established')
if '230' in ftp.login(args.user, args.password):
print ('Login successful')
if '250' in ftp.cwd('/flash'):
if not ftp_directory_exists(ftp, 'sys'):
print ('/flash/sys directory does not exist')
if not '550' in ftp.mkd('sys'):
print ('/flash/sys directory created')
else:
print ('Error: cannot create /flash/sys directory')
return False
if '250' in ftp.cwd('sys'):
print ("Entered '/flash/sys' directory")
with open(args.file, "rb") as fwfile:
print ('Firmware image found, initiating transfer...')
if '226' in ftp.storbinary("STOR " + 'mcuimg.bin', fwfile, 512):
print ('File transfer complete')
return True
else:
print ('Error: file transfer failed')
else:
print ('Error: cannot enter /flash/sys directory')
else:
print ('Error: cannot enter /flash directory')
else:
print ('Error: ftp login failed')
return False
def reset_board(args):
success = False
try:
tn = Telnet(args.ip, timeout=5)
print("Connected via Telnet, trying to login now")
if b'Login as:' in tn.read_until(b"Login as:", timeout=5):
tn.write(bytes(args.user, 'ascii') + b"\r\n")
if b'Password:' in tn.read_until(b"Password:", timeout=5):
# needed because of internal implementation details of the WiPy's telnet server
time.sleep(0.2)
tn.write(bytes(args.password, 'ascii') + b"\r\n")
if b'Type "help()" for more information.' in tn.read_until(b'Type "help()" for more information.', timeout=5):
print("Telnet login succeeded")
tn.write(b'\r\x03\x03') # ctrl-C twice: interrupt any running program
time.sleep(1)
tn.write(b'\r\x02') # ctrl-B: enter friendly REPL
if b'Type "help()" for more information.' in tn.read_until(b'Type "help()" for more information.', timeout=5):
tn.write(b"import machine\r\n")
tn.write(b"machine.reset()\r\n")
time.sleep(2)
print("Reset performed")
success = True
else:
print("Error: cannot enter friendly REPL")
else:
print("Error: telnet login failed")
except Exception as e:
print_exception(e)
finally:
try:
tn.close()
except Exception as e:
pass
return success
def verify_update(args):
success = False
firmware_tag = ''
def find_tag (tag):
if tag in firmware_tag:
print("Verification passed")
return True
else:
print("Error: verification failed, the git tag doesn't match")
return False
retries = 0
while True:
try:
# Specify a longer time out value here because the board has just been
# reset and the wireless connection might not be fully established yet
tn = Telnet(args.ip, timeout=10)
print("Connected via telnet again, lets check the git tag")
break
except socket.timeout:
if retries < 5:
print("Timeout while connecting via telnet, retrying...")
retries += 1
else:
print('Error: Telnet connection timed out!')
return False
try:
firmware_tag = tn.read_until (b'with CC3200')
tag_file_path = args.file.rstrip('mcuimg.bin') + 'genhdr/mpversion.h'
if args.tag is not None:
success = find_tag(bytes(args.tag, 'ascii'))
else:
with open(tag_file_path) as tag_file:
for line in tag_file:
bline = bytes(line, 'ascii')
if b'MICROPY_GIT_HASH' in bline:
bline = bline.lstrip(b'#define MICROPY_GIT_HASH ').replace(b'"', b'').replace(b'\r', b'').replace(b'\n', b'')
success = find_tag(bline)
break
except Exception as e:
print_exception(e)
finally:
try:
tn.close()
except Exception as e:
pass
return success
def main():
cmd_parser = argparse.ArgumentParser(description='Update the WiPy firmware with the specified image file')
cmd_parser.add_argument('-f', '--file', default=None, help='the path of the firmware file')
cmd_parser.add_argument('-u', '--user', default='micro', help='the username')
cmd_parser.add_argument('-p', '--password', default='python', help='the login password')
cmd_parser.add_argument('--ip', default='192.168.1.1', help='the ip address of the WiPy')
cmd_parser.add_argument('--verify', action='store_true', help='verify that the update succeeded')
cmd_parser.add_argument('-t', '--tag', default=None, help='git tag of the firmware image')
args = cmd_parser.parse_args()
result = 1
try:
if args.file is None:
raise ValueError('the image file path must be specified')
if transfer_file(args):
if reset_board(args):
if args.verify:
print ('Waiting for the WiFi connection to come up again...')
# this time is to allow the system's wireless network card to
# connect to the WiPy again.
time.sleep(5)
if verify_update(args):
result = 0
else:
result = 0
except Exception as e:
print_exception(e)
finally:
sys.exit(result)
if __name__ == "__main__":
main()
| mit |
linsalrob/EdwardsLab | mongodb/load_models.py | 1 | 1140 | """
Create a mongo database if it doesn't exist and load a bunch of data into it.
We need a directory with one or more JSON files in it. We look for JSON on the end of the filename.
e.g. python load_models.py -d /data/Genotype-Phenotype-Modeling/models/Citrobacter/Citrobacter/models/ -n fba_models -c citrobacter
"""
import os
import sys
import argparse
import json
from pymongo import MongoClient
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Load some data from a directory of JSON files")
parser.add_argument('-d', help='Directory of files', required=True)
parser.add_argument('-n', help='Database name', required=True)
parser.add_argument('-c', help='Collection name', required=True)
args = parser.parse_args()
client = MongoClient()
db = client[args.n]
coll = db[args.c]
for f in os.listdir(args.d):
if f.lower().endswith('.json'):
sys.stderr.write("Loading file " + f + "\n")
text = json.load(open(os.path.join(args.d, f)))
obj = {'file_name' : os.path.join(args.d, f), 'content' : text}
coll.insert(obj)
| mit |
neuhofmo/RecBlast | RecBlastUtils.py | 1 | 10025 | #! /usr/bin/env python2
# A set of tools, functions, aliases and more used in RecBlast.
import os
import tarfile
import zipfile
from time import strftime, sleep
import re
import subprocess
from Bio import Entrez
import shutil
Entrez.email = "recblast@gmail.com"
Entrez.tool = "RecBlast"
TEMP_FILES_PATH = os.getcwd()
def prepare_files(items, file_name, user_id, files_path=TEMP_FILES_PATH):
"""Receives a list of items and a file to write them to, then writes them to file and returns the file path."""
full_path = join_folder(files_path, "_".join([user_id, file_name]))
# items = list(set(items)) # make the list unique # unnecessary
with open(full_path, 'w') as f:
for item in items:
f.write("{}\n".format(item)) # improved efficiency
return full_path
def file_to_string(file_name):
"""Reads a file (file_name) and returns the text in it as a string."""
with open(file_name, 'r') as f:
text = f.read()
# delete original file
os.remove(file_name)
return text
def remove_commas(file_name):
"""Replaces commas with newlines in a file."""
with open(file_name, 'r') as f:
text = f.read()
text = replace(text, ',', '\n')
with open(file_name, 'w') as f: # now writing
f.write(text)
return file_name
# def zip_results(fasta_output_path, csv_rbh_output_filename, csv_strict_output_filename, csv_ns_output_filename,
# output_path):
def zip_results(fasta_output_path, zip_list, output_path):
"""
Receives a folder containing fasta sequences and a csv file, adds them all to zip.
:param fasta_output_path:
:param csv_rbh_output_filename:
:param csv_strict_output_filename:
:param csv_ns_output_filename:
:param output_path:
:return:
"""
zip_file = join_folder(output_path, "output.zip")
fastas = [join_folder(fasta_output_path, x) for x in os.listdir(fasta_output_path)]
bname = os.path.basename # for efficiency
with zipfile.ZipFile(zip_file, mode='w') as zf:
# adding all fasta files
for fasta in fastas:
zf.write(fasta, bname(fasta))
# zf.write(csv_file_path, os.path.basename(csv_file_path)) # add csv file
# add csv files
for f_to_zip in zip_list:
zf.write(f_to_zip, bname(f_to_zip))
# zf.write(csv_rbh_output_filename, os.path.basename(csv_rbh_output_filename)) # add csv file
# zf.write(csv_strict_output_filename, os.path.basename(csv_strict_output_filename)) # add csv file
# zf.write(csv_ns_output_filename, os.path.basename(csv_ns_output_filename)) # add csv file
return zip_file
# debugging function
def debug_s(debug_string, to_debug):
"""
Receives a string and prints it, with a timestamp.
:param debug_string: a string to print
:param to_debug: boolean flag: True means print, False - ignore.
:return:
"""
if to_debug:
print "DEBUG {0}: {1}".format(strftime('%H:%M:%S'), debug_string)
def create_folder_if_needed(path):
"""
Receives a path and creates a folder when needed (if it doesn't already exist).
"""
if os.path.exists(path):
print "{} dir exists".format(path)
else:
print "{} dir does not exist. Creating dir.".format(path)
os.mkdir(path)
def file_len(fname):
"""Return the file length in lines."""
with open(fname) as f:
for i, l in enumerate(f):
pass
return i + 1
def targz_folder(archive_name, folder):
"""
Returns True after
:param archive_name:
:param folder:
:return:
"""
with tarfile.open(archive_name, "w:gz") as tar:
tar.add(folder, arcname=os.path.basename(folder))
return True
def cleanup(path, storage_folder, run_id):
"""
Performs tar and gzip on sets of files produced by the program.
Then deletes the files and folders.
:param path: # the run_folder
:param storage_folder: # the main folder, in which the entire run_folder will be stored
:param run_id: # the folder containing the first blast results
:return:
"""
# compress all files in path:
# fasta_path
path_archive = join_folder(storage_folder, "{}.all.tar.gz".format(run_id))
if targz_folder(path_archive, path): # compress run_folder
shutil.rmtree(path) # delete run folder
return True
def write_blast_run_script(command_line, write_folder):
"""Writing a blast run script, and giving it run permissions."""
# script_path = "/tmp/blastp_run.sh" # default script location
script_path = join_folder(write_folder, "blastp_run.sh") # script location
with open(script_path, 'w') as script:
# script.write("#! /bin/tcsh\n")
script.write("#! /bin/bash\n")
script.write("# The script is designed to run the following blastp command from RecBlast\n")
script.write(command_line)
# run permissions for the script:
os.chmod(script_path, 0751)
return script_path
def write_sort_command_script(filename_to_sort, sorted_filename, write_folder):
"""Writing a sort uniq script to edit the gene csv file."""
# script_path = "/tmp/sort_script.sh" # default script location
script_path = join_folder(write_folder, "sort_script.sh") # script location
with open(script_path, 'w') as script:
# script.write("#! /bin/tcsh\n")
script.write("#! /bin/bash\n")
script.write("# The script is designed to run sort, uniq command from RecBlast\n")
command_line = "cat {0} | sort | uniq > {1}.temp; " \
"echo 'gene_id,gene_name,uniprot_id' > {1}; cat {1}.temp >> {1}; " \
"rm {1}.temp\n".format(filename_to_sort, sorted_filename)
# changed to make sure the title only comes after the genes
script.write(command_line)
# run permissions for the script:
os.chmod(script_path, 0751)
return script_path
def merge_two_dicts(x, y):
"""Given two dicts, merge them into a new dict as a shallow copy."""
z = x.copy()
z.update(y)
return z
def is_number(s):
"""The function determines if a string is a number or a text. Returns True if it's a number. """
try:
int(s)
return True
except ValueError:
return False
def blastdb_exit():
"""Exiting if we can't find the $BLASTDB on the local machine"""
print("$BLASTDB was not found! Please set the blast DB path to the right location.")
print("Make sure blast+ is installed correctly.")
exit(1)
def exists_not_empty(path):
"""Receives a file path and checks if it exists and not empty."""
if os.path.exists(path) and os.stat(path).st_size > 0:
return True
else:
return False
def subset_db(tax_id, gi_file_path, db_path, big_db, run_anyway, DEBUG, debug, attempt_no=0):
"""
Subsets a big blast database into a smaller one based on tax_id.
The function connects to entrez and retrieves gi identifiers of sequences with the same tax_id.
:param tax_id: The tax_id (string)
:param gi_file_path: file path of the gi_list file we are creating
:param db_path: the new db path
:param big_db: we are about to subset
:param run_anyway: run on NR if unable to subset
:param attempt_no: counter for the attempts in connecting to Entrez (attempts to connect up to 10 times).
:param DEBUG: A boolean flag: True for debug prints, False for quiet run.
:param debug: A function call to provide debug prints.
:return:
"""
# connecting to ENTREZ protein DB
try:
handle = Entrez.esearch(db="protein", term="txid{}[ORGN]".format(tax_id), retmode="xml", retmax=10000000)
record = Entrez.read(handle)
except Exception, e: # DB connection exception
print "Error connecting to server, trying again..."
print "Error: {}".format(e)
debug("Error connecting to server, trying again...\n")
# sleeping in case it's a temporary database problem
sleep_period = 180
print "restarting attempt in {} seconds...".format(sleep_period)
sleep(sleep_period)
# counting the number of attempts to connect.
attempt_no += 1
if attempt_no >= 10: # If too many:
print "Tried connecting to Entrez DB more than 10 times. Check your connection or try again later."
exit(1)
# try again (recursive until max)
return subset_db(tax_id, gi_file_path, db_path, big_db, run_anyway, DEBUG, debug, attempt_no)
assert int(record["Count"]) == len(record["IdList"]), "Did not fetch all sequences!" # make sure we got it all...
# writing a gi list file
with open(gi_file_path, 'w') as gi_file:
gi_file.write("\n".join(record["IdList"]) + "\n")
# the new target database path
create_folder_if_needed(os.path.join(db_path, tax_id))
target_db = os.path.join(db_path, tax_id, "db")
aliastool_command = ["blastdb_aliastool", "-gilist", gi_file_path, "-db", big_db, "-dbtype", "prot", "-out",
target_db] # TODO: test that blastdb_aliastool works for the user
try:
subprocess.check_call(aliastool_command)
print("Created DB subset from nr protein for {}".format(tax_id))
return target_db
except subprocess.CalledProcessError:
print("Problem with creating DB for tax_id {} from nr.".format(tax_id))
if run_anyway:
print("Running with the heavy nr option. Do some stretches, it might be a long run.")
return big_db
print("Aborting.\n"
"If you want to run the program anyway against the entire nr "
"(which is significantly slower than the default run, please use the --run_even_if_no_db_found flag.")
exit(1)
# for efficiency
strip = str.strip
split = str.split
replace = str.replace
re_search = re.search
re_sub = re.sub
re_match = re.match
upper = str.upper
lower = str.lower
join_folder = os.path.join
| mit |
andrei-karalionak/ggrc-core | src/ggrc_workflows/migrations/versions/20160104135243_13e52f6a9deb_add_finished_verified_dates_to_cycle_.py | 7 | 1120 | # Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Add finished/verified dates to cycle tasks
Revision ID: 13e52f6a9deb
Revises: 18bdb0671010
Create Date: 2016-01-04 13:52:43.017848
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '13e52f6a9deb'
down_revision = '18bdb0671010'
def upgrade():
op.add_column('cycle_task_group_object_tasks', sa.Column('finished_date', sa.DateTime(), nullable=True))
op.add_column('cycle_task_group_object_tasks', sa.Column('verified_date', sa.DateTime(), nullable=True))
op.execute("""
UPDATE cycle_task_group_object_tasks
SET finished_date = updated_at
WHERE status = "Finished"
""")
op.execute("""
UPDATE cycle_task_group_object_tasks
SET verified_date = updated_at, finished_date = updated_at
WHERE status = "Verified"
""")
def downgrade():
op.drop_column('cycle_task_group_object_tasks', 'verified_date')
op.drop_column('cycle_task_group_object_tasks', 'finished_date')
| apache-2.0 |
seanfisk/lsf-ibutils | docs/source/conf.py | 1 | 8703 | # -*- coding: utf-8 -*-
#
# This file is based upon the file generated by sphinx-quickstart. However,
# where sphinx-quickstart hardcodes values in this file that you input, this
# file has been changed to pull from your module's metadata module.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# Import project metadata
from lsf_ibutils import metadata
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# show todos
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = metadata.project
copyright = metadata.copyright
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = metadata.version
# The full version, including alpha/beta/rc tags.
release = metadata.version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = metadata.project_no_spaces + 'doc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index', metadata.project_no_spaces + '.tex',
metadata.project + ' Documentation', metadata.authors_string,
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', metadata.package, metadata.project + ' Documentation',
metadata.authors_string, 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', metadata.project_no_spaces,
metadata.project + ' Documentation', metadata.authors_string,
metadata.project_no_spaces, metadata.description, 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
# Extra local configuration. This is useful for placing the class description
# in the class docstring and the __init__ parameter documentation in the
# __init__ docstring. See
# <http://sphinx-doc.org/ext/autodoc.html#confval-autoclass_content> for more
# information.
autoclass_content = 'both'
| mit |
chitr/neutron | neutron/db/migration/alembic_migrations/versions/1955efc66455_weight_scheduler.py | 47 | 1036 | # Copyright 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""weight_scheduler
Revision ID: 1955efc66455
Revises: 35a0f3365720
Create Date: 2015-03-12 22:11:37.607390
"""
# revision identifiers, used by Alembic.
revision = '1955efc66455'
down_revision = '35a0f3365720'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('agents',
sa.Column('load', sa.Integer(),
server_default='0', nullable=False))
| apache-2.0 |
lkl/lkl-linux-2.6 | tools/perf/scripts/python/syscall-counts-by-pid.py | 944 | 1744 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
usage = "perf trace -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
pass
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38d %10d\n" % (id, val),
| gpl-2.0 |
vmpstr/trace-viewer | third_party/closure_linter/closure_linter/common/simplefileflags.py | 285 | 5107 | #!/usr/bin/env python
#
# Copyright 2008 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Determines the list of files to be checked from command line arguments."""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
import glob
import os
import re
import gflags as flags
FLAGS = flags.FLAGS
flags.DEFINE_multistring(
'recurse',
None,
'Recurse in to the subdirectories of the given path',
short_name='r')
flags.DEFINE_list(
'exclude_directories',
('_demos'),
'Exclude the specified directories (only applicable along with -r or '
'--presubmit)',
short_name='e')
flags.DEFINE_list(
'exclude_files',
('deps.js'),
'Exclude the specified files',
short_name='x')
def MatchesSuffixes(filename, suffixes):
"""Returns whether the given filename matches one of the given suffixes.
Args:
filename: Filename to check.
suffixes: Sequence of suffixes to check.
Returns:
Whether the given filename matches one of the given suffixes.
"""
suffix = filename[filename.rfind('.'):]
return suffix in suffixes
def _GetUserSpecifiedFiles(argv, suffixes):
"""Returns files to be linted, specified directly on the command line.
Can handle the '*' wildcard in filenames, but no other wildcards.
Args:
argv: Sequence of command line arguments. The second and following arguments
are assumed to be files that should be linted.
suffixes: Expected suffixes for the file type being checked.
Returns:
A sequence of files to be linted.
"""
files = argv[1:] or []
all_files = []
lint_files = []
# Perform any necessary globs.
for f in files:
if f.find('*') != -1:
for result in glob.glob(f):
all_files.append(result)
else:
all_files.append(f)
for f in all_files:
if MatchesSuffixes(f, suffixes):
lint_files.append(f)
return lint_files
def _GetRecursiveFiles(suffixes):
"""Returns files to be checked specified by the --recurse flag.
Args:
suffixes: Expected suffixes for the file type being checked.
Returns:
A list of files to be checked.
"""
lint_files = []
# Perform any request recursion
if FLAGS.recurse:
for start in FLAGS.recurse:
for root, subdirs, files in os.walk(start):
for f in files:
if MatchesSuffixes(f, suffixes):
lint_files.append(os.path.join(root, f))
return lint_files
def GetAllSpecifiedFiles(argv, suffixes):
"""Returns all files specified by the user on the commandline.
Args:
argv: Sequence of command line arguments. The second and following arguments
are assumed to be files that should be linted.
suffixes: Expected suffixes for the file type
Returns:
A list of all files specified directly or indirectly (via flags) on the
command line by the user.
"""
files = _GetUserSpecifiedFiles(argv, suffixes)
if FLAGS.recurse:
files += _GetRecursiveFiles(suffixes)
return FilterFiles(files)
def FilterFiles(files):
"""Filters the list of files to be linted be removing any excluded files.
Filters out files excluded using --exclude_files and --exclude_directories.
Args:
files: Sequence of files that needs filtering.
Returns:
Filtered list of files to be linted.
"""
num_files = len(files)
ignore_dirs_regexs = []
for ignore in FLAGS.exclude_directories:
ignore_dirs_regexs.append(re.compile(r'(^|[\\/])%s[\\/]' % ignore))
result_files = []
for f in files:
add_file = True
for exclude in FLAGS.exclude_files:
if f.endswith('/' + exclude) or f == exclude:
add_file = False
break
for ignore in ignore_dirs_regexs:
if ignore.search(f):
# Break out of ignore loop so we don't add to
# filtered files.
add_file = False
break
if add_file:
# Convert everything to absolute paths so we can easily remove duplicates
# using a set.
result_files.append(os.path.abspath(f))
skipped = num_files - len(result_files)
if skipped:
print 'Skipping %d file(s).' % skipped
return set(result_files)
def GetFileList(argv, file_type, suffixes):
"""Parse the flags and return the list of files to check.
Args:
argv: Sequence of command line arguments.
suffixes: Sequence of acceptable suffixes for the file type.
Returns:
The list of files to check.
"""
return sorted(GetAllSpecifiedFiles(argv, suffixes))
def IsEmptyArgumentList(argv):
return not (len(argv[1:]) or FLAGS.recurse)
| bsd-3-clause |
bettiolo/phantomjs | src/breakpad/src/tools/gyp/pylib/gyp/__init__.py | 137 | 17502 | #!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import gyp.input
import optparse
import os.path
import re
import shlex
import sys
# Default debug modes for GYP
debug = {}
# List of "official" debug modes, but you can use anything you like.
DEBUG_GENERAL = 'general'
DEBUG_VARIABLES = 'variables'
DEBUG_INCLUDES = 'includes'
def DebugOutput(mode, message):
if mode in gyp.debug.keys():
print "%s: %s" % (mode.upper(), message)
def FindBuildFiles():
extension = '.gyp'
files = os.listdir(os.getcwd())
build_files = []
for file in files:
if file[-len(extension):] == extension:
build_files.append(file)
return build_files
def Load(build_files, format, default_variables={},
includes=[], depth='.', params={}, check=False):
"""
Loads one or more specified build files.
default_variables and includes will be copied before use.
Returns the generator for the specified format and the
data returned by loading the specified build files.
"""
default_variables = copy.copy(default_variables)
# Default variables provided by this program and its modules should be
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
# avoiding collisions with user and automatic variables.
default_variables['GENERATOR'] = format
generator_name = 'gyp.generator.' + format
# These parameters are passed in order (as opposed to by key)
# because ActivePython cannot handle key parameters to __import__.
generator = __import__(generator_name, globals(), locals(), generator_name)
default_variables.update(generator.generator_default_variables)
# Give the generator the opportunity to set additional variables based on
# the params it will receive in the output phase.
if getattr(generator, 'CalculateVariables', None):
generator.CalculateVariables(default_variables, params)
# Fetch the generator specific info that gets fed to input, we use getattr
# so we can default things and the generators only have to provide what
# they need.
generator_input_info = {
'generator_wants_absolute_build_file_paths':
getattr(generator, 'generator_wants_absolute_build_file_paths', False),
'generator_handles_variants':
getattr(generator, 'generator_handles_variants', False),
'non_configuration_keys':
getattr(generator, 'generator_additional_non_configuration_keys', []),
'path_sections':
getattr(generator, 'generator_additional_path_sections', []),
'extra_sources_for_rules':
getattr(generator, 'generator_extra_sources_for_rules', []),
'generator_supports_multiple_toolsets':
getattr(generator, 'generator_supports_multiple_toolsets', False),
}
# Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check)
return [generator] + result
def NameValueListToDict(name_value_list):
"""
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
of the pairs. If a string is simply NAME, then the value in the dictionary
is set to True. If VALUE can be converted to an integer, it is.
"""
result = { }
for item in name_value_list:
tokens = item.split('=', 1)
if len(tokens) == 2:
# If we can make it an int, use that, otherwise, use the string.
try:
token_value = int(tokens[1])
except ValueError:
token_value = tokens[1]
# Set the variable to the supplied value.
result[tokens[0]] = token_value
else:
# No value supplied, treat it as a boolean and set it.
result[tokens[0]] = True
return result
def ShlexEnv(env_name):
flags = os.environ.get(env_name, [])
if flags:
flags = shlex.split(flags)
return flags
def FormatOpt(opt, value):
if opt.startswith('--'):
return '%s=%s' % (opt, value)
return opt + value
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
"""Regenerate a list of command line flags, for an option of action='append'.
The |env_name|, if given, is checked in the environment and used to generate
an initial list of options, then the options that were specified on the
command line (given in |values|) are appended. This matches the handling of
environment variables and command line flags where command line flags override
the environment, while not requiring the environment to be set when the flags
are used again.
"""
flags = []
if options.use_environment and env_name:
for flag_value in ShlexEnv(env_name):
flags.append(FormatOpt(flag, predicate(flag_value)))
if values:
for flag_value in values:
flags.append(FormatOpt(flag, predicate(flag_value)))
return flags
def RegenerateFlags(options):
"""Given a parsed options object, and taking the environment variables into
account, returns a list of flags that should regenerate an equivalent options
object (even in the absence of the environment variables.)
Any path options will be normalized relative to depth.
The format flag is not included, as it is assumed the calling generator will
set that as appropriate.
"""
def FixPath(path):
path = gyp.common.FixIfRelativePath(path, options.depth)
if not path:
return os.path.curdir
return path
def Noop(value):
return value
# We always want to ignore the environment when regenerating, to avoid
# duplicate or changed flags in the environment at the time of regeneration.
flags = ['--ignore-environment']
for name, metadata in options._regeneration_metadata.iteritems():
opt = metadata['opt']
value = getattr(options, name)
value_predicate = metadata['type'] == 'path' and FixPath or Noop
action = metadata['action']
env_name = metadata['env_name']
if action == 'append':
flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
env_name, options))
elif action in ('store', None): # None is a synonym for 'store'.
if value:
flags.append(FormatOpt(opt, value_predicate(value)))
elif options.use_environment and env_name and os.environ.get(env_name):
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
elif action in ('store_true', 'store_false'):
if ((action == 'store_true' and value) or
(action == 'store_false' and not value)):
flags.append(opt)
elif options.use_environment and env_name:
print >>sys.stderr, ('Warning: environment regeneration unimplemented '
'for %s flag %r env_name %r' % (action, opt,
env_name))
else:
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
'flag %r' % (action, opt))
return flags
class RegeneratableOptionParser(optparse.OptionParser):
def __init__(self):
self.__regeneratable_options = {}
optparse.OptionParser.__init__(self)
def add_option(self, *args, **kw):
"""Add an option to the parser.
This accepts the same arguments as OptionParser.add_option, plus the
following:
regenerate: can be set to False to prevent this option from being included
in regeneration.
env_name: name of environment variable that additional values for this
option come from.
type: adds type='path', to tell the regenerator that the values of
this option need to be made relative to options.depth
"""
env_name = kw.pop('env_name', None)
if 'dest' in kw and kw.pop('regenerate', True):
dest = kw['dest']
# The path type is needed for regenerating, for optparse we can just treat
# it as a string.
type = kw.get('type')
if type == 'path':
kw['type'] = 'string'
self.__regeneratable_options[dest] = {
'action': kw.get('action'),
'type': type,
'env_name': env_name,
'opt': args[0],
}
optparse.OptionParser.add_option(self, *args, **kw)
def parse_args(self, *args):
values, args = optparse.OptionParser.parse_args(self, *args)
values._regeneration_metadata = self.__regeneratable_options
return values, args
def main(args):
my_name = os.path.basename(sys.argv[0])
parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]'
parser.set_usage(usage.replace('%s', '%prog'))
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
parser.add_option('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
parser.add_option('--msvs-version', dest='msvs_version',
regenerate=False,
help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
parser.add_option('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables" '
'and "general"')
parser.add_option('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
parser.add_option('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
parser.add_option('--generator-output', dest='generator_output',
action='store', default=None, metavar='DIR', type='path',
env_name='GYP_GENERATOR_OUTPUT',
help='puts generated build files under DIR')
parser.add_option('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
parser.add_option('--check', dest='check', action='store_true',
help='check format of gyp files')
# We read a few things from ~/.gyp, so set up a var for that.
home_vars = ['HOME']
if sys.platform in ('cygwin', 'win32'):
home_vars.append('USERPROFILE')
home = None
for home_var in home_vars:
home = os.getenv(home_var)
if home != None:
break
home_dot_gyp = None
if home != None:
home_dot_gyp = os.path.join(home, '.gyp')
if not os.path.exists(home_dot_gyp):
home_dot_gyp = None
# TODO(thomasvl): add support for ~/.gyp/defaults
(options, build_files_arg) = parser.parse_args(args)
build_files = build_files_arg
if not options.formats:
# If no format was given on the command line, then check the env variable.
generate_formats = []
if options.use_environment:
generate_formats = os.environ.get('GYP_GENERATORS', [])
if generate_formats:
generate_formats = re.split('[\s,]', generate_formats)
if generate_formats:
options.formats = generate_formats
else:
# Nothing in the variable, default based on platform.
options.formats = [ {'darwin': 'xcode',
'win32': 'msvs',
'cygwin': 'msvs',
'freebsd7': 'make',
'freebsd8': 'make',
'linux2': 'scons',}[sys.platform] ]
if not options.generator_output and options.use_environment:
g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
if g_o:
options.generator_output = g_o
for mode in options.debug:
gyp.debug[mode] = 1
# Do an extra check to avoid work when we're not debugging.
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, 'running with these options:')
for (option, value) in options.__dict__.items():
if option[0] == '_':
continue
if isinstance(value, basestring):
DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value))
else:
DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value)))
if not build_files:
build_files = FindBuildFiles()
if not build_files:
print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \
(my_name, my_name)
return 1
# TODO(mark): Chromium-specific hack!
# For Chromium, the gyp "depth" variable should always be a relative path
# to Chromium's top-level "src" directory. If no depth variable was set
# on the command line, try to find a "src" directory by looking at the
# absolute path to each build file's directory. The first "src" component
# found will be treated as though it were the path used for --depth.
if not options.depth:
for build_file in build_files:
build_file_dir = os.path.abspath(os.path.dirname(build_file))
build_file_dir_components = build_file_dir.split(os.path.sep)
components_len = len(build_file_dir_components)
for index in xrange(components_len - 1, -1, -1):
if build_file_dir_components[index] == 'src':
options.depth = os.path.sep.join(build_file_dir_components)
break
del build_file_dir_components[index]
# If the inner loop found something, break without advancing to another
# build file.
if options.depth:
break
if not options.depth:
raise Exception, \
'Could not automatically locate src directory. This is a ' + \
'temporary Chromium feature that will be removed. Use ' + \
'--depth as a workaround.'
# -D on the command line sets variable defaults - D isn't just for define,
# it's for default. Perhaps there should be a way to force (-F?) a
# variable's value so that it can't be overridden by anything else.
cmdline_default_variables = {}
defines = []
if options.use_environment:
defines += ShlexEnv('GYP_DEFINES')
if options.defines:
defines += options.defines
cmdline_default_variables = NameValueListToDict(defines)
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL,
"cmdline_default_variables: %s" % cmdline_default_variables)
# Set up includes.
includes = []
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
# .gyp file that's loaded, before anything else is included.
if home_dot_gyp != None:
default_include = os.path.join(home_dot_gyp, 'include.gypi')
if os.path.exists(default_include):
includes.append(default_include)
# Command-line --include files come after the default include.
if options.includes:
includes.extend(options.includes)
# Generator flags should be prefixed with the target generator since they
# are global across all generator runs.
gen_flags = []
if options.use_environment:
gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
if options.generator_flags:
gen_flags += options.generator_flags
generator_flags = NameValueListToDict(gen_flags)
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags)
# TODO: Remove this and the option after we've gotten folks to move to the
# generator flag.
if options.msvs_version:
print >>sys.stderr, \
'DEPRECATED: Use generator flag (-G msvs_version=' + \
options.msvs_version + ') instead of --msvs-version=' + \
options.msvs_version
generator_flags['msvs_version'] = options.msvs_version
# Generate all requested formats (use a set in case we got one format request
# twice)
for format in set(options.formats):
params = {'options': options,
'build_files': build_files,
'generator_flags': generator_flags,
'cwd': os.getcwd(),
'build_files_arg': build_files_arg,
'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp}
# Start with the default variables from the command line.
[generator, flat_list, targets, data] = Load(build_files, format,
cmdline_default_variables,
includes, options.depth,
params, options.check)
# TODO(mark): Pass |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
# a list, and not the whole data dict.
# NOTE: flat_list is the flattened dependency graph specifying the order
# that targets may be built. Build systems that operate serially or that
# need to have dependencies defined before dependents reference them should
# generate targets in the order specified in flat_list.
generator.GenerateOutput(flat_list, targets, data, params)
# Done
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause |
XperiaSTE/android_external_bluetooth_bluez | test/bluezutils.py | 90 | 1567 | import dbus
SERVICE_NAME = "org.bluez"
ADAPTER_INTERFACE = SERVICE_NAME + ".Adapter1"
DEVICE_INTERFACE = SERVICE_NAME + ".Device1"
def get_managed_objects():
bus = dbus.SystemBus()
manager = dbus.Interface(bus.get_object("org.bluez", "/"),
"org.freedesktop.DBus.ObjectManager")
return manager.GetManagedObjects()
def find_adapter(pattern=None):
return find_adapter_in_objects(get_managed_objects(), pattern)
def find_adapter_in_objects(objects, pattern=None):
bus = dbus.SystemBus()
for path, ifaces in objects.iteritems():
adapter = ifaces.get(ADAPTER_INTERFACE)
if adapter is None:
continue
if not pattern or pattern == adapter["Address"] or \
path.endswith(pattern):
obj = bus.get_object(SERVICE_NAME, path)
return dbus.Interface(obj, ADAPTER_INTERFACE)
raise Exception("Bluetooth adapter not found")
def find_device(device_address, adapter_pattern=None):
return find_device_in_objects(get_managed_objects(), device_address,
adapter_pattern)
def find_device_in_objects(objects, device_address, adapter_pattern=None):
bus = dbus.SystemBus()
path_prefix = ""
if adapter_pattern:
adapter = find_adapter_in_objects(objects, adapter_pattern)
path_prefix = adapter.object_path
for path, ifaces in objects.iteritems():
device = ifaces.get(DEVICE_INTERFACE)
if device is None:
continue
if (device["Address"] == device_address and
path.startswith(path_prefix)):
obj = bus.get_object(SERVICE_NAME, path)
return dbus.Interface(obj, DEVICE_INTERFACE)
raise Exception("Bluetooth device not found")
| gpl-2.0 |
TeachAtTUM/edx-platform | lms/djangoapps/courseware/access_response.py | 17 | 4973 | """
This file contains all the classes used by has_access for error handling
"""
from django.utils.translation import ugettext as _
from xmodule.course_metadata_utils import DEFAULT_START_DATE
class AccessResponse(object):
"""Class that represents a response from a has_access permission check."""
def __init__(self, has_access, error_code=None, developer_message=None, user_message=None):
"""
Creates an AccessResponse object.
Arguments:
has_access (bool): if the user is granted access or not
error_code (String): optional - default is None. Unique identifier
for the specific type of error
developer_message (String): optional - default is None. Message
to show the developer
user_message (String): optional - default is None. Message to
show the user
"""
self.has_access = has_access
self.error_code = error_code
self.developer_message = developer_message
self.user_message = user_message
if has_access:
assert error_code is None
def __nonzero__(self):
"""
Overrides bool().
Allows for truth value testing of AccessResponse objects, so callers
who do not need the specific error information can check if access
is granted.
Returns:
bool: whether or not access is granted
"""
return self.has_access
def to_json(self):
"""
Creates a serializable JSON representation of an AccessResponse object.
Returns:
dict: JSON representation
"""
return {
"has_access": self.has_access,
"error_code": self.error_code,
"developer_message": self.developer_message,
"user_message": self.user_message
}
def __repr__(self):
return "AccessResponse({!r}, {!r}, {!r}, {!r})".format(
self.has_access,
self.error_code,
self.developer_message,
self.user_message
)
class AccessError(AccessResponse):
"""
Class that holds information about the error in the case of an access
denial in has_access. Contains the error code, user and developer
messages. Subclasses represent specific errors.
"""
def __init__(self, error_code, developer_message, user_message):
"""
Creates an AccessError object.
An AccessError object represents an AccessResponse where access is
denied (has_access is False).
Arguments:
error_code (String): unique identifier for the specific type of
error developer_message (String): message to show the developer
user_message (String): message to show the user
"""
super(AccessError, self).__init__(False, error_code, developer_message, user_message)
class StartDateError(AccessError):
"""
Access denied because the course has not started yet and the user
is not staff
"""
def __init__(self, start_date):
error_code = "course_not_started"
if start_date == DEFAULT_START_DATE:
developer_message = "Course has not started"
user_message = _("Course has not started")
else:
developer_message = "Course does not start until {}".format(start_date)
user_message = _("Course does not start until {}" # pylint: disable=translation-of-non-string
.format("{:%B %d, %Y}".format(start_date)))
super(StartDateError, self).__init__(error_code, developer_message, user_message)
class MilestoneAccessError(AccessError):
"""
Access denied because the user has unfulfilled milestones
"""
def __init__(self):
error_code = "unfulfilled_milestones"
developer_message = "User has unfulfilled milestones"
user_message = _("You have unfulfilled milestones")
super(MilestoneAccessError, self).__init__(error_code, developer_message, user_message)
class VisibilityError(AccessError):
"""
Access denied because the user does have the correct role to view this
course.
"""
def __init__(self):
error_code = "not_visible_to_user"
developer_message = "Course is not visible to this user"
user_message = _("You do not have access to this course")
super(VisibilityError, self).__init__(error_code, developer_message, user_message)
class MobileAvailabilityError(AccessError):
"""
Access denied because the course is not available on mobile for the user
"""
def __init__(self):
error_code = "mobile_unavailable"
developer_message = "Course is not available on mobile for this user"
user_message = _("You do not have access to this course on a mobile device")
super(MobileAvailabilityError, self).__init__(error_code, developer_message, user_message)
| agpl-3.0 |
brint/cloud-init | cloudinit/distros/gentoo.py | 7 | 5468 | # vi: ts=4 expandtab
#
# Copyright (C) 2014 Rackspace, US Inc.
#
# Author: Nate House <nathan.house@rackspace.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from cloudinit import distros
from cloudinit import helpers
from cloudinit import log as logging
from cloudinit import util
from cloudinit.distros.parsers.hostname import HostnameConf
from cloudinit.settings import PER_INSTANCE
LOG = logging.getLogger(__name__)
class Distro(distros.Distro):
locale_conf_fn = "/etc/locale.gen"
network_conf_fn = "/etc/conf.d/net"
init_cmd = [''] # init scripts
def __init__(self, name, cfg, paths):
distros.Distro.__init__(self, name, cfg, paths)
# This will be used to restrict certain
# calls from repeatly happening (when they
# should only happen say once per instance...)
self._runner = helpers.Runners(paths)
self.osfamily = 'gentoo'
# Fix sshd restarts
cfg['ssh_svcname'] = '/etc/init.d/sshd'
def apply_locale(self, locale, out_fn=None):
if not out_fn:
out_fn = self.locale_conf_fn
util.subp(['locale-gen', '-G', locale], capture=False)
# "" provides trailing newline during join
lines = [
util.make_header(),
'LANG="%s"' % (locale),
"",
]
util.write_file(out_fn, "\n".join(lines))
def install_packages(self, pkglist):
self.update_package_sources()
self.package_command('', pkgs=pkglist)
def _write_network(self, settings):
util.write_file(self.network_conf_fn, settings)
return ['all']
def _bring_up_interface(self, device_name):
cmd = ['/etc/init.d/net.%s' % device_name, 'restart']
LOG.debug("Attempting to run bring up interface %s using command %s",
device_name, cmd)
try:
(_out, err) = util.subp(cmd)
if len(err):
LOG.warn("Running %s resulted in stderr output: %s", cmd, err)
return True
except util.ProcessExecutionError:
util.logexc(LOG, "Running interface command %s failed", cmd)
return False
def _bring_up_interfaces(self, device_names):
use_all = False
for d in device_names:
if d == 'all':
use_all = True
if use_all:
# Grab device names from init scripts
cmd = ['ls', '/etc/init.d/net.*']
try:
(_out, err) = util.subp(cmd)
if len(err):
LOG.warn("Running %s resulted in stderr output: %s", cmd,
err)
except util.ProcessExecutionError:
util.logexc(LOG, "Running interface command %s failed", cmd)
return False
devices = [x.split('.')[2] for x in _out.split(' ')]
return distros.Distro._bring_up_interfaces(self, devices)
else:
return distros.Distro._bring_up_interfaces(self, device_names)
def _write_hostname(self, your_hostname, out_fn):
conf = None
try:
# Try to update the previous one
# so lets see if we can read it first.
conf = self._read_hostname_conf(out_fn)
except IOError:
pass
if not conf:
conf = HostnameConf('')
conf.set_hostname(your_hostname)
util.write_file(out_fn, conf, 0o644)
def _read_system_hostname(self):
sys_hostname = self._read_hostname(self.hostname_conf_fn)
return (self.hostname_conf_fn, sys_hostname)
def _read_hostname_conf(self, filename):
conf = HostnameConf(util.load_file(filename))
conf.parse()
return conf
def _read_hostname(self, filename, default=None):
hostname = None
try:
conf = self._read_hostname_conf(filename)
hostname = conf.hostname
except IOError:
pass
if not hostname:
return default
return hostname
def set_timezone(self, tz):
distros.set_etc_timezone(tz=tz, tz_file=self._find_tz_file(tz))
def package_command(self, command, args=None, pkgs=None):
if pkgs is None:
pkgs = []
cmd = ['emerge']
# Redirect output
cmd.append("--quiet")
if args and isinstance(args, str):
cmd.append(args)
elif args and isinstance(args, list):
cmd.extend(args)
if command:
cmd.append(command)
pkglist = util.expand_package_list('%s-%s', pkgs)
cmd.extend(pkglist)
# Allow the output of this to flow outwards (ie not be captured)
util.subp(cmd, capture=False)
def update_package_sources(self):
self._runner.run("update-sources", self.package_command,
["-u", "world"], freq=PER_INSTANCE)
| gpl-3.0 |
dcroc16/skunk_works | google_appengine/lib/django-1.3/django/contrib/webdesign/lorem_ipsum.py | 439 | 4872 | """
Utility functions for generating "lorem ipsum" Latin text.
"""
import random
COMMON_P = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'
WORDS = ('exercitationem', 'perferendis', 'perspiciatis', 'laborum', 'eveniet',
'sunt', 'iure', 'nam', 'nobis', 'eum', 'cum', 'officiis', 'excepturi',
'odio', 'consectetur', 'quasi', 'aut', 'quisquam', 'vel', 'eligendi',
'itaque', 'non', 'odit', 'tempore', 'quaerat', 'dignissimos',
'facilis', 'neque', 'nihil', 'expedita', 'vitae', 'vero', 'ipsum',
'nisi', 'animi', 'cumque', 'pariatur', 'velit', 'modi', 'natus',
'iusto', 'eaque', 'sequi', 'illo', 'sed', 'ex', 'et', 'voluptatibus',
'tempora', 'veritatis', 'ratione', 'assumenda', 'incidunt', 'nostrum',
'placeat', 'aliquid', 'fuga', 'provident', 'praesentium', 'rem',
'necessitatibus', 'suscipit', 'adipisci', 'quidem', 'possimus',
'voluptas', 'debitis', 'sint', 'accusantium', 'unde', 'sapiente',
'voluptate', 'qui', 'aspernatur', 'laudantium', 'soluta', 'amet',
'quo', 'aliquam', 'saepe', 'culpa', 'libero', 'ipsa', 'dicta',
'reiciendis', 'nesciunt', 'doloribus', 'autem', 'impedit', 'minima',
'maiores', 'repudiandae', 'ipsam', 'obcaecati', 'ullam', 'enim',
'totam', 'delectus', 'ducimus', 'quis', 'voluptates', 'dolores',
'molestiae', 'harum', 'dolorem', 'quia', 'voluptatem', 'molestias',
'magni', 'distinctio', 'omnis', 'illum', 'dolorum', 'voluptatum', 'ea',
'quas', 'quam', 'corporis', 'quae', 'blanditiis', 'atque', 'deserunt',
'laboriosam', 'earum', 'consequuntur', 'hic', 'cupiditate',
'quibusdam', 'accusamus', 'ut', 'rerum', 'error', 'minus', 'eius',
'ab', 'ad', 'nemo', 'fugit', 'officia', 'at', 'in', 'id', 'quos',
'reprehenderit', 'numquam', 'iste', 'fugiat', 'sit', 'inventore',
'beatae', 'repellendus', 'magnam', 'recusandae', 'quod', 'explicabo',
'doloremque', 'aperiam', 'consequatur', 'asperiores', 'commodi',
'optio', 'dolor', 'labore', 'temporibus', 'repellat', 'veniam',
'architecto', 'est', 'esse', 'mollitia', 'nulla', 'a', 'similique',
'eos', 'alias', 'dolore', 'tenetur', 'deleniti', 'porro', 'facere',
'maxime', 'corrupti')
COMMON_WORDS = ('lorem', 'ipsum', 'dolor', 'sit', 'amet', 'consectetur',
'adipisicing', 'elit', 'sed', 'do', 'eiusmod', 'tempor', 'incididunt',
'ut', 'labore', 'et', 'dolore', 'magna', 'aliqua')
def sentence():
"""
Returns a randomly generated sentence of lorem ipsum text.
The first word is capitalized, and the sentence ends in either a period or
question mark. Commas are added at random.
"""
# Determine the number of comma-separated sections and number of words in
# each section for this sentence.
sections = [u' '.join(random.sample(WORDS, random.randint(3, 12))) for i in range(random.randint(1, 5))]
s = u', '.join(sections)
# Convert to sentence case and add end punctuation.
return u'%s%s%s' % (s[0].upper(), s[1:], random.choice('?.'))
def paragraph():
"""
Returns a randomly generated paragraph of lorem ipsum text.
The paragraph consists of between 1 and 4 sentences, inclusive.
"""
return u' '.join([sentence() for i in range(random.randint(1, 4))])
def paragraphs(count, common=True):
"""
Returns a list of paragraphs as returned by paragraph().
If `common` is True, then the first paragraph will be the standard
'lorem ipsum' paragraph. Otherwise, the first paragraph will be random
Latin text. Either way, subsequent paragraphs will be random Latin text.
"""
paras = []
for i in range(count):
if common and i == 0:
paras.append(COMMON_P)
else:
paras.append(paragraph())
return paras
def words(count, common=True):
"""
Returns a string of `count` lorem ipsum words separated by a single space.
If `common` is True, then the first 19 words will be the standard
'lorem ipsum' words. Otherwise, all words will be selected randomly.
"""
if common:
word_list = list(COMMON_WORDS)
else:
word_list = []
c = len(word_list)
if count > c:
count -= c
while count > 0:
c = min(count, len(WORDS))
count -= c
word_list += random.sample(WORDS, c)
else:
word_list = word_list[:count]
return u' '.join(word_list)
| mit |
SA313161/ardupilot | mk/PX4/Tools/genmsg/test/test_genmsg_gentools.py | 215 | 9526 | #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import sys
TEST_CTX = 'rosgraph_msgs'
def get_test_dir():
return os.path.abspath(os.path.join(os.path.dirname(__file__), 'md5tests'))
def get_test_msg_dir():
return os.path.abspath(os.path.join(os.path.dirname(__file__), 'files'))
def get_search_path():
test_dir = get_test_msg_dir()
search_path = {}
for pkg in ['std_msgs', 'rosgraph_msgs', 'test_ros', 'geometry_msgs']:
search_path[pkg] = [ os.path.join(test_dir, pkg, 'msg') ]
return search_path
def _load_md5_tests(dir_name):
test_dir = os.path.join(get_test_dir(), dir_name)
tests = {}
for f in os.listdir(test_dir):
path = os.path.join(test_dir, f)
if not f.endswith('.txt'):
continue
name = f[:-4]
while name and name[-1].isdigit():
name = name[:-1]
assert bool(name)
if name in tests:
tests[name].append(path)
else:
tests[name] = [path]
return tests
def _compute_md5(msg_context, f):
from genmsg import load_depends, compute_md5
from genmsg.msg_loader import load_msg_from_string
text = open(f, 'r').read()
short_name = os.path.basename(f)[:-len('.msg')]
full_name = "%s/%s"%(TEST_CTX, short_name)
spec = load_msg_from_string(msg_context, text, full_name)
search_path = get_search_path()
load_depends(msg_context, spec, search_path)
return compute_md5(msg_context, spec)
def _compute_md5_text(msg_context, f):
from genmsg import compute_md5_text, load_depends
from genmsg.msg_loader import load_msg_from_string
text = open(f, 'r').read()
short_name = os.path.basename(f)[:-len('.msg')]
full_name = "%s/%s"%(TEST_CTX, short_name)
spec = load_msg_from_string(msg_context, text, full_name)
search_path = get_search_path()
load_depends(msg_context, spec, search_path)
return compute_md5_text(msg_context, spec)
def test_compute_md5_text():
from genmsg import MsgContext
msg_context = MsgContext.create_default()
# this test is just verifying that the md5sum is what it was for cturtle->electric
Header_md5 = "2176decaecbce78abc3b96ef049fabed"
rg_msg_dir = os.path.join(get_test_msg_dir(), TEST_CTX, 'msg')
clock_msg = os.path.join(rg_msg_dir, 'Clock.msg')
# a bit gory, but go ahead and regression test these important messages
assert "time clock" == _compute_md5_text(msg_context, clock_msg)
log_msg = os.path.join(rg_msg_dir, 'Log.msg')
assert "byte DEBUG=1\nbyte INFO=2\nbyte WARN=4\nbyte ERROR=8\nbyte FATAL=16\n%s header\nbyte level\nstring name\nstring msg\nstring file\nstring function\nuint32 line\nstring[] topics"%Header_md5 == _compute_md5_text(msg_context, log_msg)
tests = _load_md5_tests('md5text')
# text file #1 is the reference
for k, files in tests.items():
print("running tests", k)
ref_file = [f for f in files if f.endswith('%s1.txt'%k)]
if not ref_file:
assert False, "failed to load %s"%k
ref_file = ref_file[0]
ref_text = open(ref_file, 'r').read().strip()
print("KEY", k)
files = [f for f in files if not f.endswith('%s1.txt'%k)]
for f in files[1:]:
f_text = _compute_md5_text(msg_context, f)
assert ref_text == f_text, "failed on %s\n%s\n%s: \n[%s]\nvs.\n[%s]\n"%(k, ref_file, f, ref_text, f_text)
def test_md5_equals():
from genmsg import MsgContext
msg_context = MsgContext.create_default()
search_path = get_search_path()
tests = _load_md5_tests('same')
for k, files in tests.items():
print("running tests", k)
md5sum = _compute_md5(msg_context, files[0])
for f in files[1:]:
assert md5sum == _compute_md5(msg_context, f), "failed on %s: \n[%s]\nvs.\n[%s]\n"%(k, _compute_md5_text(msg_context, files[0]), _compute_md5_text(msg_context, f))
def test_md5_not_equals():
from genmsg import MsgContext
msg_context = MsgContext.create_default()
tests = _load_md5_tests('different')
for k, files in tests.items():
print("running tests", k)
md5s = set()
md6md5sum = _compute_md5(msg_context, files[0])
for f in files:
md5s.add(_compute_md5(msg_context, f))
# each md5 should be unique
assert len(md5s) == len(files)
twist_with_covariance_stamped_full_text = """# This represents an estimate twist with reference coordinate frame and timestamp.
Header header
TwistWithCovariance twist
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: geometry_msgs/TwistWithCovariance
# This expresses velocity in free space with uncertianty.
Twist twist
# Row-major representation of the 6x6 covariance matrix
# The orientation parameters use a fixed-axis representation.
# In order, the parameters are:
# (x, y, z, rotation about X axis, rotation about Y axis, rotation about Z axis)
float64[36] covariance
================================================================================
MSG: geometry_msgs/Twist
# This expresses velocity in free space broken into it's linear and angular parts.
Vector3 linear
Vector3 angular
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
float64 x
float64 y
float64 z"""
log_full_text = """##
## Severity level constants
##
byte DEBUG=1 #debug level
byte INFO=2 #general level
byte WARN=4 #warning level
byte ERROR=8 #error level
byte FATAL=16 #fatal/critical level
##
## Fields
##
Header header
byte level
string name # name of the node
string msg # message
string file # file the message came from
string function # function the message came from
uint32 line # line the message came from
string[] topics # topic names that the node publishes
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
"""
def test_compute_full_text():
from genmsg import MsgContext, compute_full_text, load_msg_by_type, load_depends
msg_context = MsgContext.create_default()
search_path = get_search_path()
# regression test against values used for cturtle-electric
spec = load_msg_by_type(msg_context, 'rosgraph_msgs/Log', search_path)
load_depends(msg_context, spec, search_path)
val = compute_full_text(msg_context, spec)
assert val == log_full_text, "[%s][%s]"%(val, log_full_text)
spec = load_msg_by_type(msg_context, 'geometry_msgs/TwistWithCovarianceStamped', search_path)
load_depends(msg_context, spec, search_path)
val = compute_full_text(msg_context, spec)
assert val == twist_with_covariance_stamped_full_text, "[%s][%s]"%(val, twist_with_covariance_stamped_full_text)
| gpl-3.0 |
samkariu/voicex | voicex/migrations/0001_initial.py | 2 | 7576 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Post'
db.create_table('posts', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=20)),
('post', self.gf('django.db.models.fields.TextField')()),
('zip_code', self.gf('django.db.models.fields.CharField')(max_length=20)),
('reply_to', self.gf('django.db.models.fields.related.ForeignKey')(related_name='replies', null=True, to=orm['voicex.Post'])),
('public', self.gf('django.db.models.fields.BooleanField')(default=False)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('voicex', ['Post'])
# Adding model 'Subscriber'
db.create_table('subscribers', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=20)),
('account', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['voicex.Account'])),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('voicex', ['Subscriber'])
# Adding unique constraint on 'Subscriber', fields ['phone', 'account']
db.create_unique('subscribers', ['phone', 'account_id'])
# Adding model 'Account'
db.create_table('accounts', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=20)),
('phone', self.gf('django.db.models.fields.CharField')(unique=True, max_length=20)),
('password', self.gf('django.db.models.fields.CharField')(max_length=20)),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('voicex', ['Account'])
# Adding model 'Delegate'
db.create_table('delegates', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('phone', self.gf('django.db.models.fields.CharField')(unique=True, max_length=20)),
('account', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['voicex.Account'])),
))
db.send_create_signal('voicex', ['Delegate'])
# Adding model 'Following'
db.create_table('following', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=20)),
('tag', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['voicex.Tag'])),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('voicex', ['Following'])
# Adding unique constraint on 'Following', fields ['phone', 'tag']
db.create_unique('following', ['phone', 'tag_id'])
# Adding model 'Tag'
db.create_table('tags', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=20)),
))
db.send_create_signal('voicex', ['Tag'])
def backwards(self, orm):
# Removing unique constraint on 'Following', fields ['phone', 'tag']
db.delete_unique('following', ['phone', 'tag_id'])
# Removing unique constraint on 'Subscriber', fields ['phone', 'account']
db.delete_unique('subscribers', ['phone', 'account_id'])
# Deleting model 'Post'
db.delete_table('posts')
# Deleting model 'Subscriber'
db.delete_table('subscribers')
# Deleting model 'Account'
db.delete_table('accounts')
# Deleting model 'Delegate'
db.delete_table('delegates')
# Deleting model 'Following'
db.delete_table('following')
# Deleting model 'Tag'
db.delete_table('tags')
models = {
'voicex.account': {
'Meta': {'object_name': 'Account', 'db_table': "'accounts'"},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'phone': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
},
'voicex.delegate': {
'Meta': {'object_name': 'Delegate', 'db_table': "'delegates'"},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['voicex.Account']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
},
'voicex.following': {
'Meta': {'unique_together': "(('phone', 'tag'),)", 'object_name': 'Following', 'db_table': "'following'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['voicex.Tag']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'voicex.post': {
'Meta': {'object_name': 'Post', 'db_table': "'posts'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'post': ('django.db.models.fields.TextField', [], {}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'reply_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replies'", 'null': 'True', 'to': "orm['voicex.Post']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'voicex.subscriber': {
'Meta': {'unique_together': "(('phone', 'account'),)", 'object_name': 'Subscriber', 'db_table': "'subscribers'"},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['voicex.Account']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'voicex.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "'tags'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
}
}
complete_apps = ['voicex'] | mit |
e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/cloud/univention/udm_user.py | 29 | 21233 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright (c) 2016, Adfinis SyGroup AG
# Tobias Rueetschi <tobias.ruetschi@adfinis-sygroup.ch>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: udm_user
version_added: "2.2"
author: "Tobias Rueetschi (@2-B)"
short_description: Manage posix users on a univention corporate server
description:
- "This module allows to manage posix users on a univention corporate
server (UCS).
It uses the python API of the UCS to create a new object or edit it."
requirements:
- Python >= 2.6
options:
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the user is present or not.
username:
required: true
description:
- User name
aliases: ['name']
firstname:
required: false
description:
- First name. Required if C(state=present).
lastname:
required: false
description:
- Last name. Required if C(state=present).
password:
required: false
default: None
description:
- Password. Required if C(state=present).
birthday:
required: false
default: None
description:
- Birthday
city:
required: false
default: None
description:
- City of users business address.
country:
required: false
default: None
description:
- Country of users business address.
department_number:
required: false
default: None
description:
- Department number of users business address.
aliases: [ departmentNumber ]
description:
required: false
default: None
description:
- Description (not gecos)
display_name:
required: false
default: None
description:
- Display name (not gecos)
aliases: [ displayName ]
email:
required: false
default: ['']
description:
- A list of e-mail addresses.
employee_number:
required: false
default: None
description:
- Employee number
aliases: [ employeeNumber ]
employee_type:
required: false
default: None
description:
- Employee type
aliases: [ employeeType ]
gecos:
required: false
default: None
description:
- GECOS
groups:
required: false
default: []
description:
- "POSIX groups, the LDAP DNs of the groups will be found with the
LDAP filter for each group as $GROUP:
C((&(objectClass=posixGroup)(cn=$GROUP)))."
home_share:
required: false
default: None
description:
- "Home NFS share. Must be a LDAP DN, e.g.
C(cn=home,cn=shares,ou=school,dc=example,dc=com)."
aliases: [ homeShare ]
home_share_path:
required: false
default: None
description:
- Path to home NFS share, inside the homeShare.
aliases: [ homeSharePath ]
home_telephone_number:
required: false
default: []
description:
- List of private telephone numbers.
aliases: [ homeTelephoneNumber ]
homedrive:
required: false
default: None
description:
- Windows home drive, e.g. C("H:").
mail_alternative_address:
required: false
default: []
description:
- List of alternative e-mail addresses.
aliases: [ mailAlternativeAddress ]
mail_home_server:
required: false
default: None
description:
- FQDN of mail server
aliases: [ mailHomeServer ]
mail_primary_address:
required: false
default: None
description:
- Primary e-mail address
aliases: [ mailPrimaryAddress ]
mobile_telephone_number:
required: false
default: []
description:
- Mobile phone number
aliases: [ mobileTelephoneNumber ]
organisation:
required: false
default: None
description:
- Organisation
override_pw_history:
required: false
default: False
description:
- Override password history
aliases: [ overridePWHistory ]
override_pw_length:
required: false
default: False
description:
- Override password check
aliases: [ overridePWLength ]
pager_telephonenumber:
required: false
default: []
description:
- List of pager telephone numbers.
aliases: [ pagerTelephonenumber ]
phone:
required: false
default: []
description:
- List of telephone numbers.
postcode:
required: false
default: None
description:
- Postal code of users business address.
primary_group:
required: false
default: cn=Domain Users,cn=groups,$LDAP_BASE_DN
description:
- Primary group. This must be the group LDAP DN.
aliases: [ primaryGroup ]
profilepath:
required: false
default: None
description:
- Windows profile directory
pwd_change_next_login:
required: false
default: None
choices: [ '0', '1' ]
description:
- Change password on next login.
aliases: [ pwdChangeNextLogin ]
room_number:
required: false
default: None
description:
- Room number of users business address.
aliases: [ roomNumber ]
samba_privileges:
required: false
default: []
description:
- "Samba privilege, like allow printer administration, do domain
join."
aliases: [ sambaPrivileges ]
samba_user_workstations:
required: false
default: []
description:
- Allow the authentication only on this Microsoft Windows host.
aliases: [ sambaUserWorkstations ]
sambahome:
required: false
default: None
description:
- Windows home path, e.g. C('\\\\$FQDN\\$USERNAME').
scriptpath:
required: false
default: None
description:
- Windows logon script.
secretary:
required: false
default: []
description:
- A list of superiors as LDAP DNs.
serviceprovider:
required: false
default: ['']
description:
- Enable user for the following service providers.
shell:
required: false
default: '/bin/bash'
description:
- Login shell
street:
required: false
default: None
description:
- Street of users business address.
title:
required: false
default: None
description:
- Title, e.g. C(Prof.).
unixhome:
required: false
default: '/home/$USERNAME'
description:
- Unix home directory
userexpiry:
required: false
default: Today + 1 year
description:
- Account expiry date, e.g. C(1999-12-31).
position:
required: false
default: ''
description:
- "Define the whole position of users object inside the LDAP tree,
e.g. C(cn=employee,cn=users,ou=school,dc=example,dc=com)."
update_password:
required: false
default: always
description:
- "C(always) will update passwords if they differ.
C(on_create) will only set the password for newly created users."
version_added: "2.3"
ou:
required: false
default: ''
description:
- "Organizational Unit inside the LDAP Base DN, e.g. C(school) for
LDAP OU C(ou=school,dc=example,dc=com)."
subpath:
required: false
default: 'cn=users'
description:
- "LDAP subpath inside the organizational unit, e.g.
C(cn=teachers,cn=users) for LDAP container
C(cn=teachers,cn=users,dc=example,dc=com)."
'''
EXAMPLES = '''
# Create a user on a UCS
- udm_user:
name: FooBar
password: secure_password
firstname: Foo
lastname: Bar
# Create a user with the DN
# C(uid=foo,cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com)
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
ou: school
subpath: 'cn=teachers,cn=users'
# or define the position
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
position: 'cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com'
'''
RETURN = '''# '''
import crypt
from datetime import date, timedelta
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.univention_umc import (
umc_module_for_add,
umc_module_for_edit,
ldap_search,
base_dn,
)
def main():
expiry = date.strftime(date.today() + timedelta(days=365), "%Y-%m-%d")
module = AnsibleModule(
argument_spec = dict(
birthday = dict(default=None,
type='str'),
city = dict(default=None,
type='str'),
country = dict(default=None,
type='str'),
department_number = dict(default=None,
type='str',
aliases=['departmentNumber']),
description = dict(default=None,
type='str'),
display_name = dict(default=None,
type='str',
aliases=['displayName']),
email = dict(default=[''],
type='list'),
employee_number = dict(default=None,
type='str',
aliases=['employeeNumber']),
employee_type = dict(default=None,
type='str',
aliases=['employeeType']),
firstname = dict(default=None,
type='str'),
gecos = dict(default=None,
type='str'),
groups = dict(default=[],
type='list'),
home_share = dict(default=None,
type='str',
aliases=['homeShare']),
home_share_path = dict(default=None,
type='str',
aliases=['homeSharePath']),
home_telephone_number = dict(default=[],
type='list',
aliases=['homeTelephoneNumber']),
homedrive = dict(default=None,
type='str'),
lastname = dict(default=None,
type='str'),
mail_alternative_address= dict(default=[],
type='list',
aliases=['mailAlternativeAddress']),
mail_home_server = dict(default=None,
type='str',
aliases=['mailHomeServer']),
mail_primary_address = dict(default=None,
type='str',
aliases=['mailPrimaryAddress']),
mobile_telephone_number = dict(default=[],
type='list',
aliases=['mobileTelephoneNumber']),
organisation = dict(default=None,
type='str'),
overridePWHistory = dict(default=False,
type='bool',
aliases=['override_pw_history']),
overridePWLength = dict(default=False,
type='bool',
aliases=['override_pw_length']),
pager_telephonenumber = dict(default=[],
type='list',
aliases=['pagerTelephonenumber']),
password = dict(default=None,
type='str',
no_log=True),
phone = dict(default=[],
type='list'),
postcode = dict(default=None,
type='str'),
primary_group = dict(default=None,
type='str',
aliases=['primaryGroup']),
profilepath = dict(default=None,
type='str'),
pwd_change_next_login = dict(default=None,
type='str',
choices=['0', '1'],
aliases=['pwdChangeNextLogin']),
room_number = dict(default=None,
type='str',
aliases=['roomNumber']),
samba_privileges = dict(default=[],
type='list',
aliases=['sambaPrivileges']),
samba_user_workstations = dict(default=[],
type='list',
aliases=['sambaUserWorkstations']),
sambahome = dict(default=None,
type='str'),
scriptpath = dict(default=None,
type='str'),
secretary = dict(default=[],
type='list'),
serviceprovider = dict(default=[''],
type='list'),
shell = dict(default='/bin/bash',
type='str'),
street = dict(default=None,
type='str'),
title = dict(default=None,
type='str'),
unixhome = dict(default=None,
type='str'),
userexpiry = dict(default=expiry,
type='str'),
username = dict(required=True,
aliases=['name'],
type='str'),
position = dict(default='',
type='str'),
update_password = dict(default='always',
choices=['always', 'on_create'],
type='str'),
ou = dict(default='',
type='str'),
subpath = dict(default='cn=users',
type='str'),
state = dict(default='present',
choices=['present', 'absent'],
type='str')
),
supports_check_mode=True,
required_if = ([
('state', 'present', ['firstname', 'lastname', 'password'])
])
)
username = module.params['username']
position = module.params['position']
ou = module.params['ou']
subpath = module.params['subpath']
state = module.params['state']
changed = False
users = list(ldap_search(
'(&(objectClass=posixAccount)(uid={}))'.format(username),
attr=['uid']
))
if position != '':
container = position
else:
if ou != '':
ou = 'ou={},'.format(ou)
if subpath != '':
subpath = '{},'.format(subpath)
container = '{}{}{}'.format(subpath, ou, base_dn())
user_dn = 'uid={},{}'.format(username, container)
exists = bool(len(users))
if state == 'present':
try:
if not exists:
obj = umc_module_for_add('users/user', container)
else:
obj = umc_module_for_edit('users/user', user_dn)
if module.params['displayName'] is None:
module.params['displayName'] = '{} {}'.format(
module.params['firstname'],
module.params['lastname']
)
if module.params['unixhome'] is None:
module.params['unixhome'] = '/home/{}'.format(
module.params['username']
)
for k in obj.keys():
if (k != 'password' and
k != 'groups' and
k != 'overridePWHistory' and
k in module.params and
module.params[k] is not None):
obj[k] = module.params[k]
# handle some special values
obj['e-mail'] = module.params['email']
password = module.params['password']
if obj['password'] is None:
obj['password'] = password
if module.params['update_password'] == 'always':
old_password = obj['password'].split('}', 2)[1]
if crypt.crypt(password, old_password) != old_password:
obj['overridePWHistory'] = module.params['overridePWHistory']
obj['overridePWLength'] = module.params['overridePWLength']
obj['password'] = password
diff = obj.diff()
if exists:
for k in obj.keys():
if obj.hasChanged(k):
changed = True
else:
changed = True
if not module.check_mode:
if not exists:
obj.create()
elif changed:
obj.modify()
except:
module.fail_json(
msg="Creating/editing user {} in {} failed".format(
username,
container
)
)
try:
groups = module.params['groups']
if groups:
filter = '(&(objectClass=posixGroup)(|(cn={})))'.format(
')(cn='.join(groups)
)
group_dns = list(ldap_search(filter, attr=['dn']))
for dn in group_dns:
grp = umc_module_for_edit('groups/group', dn[0])
if user_dn not in grp['users']:
grp['users'].append(user_dn)
if not module.check_mode:
grp.modify()
changed = True
except:
module.fail_json(
msg="Adding groups to user {} failed".format(username)
)
if state == 'absent' and exists:
try:
obj = umc_module_for_edit('users/user', user_dn)
if not module.check_mode:
obj.remove()
changed = True
except:
module.fail_json(
msg="Removing user {} failed".format(username)
)
module.exit_json(
changed=changed,
username=username,
diff=diff,
container=container
)
if __name__ == '__main__':
main()
| bsd-3-clause |
tobyclemson/terraform-aws-vpc-auto-peering | lambdas/auto_peering/auto_peering/vpc_peering_routes.py | 1 | 4260 | from botocore.exceptions import ClientError
class VPCPeeringRoutes(object):
def __init__(self, vpc1, vpc2,
vpc_peering_relationship,
ec2_gateways, logger):
self.vpc1 = vpc1
self.vpc2 = vpc2
self.vpc_peering_relationship = vpc_peering_relationship
self.ec2_gateways = ec2_gateways
self.logger = logger
def __private_route_tables_for(self, vpc):
ec2_gateway = self.ec2_gateways.get(vpc.region)
ec2_resource = ec2_gateway.resource
return ec2_resource.route_tables.filter(
Filters=[
{'Name': 'vpc-id', 'Values': [vpc.id]},
{'Name': 'tag:Tier', 'Values': ['private']}])
def __create_routes_in(self, route_tables, destination_vpc,
vpc_peering_connection):
for route_table in route_tables:
try:
route_table.create_route(
DestinationCidrBlock=destination_vpc.cidr_block,
VpcPeeringConnectionId=vpc_peering_connection.id)
self.logger.debug(
"Route creation succeeded for '%s'. Continuing.",
route_table.id)
except ClientError as error:
self.logger.warn(
"Route creation failed for '%s'. Error was: %s",
route_table.id, error)
def __create_routes_for(self, source_vpc, destination_vpc,
vpc_peering_connection):
self.logger.debug(
"Adding routes to private subnets in: '%s' pointing at '%s:%s:%s'.",
source_vpc.id, destination_vpc.id, destination_vpc.cidr_block,
vpc_peering_connection.id)
self.__create_routes_in(
self.__private_route_tables_for(source_vpc),
destination_vpc, vpc_peering_connection)
def __delete_routes_in(self, route_tables, source_vpc, destination_vpc):
for route_table in route_tables:
try:
ec2_gateway = self.ec2_gateways.get(source_vpc.region)
ec2_resource = ec2_gateway.resource
route = ec2_resource.Route(
route_table.id, destination_vpc.cidr_block)
route.delete()
self.logger.debug(
"Route deletion succeeded for '%s'. Continuing.",
route_table.id)
except ClientError as error:
self.logger.warn(
"Route deletion failed for '%s'. Error was: %s",
route_table.id, error)
def __delete_routes_for(self, source_vpc, destination_vpc,
vpc_peering_connection):
self.logger.debug(
"Removing routes from private subnets in: '%s' pointing at "
"'%s:%s:%s'.",
source_vpc.id, destination_vpc.id, destination_vpc.cidr_block,
vpc_peering_connection.id)
self.__delete_routes_in(
self.__private_route_tables_for(source_vpc),
source_vpc,
destination_vpc)
def provision(self):
vpc_peering_connection = self.vpc_peering_relationship.fetch()
self.__create_routes_for(self.vpc1, self.vpc2, vpc_peering_connection)
self.__create_routes_for(self.vpc2, self.vpc1, vpc_peering_connection)
def destroy(self):
vpc_peering_connection = self.vpc_peering_relationship.fetch()
self.__delete_routes_for(self.vpc1, self.vpc2, vpc_peering_connection)
self.__delete_routes_for(self.vpc2, self.vpc1, vpc_peering_connection)
def perform(self, action):
getattr(self, action)()
def _to_dict(self):
return {
'vpcs': frozenset([self.vpc1, self.vpc2]),
'vpc_peering_relationship': self.vpc_peering_relationship,
}
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._to_dict() == other._to_dict()
return NotImplemented
def __ne__(self, other):
if isinstance(other, self.__class__):
return not self.__eq__(other)
return NotImplemented
def __hash__(self):
return hash(tuple(sorted(self._to_dict().items())))
| mit |
PopCap/GameIdea | Engine/Source/ThirdParty/HTML5/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/win32com/makegw/makegw.py | 18 | 16995 | """Utility functions for writing out gateway C++ files
This module will generate a C++/Python binding for a specific COM
interface.
At this stage, no command line interface exists. You must start Python,
import this module, change to the directory where the generated code should
be written, and run the public function.
This module is capable of generating both 'Interfaces' (ie, Python
client side support for the interface) and 'Gateways' (ie, Python
server side support for the interface). Many COM interfaces are useful
both as Client and Server. Other interfaces, however, really only make
sense to implement one side or the other. For example, it would be pointless
for Python to implement Server side for 'IRunningObjectTable', unless we were
implementing core COM for an operating system in Python (hey - now there's an idea!)
Most COM interface code is totally boiler-plate - it consists of
converting arguments, dispatching the call to Python, and processing
any result values.
This module automates the generation of such code. It has the ability to
parse a .H file generated by the MIDL tool (ie, almost all COM .h files)
and build almost totally complete C++ code.
The module understands some of the well known data types, and how to
convert them. There are only a couple of places where hand-editing is
necessary, as detailed below:
unsupported types -- If a type is not known, the generator will
pretty much ignore it, but write a comment to the generated code. You
may want to add custom support for this type. In some cases, C++ compile errors
will result. These are intentional - generating code to remove these errors would
imply a false sense of security that the generator has done the right thing.
other return policies -- By default, Python never sees the return SCODE from
a COM function. The interface usually returns None if OK, else a COM exception
if "FAILED(scode)" is TRUE. You may need to change this if:
* EXCEPINFO is passed to the COM function. This is not detected and handled
* For some reason Python should always see the result SCODE, even if it
did fail or succeed. For example, some functions return a BOOLEAN result
in the SCODE, meaning Python should always see it.
* FAILED(scode) for the interface still has valid data to return (by default,
the code generated does not process the return values, and raise an exception
to Python/COM
"""
import re
import makegwparse
def make_framework_support(header_file_name, interface_name, bMakeInterface = 1, bMakeGateway = 1):
"""Generate C++ code for a Python Interface and Gateway
header_file_name -- The full path to the .H file which defines the interface.
interface_name -- The name of the interface to search for, and to generate.
bMakeInterface = 1 -- Should interface (ie, client) support be generated.
bMakeGatewayInterface = 1 -- Should gateway (ie, server) support be generated.
This method will write a .cpp and .h file into the current directory,
(using the name of the interface to build the file name.
"""
fin=open(header_file_name)
try:
interface = makegwparse.parse_interface_info(interface_name, fin)
finally:
fin.close()
if bMakeInterface and bMakeGateway:
desc = "Interface and Gateway"
elif bMakeInterface and not bMakeGateway:
desc = "Interface"
else:
desc = "Gateway"
if interface.name[:5]=="IEnum": # IEnum - use my really simple template-based one
import win32com.makegw.makegwenum
ifc_cpp_writer = win32com.makegw.makegwenum._write_enumifc_cpp
gw_cpp_writer = win32com.makegw.makegwenum._write_enumgw_cpp
else: # Use my harder working ones.
ifc_cpp_writer = _write_ifc_cpp
gw_cpp_writer = _write_gw_cpp
fout=open("Py%s.cpp" % interface.name, "w")
try:
fout.write(\
'''\
// This file implements the %s %s for Python.
// Generated by makegw.py
#include "shell_pch.h"
''' % (interface.name, desc))
# if bMakeGateway:
# fout.write('#include "PythonCOMServer.h"\n')
# if interface.base not in ["IUnknown", "IDispatch"]:
# fout.write('#include "Py%s.h"\n' % interface.base)
fout.write('#include "Py%s.h"\n\n// @doc - This file contains autoduck documentation\n' % interface.name)
if bMakeInterface: ifc_cpp_writer(fout, interface)
if bMakeGateway: gw_cpp_writer(fout, interface)
finally:
fout.close()
fout=open("Py%s.h" % interface.name, "w")
try:
fout.write(\
'''\
// This file declares the %s %s for Python.
// Generated by makegw.py
''' % (interface.name, desc))
if bMakeInterface: _write_ifc_h(fout, interface)
if bMakeGateway: _write_gw_h(fout, interface)
finally:
fout.close()
###########################################################################
#
# INTERNAL FUNCTIONS
#
#
def _write_ifc_h(f, interface):
f.write(\
'''\
// ---------------------------------------------------
//
// Interface Declaration
class Py%s : public Py%s
{
public:
MAKE_PYCOM_CTOR(Py%s);
static %s *GetI(PyObject *self);
static PyComTypeObject type;
// The Python methods
''' % (interface.name, interface.base, interface.name, interface.name))
for method in interface.methods:
f.write('\tstatic PyObject *%s(PyObject *self, PyObject *args);\n' % method.name)
f.write(\
'''\
protected:
Py%s(IUnknown *pdisp);
~Py%s();
};
''' % (interface.name, interface.name))
def _write_ifc_cpp(f, interface):
name = interface.name
f.write(\
'''\
// ---------------------------------------------------
//
// Interface Implementation
Py%(name)s::Py%(name)s(IUnknown *pdisp):
Py%(base)s(pdisp)
{
ob_type = &type;
}
Py%(name)s::~Py%(name)s()
{
}
/* static */ %(name)s *Py%(name)s::GetI(PyObject *self)
{
return (%(name)s *)Py%(base)s::GetI(self);
}
''' % (interface.__dict__))
ptr = re.sub('[a-z]', '', interface.name)
strdict = {'interfacename':interface.name, 'ptr': ptr}
for method in interface.methods:
strdict['method'] = method.name
f.write(\
'''\
// @pymethod |Py%(interfacename)s|%(method)s|Description of %(method)s.
PyObject *Py%(interfacename)s::%(method)s(PyObject *self, PyObject *args)
{
%(interfacename)s *p%(ptr)s = GetI(self);
if ( p%(ptr)s == NULL )
return NULL;
''' % strdict)
argsParseTuple = argsCOM = formatChars = codePost = \
codePobjects = codeCobjects = cleanup = cleanup_gil = ""
needConversion = 0
# if method.name=="Stat": import win32dbg;win32dbg.brk()
for arg in method.args:
try:
argCvt = makegwparse.make_arg_converter(arg)
if arg.HasAttribute("in"):
val = argCvt.GetFormatChar()
if val:
f.write ('\t' + argCvt.GetAutoduckString() + "\n")
formatChars = formatChars + val
argsParseTuple = argsParseTuple + ", " + argCvt.GetParseTupleArg()
codePobjects = codePobjects + argCvt.DeclareParseArgTupleInputConverter()
codePost = codePost + argCvt.GetParsePostCode()
needConversion = needConversion or argCvt.NeedUSES_CONVERSION()
cleanup = cleanup + argCvt.GetInterfaceArgCleanup()
cleanup_gil = cleanup_gil + argCvt.GetInterfaceArgCleanupGIL()
comArgName, comArgDeclString = argCvt.GetInterfaceCppObjectInfo()
if comArgDeclString: # If we should declare a variable
codeCobjects = codeCobjects + "\t%s;\n" % (comArgDeclString)
argsCOM = argsCOM + ", " + comArgName
except makegwparse.error_not_supported, why:
f.write('// *** The input argument %s of type "%s" was not processed ***\n// Please check the conversion function is appropriate and exists!\n' % (arg.name, arg.raw_type))
f.write('\t%s %s;\n\tPyObject *ob%s;\n' % (arg.type, arg.name, arg.name))
f.write('\t// @pyparm <o Py%s>|%s||Description for %s\n' % (arg.type, arg.name, arg.name))
codePost = codePost + '\tif (bPythonIsHappy && !PyObject_As%s( ob%s, &%s )) bPythonIsHappy = FALSE;\n' % (arg.type, arg.name, arg.name)
formatChars = formatChars + "O"
argsParseTuple = argsParseTuple + ", &ob%s" % (arg.name)
argsCOM = argsCOM + ", " + arg.name
cleanup = cleanup + "\tPyObject_Free%s(%s);\n" % (arg.type, arg.name)
if needConversion: f.write("\tUSES_CONVERSION;\n")
f.write(codePobjects);
f.write(codeCobjects);
f.write('\tif ( !PyArg_ParseTuple(args, "%s:%s"%s) )\n\t\treturn NULL;\n' % (formatChars, method.name, argsParseTuple))
if codePost:
f.write('\tBOOL bPythonIsHappy = TRUE;\n')
f.write(codePost);
f.write('\tif (!bPythonIsHappy) return NULL;\n')
strdict['argsCOM'] = argsCOM[1:]
strdict['cleanup'] = cleanup
strdict['cleanup_gil'] = cleanup_gil
f.write(\
''' HRESULT hr;
PY_INTERFACE_PRECALL;
hr = p%(ptr)s->%(method)s(%(argsCOM)s );
%(cleanup)s
PY_INTERFACE_POSTCALL;
%(cleanup_gil)s
if ( FAILED(hr) )
return PyCom_BuildPyException(hr, p%(ptr)s, IID_%(interfacename)s );
''' % strdict)
codePre = codePost = formatChars = codeVarsPass = codeDecl = ""
for arg in method.args:
if not arg.HasAttribute("out"):
continue
try:
argCvt = makegwparse.make_arg_converter(arg)
formatChar = argCvt.GetFormatChar()
if formatChar:
formatChars = formatChars + formatChar
codePre = codePre + argCvt.GetBuildForInterfacePreCode()
codePost = codePost + argCvt.GetBuildForInterfacePostCode()
codeVarsPass = codeVarsPass + ", " + argCvt.GetBuildValueArg()
codeDecl = codeDecl + argCvt.DeclareParseArgTupleInputConverter()
except makegwparse.error_not_supported, why:
f.write('// *** The output argument %s of type "%s" was not processed ***\n// %s\n' % (arg.name, arg.raw_type, why))
continue
if formatChars:
f.write('%s\n%s\tPyObject *pyretval = Py_BuildValue("%s"%s);\n%s\treturn pyretval;' % (codeDecl, codePre, formatChars, codeVarsPass, codePost))
else:
f.write('\tPy_INCREF(Py_None);\n\treturn Py_None;\n')
f.write('\n}\n\n')
f.write ('// @object Py%s|Description of the interface\n' % (name))
f.write('static struct PyMethodDef Py%s_methods[] =\n{\n' % name)
for method in interface.methods:
f.write('\t{ "%s", Py%s::%s, 1 }, // @pymeth %s|Description of %s\n' % (method.name, interface.name, method.name, method.name, method.name))
interfacebase = interface.base
f.write('''\
{ NULL }
};
PyComTypeObject Py%(name)s::type("Py%(name)s",
&Py%(interfacebase)s::type,
sizeof(Py%(name)s),
Py%(name)s_methods,
GET_PYCOM_CTOR(Py%(name)s));
''' % locals())
def _write_gw_h(f, interface):
if interface.name[0] == "I":
gname = 'PyG' + interface.name[1:]
else:
gname = 'PyG' + interface.name
name = interface.name
if interface.base == "IUnknown" or interface.base == "IDispatch":
base_name = "PyGatewayBase"
else:
if interface.base[0] == "I":
base_name = 'PyG' + interface.base[1:]
else:
base_name = 'PyG' + interface.base
f.write(\
'''\
// ---------------------------------------------------
//
// Gateway Declaration
class %s : public %s, public %s
{
protected:
%s(PyObject *instance) : %s(instance) { ; }
PYGATEWAY_MAKE_SUPPORT2(%s, %s, IID_%s, %s)
''' % (gname, base_name, name, gname, base_name, gname, name, name, base_name))
if interface.base != "IUnknown":
f.write("\t// %s\n\t// *** Manually add %s method decls here\n\n" % (interface.base, interface.base))
else:
f.write('\n\n')
f.write("\t// %s\n" % name)
for method in interface.methods:
f.write('\tSTDMETHOD(%s)(\n' % method.name)
if method.args:
for arg in method.args[:-1]:
f.write("\t\t%s,\n" % (arg.GetRawDeclaration()))
arg = method.args[-1]
f.write("\t\t%s);\n\n" % (arg.GetRawDeclaration()))
else:
f.write('\t\tvoid);\n\n')
f.write('};\n')
f.close()
def _write_gw_cpp(f, interface):
if interface.name[0] == "I":
gname = 'PyG' + interface.name[1:]
else:
gname = 'PyG' + interface.name
name = interface.name
if interface.base == "IUnknown" or interface.base == "IDispatch":
base_name = "PyGatewayBase"
else:
if interface.base[0] == "I":
base_name = 'PyG' + interface.base[1:]
else:
base_name = 'PyG' + interface.base
f.write('''\
// ---------------------------------------------------
//
// Gateway Implementation
''' % {'name':name, 'gname':gname, 'base_name':base_name})
for method in interface.methods:
f.write(\
'''\
STDMETHODIMP %s::%s(
''' % (gname, method.name))
if method.args:
for arg in method.args[:-1]:
inoutstr = ']['.join(arg.inout)
f.write("\t\t/* [%s] */ %s,\n" % (inoutstr, arg.GetRawDeclaration()))
arg = method.args[-1]
inoutstr = ']['.join(arg.inout)
f.write("\t\t/* [%s] */ %s)\n" % (inoutstr, arg.GetRawDeclaration()))
else:
f.write('\t\tvoid)\n')
f.write("{\n\tPY_GATEWAY_METHOD;\n")
cout = 0
codePre = codePost = codeVars = ""
argStr = ""
needConversion = 0
formatChars = ""
if method.args:
for arg in method.args:
if arg.HasAttribute("out"):
cout = cout + 1
if arg.indirectionLevel ==2 :
f.write("\tif (%s==NULL) return E_POINTER;\n" % arg.name)
if arg.HasAttribute("in"):
try:
argCvt = makegwparse.make_arg_converter(arg)
argCvt.SetGatewayMode()
formatchar = argCvt.GetFormatChar();
needConversion = needConversion or argCvt.NeedUSES_CONVERSION()
if formatchar:
formatChars = formatChars + formatchar
codeVars = codeVars + argCvt.DeclareParseArgTupleInputConverter()
argStr = argStr + ", " + argCvt.GetBuildValueArg()
codePre = codePre + argCvt.GetBuildForGatewayPreCode()
codePost = codePost + argCvt.GetBuildForGatewayPostCode()
except makegwparse.error_not_supported, why:
f.write('// *** The input argument %s of type "%s" was not processed ***\n// - Please ensure this conversion function exists, and is appropriate\n// - %s\n' % (arg.name, arg.raw_type, why))
f.write('\tPyObject *ob%s = PyObject_From%s(%s);\n' % (arg.name, arg.type, arg.name))
f.write('\tif (ob%s==NULL) return MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' % (arg.name, method.name))
codePost = codePost + "\tPy_DECREF(ob%s);\n" % arg.name
formatChars = formatChars + "O"
argStr = argStr + ", ob%s" % (arg.name)
if needConversion: f.write('\tUSES_CONVERSION;\n')
f.write(codeVars)
f.write(codePre)
if cout:
f.write("\tPyObject *result;\n")
resStr = "&result"
else:
resStr = "NULL"
if formatChars:
fullArgStr = '%s, "%s"%s' % (resStr, formatChars, argStr)
else:
fullArgStr = resStr
f.write('\tHRESULT hr=InvokeViaPolicy("%s", %s);\n' % (method.name, fullArgStr))
f.write(codePost)
if cout:
f.write("\tif (FAILED(hr)) return hr;\n")
f.write("\t// Process the Python results, and convert back to the real params\n")
# process the output arguments.
formatChars = codePobjects = codePost = argsParseTuple = ""
needConversion = 0
for arg in method.args:
if not arg.HasAttribute("out"):
continue
try:
argCvt = makegwparse.make_arg_converter(arg)
argCvt.SetGatewayMode()
val = argCvt.GetFormatChar()
if val:
formatChars = formatChars + val
argsParseTuple = argsParseTuple + ", " + argCvt.GetParseTupleArg()
codePobjects = codePobjects + argCvt.DeclareParseArgTupleInputConverter()
codePost = codePost + argCvt.GetParsePostCode()
needConversion = needConversion or argCvt.NeedUSES_CONVERSION()
except makegwparse.error_not_supported, why:
f.write('// *** The output argument %s of type "%s" was not processed ***\n// %s\n' % (arg.name, arg.raw_type, why))
if formatChars: # If I have any to actually process.
if len(formatChars)==1:
parseFn = "PyArg_Parse"
else:
parseFn = "PyArg_ParseTuple"
if codePobjects: f.write(codePobjects)
f.write('\tif (!%s(result, "%s" %s))\n\t\treturn MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' % (parseFn, formatChars, argsParseTuple, method.name))
if codePost:
f.write('\tBOOL bPythonIsHappy = TRUE;\n')
f.write(codePost)
f.write('\tif (!bPythonIsHappy) hr = MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' % method.name)
f.write('\tPy_DECREF(result);\n');
f.write('\treturn hr;\n}\n\n')
def test():
# make_framework_support("d:\\msdev\\include\\objidl.h", "ILockBytes")
make_framework_support("d:\\msdev\\include\\objidl.h", "IStorage")
# make_framework_support("d:\\msdev\\include\\objidl.h", "IEnumSTATSTG")
| bsd-2-clause |
IAmWave/trinerdi-icpc | lib/googletest/test/gtest_xml_outfiles_test.py | 2526 | 5340 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module."""
__author__ = "keith.ray@gmail.com (Keith Ray)"
import os
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_SUBDIR = "xml_outfiles"
GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne" SetUpProp="1" TestSomeProperty="1" TearDownProp="1" />
</testsuite>
</testsuites>
"""
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo" SetUpProp="2" TestSomeProperty="2" TearDownProp="2" />
</testsuite>
</testsuites>
"""
class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
"""Unit test for Google Test's XML output functionality."""
def setUp(self):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_OUTPUT_SUBDIR, "")
self.DeleteFilesAndDir()
def tearDown(self):
self.DeleteFilesAndDir()
def DeleteFilesAndDir(self):
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml"))
except os.error:
pass
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml"))
except os.error:
pass
try:
os.rmdir(self.output_dir_)
except os.error:
pass
def testOutfile1(self):
self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)
def testOutfile2(self):
self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)
def _TestOutFile(self, test_name, expected_xml):
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_]
p = gtest_test_utils.Subprocess(command,
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
# TODO(wan@google.com): libtool causes the built test binary to be
# named lt-gtest_xml_outfiles_test_ instead of
# gtest_xml_outfiles_test_. To account for this possibillity, we
# allow both names in the following code. We should remove this
# hack when Chandler Carruth's libtool replacement tool is ready.
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
output_file1)
expected = minidom.parseString(expected_xml)
if os.path.isfile(output_file1):
actual = minidom.parse(output_file1)
else:
actual = minidom.parse(output_file2)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == "__main__":
os.environ["GTEST_STACK_TRACE_DEPTH"] = "0"
gtest_test_utils.Main()
| mit |
instinct-vfx/rez | src/rez/vendor/packaging/utils.py | 62 | 1520 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import re
from .version import InvalidVersion, Version
_canonicalize_regex = re.compile(r"[-_.]+")
def canonicalize_name(name):
# This is taken from PEP 503.
return _canonicalize_regex.sub("-", name).lower()
def canonicalize_version(version):
"""
This is very similar to Version.__str__, but has one subtle differences
with the way it handles the release segment.
"""
try:
version = Version(version)
except InvalidVersion:
# Legacy versions cannot be normalized
return version
parts = []
# Epoch
if version.epoch != 0:
parts.append("{0}!".format(version.epoch))
# Release segment
# NB: This strips trailing '.0's to normalize
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
# Pre-release
if version.pre is not None:
parts.append("".join(str(x) for x in version.pre))
# Post-release
if version.post is not None:
parts.append(".post{0}".format(version.post))
# Development release
if version.dev is not None:
parts.append(".dev{0}".format(version.dev))
# Local version segment
if version.local is not None:
parts.append("+{0}".format(version.local))
return "".join(parts)
| lgpl-3.0 |
fzheng/codejam | lib/python2.7/site-packages/pygments/lexers/theorem.py | 22 | 19030 | # -*- coding: utf-8 -*-
"""
pygments.lexers.theorem
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for theorem-proving languages.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
class CoqLexer(RegexLexer):
"""
For the `Coq <http://coq.inria.fr/>`_ theorem prover.
.. versionadded:: 1.5
"""
name = 'Coq'
aliases = ['coq']
filenames = ['*.v']
mimetypes = ['text/x-coq']
keywords1 = (
# Vernacular commands
'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
'Universe', 'Polymorphic', 'Monomorphic', 'Context'
)
keywords2 = (
# Gallina
'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
'for', 'of', 'nosimpl', 'with', 'as',
)
keywords3 = (
# Sorts
'Type', 'Prop',
)
keywords4 = (
# Tactics
'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
'native_compute', 'subst',
)
keywords5 = (
# Terminators
'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
'assumption', 'solve', 'contradiction', 'discriminate',
'congruence',
)
keywords6 = (
# Control
'do', 'last', 'first', 'try', 'idtac', 'repeat',
)
# 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
# 'downto', 'else', 'end', 'exception', 'external', 'false',
# 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
# 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
# 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
# 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
# 'type', 'val', 'virtual', 'when', 'while', 'with'
keyopts = (
'!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
'->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
'<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
r'/\\', r'\\/', r'\{\|', r'\|\}',
u'Π', u'λ',
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
prefix_syms = r'[!?~]'
infix_syms = r'[=<>@^|&+\*/$%-]'
primitives = ('unit', 'nat', 'bool', 'string', 'ascii', 'list')
tokens = {
'root': [
(r'\s+', Text),
(r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
(r'\(\*', Comment, 'comment'),
(words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
# (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name),
(r'(%s)' % '|'.join(keyopts[::-1]), Operator),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
(r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
(r"[^\W\d][\w']*", Name),
(r'\d[\d_]*', Number.Integer),
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
String.Char),
(r"'.'", String.Char),
(r"'", Keyword), # a stray quote is another syntax element
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'string': [
(r'[^"]+', String.Double),
(r'""', String.Double),
(r'"', String.Double, '#pop'),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
(r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
(r'[A-Z][\w\']*', Name.Class, '#pop'),
(r'[a-z][a-z0-9_\']*', Name, '#pop'),
default('#pop')
],
}
def analyse_text(text):
if text.startswith('(*'):
return True
class IsabelleLexer(RegexLexer):
"""
For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
.. versionadded:: 2.0
"""
name = 'Isabelle'
aliases = ['isabelle']
filenames = ['*.thy']
mimetypes = ['text/x-isabelle']
keyword_minor = (
'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
'class_instance', 'class_relation', 'code_module', 'congs',
'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
'type_constructor', 'unchecked', 'unsafe', 'where',
)
keyword_diag = (
'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
'print_abbrevs', 'print_antiquotations', 'print_attributes',
'print_binds', 'print_bnfs', 'print_bundles',
'print_case_translations', 'print_cases', 'print_claset',
'print_classes', 'print_codeproc', 'print_codesetup',
'print_coercions', 'print_commands', 'print_context',
'print_defn_rules', 'print_dependencies', 'print_facts',
'print_induct_rules', 'print_inductives', 'print_interps',
'print_locale', 'print_locales', 'print_methods', 'print_options',
'print_orders', 'print_quot_maps', 'print_quotconsts',
'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
'print_rules', 'print_simpset', 'print_state', 'print_statement',
'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
)
keyword_thy = ('theory', 'begin', 'end')
keyword_section = ('header', 'chapter')
keyword_subsection = (
'section', 'subsection', 'subsubsection', 'sect', 'subsect',
'subsubsect',
)
keyword_theory_decl = (
'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
'code_abort', 'code_class', 'code_const', 'code_datatype',
'code_identifier', 'code_include', 'code_instance', 'code_modulename',
'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
'lifting_forget', 'lifting_update', 'local_setup', 'locale',
'method_setup', 'nitpick_params', 'no_adhoc_overloading',
'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
'overloading', 'parse_ast_translation', 'parse_translation',
'partial_function', 'primcorec', 'primrec', 'primrec_new',
'print_ast_translation', 'print_translation', 'quickcheck_generator',
'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
'text_raw', 'theorems', 'translations', 'type_notation',
'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
'bnf_axiomatization', 'cartouche', 'datatype_compat',
'free_constructors', 'functor', 'nominal_function',
'nominal_termination', 'permanent_interpretation',
'binds', 'defining', 'smt2_status', 'term_cartouche',
'boogie_file', 'text_cartouche',
)
keyword_theory_script = ('inductive_cases', 'inductive_simps')
keyword_theory_goal = (
'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
'crunch', 'crunch_ignore',
'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
'lift_definition', 'nominal_inductive', 'nominal_inductive2',
'nominal_primrec', 'pcpodef', 'primcorecursive',
'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
'theorem', 'typedef', 'wrap_free_constructors',
)
keyword_qed = ('by', 'done', 'qed')
keyword_abandon_proof = ('sorry', 'oops')
keyword_proof_goal = ('have', 'hence', 'interpret')
keyword_proof_block = ('next', 'proof')
keyword_proof_chain = (
'finally', 'from', 'then', 'ultimately', 'with',
)
keyword_proof_decl = (
'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
'txt', 'txt_raw', 'unfolding', 'using', 'write',
)
keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
keyword_proof_script = (
'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
)
operators = (
'::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
'+', '-', '!', '?',
)
proof_operators = ('{', '}', '.', '..')
tokens = {
'root': [
(r'\s+', Text),
(r'\(\*', Comment, 'comment'),
(r'\{\*', Comment, 'text'),
(words(operators), Operator),
(words(proof_operators), Operator.Word),
(words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
(words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
(words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
(words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(r'\\<\w*>', Text.Symbol),
(r"[^\W\d][.\w']*", Name),
(r"\?[^\W\d][.\w']*", Name),
(r"'[^\W\d][.\w']*", Name.Type),
(r'\d[\d_]*', Name), # display numbers as name
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'"', String, 'string'),
(r'`', String.Other, 'fact'),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'text': [
(r'[^*}]+', Comment),
(r'\*\}', Comment, '#pop'),
(r'\*', Comment),
(r'\}', Comment),
],
'string': [
(r'[^"\\]+', String),
(r'\\<\w*>', String.Symbol),
(r'\\"', String),
(r'\\', String),
(r'"', String, '#pop'),
],
'fact': [
(r'[^`\\]+', String.Other),
(r'\\<\w*>', String.Symbol),
(r'\\`', String.Other),
(r'\\', String.Other),
(r'`', String.Other, '#pop'),
],
}
class LeanLexer(RegexLexer):
"""
For the `Lean <https://github.com/leanprover/lean>`_
theorem prover.
.. versionadded:: 2.0
"""
name = 'Lean'
aliases = ['lean']
filenames = ['*.lean']
mimetypes = ['text/x-lean']
flags = re.MULTILINE | re.UNICODE
keywords1 = ('import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition', 'renaming',
'inline', 'hiding', 'exposing', 'parameter', 'parameters', 'conjecture',
'hypothesis', 'lemma', 'corollary', 'variable', 'variables', 'print', 'theorem',
'axiom', 'inductive', 'structure', 'universe', 'alias', 'help',
'options', 'precedence', 'postfix', 'prefix', 'calc_trans', 'calc_subst', 'calc_refl',
'infix', 'infixl', 'infixr', 'notation', 'eval', 'check', 'exit', 'coercion', 'end',
'private', 'using', 'namespace', 'including', 'instance', 'section', 'context',
'protected', 'expose', 'export', 'set_option', 'add_rewrite', 'extends',
'open', 'example', 'constant', 'constants', 'print', 'opaque', 'reducible', 'irreducible'
)
keywords2 = (
'forall', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume', 'take',
'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin', 'proof', 'qed', 'calc', 'match'
)
keywords3 = (
# Sorts
'Type', 'Prop',
)
operators = (
'!=', '#', '&', '&&', '*', '+', '-', '/', '@', '!', '`',
'-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<',
'<-', '=', '==', '>', '_', '`', '|', '||', '~', '=>', '<=', '>=',
'/\\', '\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈', u'×', u'⌞', u'⌟', u'≡',
u'⟨', u'⟩'
)
punctuation = ('(', ')', ':', '{', '}', '[', ']', u'⦃', u'⦄', ':=', ',')
tokens = {
'root': [
(r'\s+', Text),
(r'/-', Comment, 'comment'),
(r'--.*?$', Comment.Single),
(words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(operators), Name.Builtin.Pseudo),
(words(punctuation), Operator),
(u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]"
u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079"
u"\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*", Name),
(r'\d+', Number.Integer),
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable)
],
'comment': [
# Multiline Comments
(r'[^/-]', Comment.Multiline),
(r'/-', Comment.Multiline, '#push'),
(r'-/', Comment.Multiline, '#pop'),
(r'[/-]', Comment.Multiline)
],
'string': [
(r'[^\\"]+', String.Double),
(r'\\[n"\\]', String.Escape),
('"', String.Double, '#pop'),
],
}
| mit |
mcus/SickRage | sickbeard/providers/bluetigers.py | 3 | 6075 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Author: echel0n <sickrage.tv@gmail.com>
# URL: http://www.github.com/sickragetv/sickrage/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import traceback
import re
from requests.auth import AuthBase
from sickbeard.providers import generic
import requests
from sickbeard.bs4_parser import BS4Parser
import logging
from sickbeard import tvcache
class BLUETIGERSProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, "BLUETIGERS")
self.supportsBacklog = True
self.username = None
self.password = None
self.ratio = None
self.token = None
self.tokenLastUpdate = None
self.cache = BLUETIGERSCache(self)
self.urls = {
'base_url': 'https://www.bluetigers.ca/',
'search': 'https://www.bluetigers.ca/torrents-search.php',
'login': 'https://www.bluetigers.ca/account-login.php',
'download': 'https://www.bluetigers.ca/torrents-details.php?id=%s&hit=1',
}
self.search_params = {
"c16": 1, "c10": 1, "c130": 1, "c131": 1, "c17": 1, "c18": 1, "c19": 1
}
self.url = self.urls[b'base_url']
def _doLogin(self):
if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
return True
login_params = {
'username': self.username,
'password': self.password,
'take_login': '1'
}
response = self.getURL(self.urls[b'login'], post_data=login_params, timeout=30)
if not response:
logging.warning("Unable to connect to provider")
return False
if re.search('/account-logout.php', response):
return True
else:
logging.warning("Invalid username or password. Check your settings")
return False
return True
def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
# check for auth
if not self._doLogin():
return results
for mode in search_strings.keys():
logging.debug("Search Mode: %s" % mode)
for search_string in search_strings[mode]:
if mode is not 'RSS':
logging.debug("Search string: %s " % search_string)
self.search_params[b'search'] = search_string
data = self.getURL(self.urls[b'search'], params=self.search_params)
if not data:
continue
try:
with BS4Parser(data, features=["html5lib", "permissive"]) as html:
result_linkz = html.findAll('a', href=re.compile("torrents-details"))
if not result_linkz:
logging.debug("Data returned from provider do not contains any torrent")
continue
if result_linkz:
for link in result_linkz:
title = link.text
download_url = self.urls[b'base_url'] + "/" + link[b'href']
download_url = download_url.replace("torrents-details", "download")
# FIXME
size = -1
seeders = 1
leechers = 0
if not title or not download_url:
continue
# Filter unseeded torrent
# if seeders < self.minseed or leechers < self.minleech:
# if mode is not 'RSS':
# logging.debug(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers))
# continue
item = title, download_url, size, seeders, leechers
if mode is not 'RSS':
logging.debug("Found result: %s " % title)
items[mode].append(item)
except Exception as e:
logging.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())
# For each search mode sort all the items by seeders if available
items[mode].sort(key=lambda tup: tup[3], reverse=True)
results += items[mode]
return results
def seedRatio(self):
return self.ratio
class BLUETIGERSAuth(AuthBase):
"""Attaches HTTP Authentication to the given Request object."""
def __init__(self, token):
self.token = token
def __call__(self, r):
r.headers[b'Authorization'] = self.token
return r
class BLUETIGERSCache(tvcache.TVCache):
def __init__(self, provider_obj):
tvcache.TVCache.__init__(self, provider_obj)
# Only poll BLUETIGERS every 10 minutes max
self.minTime = 10
def _getRSSData(self):
search_strings = {'RSS': ['']}
return {'entries': self.provider._doSearch(search_strings)}
provider = BLUETIGERSProvider()
| gpl-3.0 |
odooindia/odoo | addons/crm_helpdesk/crm_helpdesk.py | 8 | 7478 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import openerp
from openerp.addons.crm import crm
from openerp.osv import fields, osv
from openerp import tools
from openerp.tools.translate import _
from openerp.tools import html2plaintext
class crm_helpdesk(osv.osv):
""" Helpdesk Cases """
_name = "crm.helpdesk"
_description = "Helpdesk"
_order = "id desc"
_inherit = ['mail.thread']
_columns = {
'id': fields.integer('ID', readonly=True),
'name': fields.char('Name', required=True),
'active': fields.boolean('Active', required=False),
'date_action_last': fields.datetime('Last Action', readonly=1),
'date_action_next': fields.datetime('Next Action', readonly=1),
'description': fields.text('Description'),
'create_date': fields.datetime('Creation Date' , readonly=True),
'write_date': fields.datetime('Update Date' , readonly=True),
'date_deadline': fields.date('Deadline'),
'user_id': fields.many2one('res.users', 'Responsible'),
'section_id': fields.many2one('crm.case.section', 'Sales Team', \
select=True, help='Responsible sales team. Define Responsible user and Email account for mail gateway.'),
'company_id': fields.many2one('res.company', 'Company'),
'date_closed': fields.datetime('Closed', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner'),
'email_cc': fields.text('Watchers Emails', size=252 , help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"),
'email_from': fields.char('Email', size=128, help="Destination email for email gateway"),
'date': fields.datetime('Date'),
'ref': fields.reference('Reference', selection=openerp.addons.base.res.res_request.referencable_models),
'ref2': fields.reference('Reference 2', selection=openerp.addons.base.res.res_request.referencable_models),
'channel_id': fields.many2one('crm.case.channel', 'Channel', help="Communication channel."),
'planned_revenue': fields.float('Planned Revenue'),
'planned_cost': fields.float('Planned Costs'),
'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority'),
'probability': fields.float('Probability (%)'),
'categ_id': fields.many2one('crm.case.categ', 'Category', \
domain="['|',('section_id','=',False),('section_id','=',section_id),\
('object_id.model', '=', 'crm.helpdesk')]"),
'duration': fields.float('Duration', states={'done': [('readonly', True)]}),
'state': fields.selection(
[('draft', 'New'),
('open', 'In Progress'),
('pending', 'Pending'),
('done', 'Closed'),
('cancel', 'Cancelled')], 'Status', readonly=True, track_visibility='onchange',
help='The status is set to \'Draft\', when a case is created.\
\nIf the case is in progress the status is set to \'Open\'.\
\nWhen the case is over, the status is set to \'Done\'.\
\nIf the case needs to be reviewed then the status is set to \'Pending\'.'),
}
_defaults = {
'active': lambda *a: 1,
'user_id': lambda s, cr, uid, c: uid,
'state': lambda *a: 'draft',
'date': fields.datetime.now,
'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(cr, uid, 'crm.helpdesk', context=c),
'priority': '1',
}
def on_change_partner_id(self, cr, uid, ids, partner_id, context=None):
values = {}
if partner_id:
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
values = {
'email_from': partner.email,
}
return {'value': values}
def write(self, cr, uid, ids, values, context=None):
""" Override to add case management: open/close dates """
if values.get('state'):
if values.get('state') in ['draft', 'open'] and not values.get('date_open'):
values['date_open'] = fields.datetime.now()
elif values.get('state') == 'close' and not values.get('date_closed'):
values['date_closed'] = fields.datetime.now()
return super(crm_helpdesk, self).write(cr, uid, ids, values, context=context)
def case_escalate(self, cr, uid, ids, context=None):
""" Escalates case to parent level """
data = {'active': True}
for case in self.browse(cr, uid, ids, context=context):
if case.section_id and case.section_id.parent_id:
parent_id = case.section_id.parent_id
data['section_id'] = parent_id.id
if parent_id.change_responsible and parent_id.user_id:
data['user_id'] = parent_id.user_id.id
else:
raise osv.except_osv(_('Error!'), _('You can not escalate, you are already at the top level regarding your sales-team category.'))
self.write(cr, uid, [case.id], data, context=context)
return True
# -------------------------------------------------------
# Mail gateway
# -------------------------------------------------------
def message_new(self, cr, uid, msg, custom_values=None, context=None):
""" Overrides mail_thread message_new that is called by the mailgateway
through message_process.
This override updates the document according to the email.
"""
if custom_values is None:
custom_values = {}
desc = html2plaintext(msg.get('body')) if msg.get('body') else ''
defaults = {
'name': msg.get('subject') or _("No Subject"),
'description': desc,
'email_from': msg.get('from'),
'email_cc': msg.get('cc'),
'user_id': False,
'partner_id': msg.get('author_id', False),
}
defaults.update(custom_values)
return super(crm_helpdesk, self).message_new(cr, uid, msg, custom_values=defaults, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Kalyzee/edx-platform | cms/djangoapps/contentstore/views/tests/test_credit_eligibility.py | 121 | 2954 | """
Unit tests for credit eligibility UI in Studio.
"""
import mock
from contentstore.tests.utils import CourseTestCase
from contentstore.utils import reverse_course_url
from xmodule.modulestore.tests.factories import CourseFactory
from openedx.core.djangoapps.credit.api import get_credit_requirements
from openedx.core.djangoapps.credit.models import CreditCourse
from openedx.core.djangoapps.credit.signals import on_course_publish
class CreditEligibilityTest(CourseTestCase):
"""Base class to test the course settings details view in Studio for credit
eligibility requirements.
"""
def setUp(self):
super(CreditEligibilityTest, self).setUp()
self.course = CourseFactory.create(org='edX', number='dummy', display_name='Credit Course')
self.course_details_url = reverse_course_url('settings_handler', unicode(self.course.id))
@mock.patch.dict("django.conf.settings.FEATURES", {'ENABLE_CREDIT_ELIGIBILITY': False})
def test_course_details_with_disabled_setting(self):
"""Test that user don't see credit eligibility requirements in response
if the feature flag 'ENABLE_CREDIT_ELIGIBILITY' is not enabled.
"""
response = self.client.get_html(self.course_details_url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "Course Credit Requirements")
self.assertNotContains(response, "Steps required to earn course credit")
@mock.patch.dict("django.conf.settings.FEATURES", {'ENABLE_CREDIT_ELIGIBILITY': True})
def test_course_details_with_enabled_setting(self):
"""Test that credit eligibility requirements are present in
response if the feature flag 'ENABLE_CREDIT_ELIGIBILITY' is enabled.
"""
# verify that credit eligibility requirements block don't show if the
# course is not set as credit course
response = self.client.get_html(self.course_details_url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "Course Credit Requirements")
self.assertNotContains(response, "Steps required to earn course credit")
# verify that credit eligibility requirements block shows if the
# course is set as credit course and it has eligibility requirements
credit_course = CreditCourse(course_key=unicode(self.course.id), enabled=True)
credit_course.save()
self.assertEqual(len(get_credit_requirements(self.course.id)), 0)
# test that after publishing course, minimum grade requirement is added
on_course_publish(self.course.id)
self.assertEqual(len(get_credit_requirements(self.course.id)), 1)
response = self.client.get_html(self.course_details_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Course Credit Requirements")
self.assertContains(response, "Steps required to earn course credit")
| agpl-3.0 |
indictranstech/buyback-erp | erpnext/controllers/buying_controller.py | 7 | 13466 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _, msgprint
from frappe.utils import flt, rounded
from erpnext.setup.utils import get_company_currency
from erpnext.accounts.party import get_party_details
from erpnext.controllers.stock_controller import StockController
class BuyingController(StockController):
def __setup__(self):
if hasattr(self, "fname"):
self.table_print_templates = {
self.fname: "templates/print_formats/includes/item_grid.html",
"other_charges": "templates/print_formats/includes/taxes.html",
}
def validate(self):
super(BuyingController, self).validate()
if getattr(self, "supplier", None) and not self.supplier_name:
self.supplier_name = frappe.db.get_value("Supplier",
self.supplier, "supplier_name")
self.is_item_table_empty()
self.set_qty_as_per_stock_uom()
self.validate_stock_or_nonstock_items()
self.validate_warehouse()
def set_missing_values(self, for_validate=False):
super(BuyingController, self).set_missing_values(for_validate)
self.set_supplier_from_item_default()
self.set_price_list_currency("Buying")
# set contact and address details for supplier, if they are not mentioned
if getattr(self, "supplier", None):
self.update_if_missing(get_party_details(self.supplier, party_type="Supplier"))
self.set_missing_item_details()
if self.get("__islocal"):
self.set_taxes("other_charges", "taxes_and_charges")
def set_supplier_from_item_default(self):
if self.meta.get_field("supplier") and not self.supplier:
for d in self.get(self.fname):
supplier = frappe.db.get_value("Item", d.item_code, "default_supplier")
if supplier:
self.supplier = supplier
break
def validate_warehouse(self):
from erpnext.stock.utils import validate_warehouse_company
warehouses = list(set([d.warehouse for d in
self.get(self.fname) if getattr(d, "warehouse", None)]))
for w in warehouses:
validate_warehouse_company(w, self.company)
def validate_stock_or_nonstock_items(self):
if self.meta.get_field("other_charges") and not self.get_stock_items():
tax_for_valuation = [d.account_head for d in self.get("other_charges")
if d.category in ["Valuation", "Valuation and Total"]]
if tax_for_valuation:
frappe.throw(_("Tax Category can not be 'Valuation' or 'Valuation and Total' as all items are non-stock items"))
def set_total_in_words(self):
from frappe.utils import money_in_words
company_currency = get_company_currency(self.company)
if self.meta.get_field("in_words"):
self.in_words = money_in_words(self.grand_total, company_currency)
if self.meta.get_field("in_words_import"):
self.in_words_import = money_in_words(self.grand_total_import,
self.currency)
def calculate_taxes_and_totals(self):
self.other_fname = "other_charges"
super(BuyingController, self).calculate_taxes_and_totals()
self.calculate_total_advance("Purchase Invoice", "advance_allocation_details")
def calculate_item_values(self):
for item in self.item_doclist:
self.round_floats_in(item)
if item.discount_percentage == 100.0:
item.rate = 0.0
elif not item.rate:
item.rate = flt(item.price_list_rate * (1.0 - (item.discount_percentage / 100.0)),
self.precision("rate", item))
item.amount = flt(item.rate * item.qty,
self.precision("amount", item))
item.item_tax_amount = 0.0;
self._set_in_company_currency(item, "amount", "base_amount")
self._set_in_company_currency(item, "price_list_rate", "base_price_list_rate")
self._set_in_company_currency(item, "rate", "base_rate")
def calculate_net_total(self):
self.net_total = self.net_total_import = 0.0
for item in self.item_doclist:
self.net_total += item.base_amount
self.net_total_import += item.amount
self.round_floats_in(self, ["net_total", "net_total_import"])
def calculate_totals(self):
self.grand_total = flt(self.tax_doclist[-1].total if self.tax_doclist else self.net_total)
self.grand_total_import = flt(self.grand_total / self.conversion_rate) \
if self.tax_doclist else self.net_total_import
self.total_tax = flt(self.grand_total - self.net_total, self.precision("total_tax"))
self.grand_total = flt(self.grand_total, self.precision("grand_total"))
self.grand_total_import = flt(self.grand_total_import, self.precision("grand_total_import"))
if self.meta.get_field("rounded_total"):
self.rounded_total = rounded(self.grand_total)
if self.meta.get_field("rounded_total_import"):
self.rounded_total_import = rounded(self.grand_total_import)
if self.meta.get_field("other_charges_added"):
self.other_charges_added = flt(sum([flt(d.tax_amount) for d in self.tax_doclist
if d.add_deduct_tax=="Add" and d.category in ["Valuation and Total", "Total"]]),
self.precision("other_charges_added"))
if self.meta.get_field("other_charges_deducted"):
self.other_charges_deducted = flt(sum([flt(d.tax_amount) for d in self.tax_doclist
if d.add_deduct_tax=="Deduct" and d.category in ["Valuation and Total", "Total"]]),
self.precision("other_charges_deducted"))
if self.meta.get_field("other_charges_added_import"):
self.other_charges_added_import = flt(self.other_charges_added /
self.conversion_rate, self.precision("other_charges_added_import"))
if self.meta.get_field("other_charges_deducted_import"):
self.other_charges_deducted_import = flt(self.other_charges_deducted /
self.conversion_rate, self.precision("other_charges_deducted_import"))
def calculate_outstanding_amount(self):
if self.doctype == "Purchase Invoice" and self.docstatus == 0:
self.total_advance = flt(self.total_advance,
self.precision("total_advance"))
self.total_amount_to_pay = flt(self.grand_total - flt(self.write_off_amount,
self.precision("write_off_amount")), self.precision("total_amount_to_pay"))
self.outstanding_amount = flt(self.total_amount_to_pay - self.total_advance,
self.precision("outstanding_amount"))
# update valuation rate
def update_valuation_rate(self, parentfield):
"""
item_tax_amount is the total tax amount applied on that item
stored for valuation
TODO: rename item_tax_amount to valuation_tax_amount
"""
stock_items = self.get_stock_items()
stock_items_qty, stock_items_amount = 0, 0
last_stock_item_idx = 1
for d in self.get(parentfield):
if d.item_code and d.item_code in stock_items:
stock_items_qty += flt(d.qty)
stock_items_amount += flt(d.base_amount)
last_stock_item_idx = d.idx
total_valuation_amount = sum([flt(d.tax_amount) for d in
self.get("other_charges")
if d.category in ["Valuation", "Valuation and Total"]])
valuation_amount_adjustment = total_valuation_amount
for i, item in enumerate(self.get(parentfield)):
if item.item_code and item.qty and item.item_code in stock_items:
item_proportion = flt(item.base_amount) / stock_items_amount if stock_items_amount \
else flt(item.qty) / stock_items_qty
if i == (last_stock_item_idx - 1):
item.item_tax_amount = flt(valuation_amount_adjustment,
self.precision("item_tax_amount", item))
else:
item.item_tax_amount = flt(item_proportion * total_valuation_amount,
self.precision("item_tax_amount", item))
valuation_amount_adjustment -= item.item_tax_amount
self.round_floats_in(item)
item.conversion_factor = item.conversion_factor or flt(frappe.db.get_value(
"UOM Conversion Detail", {"parent": item.item_code, "uom": item.uom},
"conversion_factor")) or 1
qty_in_stock_uom = flt(item.qty * item.conversion_factor)
rm_supp_cost = flt(item.rm_supp_cost) if self.doctype=="Purchase Receipt" else 0.0
landed_cost_voucher_amount = flt(item.landed_cost_voucher_amount) \
if self.doctype == "Purchase Receipt" else 0.0
item.valuation_rate = ((item.base_amount + item.item_tax_amount + rm_supp_cost
+ landed_cost_voucher_amount) / qty_in_stock_uom)
else:
item.valuation_rate = 0.0
def validate_for_subcontracting(self):
if not self.is_subcontracted and self.sub_contracted_items:
frappe.throw(_("Please enter 'Is Subcontracted' as Yes or No"))
if self.doctype == "Purchase Receipt" and self.is_subcontracted=="Yes" \
and not self.supplier_warehouse:
frappe.throw(_("Supplier Warehouse mandatory for sub-contracted Purchase Receipt"))
def create_raw_materials_supplied(self, raw_material_table):
if self.is_subcontracted=="Yes":
parent_items = []
rm_supplied_idx = 0
for item in self.get(self.fname):
if self.doctype == "Purchase Receipt":
item.rm_supp_cost = 0.0
if item.item_code in self.sub_contracted_items:
self.update_raw_materials_supplied(item, raw_material_table, rm_supplied_idx)
if [item.item_code, item.name] not in parent_items:
parent_items.append([item.item_code, item.name])
self.cleanup_raw_materials_supplied(parent_items, raw_material_table)
elif self.doctype == "Purchase Receipt":
for item in self.get(self.fname):
item.rm_supp_cost = 0.0
def update_raw_materials_supplied(self, item, raw_material_table, rm_supplied_idx):
bom_items = self.get_items_from_default_bom(item.item_code)
raw_materials_cost = 0
for bom_item in bom_items:
# check if exists
exists = 0
for d in self.get(raw_material_table):
if d.main_item_code == item.item_code and d.rm_item_code == bom_item.item_code \
and d.reference_name == item.name:
rm, exists = d, 1
break
if not exists:
rm = self.append(raw_material_table, {})
required_qty = flt(bom_item.qty_consumed_per_unit) * flt(item.qty) * flt(item.conversion_factor)
rm.reference_name = item.name
rm.bom_detail_no = bom_item.name
rm.main_item_code = item.item_code
rm.rm_item_code = bom_item.item_code
rm.stock_uom = bom_item.stock_uom
rm.required_qty = required_qty
rm.conversion_factor = item.conversion_factor
rm.idx = rm_supplied_idx
if self.doctype == "Purchase Receipt":
rm.consumed_qty = required_qty
rm.description = bom_item.description
if item.batch_no and not rm.batch_no:
rm.batch_no = item.batch_no
rm_supplied_idx += 1
# get raw materials rate
if self.doctype == "Purchase Receipt":
from erpnext.stock.utils import get_incoming_rate
rm.rate = get_incoming_rate({
"item_code": bom_item.item_code,
"warehouse": self.supplier_warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"qty": -1 * required_qty,
"serial_no": rm.serial_no
})
if not rm.rate:
from erpnext.stock.stock_ledger import get_valuation_rate
rm.rate = get_valuation_rate(bom_item.item_code, self.supplier_warehouse)
else:
rm.rate = bom_item.rate
rm.amount = required_qty * flt(rm.rate)
raw_materials_cost += flt(rm.amount)
if self.doctype == "Purchase Receipt":
item.rm_supp_cost = raw_materials_cost
def cleanup_raw_materials_supplied(self, parent_items, raw_material_table):
"""Remove all those child items which are no longer present in main item table"""
delete_list = []
for d in self.get(raw_material_table):
if [d.main_item_code, d.reference_name] not in parent_items:
# mark for deletion from doclist
delete_list.append(d)
# delete from doclist
if delete_list:
rm_supplied_details = self.get(raw_material_table)
self.set(raw_material_table, [])
for d in rm_supplied_details:
if d not in delete_list:
self.append(raw_material_table, d)
def get_items_from_default_bom(self, item_code):
bom_items = frappe.db.sql("""select t2.item_code,
ifnull(t2.qty, 0) / ifnull(t1.quantity, 1) as qty_consumed_per_unit,
t2.rate, t2.stock_uom, t2.name, t2.description
from `tabBOM` t1, `tabBOM Item` t2
where t2.parent = t1.name and t1.item = %s and t1.is_default = 1
and t1.docstatus = 1 and t1.is_active = 1""", item_code, as_dict=1)
if not bom_items:
msgprint(_("No default BOM exists for Item {0}").format(item_code), raise_exception=1)
return bom_items
@property
def sub_contracted_items(self):
if not hasattr(self, "_sub_contracted_items"):
self._sub_contracted_items = []
item_codes = list(set(item.item_code for item in
self.get(self.fname)))
if item_codes:
self._sub_contracted_items = [r[0] for r in frappe.db.sql("""select name
from `tabItem` where name in (%s) and is_sub_contracted_item='Yes'""" % \
(", ".join((["%s"]*len(item_codes))),), item_codes)]
return self._sub_contracted_items
@property
def purchase_items(self):
if not hasattr(self, "_purchase_items"):
self._purchase_items = []
item_codes = list(set(item.item_code for item in
self.get(self.fname)))
if item_codes:
self._purchase_items = [r[0] for r in frappe.db.sql("""select name
from `tabItem` where name in (%s) and is_purchase_item='Yes'""" % \
(", ".join((["%s"]*len(item_codes))),), item_codes)]
return self._purchase_items
def is_item_table_empty(self):
if not len(self.get(self.fname)):
frappe.throw(_("Item table can not be blank"))
def set_qty_as_per_stock_uom(self):
for d in self.get(self.fname):
if d.meta.get_field("stock_qty") and not d.stock_qty:
if not d.conversion_factor:
frappe.throw(_("Row {0}: Conversion Factor is mandatory"))
d.stock_qty = flt(d.qty) * flt(d.conversion_factor)
| agpl-3.0 |
Wattpad/dd-agent | tests/core/test_service_discovery.py | 4 | 27446 | # stdlib
import copy
import mock
import unittest
# 3p
from nose.plugins.attrib import attr
# project
from utils.service_discovery.config_stores import get_config_store
from utils.service_discovery.consul_config_store import ConsulStore
from utils.service_discovery.etcd_config_store import EtcdStore
from utils.service_discovery.abstract_config_store import AbstractConfigStore
from utils.service_discovery.sd_backend import get_sd_backend
from utils.service_discovery.sd_docker_backend import SDDockerBackend
def clear_singletons(agentConfig):
get_config_store(agentConfig)._drop()
get_sd_backend(agentConfig)._drop()
class Response(object):
"""Dummy response class for mocking purpose"""
def __init__(self, content):
self.content = content
def json(self):
return self.content
def raise_for_status(self):
pass
def _get_container_inspect(c_id):
"""Return a mocked container inspect dict from self.container_inspects."""
for co, _, _, _ in TestServiceDiscovery.container_inspects:
if co.get('Id') == c_id:
return co
return None
def _get_conf_tpls(image_name, trace_config=False, kube_annotations=None):
"""Return a mocked configuration template from self.mock_templates."""
return copy.deepcopy(TestServiceDiscovery.mock_templates.get(image_name)[0])
def _get_check_tpls(image_name, **kwargs):
if image_name in TestServiceDiscovery.mock_templates:
return [copy.deepcopy(TestServiceDiscovery.mock_templates.get(image_name)[0][0][0:3])]
elif image_name in TestServiceDiscovery.bad_mock_templates:
try:
return [copy.deepcopy(TestServiceDiscovery.bad_mock_templates.get(image_name))]
except Exception:
return None
def client_read(path, **kwargs):
"""Return a mocked string that would normally be read from a config store (etcd, consul...)."""
parts = path.split('/')
config_parts = ['check_names', 'init_configs', 'instances']
image, config_part = parts[-2], parts[-1]
if 'all' in kwargs:
return {}
else:
return TestServiceDiscovery.mock_tpls.get(image)[0][config_parts.index(config_part)]
def issue_read(identifier):
return TestServiceDiscovery.mock_tpls.get(identifier)
@attr('unix')
class TestServiceDiscovery(unittest.TestCase):
docker_container_inspect = {
u'Id': u'69ff25598b2314d1cdb7752cc3a659fb1c1352b32546af4f1454321550e842c0',
u'Image': u'nginx',
u'Name': u'/nginx',
u'NetworkSettings': {u'IPAddress': u'172.17.0.21', u'Ports': {u'443/tcp': None, u'80/tcp': None}}
}
docker_container_inspect_with_label = {
u'Id': u'69ff25598b2314d1cdb7752cc3a659fb1c1352b32546af4f1454321550e842c0',
u'Image': u'nginx',
u'Name': u'/nginx',
u'NetworkSettings': {u'IPAddress': u'172.17.0.21', u'Ports': {u'443/tcp': None, u'80/tcp': None}},
u'Labels': {'com.datadoghq.sd.check.id': 'custom-nginx'}
}
kubernetes_container_inspect = {
u'Id': u'389dc8a4361f3d6c866e9e9a7b6972b26a31c589c4e2f097375d55656a070bc9',
u'Image': u'foo',
u'Name': u'/k8s_sentinel.38057ab9_redis-master_default_27b84e1e-a81c-11e5-8347-42010af00002_f70875a1',
u'Config': {u'ExposedPorts': {u'6379/tcp': {}}},
u'NetworkSettings': {u'IPAddress': u'', u'Ports': None}
}
malformed_container_inspect = {
u'Id': u'69ff25598b2314d1cdb7752cc3a659fb1c1352b32546af4f1454321550e842c0',
u'Image': u'foo',
u'Name': u'/nginx'
}
container_inspects = [
# (inspect_dict, expected_ip, tpl_var, expected_port, expected_ident)
(docker_container_inspect, '172.17.0.21', 'port', '443', 'nginx'),
(docker_container_inspect_with_label, '172.17.0.21', 'port', '443', 'custom-nginx'),
(kubernetes_container_inspect, None, 'port', '6379', 'foo'), # arbitrarily defined in the mocked pod_list
(malformed_container_inspect, None, 'port', KeyError, 'foo')
]
# templates with variables already extracted
mock_templates = {
# image_name: ([(check_name, init_tpl, instance_tpl, variables)], (expected_config_template))
'image_0': (
[('check_0', {}, {'host': '%%host%%'}, ['host'])],
('check_0', {}, {'host': '127.0.0.1'})),
'image_1': (
[('check_1', {}, {'port': '%%port%%'}, ['port'])],
('check_1', {}, {'port': '1337'})),
'image_2': (
[('check_2', {}, {'host': '%%host%%', 'port': '%%port%%'}, ['host', 'port'])],
('check_2', {}, {'host': '127.0.0.1', 'port': '1337'})),
}
# raw templates coming straight from the config store
mock_tpls = {
# image_name: ('[check_name]', '[init_tpl]', '[instance_tpl]', expected_python_tpl_list)
'image_0': (
('["check_0"]', '[{}]', '[{"host": "%%host%%"}]'),
[('check_0', {}, {"host": "%%host%%"})]),
'image_1': (
('["check_1"]', '[{}]', '[{"port": "%%port%%"}]'),
[('check_1', {}, {"port": "%%port%%"})]),
'image_2': (
('["check_2"]', '[{}]', '[{"host": "%%host%%", "port": "%%port%%"}]'),
[('check_2', {}, {"host": "%%host%%", "port": "%%port%%"})]),
'bad_image_0': ((['invalid template']), []),
'bad_image_1': (('invalid template'), []),
'bad_image_2': (None, []),
'nginx': ('["nginx"]', '[{}]', '[{"host": "localhost"}]'),
'nginx:latest': ('["nginx"]', '[{}]', '[{"host": "localhost", "tags": ["foo"]}]'),
'custom-nginx': ('["nginx"]', '[{}]', '[{"host": "localhost"}]'),
'repo/custom-nginx': ('["nginx"]', '[{}]', '[{"host": "localhost", "tags": ["bar"]}]'),
'repo/dir:5000/custom-nginx:latest': ('["nginx"]', '[{}]', '[{"host": "local", "tags": ["foobar"]}]')
}
bad_mock_templates = {
'bad_image_0': ('invalid template'),
'bad_image_1': [('invalid template')],
'bad_image_2': None
}
def setUp(self):
self.etcd_agentConfig = {
'service_discovery': True,
'service_discovery_backend': 'docker',
'sd_template_dir': '/datadog/check_configs',
'sd_config_backend': 'etcd',
'sd_backend_host': '127.0.0.1',
'sd_backend_port': '2380'
}
self.consul_agentConfig = {
'service_discovery': True,
'service_discovery_backend': 'docker',
'sd_template_dir': '/datadog/check_configs',
'sd_config_backend': 'consul',
'sd_backend_host': '127.0.0.1',
'sd_backend_port': '8500'
}
self.auto_conf_agentConfig = {
'service_discovery': True,
'service_discovery_backend': 'docker',
'sd_template_dir': '/datadog/check_configs',
'additional_checksd': '/etc/dd-agent/checks.d/',
}
self.agentConfigs = [self.etcd_agentConfig, self.consul_agentConfig, self.auto_conf_agentConfig]
# sd_backend tests
@mock.patch('utils.http.requests.get')
@mock.patch('utils.kubernetes.kubeutil.check_yaml')
def test_get_host_address(self, mock_check_yaml, mock_get):
kubernetes_config = {'instances': [{'kubelet_port': 1337}]}
pod_list = {
'items': [{
'status': {
'podIP': '127.0.0.1',
'containerStatuses': [
{'containerID': 'docker://389dc8a4361f3d6c866e9e9a7b6972b26a31c589c4e2f097375d55656a070bc9'}
]
}
}]
}
# (inspect, tpl_var, expected_result)
ip_address_inspects = [
({'NetworkSettings': {}}, 'host', None),
({'NetworkSettings': {'IPAddress': ''}}, 'host', None),
({'NetworkSettings': {'IPAddress': '127.0.0.1'}}, 'host', '127.0.0.1'),
({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Networks': {}}}, 'host', '127.0.0.1'),
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {'bridge': {'IPAddress': '127.0.0.1'}}}},
'host', '127.0.0.1'),
({'NetworkSettings': {
'IPAddress': '',
'Networks': {'bridge': {'IPAddress': '127.0.0.1'}}}},
'host_bridge', '127.0.0.1'),
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'},
'foo': {'IPAddress': '192.168.0.2'}}}},
'host', '172.17.0.2'),
({'NetworkSettings': {'Networks': {}}}, 'host', None),
({'NetworkSettings': {'Networks': {}}}, 'host_bridge', None),
({'NetworkSettings': {'Networks': {'bridge': {}}}}, 'host', None),
({'NetworkSettings': {'Networks': {'bridge': {}}}}, 'host_bridge', None),
({'NetworkSettings': {
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'}
}}},
'host_bridge', '172.17.0.2'),
({'NetworkSettings': {
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'},
'foo': {'IPAddress': '192.168.0.2'}
}}},
'host_foo', '192.168.0.2')
]
mock_check_yaml.return_value = kubernetes_config
mock_get.return_value = Response(pod_list)
for c_ins, tpl_var, expected_ip in ip_address_inspects:
with mock.patch.object(AbstractConfigStore, '__init__', return_value=None):
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch('utils.kubernetes.kubeutil.get_conf_path', return_value=None):
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
self.assertEquals(sd_backend._get_host_address(c_ins, tpl_var), expected_ip)
clear_singletons(self.auto_conf_agentConfig)
def test_get_port(self):
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
for c_ins, _, var_tpl, expected_ports, _ in self.container_inspects:
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
if isinstance(expected_ports, str):
self.assertEquals(sd_backend._get_port(c_ins, var_tpl), expected_ports)
else:
self.assertRaises(expected_ports, sd_backend._get_port, c_ins, var_tpl)
clear_singletons(self.auto_conf_agentConfig)
@mock.patch('docker.Client.inspect_container', side_effect=_get_container_inspect)
@mock.patch.object(SDDockerBackend, '_get_config_templates', side_effect=_get_conf_tpls)
def test_get_check_configs(self, mock_inspect_container, mock_get_conf_tpls):
"""Test get_check_config with mocked container inspect and config template"""
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch.object(SDDockerBackend, '_get_host_address', return_value='127.0.0.1'):
with mock.patch.object(SDDockerBackend, '_get_port', return_value='1337'):
c_id = self.docker_container_inspect.get('Id')
for image in self.mock_templates.keys():
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
self.assertEquals(
sd_backend._get_check_configs(c_id, image)[0],
self.mock_templates[image][1])
clear_singletons(self.auto_conf_agentConfig)
@mock.patch.object(AbstractConfigStore, 'get_check_tpls', side_effect=_get_check_tpls)
def test_get_config_templates(self, mock_get_check_tpls):
"""Test _get_config_templates with mocked get_check_tpls"""
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch.object(EtcdStore, 'get_client', return_value=None):
with mock.patch.object(ConsulStore, 'get_client', return_value=None):
for agentConfig in self.agentConfigs:
sd_backend = get_sd_backend(agentConfig=agentConfig)
# normal cases
for image in self.mock_templates.keys():
template = sd_backend._get_config_templates(image)
expected_template = self.mock_templates.get(image)[0]
self.assertEquals(template, expected_template)
# error cases
for image in self.bad_mock_templates.keys():
self.assertEquals(sd_backend._get_config_templates(image), None)
clear_singletons(agentConfig)
def test_render_template(self):
"""Test _render_template"""
valid_configs = [
(({}, {'host': '%%host%%'}, {'host': 'foo'}),
({}, {'host': 'foo'})),
(({}, {'host': '%%host%%', 'port': '%%port%%'}, {'host': 'foo', 'port': '1337'}),
({}, {'host': 'foo', 'port': '1337'})),
(({'foo': '%%bar%%'}, {}, {'bar': 'w00t'}),
({'foo': 'w00t'}, {})),
(({'foo': '%%bar%%'}, {'host': '%%host%%'}, {'bar': 'w00t', 'host': 'localhost'}),
({'foo': 'w00t'}, {'host': 'localhost'}))
]
invalid_configs = [
({}, {'host': '%%host%%'}, {}), # no value to use
({}, {'host': '%%host%%'}, {'port': 42}), # the variable name doesn't match
({'foo': '%%bar%%'}, {'host': '%%host%%'}, {'host': 'foo'}) # not enough value/no matching var name
]
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch.object(EtcdStore, 'get_client', return_value=None):
with mock.patch.object(ConsulStore, 'get_client', return_value=None):
for agentConfig in self.agentConfigs:
sd_backend = get_sd_backend(agentConfig=agentConfig)
for tpl, res in valid_configs:
init, instance, variables = tpl
config = sd_backend._render_template(init, instance, variables)
self.assertEquals(config, res)
for init, instance, variables in invalid_configs:
config = sd_backend._render_template(init, instance, variables)
self.assertEquals(config, None)
clear_singletons(agentConfig)
def test_fill_tpl(self):
"""Test _fill_tpl with mocked docker client"""
valid_configs = [
# ((inspect, instance_tpl, variables, tags), (expected_instance_tpl, expected_var_values))
(({}, {'host': 'localhost'}, [], None), ({'host': 'localhost'}, {})),
(
({'NetworkSettings': {'IPAddress': ''}}, {'host': 'localhost'}, [], None),
({'host': 'localhost'}, {})
),
(
({'NetworkSettings': {'Networks': {}}}, {'host': 'localhost'}, [], None),
({'host': 'localhost'}, {})
),
(
({'NetworkSettings': {'Networks': {'bridge': {}}}}, {'host': 'localhost'}, [], None),
({'host': 'localhost'}, {})
),
(
({'NetworkSettings': {'IPAddress': '127.0.0.1'}},
{'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '127.0.0.1'}),
),
(
({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Networks': {}}},
{'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '127.0.0.1'}),
),
(
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}
},
{'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '172.17.0.2'}),
),
(
({'NetworkSettings': {
'IPAddress': '',
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'},
'foo': {'IPAddress': '192.168.0.2'}
}}
},
{'host': '%%host_bridge%%', 'port': 1337}, ['host_bridge'], ['foo', 'bar:baz']),
({'host': '%%host_bridge%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
{'host_bridge': '172.17.0.2'}),
),
(
({'NetworkSettings': {
'IPAddress': '',
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'},
'foo': {'IPAddress': '192.168.0.2'}
}}
},
{'host': '%%host_foo%%', 'port': 1337}, ['host_foo'], ['foo', 'bar:baz']),
({'host': '%%host_foo%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
{'host_foo': '192.168.0.2'}),
),
(
({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Ports': {'42/tcp': None, '22/tcp': None}}},
{'host': '%%host%%', 'port': '%%port_1%%', 'tags': ['env:test']},
['host', 'port_1'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': '%%port_1%%', 'tags': ['env:test', 'foo', 'bar:baz']},
{'host': '127.0.0.1', 'port_1': '42'})
)
]
# should not fail but return something specific
edge_cases = [
# ((inspect, instance_tpl, variables, tags), (expected_instance_tpl, expected_var_values))
# specify bridge but there is also a default IPAddress (networks should be preferred)
(
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}},
{'host': '%%host_bridge%%', 'port': 1337}, ['host_bridge'], ['foo', 'bar:baz']),
({'host': '%%host_bridge%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
{'host_bridge': '172.17.0.2'})
),
# specify index but there is a default IPAddress (there's a specifier, even if it's wrong, walking networks should be preferred)
(
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}},
{'host': '%%host_0%%', 'port': 1337}, ['host_0'], ['foo', 'bar:baz']),
({'host': '%%host_0%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host_0': '172.17.0.2'}),
),
# missing key for host, bridge network should be preferred
(
({'NetworkSettings': {'Networks': {
'bridge': {'IPAddress': '127.0.0.1'},
'foo': {'IPAddress': '172.17.0.2'}}}},
{'host': '%%host_bar%%', 'port': 1337}, ['host_bar'], []),
({'host': '%%host_bar%%', 'port': 1337}, {'host_bar': '127.0.0.1'}),
),
# missing index for port
(
({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Ports': {'42/tcp': None, '22/tcp': None}}},
{'host': '%%host%%', 'port': '%%port_2%%', 'tags': ['env:test']},
['host', 'port_2'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': '%%port_2%%', 'tags': ['env:test', 'foo', 'bar:baz']},
{'host': '127.0.0.1', 'port_2': '42'})
)
]
# should raise
invalid_config = [
# ((inspect, instance_tpl, variables, tags), expected_exception)
# template variable but no IPAddress available
(
({'NetworkSettings': {'Networks': {}}},
{'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
Exception,
),
# index but no IPAddress available
(
({'NetworkSettings': {'Networks': {}}},
{'host': '%%host_0%%', 'port': 1337}, ['host_0'], ['foo', 'bar:baz']),
Exception,
),
# key but no IPAddress available
(
({'NetworkSettings': {'Networks': {}}},
{'host': '%%host_foo%%', 'port': 1337}, ['host_foo'], ['foo', 'bar:baz']),
Exception,
),
# template variable but no port available
(
({'NetworkSettings': {'Networks': {}}},
{'host': 'localhost', 'port': '%%port%%'}, ['port'], []),
Exception,
),
# index but no port available
(
({'NetworkSettings': {'Networks': {}}},
{'host': 'localhost', 'port_0': '%%port%%'}, ['port_0'], []),
Exception,
),
# key but no port available
(
({'NetworkSettings': {'Networks': {}}},
{'host': 'localhost', 'port': '%%port_foo%%'}, ['port_foo'], []),
Exception,
)
]
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch.object(EtcdStore, 'get_client', return_value=None):
with mock.patch.object(ConsulStore, 'get_client', return_value=None):
for ac in self.agentConfigs:
sd_backend = get_sd_backend(agentConfig=ac)
try:
for co in valid_configs + edge_cases:
inspect, tpl, variables, tags = co[0]
instance_tpl, var_values = sd_backend._fill_tpl(inspect, tpl, variables, tags)
for key in instance_tpl.keys():
if isinstance(instance_tpl[key], list):
self.assertEquals(len(instance_tpl[key]), len(co[1][0].get(key)))
for elem in instance_tpl[key]:
self.assertTrue(elem in co[1][0].get(key))
else:
self.assertEquals(instance_tpl[key], co[1][0].get(key))
self.assertEquals(var_values, co[1][1])
for co in invalid_config:
inspect, tpl, variables, tags = co[0]
self.assertRaises(co[1], sd_backend._fill_tpl(inspect, tpl, variables, tags))
clear_singletons(ac)
except Exception:
clear_singletons(ac)
raise
# config_stores tests
def test_get_auto_config(self):
"""Test _get_auto_config"""
expected_tpl = {
'redis': ('redisdb', None, {"host": "%%host%%", "port": "%%port%%"}),
'consul': ('consul', None, {
"url": "http://%%host%%:%%port%%", "catalog_checks": True, "new_leader_checks": True
}),
'redis:v1': ('redisdb', None, {"host": "%%host%%", "port": "%%port%%"}),
'foobar': None
}
config_store = get_config_store(self.auto_conf_agentConfig)
for image in expected_tpl.keys():
config = config_store._get_auto_config(image)
self.assertEquals(config, expected_tpl.get(image))
@mock.patch.object(AbstractConfigStore, 'client_read', side_effect=client_read)
def test_get_check_tpls(self, mock_client_read):
"""Test get_check_tpls"""
valid_config = ['image_0', 'image_1', 'image_2']
invalid_config = ['bad_image_0', 'bad_image_1']
config_store = get_config_store(self.auto_conf_agentConfig)
for image in valid_config:
tpl = self.mock_tpls.get(image)[1]
self.assertEquals(tpl, config_store.get_check_tpls(image))
for image in invalid_config:
tpl = self.mock_tpls.get(image)[1]
self.assertEquals(tpl, config_store.get_check_tpls(image))
@mock.patch.object(AbstractConfigStore, 'client_read', side_effect=client_read)
def test_get_check_tpls_kube(self, mock_client_read):
"""Test get_check_tpls"""
valid_config = ['image_0', 'image_1', 'image_2']
invalid_config = ['bad_image_0']
config_store = get_config_store(self.auto_conf_agentConfig)
for image in valid_config + invalid_config:
tpl = self.mock_tpls.get(image)[1]
if tpl:
self.assertNotEquals(
tpl,
config_store.get_check_tpls('k8s-' + image, auto_conf=True))
self.assertEquals(
tpl,
config_store.get_check_tpls(
'k8s-' + image, auto_conf=True,
kube_annotations=dict(zip(
['com.datadoghq.sd/check_names',
'com.datadoghq.sd/init_configs',
'com.datadoghq.sd/instances'],
self.mock_tpls[image][0]))))
def test_get_config_id(self):
"""Test get_config_id"""
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
for c_ins, _, _, _, expected_ident in self.container_inspects:
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
self.assertEqual(
sd_backend.get_config_id(c_ins.get('Image'), c_ins.get('Labels', {})),
expected_ident)
clear_singletons(self.auto_conf_agentConfig)
@mock.patch.object(AbstractConfigStore, '_issue_read', side_effect=issue_read)
def test_read_config_from_store(self, issue_read):
"""Test read_config_from_store"""
valid_idents = [('nginx', 'nginx'), ('nginx:latest', 'nginx:latest'),
('custom-nginx', 'custom-nginx'), ('custom-nginx:latest', 'custom-nginx'),
('repo/custom-nginx:latest', 'custom-nginx'),
('repo/dir:5000/custom-nginx:latest', 'repo/dir:5000/custom-nginx:latest')]
invalid_idents = ['foo']
config_store = get_config_store(self.auto_conf_agentConfig)
for ident, expected_key in valid_idents:
tpl = config_store.read_config_from_store(ident)
# source is added after reading from the store
self.assertEquals(tpl, ('template',) + self.mock_tpls.get(expected_key))
for ident in invalid_idents:
self.assertEquals(config_store.read_config_from_store(ident), [])
| bsd-3-clause |
billy-inn/scikit-learn | examples/linear_model/lasso_dense_vs_sparse_data.py | 348 | 1862 | """
==============================
Lasso on dense and sparse data
==============================
We show that linear_model.Lasso provides the same results for dense and sparse
data and that in the case of sparse data the speed is improved.
"""
print(__doc__)
from time import time
from scipy import sparse
from scipy import linalg
from sklearn.datasets.samples_generator import make_regression
from sklearn.linear_model import Lasso
###############################################################################
# The two Lasso implementations on Dense data
print("--- Dense matrices")
X, y = make_regression(n_samples=200, n_features=5000, random_state=0)
X_sp = sparse.coo_matrix(X)
alpha = 1
sparse_lasso = Lasso(alpha=alpha, fit_intercept=False, max_iter=1000)
dense_lasso = Lasso(alpha=alpha, fit_intercept=False, max_iter=1000)
t0 = time()
sparse_lasso.fit(X_sp, y)
print("Sparse Lasso done in %fs" % (time() - t0))
t0 = time()
dense_lasso.fit(X, y)
print("Dense Lasso done in %fs" % (time() - t0))
print("Distance between coefficients : %s"
% linalg.norm(sparse_lasso.coef_ - dense_lasso.coef_))
###############################################################################
# The two Lasso implementations on Sparse data
print("--- Sparse matrices")
Xs = X.copy()
Xs[Xs < 2.5] = 0.0
Xs = sparse.coo_matrix(Xs)
Xs = Xs.tocsc()
print("Matrix density : %s %%" % (Xs.nnz / float(X.size) * 100))
alpha = 0.1
sparse_lasso = Lasso(alpha=alpha, fit_intercept=False, max_iter=10000)
dense_lasso = Lasso(alpha=alpha, fit_intercept=False, max_iter=10000)
t0 = time()
sparse_lasso.fit(Xs, y)
print("Sparse Lasso done in %fs" % (time() - t0))
t0 = time()
dense_lasso.fit(Xs.toarray(), y)
print("Dense Lasso done in %fs" % (time() - t0))
print("Distance between coefficients : %s"
% linalg.norm(sparse_lasso.coef_ - dense_lasso.coef_))
| bsd-3-clause |
stephen144/odoo | openerp/addons/base/ir/ir_ui_menu.py | 30 | 14357 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import operator
import re
import threading
import openerp
from openerp.osv import fields, osv
from openerp import api, tools
from openerp.http import request
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
MENU_ITEM_SEPARATOR = "/"
class ir_ui_menu(osv.osv):
_name = 'ir.ui.menu'
def __init__(self, *args, **kwargs):
super(ir_ui_menu, self).__init__(*args, **kwargs)
self.pool['ir.model.access'].register_cache_clearing_method(self._name, 'clear_caches')
@api.model
@tools.ormcache('frozenset(self.env.user.groups_id.ids)', 'debug')
def _visible_menu_ids(self, debug=False):
""" Return the ids of the menu items visible to the user. """
# retrieve all menus, and determine which ones are visible
context = {'ir.ui.menu.full_list': True}
menus = self.with_context(context).search([])
groups = self.env.user.groups_id if debug else self.env.user.groups_id - self.env.ref('base.group_no_one')
# first discard all menus with groups the user does not have
menus = menus.filtered(
lambda menu: not menu.groups_id or menu.groups_id & groups)
# take apart menus that have an action
action_menus = menus.filtered(lambda m: m.action and m.action.exists())
folder_menus = menus - action_menus
visible = self.browse()
# process action menus, check whether their action is allowed
access = self.env['ir.model.access']
model_fname = {
'ir.actions.act_window': 'res_model',
'ir.actions.report.xml': 'model',
'ir.actions.server': 'model_id',
}
for menu in action_menus:
fname = model_fname.get(menu.action._name)
if not fname or not menu.action[fname] or \
access.check(menu.action[fname], 'read', False):
# make menu visible, and its folder ancestors, too
visible += menu
menu = menu.parent_id
while menu and menu in folder_menus and menu not in visible:
visible += menu
menu = menu.parent_id
return set(visible.ids)
@api.multi
@api.returns('self')
def _filter_visible_menus(self):
""" Filter `self` to only keep the menu items that should be visible in
the menu hierarchy of the current user.
Uses a cache for speeding up the computation.
"""
visible_ids = self._visible_menu_ids(request.debug if request else False)
return self.filtered(lambda menu: menu.id in visible_ids)
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
if context is None:
context = {}
ids = super(ir_ui_menu, self).search(cr, uid, args, offset=0,
limit=None, order=order, context=context, count=False)
if not ids:
if count:
return 0
return []
# menu filtering is done only on main menu tree, not other menu lists
if context.get('ir.ui.menu.full_list'):
result = ids
else:
result = self._filter_visible_menus(cr, uid, ids, context=context)
if offset:
result = result[long(offset):]
if limit:
result = result[:long(limit)]
if count:
return len(result)
return result
def name_get(self, cr, uid, ids, context=None):
res = []
for id in ids:
elmt = self.browse(cr, uid, id, context=context)
res.append((id, self._get_one_full_name(elmt)))
return res
def _get_full_name(self, cr, uid, ids, name=None, args=None, context=None):
if context is None:
context = {}
res = {}
for elmt in self.browse(cr, uid, ids, context=context):
res[elmt.id] = self._get_one_full_name(elmt)
return res
def _get_one_full_name(self, elmt, level=6):
if level<=0:
return '...'
if elmt.parent_id:
parent_path = self._get_one_full_name(elmt.parent_id, level-1) + MENU_ITEM_SEPARATOR
else:
parent_path = ''
return parent_path + elmt.name
def create(self, cr, uid, values, context=None):
self.clear_caches()
return super(ir_ui_menu, self).create(cr, uid, values, context=context)
def write(self, cr, uid, ids, values, context=None):
self.clear_caches()
return super(ir_ui_menu, self).write(cr, uid, ids, values, context=context)
def unlink(self, cr, uid, ids, context=None):
# Detach children and promote them to top-level, because it would be unwise to
# cascade-delete submenus blindly. We also can't use ondelete=set null because
# that is not supported when _parent_store is used (would silently corrupt it).
# TODO: ideally we should move them under a generic "Orphans" menu somewhere?
if isinstance(ids, (int, long)):
ids = [ids]
local_context = dict(context or {})
local_context['ir.ui.menu.full_list'] = True
direct_children_ids = self.search(cr, uid, [('parent_id', 'in', ids)], context=local_context)
if direct_children_ids:
self.write(cr, uid, direct_children_ids, {'parent_id': False})
result = super(ir_ui_menu, self).unlink(cr, uid, ids, context=context)
self.clear_caches()
return result
def copy(self, cr, uid, id, default=None, context=None):
res = super(ir_ui_menu, self).copy(cr, uid, id, default=default, context=context)
datas=self.read(cr,uid,[res],['name'])[0]
rex=re.compile('\([0-9]+\)')
concat=rex.findall(datas['name'])
if concat:
next_num=int(concat[0])+1
datas['name']=rex.sub(('(%d)'%next_num),datas['name'])
else:
datas['name'] += '(1)'
self.write(cr,uid,[res],{'name':datas['name']})
return res
def read_image(self, path):
if not path:
return False
path_info = path.split(',')
icon_path = openerp.modules.get_module_resource(path_info[0],path_info[1])
icon_image = False
if icon_path:
try:
icon_file = tools.file_open(icon_path,'rb')
icon_image = base64.encodestring(icon_file.read())
finally:
icon_file.close()
return icon_image
def get_needaction_data(self, cr, uid, ids, context=None):
""" Return for each menu entry of ids :
- if it uses the needaction mechanism (needaction_enabled)
- the needaction counter of the related action, taking into account
the action domain
"""
if context is None:
context = {}
res = {}
menu_ids = set()
for menu in self.browse(cr, uid, ids, context=context):
menu_ids.add(menu.id)
ctx = None
if menu.action and menu.action.type in ('ir.actions.act_window', 'ir.actions.client') and menu.action.context:
try:
# use magical UnquoteEvalContext to ignore undefined client-side variables such as `active_id`
eval_ctx = tools.UnquoteEvalContext(**context)
ctx = eval(menu.action.context, locals_dict=eval_ctx, nocopy=True) or None
except Exception:
# if the eval still fails for some reason, we'll simply skip this menu
pass
menu_ref = ctx and ctx.get('needaction_menu_ref')
if menu_ref:
if not isinstance(menu_ref, list):
menu_ref = [menu_ref]
model_data_obj = self.pool.get('ir.model.data')
for menu_data in menu_ref:
try:
model, id = model_data_obj.get_object_reference(cr, uid, menu_data.split('.')[0], menu_data.split('.')[1])
if (model == 'ir.ui.menu'):
menu_ids.add(id)
except Exception:
pass
menu_ids = list(menu_ids)
for menu in self.browse(cr, uid, menu_ids, context=context):
res[menu.id] = {
'needaction_enabled': False,
'needaction_counter': False,
}
if menu.action and menu.action.type in ('ir.actions.act_window', 'ir.actions.client') and menu.action.res_model:
if menu.action.res_model in self.pool:
obj = self.pool[menu.action.res_model]
if obj._needaction:
if menu.action.type == 'ir.actions.act_window':
eval_context = self.pool['ir.actions.act_window']._get_eval_context(cr, uid, context=context)
dom = menu.action.domain and eval(menu.action.domain, eval_context) or []
else:
dom = eval(menu.action.params_store or '{}', {'uid': uid}).get('domain')
res[menu.id]['needaction_enabled'] = obj._needaction
res[menu.id]['needaction_counter'] = obj._needaction_count(cr, uid, dom, context=context)
return res
def get_user_roots(self, cr, uid, context=None):
""" Return all root menu ids visible for the user.
:return: the root menu ids
:rtype: list(int)
"""
menu_domain = [('parent_id', '=', False)]
return self.search(cr, uid, menu_domain, context=context)
@api.cr_uid_context
@tools.ormcache_context('uid', keys=('lang',))
def load_menus_root(self, cr, uid, context=None):
fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon_data']
menu_root_ids = self.get_user_roots(cr, uid, context=context)
menu_roots = self.read(cr, uid, menu_root_ids, fields, context=context) if menu_root_ids else []
return {
'id': False,
'name': 'root',
'parent_id': [-1, ''],
'children': menu_roots,
'all_menu_ids': menu_root_ids,
}
@api.cr_uid_context
@tools.ormcache_context('uid', 'debug', keys=('lang',))
def load_menus(self, cr, uid, debug, context=None):
""" Loads all menu items (all applications and their sub-menus).
:return: the menu root
:rtype: dict('children': menu_nodes)
"""
fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon_data']
menu_root_ids = self.get_user_roots(cr, uid, context=context)
menu_roots = self.read(cr, uid, menu_root_ids, fields, context=context) if menu_root_ids else []
menu_root = {
'id': False,
'name': 'root',
'parent_id': [-1, ''],
'children': menu_roots,
'all_menu_ids': menu_root_ids,
}
if not menu_roots:
return menu_root
# menus are loaded fully unlike a regular tree view, cause there are a
# limited number of items (752 when all 6.1 addons are installed)
menu_ids = self.search(cr, uid, [('id', 'child_of', menu_root_ids)], 0, False, False, context=context)
menu_items = self.read(cr, uid, menu_ids, fields, context=context)
# adds roots at the end of the sequence, so that they will overwrite
# equivalent menu items from full menu read when put into id:item
# mapping, resulting in children being correctly set on the roots.
menu_items.extend(menu_roots)
menu_root['all_menu_ids'] = menu_ids # includes menu_root_ids!
# make a tree using parent_id
menu_items_map = dict(
(menu_item["id"], menu_item) for menu_item in menu_items)
for menu_item in menu_items:
if menu_item['parent_id']:
parent = menu_item['parent_id'][0]
else:
parent = False
if parent in menu_items_map:
menu_items_map[parent].setdefault(
'children', []).append(menu_item)
# sort by sequence a tree using parent_id
for menu_item in menu_items:
menu_item.setdefault('children', []).sort(
key=operator.itemgetter('sequence'))
return menu_root
_columns = {
'name': fields.char('Menu', required=True, translate=True),
'sequence': fields.integer('Sequence'),
'child_id': fields.one2many('ir.ui.menu', 'parent_id', 'Child IDs'),
'parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', select=True, ondelete="restrict"),
'parent_left': fields.integer('Parent Left', select=True),
'parent_right': fields.integer('Parent Right', select=True),
'groups_id': fields.many2many('res.groups', 'ir_ui_menu_group_rel',
'menu_id', 'gid', 'Groups', help="If you have groups, the visibility of this menu will be based on these groups. "\
"If this field is empty, Odoo will compute visibility based on the related object's read access."),
'complete_name': fields.function(_get_full_name, string='Full Path', type='char'),
'web_icon': fields.char('Web Icon File'),
'action': fields.reference('Action', selection=[
('ir.actions.report.xml', 'ir.actions.report.xml'),
('ir.actions.act_window', 'ir.actions.act_window'),
('ir.actions.act_url', 'ir.actions.act_url'),
('ir.actions.server', 'ir.actions.server'),
('ir.actions.client', 'ir.actions.client'),
]),
}
web_icon_data = openerp.fields.Binary('Web Icon Image',
compute="_compute_web_icon", store=True, attachment=True)
@api.depends('web_icon')
def _compute_web_icon(self):
for menu in self:
menu.web_icon_data = self.read_image(menu.web_icon)
_constraints = [
(osv.osv._check_recursion, 'Error ! You can not create recursive Menu.', ['parent_id'])
]
_defaults = {
'sequence': 10,
}
_order = "sequence,id"
_parent_store = True
| agpl-3.0 |
mweisman/QGIS | python/plugins/db_manager/db_manager.py | 5 | 13145 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS
Date : May 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : brush.tyler@gmail.com
The content of this file is based on
- PG_Manager by Martin Dobias (GPLv2 license)
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from .info_viewer import InfoViewer
from .table_viewer import TableViewer
from .layer_preview import LayerPreview
from .db_tree import DBTree
from .db_plugins.plugin import BaseError
from .dlg_db_error import DlgDbError
class DBManager(QMainWindow):
def __init__(self, iface, parent=None):
QMainWindow.__init__(self, parent)
self.setAttribute(Qt.WA_DeleteOnClose)
self.setupUi()
self.iface = iface
# restore the window state
settings = QSettings()
self.restoreGeometry( settings.value("/DB_Manager/mainWindow/geometry", QByteArray(), type=QByteArray ) )
self.restoreState( settings.value("/DB_Manager/mainWindow/windowState", QByteArray(), type=QByteArray ) )
self.connect(self.tabs, SIGNAL("currentChanged(int)"), self.tabChanged)
self.connect(self.tree, SIGNAL("selectedItemChanged"), self.itemChanged)
self.itemChanged(None)
def closeEvent(self, e):
self.unregisterAllActions()
# save the window state
settings = QSettings()
settings.setValue( "/DB_Manager/mainWindow/windowState", self.saveState() )
settings.setValue( "/DB_Manager/mainWindow/geometry", self.saveGeometry() )
QMainWindow.closeEvent(self, e)
def refreshItem(self, item=None):
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
if item == None:
item = self.tree.currentItem()
self.tree.refreshItem(item) # refresh item children in the db tree
except BaseError, e:
DlgDbError.showError(e, self)
return
finally:
QApplication.restoreOverrideCursor()
def itemChanged(self, item):
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
self.reloadButtons()
self.refreshTabs()
except BaseError, e:
DlgDbError.showError(e, self)
return
finally:
QApplication.restoreOverrideCursor()
def reloadButtons(self):
db = self.tree.currentDatabase()
if not hasattr(self, '_lastDb'):
self._lastDb = db
elif db == self._lastDb:
return
# remove old actions
if self._lastDb != None:
self.unregisterAllActions()
# add actions of the selected database
self._lastDb = db
if self._lastDb != None:
self._lastDb.registerAllActions(self)
def tabChanged(self, index):
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
self.refreshTabs()
except BaseError, e:
DlgDbError.showError(e, self)
return
finally:
QApplication.restoreOverrideCursor()
def refreshTabs(self):
index = self.tabs.currentIndex()
item = self.tree.currentItem()
table = self.tree.currentTable()
# enable/disable tabs
self.tabs.setTabEnabled( self.tabs.indexOf(self.table), table != None )
self.tabs.setTabEnabled( self.tabs.indexOf(self.preview), table != None and table.type in [table.VectorType, table.RasterType] and table.geomColumn != None )
# show the info tab if the current tab is disabled
if not self.tabs.isTabEnabled( index ):
self.tabs.setCurrentWidget( self.info )
current_tab = self.tabs.currentWidget()
if current_tab == self.info:
self.info.showInfo( item )
elif current_tab == self.table:
self.table.loadData( item )
elif current_tab == self.preview:
self.preview.loadPreview( item )
def refreshActionSlot(self):
self.info.setDirty()
self.table.setDirty()
self.preview.setDirty()
self.refreshItem()
def importActionSlot(self):
db = self.tree.currentDatabase()
if db is None:
QMessageBox.information(self, self.tr("Sorry"), self.tr("No database selected or you are not connected to it."))
return
outUri = db.uri()
schema = self.tree.currentSchema()
if schema:
outUri.setDataSource( schema.name, "", "", "" )
from .dlg_import_vector import DlgImportVector
dlg = DlgImportVector(None, db, outUri, self)
dlg.exec_()
def exportActionSlot(self):
table = self.tree.currentTable()
if table is None:
QMessageBox.information(self, self.tr("Sorry"), self.tr("Select the table you want export to file."))
return
inLayer = table.toMapLayer()
from .dlg_export_vector import DlgExportVector
dlg = DlgExportVector(inLayer, table.database(), self)
dlg.exec_()
inLayer.deleteLater()
def runSqlWindow(self):
db = self.tree.currentDatabase()
if db == None:
QMessageBox.information(self, self.tr("Sorry"), self.tr("No database selected or you are not connected to it."))
return
from dlg_sql_window import DlgSqlWindow
dlg = DlgSqlWindow(self.iface, db, self)
#refreshDb = lambda x: self.refreshItem( db.connection() ) # refresh the database tree
#self.connect( dlg, SIGNAL( "queryExecuted(const QString &)" ), refreshDb )
dlg.show()
dlg.exec_()
def showSystemTables(self):
self.tree.showSystemTables( self.actionShowSystemTables.isChecked() )
def registerAction(self, action, menuName, callback=None):
""" register an action to the manager's main menu """
if not hasattr(self, '_registeredDbActions'):
self._registeredDbActions = {}
if callback != None:
invoke_callback = lambda x: self.invokeCallback( callback )
if menuName == None or menuName == "":
self.addAction( action )
if not self._registeredDbActions.has_key(menuName):
self._registeredDbActions[menuName] = list()
self._registeredDbActions[menuName].append(action)
if callback != None:
QObject.connect( action, SIGNAL("triggered(bool)"), invoke_callback )
return True
# search for the menu
actionMenu = None
helpMenuAction = None
for a in self.menuBar.actions():
if not a.menu() or a.menu().title() != menuName:
continue
if a.menu() != self.menuHelp:
helpMenuAction = a
actionMenu = a
break
# not found, add a new menu before the help menu
if actionMenu == None:
menu = QMenu(menuName, self)
if helpMenuAction != None:
actionMenu = self.menuBar.insertMenu(helpMenuAction, menu)
else:
actionMenu = self.menuBar.addMenu(menu)
menu = actionMenu.menu()
menuActions = menu.actions()
# get the placeholder's position to insert before it
pos = 0
for pos in range(len(menuActions)):
if menuActions[pos].isSeparator() and menuActions[pos].objectName().endswith("_placeholder"):
menuActions[pos].setVisible(True)
break
if pos < len(menuActions):
before = menuActions[pos]
menu.insertAction( before, action )
else:
menu.addAction( action )
actionMenu.setVisible(True) # show the menu
if not self._registeredDbActions.has_key(menuName):
self._registeredDbActions[menuName] = list()
self._registeredDbActions[menuName].append(action)
if callback != None:
QObject.connect( action, SIGNAL("triggered(bool)"), invoke_callback )
return True
def invokeCallback(self, callback, params=None):
""" Call a method passing the selected item in the database tree,
the sender (usually a QAction), the plugin mainWindow and
optionally additional parameters.
This method takes care to override and restore the cursor,
but also catches exceptions and displays the error dialog.
"""
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
if params is None:
callback( self.tree.currentItem(), self.sender(), self )
else:
callback( self.tree.currentItem(), self.sender(), self, *params )
except BaseError, e:
# catch database errors and display the error dialog
DlgDbError.showError(e, self)
return
finally:
QApplication.restoreOverrideCursor()
def unregisterAction(self, action, menuName):
if not hasattr(self, '_registeredDbActions'):
return
if menuName == None or menuName == "":
self.removeAction( action )
if self._registeredDbActions.has_key(menuName):
if self._registeredDbActions[menuName].count( action ) > 0:
self._registeredDbActions[menuName].remove( action )
action.deleteLater()
return True
for a in self.menuBar.actions():
if not a.menu() or a.menu().title() != menuName:
continue
menu = a.menu()
menuActions = menu.actions()
menu.removeAction( action )
if menu.isEmpty(): # hide the menu
a.setVisible(False)
if self._registeredDbActions.has_key(menuName):
if self._registeredDbActions[menuName].count( action ) > 0:
self._registeredDbActions[menuName].remove( action )
# hide the placeholder if there're no other registered actions
if len(self._registeredDbActions[menuName]) <= 0:
for i in range(len(menuActions)):
if menuActions[i].isSeparator() and menuActions[i].objectName().endswith("_placeholder"):
menuActions[i].setVisible(False)
break
action.deleteLater()
return True
return False
def unregisterAllActions(self):
if not hasattr(self, '_registeredDbActions'):
return
for menuName in self._registeredDbActions:
for action in list(self._registeredDbActions[menuName]):
self.unregisterAction( action, menuName )
del self._registeredDbActions
def setupUi(self):
self.setWindowTitle(self.tr("DB Manager"))
self.setWindowIcon(QIcon(":/db_manager/icon"))
self.resize(QSize(700,500).expandedTo(self.minimumSizeHint()))
# create central tab widget
self.tabs = QTabWidget()
self.info = InfoViewer(self)
self.tabs.addTab(self.info, self.tr("Info"))
self.table = TableViewer(self)
self.tabs.addTab(self.table, self.tr("Table"))
self.preview = LayerPreview(self)
self.tabs.addTab(self.preview, self.tr("Preview"))
self.setCentralWidget(self.tabs)
# create database tree
self.dock = QDockWidget("Tree", self)
self.dock.setObjectName("DB_Manager_DBView")
self.dock.setFeatures(QDockWidget.DockWidgetMovable)
self.tree = DBTree(self)
self.dock.setWidget(self.tree)
self.addDockWidget(Qt.LeftDockWidgetArea, self.dock)
# create status bar
self.statusBar = QStatusBar(self)
self.setStatusBar(self.statusBar)
# create menus
self.menuBar = QMenuBar(self)
self.menuDb = QMenu(self.tr("&Database"), self)
actionMenuDb = self.menuBar.addMenu(self.menuDb)
self.menuSchema = QMenu(self.tr("&Schema"), self)
actionMenuSchema = self.menuBar.addMenu(self.menuSchema)
self.menuTable = QMenu(self.tr("&Table"), self)
actionMenuTable = self.menuBar.addMenu(self.menuTable)
self.menuHelp = None # QMenu(self.tr("&Help"), self)
#actionMenuHelp = self.menuBar.addMenu(self.menuHelp)
self.setMenuBar(self.menuBar)
# create toolbar
self.toolBar = QToolBar("Default", self)
self.toolBar.setObjectName("DB_Manager_ToolBar")
self.addToolBar(self.toolBar)
# create menus' actions
# menu DATABASE
sep = self.menuDb.addSeparator(); sep.setObjectName("DB_Manager_DbMenu_placeholder"); sep.setVisible(False)
self.actionRefresh = self.menuDb.addAction( QIcon(":/db_manager/actions/refresh"), self.tr("&Refresh"), self.refreshActionSlot, QKeySequence("F5") )
self.actionSqlWindow = self.menuDb.addAction( QIcon(":/db_manager/actions/sql_window"), self.tr("&SQL window"), self.runSqlWindow, QKeySequence("F2") )
self.menuDb.addSeparator()
self.actionClose = self.menuDb.addAction( QIcon(), self.tr("&Exit"), self.close, QKeySequence("CTRL+Q") )
# menu SCHEMA
sep = self.menuSchema.addSeparator(); sep.setObjectName("DB_Manager_SchemaMenu_placeholder"); sep.setVisible(False)
actionMenuSchema.setVisible(False)
# menu TABLE
sep = self.menuTable.addSeparator(); sep.setObjectName("DB_Manager_TableMenu_placeholder"); sep.setVisible(False)
self.actionImport = self.menuTable.addAction( QIcon(":/db_manager/actions/import"), self.tr("&Import layer/file"), self.importActionSlot )
self.actionExport = self.menuTable.addAction( QIcon(":/db_manager/actions/export"), self.tr("&Export to file"), self.exportActionSlot )
self.menuTable.addSeparator()
#self.actionShowSystemTables = self.menuTable.addAction(self.tr("Show system tables/views"), self.showSystemTables)
#self.actionShowSystemTables.setCheckable(True)
#self.actionShowSystemTables.setChecked(True)
actionMenuTable.setVisible(False)
# add actions to the toolbar
self.toolBar.addAction( self.actionRefresh )
self.toolBar.addAction( self.actionSqlWindow )
self.toolBar.addAction( self.actionImport )
self.toolBar.addAction( self.actionExport )
| gpl-2.0 |
klickagent/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/factory_unittest.py | 118 | 3965 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.tool.mocktool import MockOptions
from webkitpy.common.system.systemhost_mock import MockSystemHost
from webkitpy.port import factory
from webkitpy.port import gtk
from webkitpy.port import mac
from webkitpy.port import qt
from webkitpy.port import test
from webkitpy.port import win
class FactoryTest(unittest.TestCase):
"""Test that the factory creates the proper port object for given combination of port_name, host.platform, and options."""
# FIXME: The ports themselves should expose what options they require,
# instead of passing generic "options".
def setUp(self):
self.webkit_options = MockOptions(pixel_tests=False)
def assert_port(self, port_name=None, os_name=None, os_version=None, options=None, cls=None):
host = MockSystemHost(os_name=os_name, os_version=os_version)
port = factory.PortFactory(host).get(port_name, options=options)
self.assertIsInstance(port, cls)
def test_mac(self):
self.assert_port(port_name='mac-lion', cls=mac.MacPort)
self.assert_port(port_name='mac-lion-wk2', cls=mac.MacPort)
self.assert_port(port_name='mac', os_name='mac', os_version='lion', cls=mac.MacPort)
self.assert_port(port_name=None, os_name='mac', os_version='lion', cls=mac.MacPort)
def test_win(self):
self.assert_port(port_name='win-xp', cls=win.WinPort)
self.assert_port(port_name='win-xp-wk2', cls=win.WinPort)
self.assert_port(port_name='win', os_name='win', os_version='xp', cls=win.WinPort)
self.assert_port(port_name=None, os_name='win', os_version='xp', cls=win.WinPort)
self.assert_port(port_name=None, os_name='win', os_version='xp', options=self.webkit_options, cls=win.WinPort)
def test_gtk(self):
self.assert_port(port_name='gtk', cls=gtk.GtkPort)
def test_qt(self):
self.assert_port(port_name='qt', cls=qt.QtPort)
def test_unknown_specified(self):
self.assertRaises(NotImplementedError, factory.PortFactory(MockSystemHost()).get, port_name='unknown')
def test_unknown_default(self):
self.assertRaises(NotImplementedError, factory.PortFactory(MockSystemHost(os_name='vms')).get)
def test_get_from_builder_name(self):
self.assertEqual(factory.PortFactory(MockSystemHost()).get_from_builder_name('Apple Lion Release WK1 (Tests)').name(),
'mac-lion')
| bsd-3-clause |
firerszd/kbengine | kbe/res/scripts/common/Lib/test/test_multibytecodec.py | 72 | 9977 | #
# test_multibytecodec.py
# Unit test for multibytecodec itself
#
from test import support
from test.support import TESTFN
import unittest, io, codecs, sys, os
import _multibytecodec
ALL_CJKENCODINGS = [
# _codecs_cn
'gb2312', 'gbk', 'gb18030', 'hz',
# _codecs_hk
'big5hkscs',
# _codecs_jp
'cp932', 'shift_jis', 'euc_jp', 'euc_jisx0213', 'shift_jisx0213',
'euc_jis_2004', 'shift_jis_2004',
# _codecs_kr
'cp949', 'euc_kr', 'johab',
# _codecs_tw
'big5', 'cp950',
# _codecs_iso2022
'iso2022_jp', 'iso2022_jp_1', 'iso2022_jp_2', 'iso2022_jp_2004',
'iso2022_jp_3', 'iso2022_jp_ext', 'iso2022_kr',
]
class Test_MultibyteCodec(unittest.TestCase):
def test_nullcoding(self):
for enc in ALL_CJKENCODINGS:
self.assertEqual(b''.decode(enc), '')
self.assertEqual(str(b'', enc), '')
self.assertEqual(''.encode(enc), b'')
def test_str_decode(self):
for enc in ALL_CJKENCODINGS:
self.assertEqual('abcd'.encode(enc), b'abcd')
def test_errorcallback_longindex(self):
dec = codecs.getdecoder('euc-kr')
myreplace = lambda exc: ('', sys.maxsize+1)
codecs.register_error('test.cjktest', myreplace)
self.assertRaises(IndexError, dec,
b'apple\x92ham\x93spam', 'test.cjktest')
def test_codingspec(self):
try:
for enc in ALL_CJKENCODINGS:
code = '# coding: {}\n'.format(enc)
exec(code)
finally:
support.unlink(TESTFN)
def test_init_segfault(self):
# bug #3305: this used to segfault
self.assertRaises(AttributeError,
_multibytecodec.MultibyteStreamReader, None)
self.assertRaises(AttributeError,
_multibytecodec.MultibyteStreamWriter, None)
def test_decode_unicode(self):
# Trying to decode an unicode string should raise a TypeError
for enc in ALL_CJKENCODINGS:
self.assertRaises(TypeError, codecs.getdecoder(enc), "")
class Test_IncrementalEncoder(unittest.TestCase):
def test_stateless(self):
# cp949 encoder isn't stateful at all.
encoder = codecs.getincrementalencoder('cp949')()
self.assertEqual(encoder.encode('\ud30c\uc774\uc36c \ub9c8\uc744'),
b'\xc6\xc4\xc0\xcc\xbd\xe3 \xb8\xb6\xc0\xbb')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode('\u2606\u223c\u2606', True),
b'\xa1\xd9\xa1\xad\xa1\xd9')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode('', True), b'')
self.assertEqual(encoder.encode('', False), b'')
self.assertEqual(encoder.reset(), None)
def test_stateful(self):
# jisx0213 encoder is stateful for a few codepoints. eg)
# U+00E6 => A9DC
# U+00E6 U+0300 => ABC4
# U+0300 => ABDC
encoder = codecs.getincrementalencoder('jisx0213')()
self.assertEqual(encoder.encode('\u00e6\u0300'), b'\xab\xc4')
self.assertEqual(encoder.encode('\u00e6'), b'')
self.assertEqual(encoder.encode('\u0300'), b'\xab\xc4')
self.assertEqual(encoder.encode('\u00e6', True), b'\xa9\xdc')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode('\u0300'), b'\xab\xdc')
self.assertEqual(encoder.encode('\u00e6'), b'')
self.assertEqual(encoder.encode('', True), b'\xa9\xdc')
self.assertEqual(encoder.encode('', True), b'')
def test_stateful_keep_buffer(self):
encoder = codecs.getincrementalencoder('jisx0213')()
self.assertEqual(encoder.encode('\u00e6'), b'')
self.assertRaises(UnicodeEncodeError, encoder.encode, '\u0123')
self.assertEqual(encoder.encode('\u0300\u00e6'), b'\xab\xc4')
self.assertRaises(UnicodeEncodeError, encoder.encode, '\u0123')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode('\u0300'), b'\xab\xdc')
self.assertEqual(encoder.encode('\u00e6'), b'')
self.assertRaises(UnicodeEncodeError, encoder.encode, '\u0123')
self.assertEqual(encoder.encode('', True), b'\xa9\xdc')
def test_issue5640(self):
encoder = codecs.getincrementalencoder('shift-jis')('backslashreplace')
self.assertEqual(encoder.encode('\xff'), b'\\xff')
self.assertEqual(encoder.encode('\n'), b'\n')
class Test_IncrementalDecoder(unittest.TestCase):
def test_dbcs(self):
# cp949 decoder is simple with only 1 or 2 bytes sequences.
decoder = codecs.getincrementaldecoder('cp949')()
self.assertEqual(decoder.decode(b'\xc6\xc4\xc0\xcc\xbd'),
'\ud30c\uc774')
self.assertEqual(decoder.decode(b'\xe3 \xb8\xb6\xc0\xbb'),
'\uc36c \ub9c8\uc744')
self.assertEqual(decoder.decode(b''), '')
def test_dbcs_keep_buffer(self):
decoder = codecs.getincrementaldecoder('cp949')()
self.assertEqual(decoder.decode(b'\xc6\xc4\xc0'), '\ud30c')
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', True)
self.assertEqual(decoder.decode(b'\xcc'), '\uc774')
self.assertEqual(decoder.decode(b'\xc6\xc4\xc0'), '\ud30c')
self.assertRaises(UnicodeDecodeError, decoder.decode,
b'\xcc\xbd', True)
self.assertEqual(decoder.decode(b'\xcc'), '\uc774')
def test_iso2022(self):
decoder = codecs.getincrementaldecoder('iso2022-jp')()
ESC = b'\x1b'
self.assertEqual(decoder.decode(ESC + b'('), '')
self.assertEqual(decoder.decode(b'B', True), '')
self.assertEqual(decoder.decode(ESC + b'$'), '')
self.assertEqual(decoder.decode(b'B@$'), '\u4e16')
self.assertEqual(decoder.decode(b'@$@'), '\u4e16')
self.assertEqual(decoder.decode(b'$', True), '\u4e16')
self.assertEqual(decoder.reset(), None)
self.assertEqual(decoder.decode(b'@$'), '@$')
self.assertEqual(decoder.decode(ESC + b'$'), '')
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', True)
self.assertEqual(decoder.decode(b'B@$'), '\u4e16')
def test_decode_unicode(self):
# Trying to decode an unicode string should raise a TypeError
for enc in ALL_CJKENCODINGS:
decoder = codecs.getincrementaldecoder(enc)()
self.assertRaises(TypeError, decoder.decode, "")
class Test_StreamReader(unittest.TestCase):
def test_bug1728403(self):
try:
f = open(TESTFN, 'wb')
try:
f.write(b'\xa1')
finally:
f.close()
f = codecs.open(TESTFN, encoding='cp949')
try:
self.assertRaises(UnicodeDecodeError, f.read, 2)
finally:
f.close()
finally:
support.unlink(TESTFN)
class Test_StreamWriter(unittest.TestCase):
def test_gb18030(self):
s= io.BytesIO()
c = codecs.getwriter('gb18030')(s)
c.write('123')
self.assertEqual(s.getvalue(), b'123')
c.write('\U00012345')
self.assertEqual(s.getvalue(), b'123\x907\x959')
c.write('\uac00\u00ac')
self.assertEqual(s.getvalue(),
b'123\x907\x959\x827\xcf5\x810\x851')
def test_utf_8(self):
s= io.BytesIO()
c = codecs.getwriter('utf-8')(s)
c.write('123')
self.assertEqual(s.getvalue(), b'123')
c.write('\U00012345')
self.assertEqual(s.getvalue(), b'123\xf0\x92\x8d\x85')
c.write('\uac00\u00ac')
self.assertEqual(s.getvalue(),
b'123\xf0\x92\x8d\x85'
b'\xea\xb0\x80\xc2\xac')
def test_streamwriter_strwrite(self):
s = io.BytesIO()
wr = codecs.getwriter('gb18030')(s)
wr.write('abcd')
self.assertEqual(s.getvalue(), b'abcd')
class Test_ISO2022(unittest.TestCase):
def test_g2(self):
iso2022jp2 = b'\x1b(B:hu4:unit\x1b.A\x1bNi de famille'
uni = ':hu4:unit\xe9 de famille'
self.assertEqual(iso2022jp2.decode('iso2022-jp-2'), uni)
def test_iso2022_jp_g0(self):
self.assertNotIn(b'\x0e', '\N{SOFT HYPHEN}'.encode('iso-2022-jp-2'))
for encoding in ('iso-2022-jp-2004', 'iso-2022-jp-3'):
e = '\u3406'.encode(encoding)
self.assertFalse(any(x > 0x80 for x in e))
def test_bug1572832(self):
for x in range(0x10000, 0x110000):
# Any ISO 2022 codec will cause the segfault
chr(x).encode('iso_2022_jp', 'ignore')
class TestStateful(unittest.TestCase):
text = '\u4E16\u4E16'
encoding = 'iso-2022-jp'
expected = b'\x1b$B@$@$'
reset = b'\x1b(B'
expected_reset = expected + reset
def test_encode(self):
self.assertEqual(self.text.encode(self.encoding), self.expected_reset)
def test_incrementalencoder(self):
encoder = codecs.getincrementalencoder(self.encoding)()
output = b''.join(
encoder.encode(char)
for char in self.text)
self.assertEqual(output, self.expected)
self.assertEqual(encoder.encode('', final=True), self.reset)
self.assertEqual(encoder.encode('', final=True), b'')
def test_incrementalencoder_final(self):
encoder = codecs.getincrementalencoder(self.encoding)()
last_index = len(self.text) - 1
output = b''.join(
encoder.encode(char, index == last_index)
for index, char in enumerate(self.text))
self.assertEqual(output, self.expected_reset)
self.assertEqual(encoder.encode('', final=True), b'')
class TestHZStateful(TestStateful):
text = '\u804a\u804a'
encoding = 'hz'
expected = b'~{ADAD'
reset = b'~}'
expected_reset = expected + reset
def test_main():
support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
| lgpl-3.0 |
openhatch/oh-mainline | mysite/search/migrations/0021_remove_icon_for_profile_since_we_realized_we_do_not_need_it.py | 17 | 3787 | # This file is part of OpenHatch.
# Copyright (C) 2009 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.search.models import *
class Migration:
def forwards(self, orm):
# Deleting field 'Project.icon_for_profile'
db.delete_column('search_project', 'icon_for_profile')
def backwards(self, orm):
# Adding field 'Project.icon_for_profile'
db.add_column('search_project', 'icon_for_profile', orm['search.project:icon_for_profile'])
models = {
'search.bug': {
'bize_size_tag_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'canonical_bug_link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'date_reported': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {}),
'good_for_newcomers': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'last_polled': ('django.db.models.fields.DateTimeField', [], {}),
'last_touched': ('django.db.models.fields.DateTimeField', [], {}),
'looks_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'people_involved': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'submitter_realname': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'submitter_username': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'search.project': {
'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'icon_for_search_result': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_raw': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_smaller_for_badge': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
}
}
complete_apps = ['search']
| agpl-3.0 |
broxtronix/distributed | distributed/tests/test_joblib.py | 2 | 1853 | from __future__ import print_function, division, absolute_import
import pytest
from random import random
from time import sleep
from distributed.utils_test import inc, cluster, loop
backend = pytest.importorskip('distributed.joblib')
joblibs = [backend.joblib, backend.sk_joblib]
def slow_raise_value_error(condition, duration=0.05):
sleep(duration)
if condition:
raise ValueError("condition evaluated to True")
@pytest.mark.parametrize('joblib', joblibs)
def test_simple(loop, joblib):
if joblib is None:
pytest.skip()
Parallel = joblib.Parallel
delayed = joblib.delayed
with cluster() as (s, [a, b]):
with joblib.parallel_backend('dask.distributed', loop=loop,
scheduler_host=('127.0.0.1', s['port'])):
seq = Parallel()(delayed(inc)(i) for i in range(10))
assert seq == [inc(i) for i in range(10)]
with pytest.raises(ValueError):
Parallel()(delayed(slow_raise_value_error)(i == 3)
for i in range(10))
seq = Parallel()(delayed(inc)(i) for i in range(10))
assert seq == [inc(i) for i in range(10)]
ba, _ = joblib.parallel.get_active_backend()
ba.client.shutdown()
def random2():
return random()
@pytest.mark.parametrize('joblib', joblibs)
def test_dont_assume_function_purity(loop, joblib):
if joblib is None:
pytest.skip()
Parallel = joblib.Parallel
delayed = joblib.delayed
with cluster() as (s, [a, b]):
with joblib.parallel_backend('dask.distributed', loop=loop,
scheduler_host=('127.0.0.1', s['port'])):
x, y = Parallel()(delayed(random2)() for i in range(2))
assert x != y
ba, _ = joblib.parallel.get_active_backend()
ba.client.shutdown()
| bsd-3-clause |
chrish42/pylearn | pylearn2/sandbox/cuda_convnet/tests/test_common.py | 49 | 2802 | __authors__ = "Ian Goodfellow, David Warde-Farley"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow, David Warde-Farley"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from pylearn2.testing.skip import skip_if_no_gpu
skip_if_no_gpu()
import numpy as np
from theano import shared
from pylearn2.sandbox.cuda_convnet.filter_acts import FilterActs
from pylearn2.sandbox.cuda_convnet.img_acts import ImageActs
from theano.sandbox.cuda import gpu_from_host
from theano import function
from theano.tensor import as_tensor_variable
def test_reject_rect():
for cls in (FilterActs, ImageActs):
# Tests that running FilterActs with a non-square
# kernel is an error
rng = np.random.RandomState([2012, 10, 9])
batch_size = 5
rows = 10
cols = 9
channels = 3
filter_rows = 4
filter_cols = filter_rows + 1
num_filters = 6
images = shared(rng.uniform(-1., 1., (channels, rows, cols,
batch_size)).astype('float32'), name='images')
filters = shared(rng.uniform(-1., 1., (channels, filter_rows,
filter_cols, num_filters)).astype('float32'), name='filters')
gpu_images = gpu_from_host(images)
gpu_filters = gpu_from_host(filters)
if cls is ImageActs:
output = cls()(gpu_images, gpu_filters,
as_tensor_variable((rows, cols)))
else:
output = cls()(gpu_images, gpu_filters)
f = function([], output)
try:
output = f()
except ValueError:
continue
assert False
def test_reject_bad_filt_number():
for cls in (FilterActs, ImageActs):
# Tests that running FilterActs with a # of filters per
# group that is not 16 is an error
rng = np.random.RandomState([2012, 10, 9])
batch_size = 5
rows = 10
cols = 9
channels = 3
filter_rows = 4
filter_cols = filter_rows
num_filters = 6
images = shared(rng.uniform(-1., 1., (channels, rows, cols,
batch_size)).astype('float32'), name='images')
filters = shared(rng.uniform(-1., 1., (channels, filter_rows,
filter_cols, num_filters)).astype('float32'), name='filters')
gpu_images = gpu_from_host(images)
gpu_filters = gpu_from_host(filters)
if cls is ImageActs:
output = cls()(gpu_images, gpu_filters,
as_tensor_variable((rows, cols)))
else:
output = cls()(gpu_images, gpu_filters)
f = function([], output)
try:
output = f()
except ValueError:
continue
assert False
| bsd-3-clause |
sdh11/gnuradio | grc/converter/block.py | 3 | 8171 | # Copyright 2016 Free Software Foundation, Inc.
# This file is part of GNU Radio
#
# GNU Radio Companion is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# GNU Radio Companion is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
Converter for legacy block definitions in XML format
- Cheetah expressions that can not be converted are passed to Cheetah for now
- Instead of generating a Block subclass directly a string representation is
used and evaluated. This is slower / lamer but allows us to show the user
how a converted definition would look like
"""
from __future__ import absolute_import, division, print_function
from collections import OrderedDict, defaultdict
from itertools import chain
from ..core.io import yaml
from . import cheetah_converter, xml
current_file_format = 1
reserved_block_keys = ('import', ) # todo: add more keys
def from_xml(filename):
"""Load block description from xml file"""
element, version_info = xml.load(filename, 'block.dtd')
try:
data = convert_block_xml(element)
except NameError:
raise ValueError('Conversion failed', filename)
return data
def dump(data, stream):
out = yaml.dump(data)
replace = [
('parameters:', '\nparameters:'),
('inputs:', '\ninputs:'),
('outputs:', '\noutputs:'),
('templates:', '\ntemplates:'),
('documentation:', '\ndocumentation:'),
('file_format:', '\nfile_format:'),
]
for r in replace:
out = out.replace(*r)
prefix = '# auto-generated by grc.converter\n\n'
stream.write(prefix + out)
no_value = object()
dummy = cheetah_converter.DummyConverter()
def convert_block_xml(node):
converter = cheetah_converter.Converter(names={
param_node.findtext('key'): {
opt_node.text.split(':')[0]
for opt_node in next(param_node.iterfind('option'), param_node).iterfind('opt')
} for param_node in node.iterfind('param')
})
block_id = node.findtext('key')
if block_id in reserved_block_keys:
block_id += '_'
data = OrderedDict()
data['id'] = block_id
data['label'] = node.findtext('name') or no_value
data['category'] = node.findtext('category') or no_value
data['flags'] = [n.text for n in node.findall('flags')]
data['flags'] += ['show_id'] if block_id.startswith('variable') else []
if not data['flags']:
data['flags'] = no_value
data['parameters'] = [convert_param_xml(param_node, converter.to_python_dec)
for param_node in node.iterfind('param')] or no_value
# data['params'] = {p.pop('key'): p for p in data['params']}
data['inputs'] = [convert_port_xml(port_node, converter.to_python_dec)
for port_node in node.iterfind('sink')] or no_value
data['outputs'] = [convert_port_xml(port_node, converter.to_python_dec)
for port_node in node.iterfind('source')] or no_value
data['value'] = (
converter.to_python_dec(node.findtext('var_value')) or
('${ value }' if block_id.startswith('variable') else no_value)
)
data['asserts'] = [converter.to_python_dec(check_node.text)
for check_node in node.iterfind('check')] or no_value
data['templates'] = convert_templates(node, converter.to_mako, block_id) or no_value
docs = node.findtext('doc')
if docs:
docs = docs.strip().replace('\\\n', '')
data['documentation'] = yaml.MultiLineString(docs)
data['file_format'] = current_file_format
data = OrderedDict((key, value) for key, value in data.items() if value is not no_value)
auto_hide_params_for_item_sizes(data)
return data
def auto_hide_params_for_item_sizes(data):
item_size_templates = []
vlen_templates = []
for port in chain(*[data.get(direction, []) for direction in ['inputs', 'outputs']]):
for key in ['dtype', 'multiplicity']:
item_size_templates.append(str(port.get(key, '')))
vlen_templates.append(str(port.get('vlen', '')))
item_size_templates = ' '.join(value for value in item_size_templates if '${' in value)
vlen_templates = ' '.join(value for value in vlen_templates if '${' in value)
for param in data.get('parameters', []):
if param['id'] in item_size_templates:
param.setdefault('hide', 'part')
if param['id'] in vlen_templates:
param.setdefault('hide', "${ 'part' if vlen == 1 else 'none' }")
def convert_templates(node, convert, block_id=''):
templates = OrderedDict()
imports = '\n'.join(convert(import_node.text)
for import_node in node.iterfind('import'))
if '\n' in imports:
imports = yaml.MultiLineString(imports)
templates['imports'] = imports or no_value
templates['var_make'] = convert(node.findtext('var_make') or '') or no_value
make = convert(node.findtext('make') or '')
if make:
check_mako_template(block_id, make)
if '\n' in make:
make = yaml.MultiLineString(make)
templates['make'] = make or no_value
templates['callbacks'] = [
convert(cb_node.text) for cb_node in node.iterfind('callback')
] or no_value
return OrderedDict((key, value) for key, value in templates.items() if value is not no_value)
def convert_param_xml(node, convert):
param = OrderedDict()
param['id'] = node.findtext('key').strip()
param['label'] = node.findtext('name').strip()
param['category'] = node.findtext('tab') or no_value
param['dtype'] = convert(node.findtext('type') or '')
param['default'] = node.findtext('value') or no_value
options = yaml.ListFlowing(on.findtext('key') for on in node.iterfind('option'))
option_labels = yaml.ListFlowing(on.findtext('name') for on in node.iterfind('option'))
param['options'] = options or no_value
if not all(str(o).title() == l for o, l in zip(options, option_labels)):
param['option_labels'] = option_labels
attributes = defaultdict(yaml.ListFlowing)
for option_n in node.iterfind('option'):
for opt_n in option_n.iterfind('opt'):
key, value = opt_n.text.split(':', 2)
attributes[key].append(value)
param['option_attributes'] = dict(attributes) or no_value
param['hide'] = convert(node.findtext('hide')) or no_value
return OrderedDict((key, value) for key, value in param.items() if value is not no_value)
def convert_port_xml(node, convert):
port = OrderedDict()
label = node.findtext('name')
# default values:
port['label'] = label if label not in ('in', 'out') else no_value
dtype = convert(node.findtext('type'))
# TODO: detect dyn message ports
port['domain'] = domain = 'message' if dtype == 'message' else 'stream'
if domain == 'message':
port['id'], port['label'] = label, no_value
else:
port['dtype'] = dtype
vlen = node.findtext('vlen')
port['vlen'] = int(vlen) if vlen and vlen.isdigit() else convert(vlen) or no_value
port['multiplicity'] = convert(node.findtext('nports')) or no_value
port['optional'] = bool(node.findtext('optional')) or no_value
port['hide'] = convert(node.findtext('hide')) or no_value
return OrderedDict((key, value) for key, value in port.items() if value is not no_value)
def check_mako_template(block_id, expr):
import sys
from mako.template import Template
try:
Template(expr)
except Exception as error:
print(block_id, expr, type(error), error, '', sep='\n', file=sys.stderr)
| gpl-3.0 |
trevorlinton/skia | bench/bench_util.py | 29 | 11022 | '''
Created on May 19, 2011
@author: bungeman
'''
import re
import math
# bench representation algorithm constant names
ALGORITHM_AVERAGE = 'avg'
ALGORITHM_MEDIAN = 'med'
ALGORITHM_MINIMUM = 'min'
ALGORITHM_25TH_PERCENTILE = '25th'
# Regular expressions used throughout.
PER_SETTING_RE = '([^\s=]+)(?:=(\S+))?'
SETTINGS_RE = 'skia bench:((?:\s+' + PER_SETTING_RE + ')*)'
BENCH_RE = 'running bench (?:\[\d+ \d+\] )?\s*(\S+)'
TIME_RE = '(?:(\w*)msecs = )?\s*((?:\d+\.\d+)(?:,\s*\d+\.\d+)*)'
# non-per-tile benches have configs that don't end with ']' or '>'
CONFIG_RE = '(\S+[^\]>]):\s+((?:' + TIME_RE + '\s+)+)'
# per-tile bench lines are in the following format. Note that there are
# non-averaged bench numbers in separate lines, which we ignore now due to
# their inaccuracy.
TILE_RE = (' tile_(\S+): tile \[\d+,\d+\] out of \[\d+,\d+\] <averaged>:'
' ((?:' + TIME_RE + '\s+)+)')
# for extracting tile layout
TILE_LAYOUT_RE = ' out of \[(\d+),(\d+)\] <averaged>: '
PER_SETTING_RE_COMPILED = re.compile(PER_SETTING_RE)
SETTINGS_RE_COMPILED = re.compile(SETTINGS_RE)
BENCH_RE_COMPILED = re.compile(BENCH_RE)
TIME_RE_COMPILED = re.compile(TIME_RE)
CONFIG_RE_COMPILED = re.compile(CONFIG_RE)
TILE_RE_COMPILED = re.compile(TILE_RE)
TILE_LAYOUT_RE_COMPILED = re.compile(TILE_LAYOUT_RE)
class BenchDataPoint:
"""A single data point produced by bench.
(str, str, str, float, {str:str}, str, [floats])"""
def __init__(self, bench, config, time_type, time, settings,
tile_layout='', per_tile_values=[]):
self.bench = bench
self.config = config
self.time_type = time_type
self.time = time
self.settings = settings
# how tiles cover the whole picture. '5x3' means 5 columns and 3 rows.
self.tile_layout = tile_layout
# list of per_tile bench values, if applicable
self.per_tile_values = per_tile_values
def __repr__(self):
return "BenchDataPoint(%s, %s, %s, %s, %s)" % (
str(self.bench),
str(self.config),
str(self.time_type),
str(self.time),
str(self.settings),
)
class _ExtremeType(object):
"""Instances of this class compare greater or less than other objects."""
def __init__(self, cmpr, rep):
object.__init__(self)
self._cmpr = cmpr
self._rep = rep
def __cmp__(self, other):
if isinstance(other, self.__class__) and other._cmpr == self._cmpr:
return 0
return self._cmpr
def __repr__(self):
return self._rep
Max = _ExtremeType(1, "Max")
Min = _ExtremeType(-1, "Min")
class _ListAlgorithm(object):
"""Algorithm for selecting the representation value from a given list.
representation is one of the ALGORITHM_XXX representation types."""
def __init__(self, data, representation=None):
if not representation:
representation = ALGORITHM_AVERAGE # default algorithm
self._data = data
self._len = len(data)
if representation == ALGORITHM_AVERAGE:
self._rep = sum(self._data) / self._len
else:
self._data.sort()
if representation == ALGORITHM_MINIMUM:
self._rep = self._data[0]
else:
# for percentiles, we use the value below which x% of values are
# found, which allows for better detection of quantum behaviors.
if representation == ALGORITHM_MEDIAN:
x = int(round(0.5 * self._len + 0.5))
elif representation == ALGORITHM_25TH_PERCENTILE:
x = int(round(0.25 * self._len + 0.5))
else:
raise Exception("invalid representation algorithm %s!" %
representation)
self._rep = self._data[x - 1]
def compute(self):
return self._rep
def _ParseAndStoreTimes(config_re_compiled, is_per_tile, line, bench,
value_dic, layout_dic, representation=None):
"""Parses given bench time line with regex and adds data to value_dic.
config_re_compiled: precompiled regular expression for parsing the config
line.
is_per_tile: boolean indicating whether this is a per-tile bench.
If so, we add tile layout into layout_dic as well.
line: input string line to parse.
bench: name of bench for the time values.
value_dic: dictionary to store bench values. See bench_dic in parse() below.
layout_dic: dictionary to store tile layouts. See parse() for descriptions.
representation: should match one of the ALGORITHM_XXX types."""
for config in config_re_compiled.finditer(line):
current_config = config.group(1)
tile_layout = ''
if is_per_tile: # per-tile bench, add name prefix
current_config = 'tile_' + current_config
layouts = TILE_LAYOUT_RE_COMPILED.search(line)
if layouts and len(layouts.groups()) == 2:
tile_layout = '%sx%s' % layouts.groups()
times = config.group(2)
for new_time in TIME_RE_COMPILED.finditer(times):
current_time_type = new_time.group(1)
iters = [float(i) for i in
new_time.group(2).strip().split(',')]
value_dic.setdefault(bench, {}).setdefault(
current_config, {}).setdefault(current_time_type, []).append(
_ListAlgorithm(iters, representation).compute())
layout_dic.setdefault(bench, {}).setdefault(
current_config, {}).setdefault(current_time_type, tile_layout)
# TODO(bensong): switch to reading JSON output when available. This way we don't
# need the RE complexities.
def parse(settings, lines, representation=None):
"""Parses bench output into a useful data structure.
({str:str}, __iter__ -> str) -> [BenchDataPoint]
representation is one of the ALGORITHM_XXX types."""
benches = []
current_bench = None
bench_dic = {} # [bench][config][time_type] -> [list of bench values]
# [bench][config][time_type] -> tile_layout
layout_dic = {}
for line in lines:
# see if this line is a settings line
settingsMatch = SETTINGS_RE_COMPILED.search(line)
if (settingsMatch):
settings = dict(settings)
for settingMatch in PER_SETTING_RE_COMPILED.finditer(settingsMatch.group(1)):
if (settingMatch.group(2)):
settings[settingMatch.group(1)] = settingMatch.group(2)
else:
settings[settingMatch.group(1)] = True
# see if this line starts a new bench
new_bench = BENCH_RE_COMPILED.search(line)
if new_bench:
current_bench = new_bench.group(1)
# add configs on this line to the bench_dic
if current_bench:
if line.startswith(' tile_') :
_ParseAndStoreTimes(TILE_RE_COMPILED, True, line, current_bench,
bench_dic, layout_dic, representation)
else:
_ParseAndStoreTimes(CONFIG_RE_COMPILED, False, line,
current_bench,
bench_dic, layout_dic, representation)
# append benches to list, use the total time as final bench value.
for bench in bench_dic:
for config in bench_dic[bench]:
for time_type in bench_dic[bench][config]:
tile_layout = ''
per_tile_values = []
if len(bench_dic[bench][config][time_type]) > 1:
# per-tile values, extract tile_layout
per_tile_values = bench_dic[bench][config][time_type]
tile_layout = layout_dic[bench][config][time_type]
benches.append(BenchDataPoint(
bench,
config,
time_type,
sum(bench_dic[bench][config][time_type]),
settings,
tile_layout,
per_tile_values))
return benches
class LinearRegression:
"""Linear regression data based on a set of data points.
([(Number,Number)])
There must be at least two points for this to make sense."""
def __init__(self, points):
n = len(points)
max_x = Min
min_x = Max
Sx = 0.0
Sy = 0.0
Sxx = 0.0
Sxy = 0.0
Syy = 0.0
for point in points:
x = point[0]
y = point[1]
max_x = max(max_x, x)
min_x = min(min_x, x)
Sx += x
Sy += y
Sxx += x*x
Sxy += x*y
Syy += y*y
denom = n*Sxx - Sx*Sx
if (denom != 0.0):
B = (n*Sxy - Sx*Sy) / denom
else:
B = 0.0
a = (1.0/n)*(Sy - B*Sx)
se2 = 0
sB2 = 0
sa2 = 0
if (n >= 3 and denom != 0.0):
se2 = (1.0/(n*(n-2)) * (n*Syy - Sy*Sy - B*B*denom))
sB2 = (n*se2) / denom
sa2 = sB2 * (1.0/n) * Sxx
self.slope = B
self.intercept = a
self.serror = math.sqrt(max(0, se2))
self.serror_slope = math.sqrt(max(0, sB2))
self.serror_intercept = math.sqrt(max(0, sa2))
self.max_x = max_x
self.min_x = min_x
def __repr__(self):
return "LinearRegression(%s, %s, %s, %s, %s)" % (
str(self.slope),
str(self.intercept),
str(self.serror),
str(self.serror_slope),
str(self.serror_intercept),
)
def find_min_slope(self):
"""Finds the minimal slope given one standard deviation."""
slope = self.slope
intercept = self.intercept
error = self.serror
regr_start = self.min_x
regr_end = self.max_x
regr_width = regr_end - regr_start
if slope < 0:
lower_left_y = slope*regr_start + intercept - error
upper_right_y = slope*regr_end + intercept + error
return min(0, (upper_right_y - lower_left_y) / regr_width)
elif slope > 0:
upper_left_y = slope*regr_start + intercept + error
lower_right_y = slope*regr_end + intercept - error
return max(0, (lower_right_y - upper_left_y) / regr_width)
return 0
def CreateRevisionLink(revision_number):
"""Returns HTML displaying the given revision number and linking to
that revision's change page at code.google.com, e.g.
http://code.google.com/p/skia/source/detail?r=2056
"""
return '<a href="http://code.google.com/p/skia/source/detail?r=%s">%s</a>'%(
revision_number, revision_number)
def main():
foo = [[0.0, 0.0], [0.0, 1.0], [0.0, 2.0], [0.0, 3.0]]
LinearRegression(foo)
if __name__ == "__main__":
main()
| bsd-3-clause |
dumengnanbuaa/awesome-python-webapp | www/config.py | 17 | 1202 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Configuration
'''
__author__ = 'Michael Liao'
import config_default
class Dict(dict):
'''
Simple dict but support access as x.y style.
'''
def __init__(self, names=(), values=(), **kw):
super(Dict, self).__init__(**kw)
for k, v in zip(names, values):
self[k] = v
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(r"'Dict' object has no attribute '%s'" % key)
def __setattr__(self, key, value):
self[key] = value
def merge(defaults, override):
r = {}
for k, v in defaults.iteritems():
if k in override:
if isinstance(v, dict):
r[k] = merge(v, override[k])
else:
r[k] = override[k]
else:
r[k] = v
return r
def toDict(d):
D = Dict()
for k, v in d.iteritems():
D[k] = toDict(v) if isinstance(v, dict) else v
return D
configs = config_default.configs
try:
import config_override
configs = merge(configs, config_override.configs)
except ImportError:
pass
configs = toDict(configs)
| gpl-2.0 |
LuoZijun/uOffice | temp/pydocxx/tests/oxml/unitdata/text.py | 10 | 3187 | # encoding: utf-8
"""
Test data builders for text XML elements
"""
from ...unitdata import BaseBuilder
from .shared import CT_OnOffBuilder, CT_StringBuilder
class CT_BrBuilder(BaseBuilder):
__tag__ = 'w:br'
__nspfxs__ = ('w',)
__attrs__ = ('w:type', 'w:clear')
class CT_EmptyBuilder(BaseBuilder):
__nspfxs__ = ('w',)
__attrs__ = ()
def __init__(self, tag):
self.__tag__ = tag
super(CT_EmptyBuilder, self).__init__()
class CT_JcBuilder(BaseBuilder):
__tag__ = 'w:jc'
__nspfxs__ = ('w',)
__attrs__ = ('w:val',)
class CT_PBuilder(BaseBuilder):
__tag__ = 'w:p'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_PPrBuilder(BaseBuilder):
__tag__ = 'w:pPr'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_RBuilder(BaseBuilder):
__tag__ = 'w:r'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_RPrBuilder(BaseBuilder):
__tag__ = 'w:rPr'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_SectPrBuilder(BaseBuilder):
__tag__ = 'w:sectPr'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_TextBuilder(BaseBuilder):
__tag__ = 'w:t'
__nspfxs__ = ('w',)
__attrs__ = ()
def with_space(self, value):
self._set_xmlattr('xml:space', str(value))
return self
class CT_UnderlineBuilder(BaseBuilder):
__tag__ = 'w:u'
__nspfxs__ = ('w',)
__attrs__ = (
'w:val', 'w:color', 'w:themeColor', 'w:themeTint', 'w:themeShade'
)
def a_b():
return CT_OnOffBuilder('w:b')
def a_bCs():
return CT_OnOffBuilder('w:bCs')
def a_br():
return CT_BrBuilder()
def a_caps():
return CT_OnOffBuilder('w:caps')
def a_cr():
return CT_EmptyBuilder('w:cr')
def a_cs():
return CT_OnOffBuilder('w:cs')
def a_dstrike():
return CT_OnOffBuilder('w:dstrike')
def a_jc():
return CT_JcBuilder()
def a_noProof():
return CT_OnOffBuilder('w:noProof')
def a_shadow():
return CT_OnOffBuilder('w:shadow')
def a_smallCaps():
return CT_OnOffBuilder('w:smallCaps')
def a_snapToGrid():
return CT_OnOffBuilder('w:snapToGrid')
def a_specVanish():
return CT_OnOffBuilder('w:specVanish')
def a_strike():
return CT_OnOffBuilder('w:strike')
def a_tab():
return CT_EmptyBuilder('w:tab')
def a_vanish():
return CT_OnOffBuilder('w:vanish')
def a_webHidden():
return CT_OnOffBuilder('w:webHidden')
def a_p():
return CT_PBuilder()
def a_pPr():
return CT_PPrBuilder()
def a_pStyle():
return CT_StringBuilder('w:pStyle')
def a_sectPr():
return CT_SectPrBuilder()
def a_t():
return CT_TextBuilder()
def a_u():
return CT_UnderlineBuilder()
def an_emboss():
return CT_OnOffBuilder('w:emboss')
def an_i():
return CT_OnOffBuilder('w:i')
def an_iCs():
return CT_OnOffBuilder('w:iCs')
def an_imprint():
return CT_OnOffBuilder('w:imprint')
def an_oMath():
return CT_OnOffBuilder('w:oMath')
def an_outline():
return CT_OnOffBuilder('w:outline')
def an_r():
return CT_RBuilder()
def an_rPr():
return CT_RPrBuilder()
def an_rStyle():
return CT_StringBuilder('w:rStyle')
def an_rtl():
return CT_OnOffBuilder('w:rtl')
| gpl-3.0 |
temasek/android_external_chromium_org | chrome/common/extensions/docs/server2/features_utility_test.py | 26 | 2661 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from features_utility import Parse, Filtered, MergedWith
class FeaturesUtilityTest(unittest.TestCase):
def testFromJson(self):
raw_features_json = {
'doc1': {
'extension_types': ['extension', 'platform_app']
},
'doc2': {
'extension_types': ['hosted_app', 'packaged_app']
},
'doc3': {
'whitelist': 'hashhashashhashashhashashhash'
},
'doc4': [
{ 'extension_types': 'all' },
{ 'whitelist': 'hashhashashhashashhashashhash' }
],
'doc5': {
'extension_types': ['extension']
},
'doc1.sub1': {
'extension_types': ['platform_app', 'hosted_app', 'packaged_app']
}
}
expected = {
'doc1': {
'platforms': ['apps', 'extensions'],
'name': 'doc1'
},
'doc2': {
'platforms': [],
'name': 'doc2'
},
'doc4': {
'platforms': ['apps', 'extensions'],
'name': 'doc4'
},
'doc5': {
'platforms': ['extensions'],
'name': 'doc5'
},
'doc1.sub1': {
'platforms': ['apps'],
'name': 'doc1.sub1'
}
}
self.assertEqual(expected, Parse(raw_features_json))
def testFilter(self):
unfiltered = {
'doc1': { 'platforms': ['apps'] },
'doc2': { 'platforms': ['extensions'] },
'doc3': { 'platforms': ['apps', 'extensions'] },
'doc4': { 'platforms': [] }
}
apps_names = set(('doc1', 'doc3'))
extension_names = set(('doc2', 'doc3'))
self.assertEqual(
apps_names, set(Filtered(unfiltered, 'apps').keys()))
self.assertEqual(
extension_names, set(Filtered(unfiltered, 'extensions').keys()))
def testMergeFeatures(self):
features = {
'doc1': {
'platforms': ['apps']
},
'doc3': {
'name': 'doc3'
}
}
other = {
'doc1': {
'name': 'doc1',
'platforms': ['extensions']
},
'doc2': {
'name': 'doc2'
},
'doc3': {
'platforms': ['extensions', 'apps']
}
}
expected = {
'doc1': {
'name': 'doc1',
'platforms': ['extensions']
},
'doc2': {
'name': 'doc2',
'platforms': []
},
'doc3': {
'name': 'doc3',
'platforms': ['extensions', 'apps']
}
}
self.assertEqual(expected, MergedWith(features, other))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
diascreative/opencore | opencore/views/files.py | 4 | 2160 | # Copyright (C) 2008-2009 Open Society Institute
# Thomas Moroz: tmoroz.org
# 2010-2011 Large Blue
# Fergus Doyle: fergus.doyle@largeblue.com
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License Version 2 as published
# by the Free Software Foundation. You may not use, modify or distribute
# this program under any other version of the GNU General Public License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from webob import Response
from opencore.models.interfaces import IImage
def download_file_view(context, request):
# To view image-ish files in-line, use thumbnail_view.
f = context.blobfile.open()
headers = [
('Content-Type', context.mimetype),
('Content-Length', str(context.size)),
]
if 'save' in request.params:
fname = context.filename
if isinstance(fname, unicode):
fname = fname.encode('utf-8')
fname = fname.replace('\n', ' ').replace('\r', ' ').replace('\t', ' ')
headers.append(
('Content-Disposition', 'attachment; filename=%s' % fname)
)
response = Response(headerlist=headers, app_iter=f)
return response
def thumbnail_view(context, request):
assert IImage.providedBy(context), "Context must be an image."
filename = request.subpath[0] # <width>x<length>.jpg
size = map(int, filename[:-4].split('x'))
thumb = context.thumbnail(tuple(size))
# XXX Allow browser caching be setting Last-modified and Expires
# and respecting If-Modified-Since requests with 302 responses.
data = thumb.blobfile.open().read()
return Response(body=data, content_type=thumb.mimetype)
| gpl-2.0 |
yg257/Pangea | templates/root/ec2/lib/boto-2.34.0/boto/ses/__init__.py | 131 | 2013 | # Copyright (c) 2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011 Harry Marr http://hmarr.com/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.ses.connection import SESConnection
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the SES service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo` instances
"""
return get_regions('ses', connection_cls=SESConnection)
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.ses.connection.SESConnection`.
:type: str
:param region_name: The name of the region to connect to.
:rtype: :class:`boto.ses.connection.SESConnection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| apache-2.0 |
dhoffman34/django | django/template/defaulttags.py | 1 | 52686 | """Default tags used by the template system, available to all templates."""
from __future__ import unicode_literals
import os
import sys
import re
from datetime import datetime
from itertools import groupby, cycle as itertools_cycle
import warnings
from django.conf import settings
from django.template.base import (Node, NodeList, Template, Context, Library,
TemplateSyntaxError, VariableDoesNotExist, InvalidTemplateLibrary,
BLOCK_TAG_START, BLOCK_TAG_END, VARIABLE_TAG_START, VARIABLE_TAG_END,
SINGLE_BRACE_START, SINGLE_BRACE_END, COMMENT_TAG_START, COMMENT_TAG_END,
VARIABLE_ATTRIBUTE_SEPARATOR, get_library, token_kwargs, kwarg_re,
render_value_in_context)
from django.template.smartif import IfParser, Literal
from django.template.defaultfilters import date
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text, smart_text
from django.utils.lorem_ipsum import words, paragraphs
from django.utils.safestring import mark_safe
from django.utils.html import format_html
from django.utils import six
from django.utils import timezone
register = Library()
class AutoEscapeControlNode(Node):
"""Implements the actions of the autoescape tag."""
def __init__(self, setting, nodelist):
self.setting, self.nodelist = setting, nodelist
def render(self, context):
old_setting = context.autoescape
context.autoescape = self.setting
output = self.nodelist.render(context)
context.autoescape = old_setting
if self.setting:
return mark_safe(output)
else:
return output
class CommentNode(Node):
def render(self, context):
return ''
class CsrfTokenNode(Node):
def render(self, context):
csrf_token = context.get('csrf_token', None)
if csrf_token:
if csrf_token == 'NOTPROVIDED':
return format_html("")
else:
return format_html("<input type='hidden' name='csrfmiddlewaretoken' value='{0}' />", csrf_token)
else:
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
if settings.DEBUG:
warnings.warn("A {% csrf_token %} was used in a template, but the context did not provide the value. This is usually caused by not using RequestContext.")
return ''
class CycleNode(Node):
def __init__(self, cyclevars, variable_name=None, silent=False):
self.cyclevars = cyclevars
self.variable_name = variable_name
self.silent = silent
def render(self, context):
if self not in context.render_context:
# First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self]
value = next(cycle_iter).resolve(context)
if self.variable_name:
context[self.variable_name] = value
if self.silent:
return ''
return render_value_in_context(value, context)
class DebugNode(Node):
def render(self, context):
from pprint import pformat
output = [pformat(val) for val in context]
output.append('\n\n')
output.append(pformat(sys.modules))
return ''.join(output)
class FilterNode(Node):
def __init__(self, filter_expr, nodelist):
self.filter_expr, self.nodelist = filter_expr, nodelist
def render(self, context):
output = self.nodelist.render(context)
# Apply filters.
with context.push(var=output):
return self.filter_expr.resolve(context)
class FirstOfNode(Node):
def __init__(self, variables):
self.vars = variables
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
return render_value_in_context(value, context)
return ''
class ForNode(Node):
child_nodelists = ('nodelist_loop', 'nodelist_empty')
def __init__(self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None):
self.loopvars, self.sequence = loopvars, sequence
self.is_reversed = is_reversed
self.nodelist_loop = nodelist_loop
if nodelist_empty is None:
self.nodelist_empty = NodeList()
else:
self.nodelist_empty = nodelist_empty
def __repr__(self):
reversed_text = ' reversed' if self.is_reversed else ''
return "<For Node: for %s in %s, tail_len: %d%s>" % \
(', '.join(self.loopvars), self.sequence, len(self.nodelist_loop),
reversed_text)
def __iter__(self):
for node in self.nodelist_loop:
yield node
for node in self.nodelist_empty:
yield node
def render(self, context):
if 'forloop' in context:
parentloop = context['forloop']
else:
parentloop = {}
with context.push():
try:
values = self.sequence.resolve(context, True)
except VariableDoesNotExist:
values = []
if values is None:
values = []
if not hasattr(values, '__len__'):
values = list(values)
len_values = len(values)
if len_values < 1:
return self.nodelist_empty.render(context)
nodelist = []
if self.is_reversed:
values = reversed(values)
num_loopvars = len(self.loopvars)
unpack = num_loopvars > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context['forloop'] = {'parentloop': parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict['counter0'] = i
loop_dict['counter'] = i + 1
# Reverse counter iteration numbers.
loop_dict['revcounter'] = len_values - i
loop_dict['revcounter0'] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict['first'] = (i == 0)
loop_dict['last'] = (i == len_values - 1)
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
# To complete this deprecation, remove from here to the
# try/except block as well as the try/except itself,
# leaving `unpacked_vars = ...` and the "else" statements.
if not isinstance(item, (list, tuple)):
len_item = 1
else:
len_item = len(item)
# Check loop variable count before unpacking
if num_loopvars != len_item:
warnings.warn(
"Need {0} values to unpack in for loop; got {1}. "
"This will raise an exception in Django 2.0."
.format(num_loopvars, len_item),
RemovedInDjango20Warning)
try:
unpacked_vars = dict(zip(self.loopvars, item))
except TypeError:
pass
else:
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
# In TEMPLATE_DEBUG mode provide source of the node which
# actually raised the exception
if settings.TEMPLATE_DEBUG:
for node in self.nodelist_loop:
try:
nodelist.append(node.render(context))
except Exception as e:
if not hasattr(e, 'django_template_source'):
e.django_template_source = node.source
raise
else:
for node in self.nodelist_loop:
nodelist.append(node.render(context))
if pop_context:
# The loop variables were pushed on to the context so pop them
# off again. This is necessary because the tag lets the length
# of loopvars differ to the length of each set of items and we
# don't want to leave any vars from the previous loop on the
# context.
context.pop()
return mark_safe(''.join(force_text(n) for n in nodelist))
class IfChangedNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, nodelist_true, nodelist_false, *varlist):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self._varlist = varlist
def render(self, context):
# Init state storage
state_frame = self._get_context_stack_frame(context)
if self not in state_frame:
state_frame[self] = None
nodelist_true_output = None
try:
if self._varlist:
# Consider multiple parameters. This automatically behaves
# like an OR evaluation of the multiple variables.
compare_to = [var.resolve(context, True) for var in self._varlist]
else:
# The "{% ifchanged %}" syntax (without any variables) compares the rendered output.
compare_to = nodelist_true_output = self.nodelist_true.render(context)
except VariableDoesNotExist:
compare_to = None
if compare_to != state_frame[self]:
state_frame[self] = compare_to
return nodelist_true_output or self.nodelist_true.render(context) # render true block if not already rendered
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ''
def _get_context_stack_frame(self, context):
# The Context object behaves like a stack where each template tag can create a new scope.
# Find the place where to store the state to detect changes.
if 'forloop' in context:
# Ifchanged is bound to the local for loop.
# When there is a loop-in-loop, the state is bound to the inner loop,
# so it resets when the outer loop continues.
return context['forloop']
else:
# Using ifchanged outside loops. Effectively this is a no-op because the state is associated with 'self'.
return context.render_context
class IfEqualNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, var1, var2, nodelist_true, nodelist_false, negate):
self.var1, self.var2 = var1, var2
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self.negate = negate
def __repr__(self):
return "<IfEqualNode>"
def render(self, context):
val1 = self.var1.resolve(context, True)
val2 = self.var2.resolve(context, True)
if (self.negate and val1 != val2) or (not self.negate and val1 == val2):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class IfNode(Node):
def __init__(self, conditions_nodelists):
self.conditions_nodelists = conditions_nodelists
def __repr__(self):
return "<IfNode>"
def __iter__(self):
for _, nodelist in self.conditions_nodelists:
for node in nodelist:
yield node
@property
def nodelist(self):
return NodeList(node for _, nodelist in self.conditions_nodelists for node in nodelist)
def render(self, context):
for condition, nodelist in self.conditions_nodelists:
if condition is not None: # if / elif clause
try:
match = condition.eval(context)
except VariableDoesNotExist:
match = None
else: # else clause
match = True
if match:
return nodelist.render(context)
return ''
class LoremNode(Node):
def __init__(self, count, method, common):
self.count, self.method, self.common = count, method, common
def render(self, context):
try:
count = int(self.count.resolve(context))
except (ValueError, TypeError):
count = 1
if self.method == 'w':
return words(count, common=self.common)
else:
paras = paragraphs(count, common=self.common)
if self.method == 'p':
paras = ['<p>%s</p>' % p for p in paras]
return '\n\n'.join(paras)
class RegroupNode(Node):
def __init__(self, target, expression, var_name):
self.target, self.expression = target, expression
self.var_name = var_name
def resolve_expression(self, obj, context):
# This method is called for each object in self.target. See regroup()
# for the reason why we temporarily put the object in the context.
context[self.var_name] = obj
return self.expression.resolve(context, True)
def render(self, context):
obj_list = self.target.resolve(context, True)
if obj_list is None:
# target variable wasn't found in context; fail silently.
context[self.var_name] = []
return ''
# List of dictionaries in the format:
# {'grouper': 'key', 'list': [list of contents]}.
context[self.var_name] = [
{'grouper': key, 'list': list(val)}
for key, val in
groupby(obj_list, lambda obj: self.resolve_expression(obj, context))
]
return ''
def include_is_allowed(filepath):
filepath = os.path.abspath(filepath)
for root in settings.ALLOWED_INCLUDE_ROOTS:
if filepath.startswith(root):
return True
return False
class SsiNode(Node):
def __init__(self, filepath, parsed):
self.filepath = filepath
self.parsed = parsed
def render(self, context):
filepath = self.filepath.resolve(context)
if not include_is_allowed(filepath):
if settings.DEBUG:
return "[Didn't have permission to include file]"
else:
return '' # Fail silently for invalid includes.
try:
with open(filepath, 'r') as fp:
output = fp.read()
except IOError:
output = ''
if self.parsed:
try:
t = Template(output, name=filepath)
return t.render(context)
except TemplateSyntaxError as e:
if settings.DEBUG:
return "[Included template had syntax error: %s]" % e
else:
return '' # Fail silently for invalid included templates.
return output
class LoadNode(Node):
def render(self, context):
return ''
class NowNode(Node):
def __init__(self, format_string):
self.format_string = format_string
def render(self, context):
tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
return date(datetime.now(tz=tzinfo), self.format_string)
class SpacelessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
return strip_spaces_between_tags(self.nodelist.render(context).strip())
class TemplateTagNode(Node):
mapping = {'openblock': BLOCK_TAG_START,
'closeblock': BLOCK_TAG_END,
'openvariable': VARIABLE_TAG_START,
'closevariable': VARIABLE_TAG_END,
'openbrace': SINGLE_BRACE_START,
'closebrace': SINGLE_BRACE_END,
'opencomment': COMMENT_TAG_START,
'closecomment': COMMENT_TAG_END,
}
def __init__(self, tagtype):
self.tagtype = tagtype
def render(self, context):
return self.mapping.get(self.tagtype, '')
class URLNode(Node):
def __init__(self, view_name, args, kwargs, asvar):
self.view_name = view_name
self.args = args
self.kwargs = kwargs
self.asvar = asvar
def render(self, context):
from django.core.urlresolvers import reverse, NoReverseMatch
args = [arg.resolve(context) for arg in self.args]
kwargs = dict((smart_text(k, 'ascii'), v.resolve(context))
for k, v in self.kwargs.items())
view_name = self.view_name.resolve(context)
# Try to look up the URL twice: once given the view name, and again
# relative to what we guess is the "main" app. If they both fail,
# re-raise the NoReverseMatch unless we're using the
# {% url ... as var %} construct in which case return nothing.
url = ''
try:
url = reverse(view_name, args=args, kwargs=kwargs, current_app=context.current_app)
except NoReverseMatch:
exc_info = sys.exc_info()
if settings.SETTINGS_MODULE:
project_name = settings.SETTINGS_MODULE.split('.')[0]
try:
url = reverse(project_name + '.' + view_name,
args=args, kwargs=kwargs,
current_app=context.current_app)
except NoReverseMatch:
if self.asvar is None:
# Re-raise the original exception, not the one with
# the path relative to the project. This makes a
# better error message.
six.reraise(*exc_info)
else:
if self.asvar is None:
raise
if self.asvar:
context[self.asvar] = url
return ''
else:
return url
class VerbatimNode(Node):
def __init__(self, content):
self.content = content
def render(self, context):
return self.content
class WidthRatioNode(Node):
def __init__(self, val_expr, max_expr, max_width, asvar=None):
self.val_expr = val_expr
self.max_expr = max_expr
self.max_width = max_width
self.asvar = asvar
def render(self, context):
try:
value = self.val_expr.resolve(context)
max_value = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ''
except (ValueError, TypeError):
raise TemplateSyntaxError("widthratio final argument must be a number")
try:
value = float(value)
max_value = float(max_value)
ratio = (value / max_value) * max_width
result = str(int(round(ratio)))
except ZeroDivisionError:
return '0'
except (ValueError, TypeError, OverflowError):
return ''
if self.asvar:
context[self.asvar] = result
return ''
else:
return result
class WithNode(Node):
def __init__(self, var, name, nodelist, extra_context=None):
self.nodelist = nodelist
# var and name are legacy attributes, being left in case they are used
# by third-party subclasses of this Node.
self.extra_context = extra_context or {}
if name:
self.extra_context[name] = var
def __repr__(self):
return "<WithNode>"
def render(self, context):
values = dict((key, val.resolve(context)) for key, val in
six.iteritems(self.extra_context))
with context.push(**values):
return self.nodelist.render(context)
@register.tag
def autoescape(parser, token):
"""
Force autoescape behavior for this block.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 2:
raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
arg = args[1]
if arg not in ('on', 'off'):
raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
nodelist = parser.parse(('endautoescape',))
parser.delete_first_token()
return AutoEscapeControlNode((arg == 'on'), nodelist)
@register.tag
def comment(parser, token):
"""
Ignores everything between ``{% comment %}`` and ``{% endcomment %}``.
"""
parser.skip_past('endcomment')
return CommentNode()
@register.tag
def cycle(parser, token):
"""
Cycles among the given strings each time this tag is encountered.
Within a loop, cycles among the given strings each time through
the loop::
{% for o in some_list %}
<tr class="{% cycle 'row1' 'row2' %}">
...
</tr>
{% endfor %}
Outside of a loop, give the values a unique name the first time you call
it, then use that name each successive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
You can use any number of values, separated by spaces. Commas can also
be used to separate values; if a comma is used, the cycle values are
interpreted as literal strings.
The optional flag "silent" can be used to prevent the cycle declaration
from returning any value::
{% for o in some_list %}
{% cycle 'row1' 'row2' as rowcolors silent %}
<tr class="{{ rowcolors }}">{% include "subtemplate.html " %}</tr>
{% endfor %}
"""
# Note: This returns the exact same node on each {% cycle name %} call;
# that is, the node object returned from {% cycle a b c as name %} and the
# one returned from {% cycle name %} are the exact same object. This
# shouldn't cause problems (heh), but if it does, now you know.
#
# Ugly hack warning: This stuffs the named template dict into parser so
# that names are only unique within each template (as opposed to using
# a global variable, which would make cycle names have to be unique across
# *all* templates.
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
if ',' in args[1]:
# Backwards compatibility: {% cycle a,b %} or {% cycle a,b as foo %}
# case.
args[1:2] = ['"%s"' % arg for arg in args[1].split(",")]
if len(args) == 2:
# {% cycle foo %} case.
name = args[1]
if not hasattr(parser, '_namedCycleNodes'):
raise TemplateSyntaxError("No named cycles in template. '%s' is not defined" % name)
if name not in parser._namedCycleNodes:
raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
return parser._namedCycleNodes[name]
as_form = False
if len(args) > 4:
# {% cycle ... as foo [silent] %} case.
if args[-3] == "as":
if args[-1] != "silent":
raise TemplateSyntaxError("Only 'silent' flag is allowed after cycle's name, not '%s'." % args[-1])
as_form = True
silent = True
args = args[:-1]
elif args[-2] == "as":
as_form = True
silent = False
if as_form:
name = args[-1]
values = [parser.compile_filter(arg) for arg in args[1:-2]]
node = CycleNode(values, name, silent=silent)
if not hasattr(parser, '_namedCycleNodes'):
parser._namedCycleNodes = {}
parser._namedCycleNodes[name] = node
else:
values = [parser.compile_filter(arg) for arg in args[1:]]
node = CycleNode(values)
return node
@register.tag
def csrf_token(parser, token):
return CsrfTokenNode()
@register.tag
def debug(parser, token):
"""
Outputs a whole load of debugging information, including the current
context and imported modules.
Sample usage::
<pre>
{% debug %}
</pre>
"""
return DebugNode()
@register.tag('filter')
def do_filter(parser, token):
"""
Filters the contents of the block through variable filters.
Filters can also be piped through each other, and they can have
arguments -- just like in variable syntax.
Sample usage::
{% filter force_escape|lower %}
This text will be HTML-escaped, and will appear in lowercase.
{% endfilter %}
Note that the ``escape`` and ``safe`` filters are not acceptable arguments.
Instead, use the ``autoescape`` tag to manage autoescaping for blocks of
template code.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
_, rest = token.contents.split(None, 1)
filter_expr = parser.compile_filter("var|%s" % (rest))
for func, unused in filter_expr.filters:
filter_name = getattr(func, '_filter_name', None)
if filter_name in ('escape', 'safe'):
raise TemplateSyntaxError('"filter %s" is not permitted. Use the "autoescape" tag instead.' % filter_name)
nodelist = parser.parse(('endfilter',))
parser.delete_first_token()
return FilterNode(filter_expr, nodelist)
@register.tag
def firstof(parser, token):
"""
Outputs the first variable passed that is not False, without escaping.
Outputs nothing if all the passed variables are False.
Sample usage::
{% firstof var1 var2 var3 %}
This is equivalent to::
{% if var1 %}
{{ var1|safe }}
{% elif var2 %}
{{ var2|safe }}
{% elif var3 %}
{{ var3|safe }}
{% endif %}
but obviously much cleaner!
You can also use a literal string as a fallback value in case all
passed variables are False::
{% firstof var1 var2 var3 "fallback value" %}
If you want to escape the output, use a filter tag::
{% filter force_escape %}
{% firstof var1 var2 var3 "fallback value" %}
{% endfilter %}
"""
bits = token.split_contents()[1:]
if len(bits) < 1:
raise TemplateSyntaxError("'firstof' statement requires at least one argument")
return FirstOfNode([parser.compile_filter(bit) for bit in bits])
@register.tag('for')
def do_for(parser, token):
"""
Loops over each item in an array.
For example, to display a list of athletes given ``athlete_list``::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
</ul>
You can loop over a list in reverse by using
``{% for obj in list reversed %}``.
You can also unpack multiple values from a two-dimensional array::
{% for key,value in dict.items %}
{{ key }}: {{ value }}
{% endfor %}
The ``for`` tag can take an optional ``{% empty %}`` clause that will
be displayed if the given array is empty or could not be found::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% empty %}
<li>Sorry, no athletes in this list.</li>
{% endfor %}
<ul>
The above is equivalent to -- but shorter, cleaner, and possibly faster
than -- the following::
<ul>
{% if althete_list %}
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
{% else %}
<li>Sorry, no athletes in this list.</li>
{% endif %}
</ul>
The for loop sets a number of variables available within the loop:
========================== ================================================
Variable Description
========================== ================================================
``forloop.counter`` The current iteration of the loop (1-indexed)
``forloop.counter0`` The current iteration of the loop (0-indexed)
``forloop.revcounter`` The number of iterations from the end of the
loop (1-indexed)
``forloop.revcounter0`` The number of iterations from the end of the
loop (0-indexed)
``forloop.first`` True if this is the first time through the loop
``forloop.last`` True if this is the last time through the loop
``forloop.parentloop`` For nested loops, this is the loop "above" the
current one
========================== ================================================
"""
bits = token.split_contents()
if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four"
" words: %s" % token.contents)
is_reversed = bits[-1] == 'reversed'
in_index = -3 if is_reversed else -2
if bits[in_index] != 'in':
raise TemplateSyntaxError("'for' statements should use the format"
" 'for x in y': %s" % token.contents)
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars:
if not var or ' ' in var:
raise TemplateSyntaxError("'for' tag received an invalid argument:"
" %s" % token.contents)
sequence = parser.compile_filter(bits[in_index + 1])
nodelist_loop = parser.parse(('empty', 'endfor',))
token = parser.next_token()
if token.contents == 'empty':
nodelist_empty = parser.parse(('endfor',))
parser.delete_first_token()
else:
nodelist_empty = None
return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
def do_ifequal(parser, token, negate):
bits = list(token.split_contents())
if len(bits) != 3:
raise TemplateSyntaxError("%r takes two arguments" % bits[0])
end_tag = 'end' + bits[0]
nodelist_true = parser.parse(('else', end_tag))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse((end_tag,))
parser.delete_first_token()
else:
nodelist_false = NodeList()
val1 = parser.compile_filter(bits[1])
val2 = parser.compile_filter(bits[2])
return IfEqualNode(val1, val2, nodelist_true, nodelist_false, negate)
@register.tag
def ifequal(parser, token):
"""
Outputs the contents of the block if the two arguments equal each other.
Examples::
{% ifequal user.id comment.user_id %}
...
{% endifequal %}
{% ifnotequal user.id comment.user_id %}
...
{% else %}
...
{% endifnotequal %}
"""
return do_ifequal(parser, token, False)
@register.tag
def ifnotequal(parser, token):
"""
Outputs the contents of the block if the two arguments are not equal.
See ifequal.
"""
return do_ifequal(parser, token, True)
class TemplateLiteral(Literal):
def __init__(self, value, text):
self.value = value
self.text = text # for better error messages
def display(self):
return self.text
def eval(self, context):
return self.value.resolve(context, ignore_failures=True)
class TemplateIfParser(IfParser):
error_class = TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
super(TemplateIfParser, self).__init__(*args, **kwargs)
def create_var(self, value):
return TemplateLiteral(self.template_parser.compile_filter(value), value)
@register.tag('if')
def do_if(parser, token):
"""
The ``{% if %}`` tag evaluates a variable, and if that variable is "true"
(i.e., exists, is not empty, and is not a false boolean value), the
contents of the block are output:
::
{% if athlete_list %}
Number of athletes: {{ athlete_list|count }}
{% elif athlete_in_locker_room_list %}
Athletes should be out of the locker room soon!
{% else %}
No athletes.
{% endif %}
In the above, if ``athlete_list`` is not empty, the number of athletes will
be displayed by the ``{{ athlete_list|count }}`` variable.
As you can see, the ``if`` tag may take one or several `` {% elif %}``
clauses, as well as an ``{% else %}`` clause that will be displayed if all
previous conditions fail. These clauses are optional.
``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
variables or to negate a given variable::
{% if not athlete_list %}
There are no athletes.
{% endif %}
{% if athlete_list or coach_list %}
There are some athletes or some coaches.
{% endif %}
{% if athlete_list and coach_list %}
Both athletes and coaches are available.
{% endif %}
{% if not athlete_list or coach_list %}
There are no athletes, or there are some coaches.
{% endif %}
{% if athlete_list and not coach_list %}
There are some athletes and absolutely no coaches.
{% endif %}
Comparison operators are also available, and the use of filters is also
allowed, for example::
{% if articles|length >= 5 %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid if tag.
All supported operators are: ``or``, ``and``, ``in``, ``not in``
``==`` (or ``=``), ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
Operator precedence follows Python.
"""
# {% if ... %}
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists = [(condition, nodelist)]
token = parser.next_token()
# {% elif ... %} (repeatable)
while token.contents.startswith('elif'):
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists.append((condition, nodelist))
token = parser.next_token()
# {% else %} (optional)
if token.contents == 'else':
nodelist = parser.parse(('endif',))
conditions_nodelists.append((None, nodelist))
token = parser.next_token()
# {% endif %}
assert token.contents == 'endif'
return IfNode(conditions_nodelists)
@register.tag
def ifchanged(parser, token):
"""
Checks if a value has changed from the last iteration of a loop.
The ``{% ifchanged %}`` block tag is used within a loop. It has two
possible uses.
1. Checks its own rendered contents against its previous state and only
displays the content if it has changed. For example, this displays a
list of days, only displaying the month if it changes::
<h1>Archive for {{ year }}</h1>
{% for date in days %}
{% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
<a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
{% endfor %}
2. If given one or more variables, check whether any variable has changed.
For example, the following shows the date every time it changes, while
showing the hour if either the hour or the date has changed::
{% for date in days %}
{% ifchanged date.date %} {{ date.date }} {% endifchanged %}
{% ifchanged date.hour date.date %}
{{ date.hour }}
{% endifchanged %}
{% endfor %}
"""
bits = token.split_contents()
nodelist_true = parser.parse(('else', 'endifchanged'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endifchanged',))
parser.delete_first_token()
else:
nodelist_false = NodeList()
values = [parser.compile_filter(bit) for bit in bits[1:]]
return IfChangedNode(nodelist_true, nodelist_false, *values)
@register.tag
def ssi(parser, token):
"""
Outputs the contents of a given file into the page.
Like a simple "include" tag, the ``ssi`` tag includes the contents
of another file -- which must be specified using an absolute path --
in the current page::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" %}
If the optional "parsed" parameter is given, the contents of the included
file are evaluated as template code, with the current context::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" parsed %}
"""
bits = token.split_contents()
parsed = False
if len(bits) not in (2, 3):
raise TemplateSyntaxError("'ssi' tag takes one argument: the path to"
" the file to be included")
if len(bits) == 3:
if bits[2] == 'parsed':
parsed = True
else:
raise TemplateSyntaxError("Second (optional) argument to %s tag"
" must be 'parsed'" % bits[0])
filepath = parser.compile_filter(bits[1])
return SsiNode(filepath, parsed)
@register.tag
def load(parser, token):
"""
Loads a custom template tag set.
For example, to load the template tags in
``django/templatetags/news/photos.py``::
{% load news.photos %}
Can also be used to load an individual tag/filter from
a library::
{% load byline from news %}
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) >= 4 and bits[-2] == "from":
try:
taglib = bits[-1]
lib = get_library(taglib)
except InvalidTemplateLibrary as e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
else:
temp_lib = Library()
for name in bits[1:-2]:
if name in lib.tags:
temp_lib.tags[name] = lib.tags[name]
# a name could be a tag *and* a filter, so check for both
if name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
elif name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
else:
raise TemplateSyntaxError("'%s' is not a valid tag or filter in tag library '%s'" %
(name, taglib))
parser.add_library(temp_lib)
else:
for taglib in bits[1:]:
# add the library to the parser
try:
lib = get_library(taglib)
parser.add_library(lib)
except InvalidTemplateLibrary as e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
return LoadNode()
@register.tag
def lorem(parser, token):
"""
Creates random Latin text useful for providing test data in templates.
Usage format::
{% lorem [count] [method] [random] %}
``count`` is a number (or variable) containing the number of paragraphs or
words to generate (default is 1).
``method`` is either ``w`` for words, ``p`` for HTML paragraphs, ``b`` for
plain-text paragraph blocks (default is ``b``).
``random`` is the word ``random``, which if given, does not use the common
paragraph (starting "Lorem ipsum dolor sit amet, consectetuer...").
Examples:
* ``{% lorem %}`` will output the common "lorem ipsum" paragraph
* ``{% lorem 3 p %}`` will output the common "lorem ipsum" paragraph
and two random paragraphs each wrapped in HTML ``<p>`` tags
* ``{% lorem 2 w random %}`` will output two random latin words
"""
bits = list(token.split_contents())
tagname = bits[0]
# Random bit
common = bits[-1] != 'random'
if not common:
bits.pop()
# Method bit
if bits[-1] in ('w', 'p', 'b'):
method = bits.pop()
else:
method = 'b'
# Count bit
if len(bits) > 1:
count = bits.pop()
else:
count = '1'
count = parser.compile_filter(count)
if len(bits) != 1:
raise TemplateSyntaxError("Incorrect format for %r tag" % tagname)
return LoremNode(count, method, common)
@register.tag
def now(parser, token):
"""
Displays the date, formatted according to the given string.
Uses the same format as PHP's ``date()`` function; see http://php.net/date
for all the possible values.
Sample usage::
It is {% now "jS F Y H:i" %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'now' statement takes one argument")
format_string = bits[1][1:-1]
return NowNode(format_string)
@register.tag
def regroup(parser, token):
"""
Regroups a list of alike objects by a common attribute.
This complex tag is best illustrated by use of an example: say that
``people`` is a list of ``Person`` objects that have ``first_name``,
``last_name``, and ``gender`` attributes, and you'd like to display a list
that looks like:
* Male:
* George Bush
* Bill Clinton
* Female:
* Margaret Thatcher
* Colendeeza Rice
* Unknown:
* Pat Smith
The following snippet of template code would accomplish this dubious task::
{% regroup people by gender as grouped %}
<ul>
{% for group in grouped %}
<li>{{ group.grouper }}
<ul>
{% for item in group.list %}
<li>{{ item }}</li>
{% endfor %}
</ul>
{% endfor %}
</ul>
As you can see, ``{% regroup %}`` populates a variable with a list of
objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
item that was grouped by; ``list`` contains the list of objects that share
that ``grouper``. In this case, ``grouper`` would be ``Male``, ``Female``
and ``Unknown``, and ``list`` is the list of people with those genders.
Note that ``{% regroup %}`` does not work when the list to be grouped is not
sorted by the key you are grouping by! This means that if your list of
people was not sorted by gender, you'd need to make sure it is sorted
before using it, i.e.::
{% regroup people|dictsort:"gender" by gender as grouped %}
"""
bits = token.split_contents()
if len(bits) != 6:
raise TemplateSyntaxError("'regroup' tag takes five arguments")
target = parser.compile_filter(bits[1])
if bits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
" be 'as'")
var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
# group by the resulting value. After all items are processed, it will
# save the final result in the context under 'var_name', thus clearing the
# temporary values. This hack is necessary because the template engine
# doesn't provide a context-aware equivalent of Python's getattr.
expression = parser.compile_filter(var_name +
VARIABLE_ATTRIBUTE_SEPARATOR +
bits[3])
return RegroupNode(target, expression, var_name)
@register.tag
def spaceless(parser, token):
"""
Removes whitespace between HTML tags, including tab and newline characters.
Example usage::
{% spaceless %}
<p>
<a href="foo/">Foo</a>
</p>
{% endspaceless %}
This example would return this HTML::
<p><a href="foo/">Foo</a></p>
Only space between *tags* is normalized -- not space between tags and text.
In this example, the space around ``Hello`` won't be stripped::
{% spaceless %}
<strong>
Hello
</strong>
{% endspaceless %}
"""
nodelist = parser.parse(('endspaceless',))
parser.delete_first_token()
return SpacelessNode(nodelist)
@register.tag
def templatetag(parser, token):
"""
Outputs one of the bits used to compose template tags.
Since the template system has no concept of "escaping", to display one of
the bits used in template tags, you must use the ``{% templatetag %}`` tag.
The argument tells which template bit to output:
================== =======
Argument Outputs
================== =======
``openblock`` ``{%``
``closeblock`` ``%}``
``openvariable`` ``{{``
``closevariable`` ``}}``
``openbrace`` ``{``
``closebrace`` ``}``
``opencomment`` ``{#``
``closecomment`` ``#}``
================== =======
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'templatetag' statement takes one argument")
tag = bits[1]
if tag not in TemplateTagNode.mapping:
raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
" Must be one of: %s" %
(tag, list(TemplateTagNode.mapping)))
return TemplateTagNode(tag)
@register.tag
def url(parser, token):
"""
Returns an absolute URL matching given view with its parameters.
This is a way to define links that aren't tied to a particular URL
configuration::
{% url "path.to.some_view" arg1 arg2 %}
or
{% url "path.to.some_view" name1=value1 name2=value2 %}
The first argument is a path to a view. It can be an absolute Python path
or just ``app_name.view_name`` without the project name if the view is
located inside the project.
Other arguments are space-separated values that will be filled in place of
positional and keyword arguments in the URL. Don't mix positional and
keyword arguments.
All arguments for the URL should be present.
For example if you have a view ``app_name.client`` taking client's id and
the corresponding line in a URLconf looks like this::
('^client/(\d+)/$', 'app_name.client')
and this app's URLconf is included into the project's URLconf under some
path::
('^clients/', include('project_name.app_name.urls'))
then in a template you can create a link for a certain client like this::
{% url "app_name.client" client.id %}
The URL will look like ``/clients/client/123/``.
The first argument can also be a named URL instead of the Python path to
the view callable. For example if the URLconf entry looks like this::
url('^client/(\d+)/$', name='client-detail-view')
then in the template you can use::
{% url "client-detail-view" client.id %}
There is even another possible value type for the first argument. It can be
the name of a template variable that will be evaluated to obtain the view
name or the URL name, e.g.::
{% with view_path="app_name.client" %}
{% url view_path client.id %}
{% endwith %}
or,
{% with url_name="client-detail-view" %}
{% url url_name client.id %}
{% endwith %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument"
" (path to a view)" % bits[0])
viewname = parser.compile_filter(bits[1])
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits):
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar)
@register.tag
def verbatim(parser, token):
"""
Stops the template engine from rendering the contents of this block tag.
Usage::
{% verbatim %}
{% don't process this %}
{% endverbatim %}
You can also designate a specific closing tag block (allowing the
unrendered use of ``{% endverbatim %}``)::
{% verbatim myblock %}
...
{% endverbatim myblock %}
"""
nodelist = parser.parse(('endverbatim',))
parser.delete_first_token()
return VerbatimNode(nodelist.render(Context()))
@register.tag
def widthratio(parser, token):
"""
For creating bar charts and such, this tag calculates the ratio of a given
value to a maximum value, and then applies that ratio to a constant.
For example::
<img src="bar.png" alt="Bar"
height="10" width="{% widthratio this_value max_value max_width %}" />
If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100,
the image in the above example will be 88 pixels wide
(because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88).
In some cases you might want to capture the result of widthratio in a
variable. It can be useful for instance in a blocktrans like this::
{% widthratio this_value max_value max_width as width %}
{% blocktrans %}The width is: {{ width }}{% endblocktrans %}
"""
bits = token.split_contents()
if len(bits) == 4:
tag, this_value_expr, max_value_expr, max_width = bits
asvar = None
elif len(bits) == 6:
tag, this_value_expr, max_value_expr, max_width, as_, asvar = bits
if as_ != 'as':
raise TemplateSyntaxError("Invalid syntax in widthratio tag. Expecting 'as' keyword")
else:
raise TemplateSyntaxError("widthratio takes at least three arguments")
return WidthRatioNode(parser.compile_filter(this_value_expr),
parser.compile_filter(max_value_expr),
parser.compile_filter(max_width),
asvar=asvar)
@register.tag('with')
def do_with(parser, token):
"""
Adds one or more values to the context (inside of this block) for caching
and easy access.
For example::
{% with total=person.some_sql_method %}
{{ total }} object{{ total|pluralize }}
{% endwith %}
Multiple values can be added to the context::
{% with foo=1 bar=2 %}
...
{% endwith %}
The legacy format of ``{% with person.some_sql_method as total %}`` is
still accepted.
"""
bits = token.split_contents()
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable "
"assignment" % bits[0])
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))
nodelist = parser.parse(('endwith',))
parser.delete_first_token()
return WithNode(None, None, nodelist, extra_context=extra_context)
| bsd-3-clause |
mgit-at/ansible | lib/ansible/modules/network/onyx/onyx_magp.py | 66 | 7830 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: onyx_magp
version_added: "2.5"
author: "Samer Deeb (@samerd)"
short_description: Manage MAGP protocol on Mellanox ONYX network devices
description:
- This module provides declarative management of MAGP protocol on vlan
interface of Mellanox ONYX network devices.
notes:
- Tested on ONYX 3.6.4000
options:
magp_id:
description:
- "MAGP instance number 1-255"
required: true
interface:
description:
- VLAN Interface name.
required: true
state:
description:
- MAGP state.
default: present
choices: ['present', 'absent', 'enabled', 'disabled']
router_ip:
description:
- MAGP router IP address.
router_mac:
description:
- MAGP router MAC address.
"""
EXAMPLES = """
- name: run add vlan interface with magp
onyx_magp:
magp_id: 103
router_ip: 192.168.8.2
router_mac: AA:1B:2C:3D:4E:5F
interface: Vlan 1002
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always
type: list
sample:
- interface vlan 234 magp 103
- exit
- interface vlan 234 magp 103 ip virtual-router address 1.2.3.4
"""
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.onyx.onyx import BaseOnyxModule
from ansible.module_utils.network.onyx.onyx import show_cmd
class OnyxMagpModule(BaseOnyxModule):
IF_VLAN_REGEX = re.compile(r"^Vlan (\d+)$")
@classmethod
def _get_element_spec(cls):
return dict(
magp_id=dict(type='int', required=True),
state=dict(default='present',
choices=['present', 'absent', 'enabled', 'disabled']),
interface=dict(required=True),
router_ip=dict(),
router_mac=dict(),
)
def init_module(self):
""" Ansible module initialization
"""
element_spec = self._get_element_spec()
argument_spec = dict()
argument_spec.update(element_spec)
self._module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
def validate_magp_id(self, value):
if value and not 1 <= int(value) <= 255:
self._module.fail_json(msg='magp id must be between 1 and 255')
def get_required_config(self):
module_params = self._module.params
interface = module_params['interface']
match = self.IF_VLAN_REGEX.match(interface)
vlan_id = 0
if match:
vlan_id = int(match.group(1))
else:
self._module.fail_json(
msg='Invalid interface name: should be "Vlan <vlan_id>"')
self._required_config = dict(
magp_id=module_params['magp_id'],
state=module_params['state'],
vlan_id=vlan_id,
router_ip=module_params['router_ip'],
router_mac=module_params['router_mac'])
self.validate_param_values(self._required_config)
@classmethod
def get_magp_id(cls, item):
header = cls.get_config_attr(item, "header")
return int(header.split()[1])
def _create_magp_instance_data(self, magp_id, item):
vlan_id = int(self.get_config_attr(item, "Interface vlan"))
state = self.get_config_attr(item, "Admin state").lower()
return dict(
magp_id=magp_id,
state=state,
vlan_id=vlan_id,
router_ip=self.get_config_attr(item, "Virtual IP"),
router_mac=self.get_config_attr(item, "Virtual MAC"))
def _update_magp_data(self, magp_data):
for magp_item in magp_data:
magp_id = self.get_magp_id(magp_item)
inst_data = self._create_magp_instance_data(magp_id, magp_item)
self._current_config[magp_id] = inst_data
def _get_magp_config(self):
cmd = "show magp"
return show_cmd(self._module, cmd, json_fmt=True, fail_on_error=False)
def load_current_config(self):
# called in base class in run function
self._current_config = dict()
magp_data = self._get_magp_config()
if magp_data:
self._update_magp_data(magp_data)
def _generate_no_magp_commands(self):
req_vlan_id = self._required_config['vlan_id']
req_magp_id = self._required_config['magp_id']
curr_magp_data = self._current_config.get(req_magp_id)
if not curr_magp_data:
return
curr_vlan_id = curr_magp_data.get(req_vlan_id)
if curr_vlan_id == req_vlan_id:
cmd = 'interface vlan %s no magp %s' % (req_vlan_id, req_magp_id)
self._commands.append(cmd)
def _generate_magp_commands(self, req_state):
req_vlan_id = self._required_config['vlan_id']
req_magp_id = self._required_config['magp_id']
curr_magp_data = self._current_config.get(req_magp_id, dict())
curr_vlan_id = curr_magp_data.get('vlan_id')
magp_prefix = 'interface vlan %s magp %s' % (req_vlan_id, req_magp_id)
create_new_magp = False
if curr_vlan_id != req_vlan_id:
if curr_vlan_id:
cmd = 'interface vlan %s no magp %s' % (
curr_vlan_id, req_magp_id)
self._commands.append(cmd)
create_new_magp = True
self._commands.append(magp_prefix)
self._commands.append('exit')
req_router_ip = self._required_config['router_ip']
curr_router_ip = curr_magp_data.get('router_ip')
if req_router_ip:
if curr_router_ip != req_router_ip or create_new_magp:
cmd = '%s ip virtual-router address %s' % (
magp_prefix, req_router_ip)
self._commands.append(cmd)
else:
if curr_router_ip and curr_router_ip != '0.0.0.0':
cmd = '%s no ip virtual-router address' % magp_prefix
self._commands.append(cmd)
req_router_mac = self._required_config['router_mac']
curr_router_mac = curr_magp_data.get('router_mac')
if curr_router_mac:
curr_router_mac = curr_router_mac.lower()
if req_router_mac:
req_router_mac = req_router_mac.lower()
if curr_router_mac != req_router_mac or create_new_magp:
cmd = '%s ip virtual-router mac-address %s' % (
magp_prefix, req_router_mac)
self._commands.append(cmd)
else:
if curr_router_mac and curr_router_mac != '00:00:00:00:00:00':
cmd = '%s no ip virtual-router mac-address' % magp_prefix
self._commands.append(cmd)
if req_state in ('enabled', 'disabled'):
curr_state = curr_magp_data.get('state', 'enabled')
if curr_state != req_state:
if req_state == 'enabled':
suffix = 'no shutdown'
else:
suffix = 'shutdown'
cmd = '%s %s' % (magp_prefix, suffix)
self._commands.append(cmd)
def generate_commands(self):
req_state = self._required_config['state']
if req_state == 'absent':
return self._generate_no_magp_commands()
return self._generate_magp_commands(req_state)
def main():
""" main entry point for module execution
"""
OnyxMagpModule.main()
if __name__ == '__main__':
main()
| gpl-3.0 |
pshen/ansible | test/runner/lib/cloud/aws.py | 58 | 3311 | """AWS plugin for integration tests."""
from __future__ import absolute_import, print_function
import os
from lib.util import (
ApplicationError,
display,
is_shippable,
)
from lib.cloud import (
CloudProvider,
CloudEnvironment,
)
from lib.core_ci import (
AnsibleCoreCI,
)
class AwsCloudProvider(CloudProvider):
"""AWS cloud provider plugin. Sets up cloud resources before delegation."""
def filter(self, targets, exclude):
"""Filter out the cloud tests when the necessary config and resources are not available.
:type targets: tuple[TestTarget]
:type exclude: list[str]
"""
if os.path.isfile(self.config_static_path):
return
aci = self._create_ansible_core_ci()
if os.path.isfile(aci.ci_key):
return
if is_shippable():
return
super(AwsCloudProvider, self).filter(targets, exclude)
def setup(self):
"""Setup the cloud resource before delegation and register a cleanup callback."""
super(AwsCloudProvider, self).setup()
aws_config_path = os.path.expanduser('~/.aws')
if os.path.exists(aws_config_path) and not self.args.docker and not self.args.remote:
raise ApplicationError('Rename "%s" or use the --docker or --remote option to isolate tests.' % aws_config_path)
if not self._use_static_config():
self._setup_dynamic()
def _setup_dynamic(self):
"""Request AWS credentials through the Ansible Core CI service."""
display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1)
config = self._read_config_template()
aci = self._create_ansible_core_ci()
response = aci.start()
if not self.args.explain:
credentials = response['aws']['credentials']
values = dict(
ACCESS_KEY=credentials['access_key'],
SECRET_KEY=credentials['secret_key'],
SECURITY_TOKEN=credentials['session_token'],
)
config = self._populate_config_template(config, values)
self._write_config(config)
def _create_ansible_core_ci(self):
"""
:rtype: AnsibleCoreCI
"""
return AnsibleCoreCI(self.args, 'aws', 'sts', persist=False, stage=self.args.remote_stage)
class AwsCloudEnvironment(CloudEnvironment):
"""AWS cloud environment plugin. Updates integration test environment after delegation."""
def configure_environment(self, env, cmd):
"""
:type env: dict[str, str]
:type cmd: list[str]
"""
cmd.append('-e')
cmd.append('@%s' % self.config_path)
cmd.append('-e')
cmd.append('resource_prefix=%s' % self.resource_prefix)
def on_failure(self, target, tries):
"""
:type target: TestTarget
:type tries: int
"""
if not tries and self.managed:
display.notice('If %s failed due to permissions, the IAM test policy may need to be updated. '
'For help, consult @mattclay or @gundalow on GitHub or #ansible-devel on IRC.' % target.name)
@property
def inventory_hosts(self):
"""
:rtype: str | None
"""
return 'amazon'
| gpl-3.0 |
kamalx/edx-platform | common/djangoapps/student/roles.py | 30 | 11451 | """
Classes used to model the roles used in the courseware. Each role is responsible for checking membership,
adding users, removing users, and listing members
"""
from abc import ABCMeta, abstractmethod
from django.contrib.auth.models import User
import logging
from student.models import CourseAccessRole
from xmodule_django.models import CourseKeyField
log = logging.getLogger(__name__)
# A list of registered access roles.
REGISTERED_ACCESS_ROLES = {}
def register_access_role(cls):
"""
Decorator that allows access roles to be registered within the roles module and referenced by their
string values.
Assumes that the decorated class has a "ROLE" attribute, defining its type.
"""
try:
role_name = getattr(cls, 'ROLE')
REGISTERED_ACCESS_ROLES[role_name] = cls
except AttributeError:
log.exception(u"Unable to register Access Role with attribute 'ROLE'.")
return cls
class RoleCache(object):
"""
A cache of the CourseAccessRoles held by a particular user
"""
def __init__(self, user):
self._roles = set(
CourseAccessRole.objects.filter(user=user).all()
)
def has_role(self, role, course_id, org):
"""
Return whether this RoleCache contains a role with the specified role, course_id, and org
"""
return any(
access_role.role == role and
access_role.course_id == course_id and
access_role.org == org
for access_role in self._roles
)
class AccessRole(object):
"""
Object representing a role with particular access to a resource
"""
__metaclass__ = ABCMeta
@abstractmethod
def has_user(self, user): # pylint: disable=unused-argument
"""
Return whether the supplied django user has access to this role.
"""
return False
@abstractmethod
def add_users(self, *users):
"""
Add the role to the supplied django users.
"""
pass
@abstractmethod
def remove_users(self, *users):
"""
Remove the role from the supplied django users.
"""
pass
@abstractmethod
def users_with_role(self):
"""
Return a django QuerySet for all of the users with this role
"""
return User.objects.none()
class GlobalStaff(AccessRole):
"""
The global staff role
"""
def has_user(self, user):
return user.is_staff
def add_users(self, *users):
for user in users:
if (user.is_authenticated() and user.is_active):
user.is_staff = True
user.save()
def remove_users(self, *users):
for user in users:
# don't check is_authenticated nor is_active on purpose
user.is_staff = False
user.save()
def users_with_role(self):
raise Exception("This operation is un-indexed, and shouldn't be used")
class RoleBase(AccessRole):
"""
Roles by type (e.g., instructor, beta_user) and optionally org, course_key
"""
def __init__(self, role_name, org='', course_key=None):
"""
Create role from required role_name w/ optional org and course_key. You may just provide a role
name if it's a global role (not constrained to an org or course). Provide org if constrained to
an org. Provide org and course if constrained to a course. Although, you should use the subclasses
for all of these.
"""
super(RoleBase, self).__init__()
self.org = org
self.course_key = course_key
self._role_name = role_name
def has_user(self, user):
"""
Return whether the supplied django user has access to this role.
"""
if not (user.is_authenticated() and user.is_active):
return False
# pylint: disable=protected-access
if not hasattr(user, '_roles'):
# Cache a list of tuples identifying the particular roles that a user has
# Stored as tuples, rather than django models, to make it cheaper to construct objects for comparison
user._roles = RoleCache(user)
return user._roles.has_role(self._role_name, self.course_key, self.org)
def add_users(self, *users):
"""
Add the supplied django users to this role.
"""
# silently ignores anonymous and inactive users so that any that are
# legit get updated.
from student.models import CourseAccessRole
for user in users:
if user.is_authenticated and user.is_active and not self.has_user(user):
entry = CourseAccessRole(user=user, role=self._role_name, course_id=self.course_key, org=self.org)
entry.save()
if hasattr(user, '_roles'):
del user._roles
def remove_users(self, *users):
"""
Remove the supplied django users from this role.
"""
entries = CourseAccessRole.objects.filter(
user__in=users, role=self._role_name, org=self.org, course_id=self.course_key
)
entries.delete()
for user in users:
if hasattr(user, '_roles'):
del user._roles
def users_with_role(self):
"""
Return a django QuerySet for all of the users with this role
"""
# Org roles don't query by CourseKey, so use CourseKeyField.Empty for that query
if self.course_key is None:
self.course_key = CourseKeyField.Empty
entries = User.objects.filter(
courseaccessrole__role=self._role_name,
courseaccessrole__org=self.org,
courseaccessrole__course_id=self.course_key
)
return entries
class CourseRole(RoleBase):
"""
A named role in a particular course
"""
def __init__(self, role, course_key):
"""
Args:
course_key (CourseKey)
"""
super(CourseRole, self).__init__(role, course_key.org, course_key)
@classmethod
def course_group_already_exists(self, course_key):
return CourseAccessRole.objects.filter(org=course_key.org, course_id=course_key).exists()
class OrgRole(RoleBase):
"""
A named role in a particular org independent of course
"""
def __init__(self, role, org):
super(OrgRole, self).__init__(role, org)
@register_access_role
class CourseStaffRole(CourseRole):
"""A Staff member of a course"""
ROLE = 'staff'
def __init__(self, *args, **kwargs):
super(CourseStaffRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseInstructorRole(CourseRole):
"""A course Instructor"""
ROLE = 'instructor'
def __init__(self, *args, **kwargs):
super(CourseInstructorRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseFinanceAdminRole(CourseRole):
"""A course staff member with privileges to review financial data."""
ROLE = 'finance_admin'
def __init__(self, *args, **kwargs):
super(CourseFinanceAdminRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseSalesAdminRole(CourseRole):
"""A course staff member with privileges to perform sales operations. """
ROLE = 'sales_admin'
def __init__(self, *args, **kwargs):
super(CourseSalesAdminRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseBetaTesterRole(CourseRole):
"""A course Beta Tester"""
ROLE = 'beta_testers'
def __init__(self, *args, **kwargs):
super(CourseBetaTesterRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class LibraryUserRole(CourseRole):
"""
A user who can view a library and import content from it, but not edit it.
Used in Studio only.
"""
ROLE = 'library_user'
def __init__(self, *args, **kwargs):
super(LibraryUserRole, self).__init__(self.ROLE, *args, **kwargs)
class CourseCcxCoachRole(CourseRole):
"""A CCX Coach"""
ROLE = 'ccx_coach'
def __init__(self, *args, **kwargs):
super(CourseCcxCoachRole, self).__init__(self.ROLE, *args, **kwargs)
class OrgStaffRole(OrgRole):
"""An organization staff member"""
def __init__(self, *args, **kwargs):
super(OrgStaffRole, self).__init__('staff', *args, **kwargs)
class OrgInstructorRole(OrgRole):
"""An organization instructor"""
def __init__(self, *args, **kwargs):
super(OrgInstructorRole, self).__init__('instructor', *args, **kwargs)
class OrgLibraryUserRole(OrgRole):
"""
A user who can view any libraries in an org and import content from them, but not edit them.
Used in Studio only.
"""
ROLE = LibraryUserRole.ROLE
def __init__(self, *args, **kwargs):
super(OrgLibraryUserRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseCreatorRole(RoleBase):
"""
This is the group of people who have permission to create new courses (we may want to eventually
make this an org based role).
"""
ROLE = "course_creator_group"
def __init__(self, *args, **kwargs):
super(CourseCreatorRole, self).__init__(self.ROLE, *args, **kwargs)
class UserBasedRole(object):
"""
Backward mapping: given a user, manipulate the courses and roles
"""
def __init__(self, user, role):
"""
Create a UserBasedRole accessor: for a given user and role (e.g., "instructor")
"""
self.user = user
self.role = role
def has_course(self, course_key):
"""
Return whether the role's user has the configured role access to the passed course
"""
if not (self.user.is_authenticated() and self.user.is_active):
return False
# pylint: disable=protected-access
if not hasattr(self.user, '_roles'):
self.user._roles = RoleCache(self.user)
return self.user._roles.has_role(self.role, course_key, course_key.org)
def add_course(self, *course_keys):
"""
Grant this object's user the object's role for the supplied courses
"""
if self.user.is_authenticated and self.user.is_active:
for course_key in course_keys:
entry = CourseAccessRole(user=self.user, role=self.role, course_id=course_key, org=course_key.org)
entry.save()
if hasattr(self.user, '_roles'):
del self.user._roles
else:
raise ValueError("user is not active. Cannot grant access to courses")
def remove_courses(self, *course_keys):
"""
Remove the supplied courses from this user's configured role.
"""
entries = CourseAccessRole.objects.filter(user=self.user, role=self.role, course_id__in=course_keys)
entries.delete()
if hasattr(self.user, '_roles'):
del self.user._roles
def courses_with_role(self):
"""
Return a django QuerySet for all of the courses with this user x role. You can access
any of these properties on each result record:
* user (will be self.user--thus uninteresting)
* org
* course_id
* role (will be self.role--thus uninteresting)
"""
return CourseAccessRole.objects.filter(role=self.role, user=self.user)
| agpl-3.0 |
mattcongy/itshop | docker-images/taigav2/taiga-back/taiga/events/events.py | 1 | 3483 | # -*- coding: utf-8 -*-
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
from django.db import connection
from taiga.base.utils import json
from taiga.base.utils.db import get_typename_for_model_instance
from . import middleware as mw
from . import backends
# The complete list of content types
# of allowed models for change events
watched_types = set([
"userstories.userstory",
"issues.issue",
"tasks.task",
"wiki.wiki_page",
"milestones.milestone",
])
def emit_event(data:dict, routing_key:str, *,
sessionid:str=None, channel:str="events",
on_commit:bool=True):
if not sessionid:
sessionid = mw.get_current_session_id()
data = {"session_id": sessionid,
"data": data}
backend = backends.get_events_backend()
def backend_emit_event():
backend.emit_event(message=json.dumps(data), routing_key=routing_key, channel=channel)
if on_commit:
connection.on_commit(backend_emit_event)
else:
backend_emit_event()
def emit_event_for_model(obj, *, type:str="change", channel:str="events",
content_type:str=None, sessionid:str=None):
"""
Sends a model change event.
"""
if obj._importing:
return None
assert type in set(["create", "change", "delete"])
assert hasattr(obj, "project_id")
if not content_type:
content_type = get_typename_for_model_instance(obj)
projectid = getattr(obj, "project_id")
pk = getattr(obj, "pk", None)
app_name, model_name = content_type.split(".", 1)
routing_key = "changes.project.{0}.{1}".format(projectid, app_name)
data = {"type": type,
"matches": content_type,
"pk": pk}
return emit_event(routing_key=routing_key,
channel=channel,
sessionid=sessionid,
data=data)
def emit_event_for_ids(ids, content_type:str, projectid:int, *,
type:str="change", channel:str="events", sessionid:str=None):
assert type in set(["create", "change", "delete"])
assert isinstance(ids, collections.Iterable)
assert content_type, "'content_type' parameter is mandatory"
app_name, model_name = content_type.split(".", 1)
routing_key = "changes.project.{0}.{1}".format(projectid, app_name)
data = {"type": type,
"matches": content_type,
"pk": ids}
return emit_event(routing_key=routing_key,
channel=channel,
sessionid=sessionid,
data=data)
| mit |
kivymd/KivyMD | kivymd/uix/banner.py | 1 | 11889 | """
Components/Banner
=================
.. seealso::
`Material Design spec, Banner <https://material.io/components/banners>`_
.. rubric:: A banner displays a prominent message and related optional actions.
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner.png
:align: center
Usage
=====
.. code-block:: python
from kivy.lang import Builder
from kivy.factory import Factory
from kivymd.app import MDApp
Builder.load_string('''
<ExampleBanner@Screen>
MDBanner:
id: banner
text: ["One line string text example without actions."]
# The widget that is under the banner.
# It will be shifted down to the height of the banner.
over_widget: screen
vertical_pad: toolbar.height
MDToolbar:
id: toolbar
title: "Example Banners"
elevation: 10
pos_hint: {'top': 1}
BoxLayout:
id: screen
orientation: "vertical"
size_hint_y: None
height: Window.height - toolbar.height
OneLineListItem:
text: "Banner without actions"
on_release: banner.show()
Widget:
''')
class Test(MDApp):
def build(self):
return Factory.ExampleBanner()
Test().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-example-1.gif
:align: center
.. rubric:: Banner type.
By default, the banner is of the type ``'one-line'``:
.. code-block:: kv
MDBanner:
text: ["One line string text example without actions."]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-one-line.png
:align: center
To use a two-line banner, specify the ``'two-line'`` :attr:`MDBanner.type` for the banner
and pass the list of two lines to the :attr:`MDBanner.text` parameter:
.. code-block:: kv
MDBanner:
type: "two-line"
text:
["One line string text example without actions.", "This is the second line of the banner message."]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-two-line.png
:align: center
Similarly, create a three-line banner:
.. code-block:: kv
MDBanner:
type: "three-line"
text:
["One line string text example without actions.", "This is the second line of the banner message.", "and this is the third line of the banner message."]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-three-line.png
:align: center
To add buttons to any type of banner,
use the :attr:`MDBanner.left_action` and :attr:`MDBanner.right_action` parameters,
which should take a list ['Button name', function]:
.. code-block:: kv
MDBanner:
text: ["One line string text example without actions."]
left_action: ["CANCEL", lambda x: None]
Or two buttons:
.. code-block:: kv
MDBanner:
text: ["One line string text example without actions."]
left_action: ["CANCEL", lambda x: None]
right_action: ["CLOSE", lambda x: None]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-actions.png
:align: center
If you want to use the icon on the left in the banner,
add the prefix `'-icon'` to the banner type:
.. code-block:: kv
MDBanner:
type: "one-line-icon"
icon: f"{images_path}/kivymd.png"
text: ["One line string text example without actions."]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-icon.png
:align: center
.. Note:: `See full example <https://github.com/kivymd/KivyMD/wiki/Components-Banner>`_
"""
__all__ = ("MDBanner",)
from kivy.animation import Animation
from kivy.clock import Clock
from kivy.lang import Builder
from kivy.metrics import dp
from kivy.properties import (
ListProperty,
NumericProperty,
ObjectProperty,
OptionProperty,
StringProperty,
)
from kivy.uix.widget import Widget
from kivymd.uix.button import MDFlatButton
from kivymd.uix.card import MDCard
from kivymd.uix.list import (
OneLineAvatarListItem,
OneLineListItem,
ThreeLineAvatarListItem,
ThreeLineListItem,
TwoLineAvatarListItem,
TwoLineListItem,
)
Builder.load_string(
"""
#:import Window kivy.core.window.Window
#:import Clock kivy.clock.Clock
<ThreeLineIconBanner>
text: root.text_message[0]
secondary_text: root.text_message[1]
tertiary_text: root.text_message[2]
divider: None
_no_ripple_effect: True
ImageLeftWidget:
source: root.icon
<TwoLineIconBanner>
text: root.text_message[0]
secondary_text: root.text_message[1]
divider: None
_no_ripple_effect: True
ImageLeftWidget:
source: root.icon
<OneLineIconBanner>
text: root.text_message[0]
divider: None
_no_ripple_effect: True
ImageLeftWidget:
source: root.icon
<ThreeLineBanner>
text: root.text_message[0]
secondary_text: root.text_message[1]
tertiary_text: root.text_message[2]
divider: None
_no_ripple_effect: True
<TwoLineBanner>
text: root.text_message[0]
secondary_text: root.text_message[1]
divider: None
_no_ripple_effect: True
<OneLineBanner>
text: root.text_message[0]
divider: None
_no_ripple_effect: True
<MDBanner>
size_hint_y: None
height: self.minimum_height
banner_y: 0
orientation: "vertical"
y: Window.height - self.banner_y
canvas:
Color:
rgba: 0, 0, 0, 0
Rectangle:
pos: self.pos
size: self.size
BoxLayout:
id: container_message
size_hint_y: None
height: self.minimum_height
BoxLayout:
size_hint: None, None
size: self.minimum_size
pos_hint: {"right": 1}
padding: 0, 0, "8dp", "8dp"
spacing: "8dp"
BoxLayout:
id: left_action_box
size_hint: None, None
size: self.minimum_size
BoxLayout:
id: right_action_box
size_hint: None, None
size: self.minimum_size
"""
)
class MDBanner(MDCard):
vertical_pad = NumericProperty(dp(68))
"""
Indent the banner at the top of the screen.
:attr:`vertical_pad` is an :class:`~kivy.properties.NumericProperty`
and defaults to `dp(68)`.
"""
opening_transition = StringProperty("in_quad")
"""
The name of the animation transition.
:attr:`opening_transition` is an :class:`~kivy.properties.StringProperty`
and defaults to `'in_quad'`.
"""
icon = StringProperty("data/logo/kivy-icon-128.png")
"""Icon banner.
:attr:`icon` is an :class:`~kivy.properties.StringProperty`
and defaults to `'data/logo/kivy-icon-128.png'`.
"""
over_widget = ObjectProperty()
"""
The widget that is under the banner.
It will be shifted down to the height of the banner.
:attr:`over_widget` is an :class:`~kivy.properties.ObjectProperty`
and defaults to `None`.
"""
text = ListProperty()
"""List of lines for banner text.
Must contain no more than three lines for a
`'one-line'`, `'two-line'` and `'three-line'` banner, respectively.
:attr:`text` is an :class:`~kivy.properties.ListProperty`
and defaults to `[]`.
"""
left_action = ListProperty()
"""The action of banner.
To add one action, make a list [`'name_action'`, callback]
where `'name_action'` is a string that corresponds to an action name and
``callback`` is the function called on a touch release event.
:attr:`left_action` is an :class:`~kivy.properties.ListProperty`
and defaults to `[]`.
"""
right_action = ListProperty()
"""Works the same way as :attr:`left_action`.
:attr:`right_action` is an :class:`~kivy.properties.ListProperty`
and defaults to `[]`.
"""
type = OptionProperty(
"one-line",
options=[
"one-line",
"two-line",
"three-line",
"one-line-icon",
"two-line-icon",
"three-line-icon",
],
allownone=True,
)
"""Banner type. . Available options are: (`"one-line"`, `"two-line"`,
`"three-line"`, `"one-line-icon"`, `"two-line-icon"`, `"three-line-icon"`).
:attr:`type` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'one-line'`.
"""
_type_message = None
_progress = False
def add_actions_buttons(self, box, data):
if data:
name_action_button, function_action_button = data
action_button = MDFlatButton(
text=f"[b]{name_action_button}[/b]",
theme_text_color="Custom",
text_color=self.theme_cls.primary_color,
on_release=function_action_button,
)
action_button.markup = True
box.add_widget(action_button)
def set_left_action(self):
self.add_actions_buttons(self.ids.left_action_box, self.left_action)
def set_right_action(self):
self.add_actions_buttons(self.ids.right_action_box, self.right_action)
def set_type_banner(self):
self._type_message = {
"three-line-icon": ThreeLineIconBanner,
"two-line-icon": TwoLineIconBanner,
"one-line-icon": OneLineIconBanner,
"three-line": ThreeLineBanner,
"two-line": TwoLineBanner,
"one-line": OneLineBanner,
}[self.type]
def add_banner_to_container(self):
self.ids.container_message.add_widget(
self._type_message(text_message=self.text, icon=self.icon)
)
def show(self):
def show(interval):
self.set_type_banner()
self.set_left_action()
self.set_right_action()
self.add_banner_to_container()
Clock.schedule_once(self.animation_display_banner, 0.1)
if self._progress:
return
self._progress = True
if self.ids.container_message.children:
self.hide()
Clock.schedule_once(show, 0.7)
def animation_display_banner(self, i):
Animation(
banner_y=self.height + self.vertical_pad,
d=0.15,
t=self.opening_transition,
).start(self)
anim = Animation(
y=self.over_widget.y - self.height,
d=0.15,
t=self.opening_transition,
)
anim.bind(on_complete=self._reset_progress)
anim.start(self.over_widget)
def hide(self):
def hide(interval):
anim = Animation(banner_y=0, d=0.15)
anim.bind(on_complete=self._remove_banner)
anim.start(self)
Animation(y=self.over_widget.y + self.height, d=0.15).start(
self.over_widget
)
Clock.schedule_once(hide, 0.5)
def _remove_banner(self, *args):
self.ids.container_message.clear_widgets()
self.ids.left_action_box.clear_widgets()
self.ids.right_action_box.clear_widgets()
def _reset_progress(self, *args):
self._progress = False
class BaseBanner(Widget):
text_message = ListProperty(["", "", ""])
icon = StringProperty()
def on_touch_down(self, touch):
self.parent.parent.hide()
class ThreeLineIconBanner(ThreeLineAvatarListItem, BaseBanner):
pass
class TwoLineIconBanner(TwoLineAvatarListItem, BaseBanner):
pass
class OneLineIconBanner(OneLineAvatarListItem, BaseBanner):
pass
class ThreeLineBanner(ThreeLineListItem, BaseBanner):
pass
class TwoLineBanner(TwoLineListItem, BaseBanner):
pass
class OneLineBanner(OneLineListItem, BaseBanner):
pass
| mit |
HadiOfBBG/pegasusrises | gdata/tlslite/integration/HTTPTLSConnection.py | 271 | 6668 | """TLS Lite + httplib."""
import socket
import httplib
from gdata.tlslite.TLSConnection import TLSConnection
from gdata.tlslite.integration.ClientHelper import ClientHelper
class HTTPBaseTLSConnection(httplib.HTTPConnection):
"""This abstract class provides a framework for adding TLS support
to httplib."""
default_port = 443
def __init__(self, host, port=None, strict=None):
if strict == None:
#Python 2.2 doesn't support strict
httplib.HTTPConnection.__init__(self, host, port)
else:
httplib.HTTPConnection.__init__(self, host, port, strict)
def connect(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if hasattr(sock, 'settimeout'):
sock.settimeout(10)
sock.connect((self.host, self.port))
#Use a TLSConnection to emulate a socket
self.sock = TLSConnection(sock)
#When httplib closes this, close the socket
self.sock.closeSocket = True
self._handshake(self.sock)
def _handshake(self, tlsConnection):
"""Called to perform some sort of handshake.
This method must be overridden in a subclass to do some type of
handshake. This method will be called after the socket has
been connected but before any data has been sent. If this
method does not raise an exception, the TLS connection will be
considered valid.
This method may (or may not) be called every time an HTTP
request is performed, depending on whether the underlying HTTP
connection is persistent.
@type tlsConnection: L{tlslite.TLSConnection.TLSConnection}
@param tlsConnection: The connection to perform the handshake
on.
"""
raise NotImplementedError()
class HTTPTLSConnection(HTTPBaseTLSConnection, ClientHelper):
"""This class extends L{HTTPBaseTLSConnection} to support the
common types of handshaking."""
def __init__(self, host, port=None,
username=None, password=None, sharedKey=None,
certChain=None, privateKey=None,
cryptoID=None, protocol=None,
x509Fingerprint=None,
x509TrustList=None, x509CommonName=None,
settings = None):
"""Create a new HTTPTLSConnection.
For client authentication, use one of these argument
combinations:
- username, password (SRP)
- username, sharedKey (shared-key)
- certChain, privateKey (certificate)
For server authentication, you can either rely on the
implicit mutual authentication performed by SRP or
shared-keys, or you can do certificate-based server
authentication with one of these argument combinations:
- cryptoID[, protocol] (requires cryptoIDlib)
- x509Fingerprint
- x509TrustList[, x509CommonName] (requires cryptlib_py)
Certificate-based server authentication is compatible with
SRP or certificate-based client authentication. It is
not compatible with shared-keys.
The constructor does not perform the TLS handshake itself, but
simply stores these arguments for later. The handshake is
performed only when this class needs to connect with the
server. Thus you should be prepared to handle TLS-specific
exceptions when calling methods inherited from
L{httplib.HTTPConnection} such as request(), connect(), and
send(). See the client handshake functions in
L{tlslite.TLSConnection.TLSConnection} for details on which
exceptions might be raised.
@type host: str
@param host: Server to connect to.
@type port: int
@param port: Port to connect to.
@type username: str
@param username: SRP or shared-key username. Requires the
'password' or 'sharedKey' argument.
@type password: str
@param password: SRP password for mutual authentication.
Requires the 'username' argument.
@type sharedKey: str
@param sharedKey: Shared key for mutual authentication.
Requires the 'username' argument.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: Certificate chain for client authentication.
Requires the 'privateKey' argument. Excludes the SRP or
shared-key related arguments.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: Private key for client authentication.
Requires the 'certChain' argument. Excludes the SRP or
shared-key related arguments.
@type cryptoID: str
@param cryptoID: cryptoID for server authentication. Mutually
exclusive with the 'x509...' arguments.
@type protocol: str
@param protocol: cryptoID protocol URI for server
authentication. Requires the 'cryptoID' argument.
@type x509Fingerprint: str
@param x509Fingerprint: Hex-encoded X.509 fingerprint for
server authentication. Mutually exclusive with the 'cryptoID'
and 'x509TrustList' arguments.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
other party must present a certificate chain which extends to
one of these root certificates. The cryptlib_py module must be
installed to use this parameter. Mutually exclusive with the
'cryptoID' and 'x509Fingerprint' arguments.
@type x509CommonName: str
@param x509CommonName: The end-entity certificate's 'CN' field
must match this value. For a web server, this is typically a
server name such as 'www.amazon.com'. Mutually exclusive with
the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
'x509TrustList' argument.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
"""
HTTPBaseTLSConnection.__init__(self, host, port)
ClientHelper.__init__(self,
username, password, sharedKey,
certChain, privateKey,
cryptoID, protocol,
x509Fingerprint,
x509TrustList, x509CommonName,
settings)
def _handshake(self, tlsConnection):
ClientHelper._handshake(self, tlsConnection)
| apache-2.0 |
sfagmenos/ker | tools/perf/util/setup.py | 766 | 1540 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = getenv('CFLAGS', '').split()
# switch off several checks (need to be at the end of cflags list)
cflags += ['-fno-strict-aliasing', '-Wno-write-strings', '-Wno-unused-parameter' ]
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
libapikfs = getenv('LIBAPI')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, libapikfs],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
mpattyn/fumiste | prototypePython/steamapi/requests/adapters.py | 10 | 14863 | # -*- coding: utf-8 -*-
"""
requests.adapters
~~~~~~~~~~~~~~~~~
This module contains the transport adapters that Requests uses to define
and maintain connections.
"""
import socket
from .models import Response
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.util import Timeout as TimeoutSauce
from .compat import urlparse, basestring, urldefrag, unquote
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
except_on_missing_scheme, get_auth_from_url)
from .structures import CaseInsensitiveDict
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import TimeoutError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3.exceptions import ProxyError as _ProxyError
from .cookies import extract_cookies_to_jar
from .exceptions import ConnectionError, Timeout, SSLError, ProxyError
from .auth import _basic_auth_str
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
class BaseAdapter(object):
"""The Base Transport Adapter"""
def __init__(self):
super(BaseAdapter, self).__init__()
def send(self):
raise NotImplementedError
def close(self):
raise NotImplementedError
class HTTPAdapter(BaseAdapter):
"""The built-in HTTP Adapter for urllib3.
Provides a general-case interface for Requests sessions to contact HTTP and
HTTPS urls by implementing the Transport Adapter interface. This class will
usually be created by the :class:`Session <Session>` class under the
covers.
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param int max_retries: The maximum number of retries each connection
should attempt. Note, this applies only to failed connections and
timeouts, never to requests where the server returns a response.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
"""
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
'_pool_block']
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
self.max_retries = max_retries
self.config = {}
self.proxy_manager = {}
super(HTTPAdapter, self).__init__()
self._pool_connections = pool_connections
self._pool_maxsize = pool_maxsize
self._pool_block = pool_block
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in
self.__attrs__)
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
# because self.poolmanager uses a lambda function, which isn't pickleable.
self.proxy_manager = {}
self.config = {}
for attr, value in state.items():
setattr(self, attr, value)
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
block=self._pool_block)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK):
"""Initializes a urllib3 PoolManager. This method should not be called
from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block)
def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param conn: The urllib3 connection object associated with the cert.
:param url: The requested URL.
:param verify: Whether we should actually verify the certificate.
:param cert: The SSL certificate to verify.
"""
if url.lower().startswith('https') and verify:
cert_loc = None
# Allow self-specified cert location.
if verify is not True:
cert_loc = verify
if not cert_loc:
cert_loc = DEFAULT_CA_BUNDLE_PATH
if not cert_loc:
raise Exception("Could not find a suitable SSL CA certificate bundle.")
conn.cert_reqs = 'CERT_REQUIRED'
conn.ca_certs = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
if cert:
if not isinstance(cert, basestring):
conn.cert_file = cert[0]
conn.key_file = cert[1]
else:
conn.cert_file = cert
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
"""
response = Response()
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8')
else:
response.url = req.url
# Add new cookies from the server.
extract_cookies_to_jar(response.cookies, req, resp)
# Give the Response some context.
response.request = req
response.connection = self
return response
def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param url: The URL to connect to.
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
"""
proxies = proxies or {}
proxy = proxies.get(urlparse(url.lower()).scheme)
if proxy:
except_on_missing_scheme(proxy)
proxy_headers = self.proxy_headers(proxy)
if not proxy in self.proxy_manager:
self.proxy_manager[proxy] = proxy_from_url(
proxy,
proxy_headers=proxy_headers,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block)
conn = self.proxy_manager[proxy].connection_from_url(url)
else:
# Only scheme should be lower case
parsed = urlparse(url)
url = parsed.geturl()
conn = self.poolmanager.connection_from_url(url)
return conn
def close(self):
"""Disposes of any internal state.
Currently, this just closes the PoolManager, which closes pooled
connections.
"""
self.poolmanager.clear()
def request_url(self, request, proxies):
"""Obtain the url to use when making the final request.
If the message is being sent through a HTTP proxy, the full URL has to
be used. Otherwise, we should only use the path portion of the URL.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param proxies: A dictionary of schemes to proxy URLs.
"""
proxies = proxies or {}
scheme = urlparse(request.url).scheme
proxy = proxies.get(scheme)
if proxy and scheme != 'https':
url, _ = urldefrag(request.url)
else:
url = request.path_url
return url
def add_headers(self, request, **kwargs):
"""Add any headers needed by the connection. As of v2.0 this does
nothing by default, but is left for overriding by users that subclass
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
:param kwargs: The keyword arguments from the call to send().
"""
pass
def proxy_headers(self, proxy):
"""Returns a dictionary of the headers to add to any request sent
through a proxy. This works with urllib3 magic to ensure that they are
correctly sent to the proxy, rather than in a tunnelled request if
CONNECT is being used.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxies: The url of the proxy being used for this request.
:param kwargs: Optional additional keyword arguments.
"""
headers = {}
username, password = get_auth_from_url(proxy)
if username and password:
# Proxy auth usernames and passwords will be urlencoded, we need
# to decode them.
username = unquote(username)
password = unquote(password)
headers['Proxy-Authorization'] = _basic_auth_str(username,
password)
return headers
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) The timeout on the request.
:param verify: (optional) Whether to verify SSL certificates.
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
conn = self.get_connection(request.url, proxies)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request)
chunked = not (request.body is None or 'Content-Length' in request.headers)
if stream:
timeout = TimeoutSauce(connect=timeout)
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
# Send the request.
else:
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=timeout)
try:
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
else:
# All is well, return the connection to the pool.
conn._put_conn(low_conn)
except socket.error as sockerr:
raise ConnectionError(sockerr)
except MaxRetryError as e:
raise ConnectionError(e)
except _ProxyError as e:
raise ProxyError(e)
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
raise SSLError(e)
elif isinstance(e, TimeoutError):
raise Timeout(e)
else:
raise
r = self.build_response(request, resp)
if not stream:
r.content
return r
| mit |
shesselba/linux-berlin | tools/perf/scripts/python/futex-contention.py | 1997 | 1508 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm, callchain,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm, callchain,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/testfixtures/tests/test_roundcomparison.py | 2 | 5182 | # Copyright (c) 2014 Simplistix Ltd
# See license.txt for license details.
from decimal import Decimal
from testfixtures import RoundComparison as R, compare, ShouldRaise
from unittest import TestCase
from ..compat import PY2, PY3
class Tests(TestCase):
def test_equal_yes_rhs(self):
self.assertTrue(0.123457 == R(0.123456, 5))
def test_equal_yes_lhs(self):
self.assertTrue(R(0.123456, 5) == 0.123457)
def test_equal_no_rhs(self):
self.assertFalse(0.123453 == R(0.123456, 5))
def test_equal_no_lhs(self):
self.assertFalse(R(0.123456, 5) == 0.123453)
def test_not_equal_yes_rhs(self):
self.assertFalse(0.123457 != R(0.123456, 5))
def test_not_equal_yes_lhs(self):
self.assertFalse(R(0.123456, 5) != 0.123457)
def test_not_equal_no_rhs(self):
self.assertTrue(0.123453 != R(0.123456, 5))
def test_not_equal_no_lhs(self):
self.assertTrue(R(0.123456, 5) != 0.123453)
def test_equal_in_sequence_rhs(self):
self.assertEqual((1, 2, 0.123457),
(1, 2, R(0.123456, 5)))
def test_equal_in_sequence_lhs(self):
self.assertEqual((1, 2, R(0.123456, 5)),
(1, 2, 0.123457))
def test_not_equal_in_sequence_rhs(self):
self.assertNotEqual((1, 2, 0.1236),
(1, 2, R(0.123456, 5)))
def test_not_equal_in_sequence_lhs(self):
self.assertNotEqual((1, 2, R(0.123456, 5)),
(1, 2, 0.1236))
def test_not_numeric_rhs(self):
with ShouldRaise(TypeError):
'abc' == R(0.123456, 5)
def test_not_numeric_lhs(self):
with ShouldRaise(TypeError):
R(0.123456, 5) == 'abc'
def test_repr(self):
compare('<R:0.12346 to 5 digits>',
repr(R(0.123456, 5)))
def test_str(self):
compare('<R:0.12346 to 5 digits>',
repr(R(0.123456, 5)))
def test_str_negative(self):
if PY3:
expected = '<R:123500 to -2 digits>'
else:
expected = '<R:123500.0 to -2 digits>'
compare(expected, repr(R(123456, -2)))
TYPE_ERROR_DECIMAL = TypeError(
"Cannot compare <R:0.12346 to 5 digits> with <class 'decimal.Decimal'>"
)
def test_equal_yes_decimal_to_float_rhs(self):
with ShouldRaise(self.TYPE_ERROR_DECIMAL, unless=PY2):
self.assertTrue(Decimal("0.123457") == R(0.123456, 5))
def test_equal_yes_decimal_to_float_lhs(self):
with ShouldRaise(self.TYPE_ERROR_DECIMAL, unless=PY2):
self.assertTrue(R(0.123456, 5) == Decimal("0.123457"))
def test_equal_no_decimal_to_float_rhs(self):
with ShouldRaise(self.TYPE_ERROR_DECIMAL, unless=PY2):
self.assertFalse(Decimal("0.123453") == R(0.123456, 5))
def test_equal_no_decimal_to_float_lhs(self):
with ShouldRaise(self.TYPE_ERROR_DECIMAL, unless=PY2):
self.assertFalse(R(0.123456, 5) == Decimal("0.123453"))
TYPE_ERROR_FLOAT = TypeError(
"Cannot compare <R:0.12346 to 5 digits> with <class 'float'>"
)
def test_equal_yes_float_to_decimal_rhs(self):
with ShouldRaise(self.TYPE_ERROR_FLOAT, unless=PY2):
self.assertTrue(0.123457 == R(Decimal("0.123456"), 5))
def test_equal_yes_float_to_decimal_lhs(self):
with ShouldRaise(self.TYPE_ERROR_FLOAT, unless=PY2):
self.assertTrue(R(Decimal("0.123456"), 5) == 0.123457)
def test_equal_no_float_to_decimal_rhs(self):
with ShouldRaise(self.TYPE_ERROR_FLOAT, unless=PY2):
self.assertFalse(0.123453 == R(Decimal("0.123456"), 5))
def test_equal_no_float_to_decimal_lhs(self):
with ShouldRaise(self.TYPE_ERROR_FLOAT, unless=PY2):
self.assertFalse(R(Decimal("0.123456"), 5) == 0.123453)
def test_integer_float(self):
with ShouldRaise(TypeError, unless=PY2):
1 == R(1.000001, 5)
def test_float_integer(self):
with ShouldRaise(TypeError, unless=PY2):
R(1.000001, 5) == 1
def test_equal_yes_integer_other_rhs(self):
self.assertTrue(10 == R(11, -1))
def test_equal_yes_integer_lhs(self):
self.assertTrue(R(11, -1) == 10)
def test_equal_no_integer_rhs(self):
self.assertFalse(10 == R(16, -1))
def test_equal_no_integer_lhs(self):
self.assertFalse(R(16, -1) == 10)
def test_equal_integer_zero_precision(self):
self.assertTrue(1 == R(1, 0))
def test_equal_yes_negative_precision(self):
self.assertTrue(149.123 == R(101.123, -2))
def test_equal_no_negative_precision(self):
self.assertFalse(149.123 == R(150.001, -2))
def test_decimal_yes_rhs(self):
self.assertTrue(Decimal('0.123457') == R(Decimal('0.123456'), 5))
def test_decimal_yes_lhs(self):
self.assertTrue(R(Decimal('0.123456'), 5) == Decimal('0.123457'))
def test_decimal_no_rhs(self):
self.assertFalse(Decimal('0.123453') == R(Decimal('0.123456'), 5))
def test_decimal_no_lhs(self):
self.assertFalse(R(Decimal('0.123456'), 5) == Decimal('0.123453'))
| agpl-3.0 |
40223210/w16b_test | static/Brython3.1.1-20150328-091302/Lib/collections/__init__.py | 625 | 25849 | #__all__ = ['deque', 'defaultdict', 'Counter']
from _collections import deque, defaultdict
#from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList',
'UserString', 'Counter', 'OrderedDict']
# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py.
# They should however be considered an integral part of collections.py.
# fixme brython.. there is an issue with _abcoll
#from _abcoll import *
#from _abcoll import Set
from _abcoll import MutableMapping
#import _abcoll
#__all__ += _abcoll.__all__
from collections.abc import *
import collections.abc
__all__ += collections.abc.__all__
from _collections import deque, defaultdict, namedtuple
from operator import itemgetter as _itemgetter
from keyword import iskeyword as _iskeyword
import sys as _sys
import heapq as _heapq
#fixme brython
#from weakref import proxy as _proxy
from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
from reprlib import recursive_repr as _recursive_repr
class Set(set):
pass
class Sequence(list):
pass
def _proxy(obj):
return obj
################################################################################
### OrderedDict
################################################################################
class _Link(object):
__slots__ = 'prev', 'next', 'key', '__weakref__'
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as regular dictionaries.
# The internal self.__map dict maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
# The prev links are weakref proxies (to prevent circular references).
# Individual links are kept alive by the hard reference in self.__map.
# Those hard references disappear when a key is deleted from an OrderedDict.
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. The signature is the same as
regular dictionaries, but keyword arguments are not recommended because
their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__hardroot = _Link()
self.__root = root = _proxy(self.__hardroot)
root.prev = root.next = root
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value,
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link at the end of the linked list,
# and the inherited dictionary is updated with the new key/value pair.
if key not in self:
self.__map[key] = link = Link()
root = self.__root
last = root.prev
link.prev, link.next, link.key = last, root, key
last.next = link
root.prev = proxy(link)
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which gets
# removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link = self.__map.pop(key)
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
# Traverse the linked list in order.
root = self.__root
curr = root.next
while curr is not root:
yield curr.key
curr = curr.next
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
# Traverse the linked list in reverse order.
root = self.__root
curr = root.prev
while curr is not root:
yield curr.key
curr = curr.prev
def clear(self):
'od.clear() -> None. Remove all items from od.'
root = self.__root
root.prev = root.next = root
self.__map.clear()
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root.prev
link_prev = link.prev
link_prev.next = root
root.prev = link_prev
else:
link = root.next
link_next = link.next
root.next = link_next
link_next.prev = root
key = link.key
del self.__map[key]
value = dict.pop(self, key)
return key, value
def move_to_end(self, key, last=True):
'''Move an existing element to the end (or beginning if last==False).
Raises KeyError if the element does not exist.
When last=True, acts like a fast version of self[key]=self.pop(key).
'''
link = self.__map[key]
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
root = self.__root
if last:
last = root.prev
link.prev = last
link.next = root
last.next = root.prev = link
else:
first = root.next
link.prev = root
link.next = first
root.next = first.prev = link
def __sizeof__(self):
sizeof = _sys.getsizeof
n = len(self) + 1 # number of links including root
size = sizeof(self.__dict__) # instance dictionary
size += sizeof(self.__map) * 2 # internal dict and inherited dict
size += sizeof(self.__hardroot) * n # link objects
size += sizeof(self.__root) * n # proxy objects
return size
#fixme brython.. Issue with _abcoll, which contains MutableMapping
update = __update = MutableMapping.update
keys = MutableMapping.keys
values = MutableMapping.values
items = MutableMapping.items
__ne__ = MutableMapping.__ne__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
value. If key is not found, d is returned if given, otherwise KeyError
is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
#fixme, brython issue
#@_recursive_repr()
def __repr__(self):
'od.__repr__() <==> repr(od)'
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None.
'''
self = cls()
for key in iterable:
self[key] = value
return self
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and \
all(p==q for p, q in zip(self.items(), other.items()))
return dict.__eq__(self, other)
########################################################################
### Counter
########################################################################
def _count_elements(mapping, iterable):
'Tally elements from the iterable.'
mapping_get = mapping.get
for elem in iterable:
mapping[elem] = mapping_get(elem, 0) + 1
#try: # Load C helper function if available
# from _collections import _count_elements
#except ImportError:
# pass
class Counter(dict):
'''Dict subclass for counting hashable items. Sometimes called a bag
or multiset. Elements are stored as dictionary keys and their counts
are stored as dictionary values.
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
>>> c.most_common(3) # three most common elements
[('a', 5), ('b', 4), ('c', 3)]
>>> sorted(c) # list all unique elements
['a', 'b', 'c', 'd', 'e']
>>> ''.join(sorted(c.elements())) # list elements with repetitions
'aaaaabbbbcccdde'
>>> sum(c.values()) # total of all counts
15
>>> c['a'] # count of letter 'a'
5
>>> for elem in 'shazam': # update counts from an iterable
... c[elem] += 1 # by adding 1 to each element's count
>>> c['a'] # now there are seven 'a'
7
>>> del c['b'] # remove all 'b'
>>> c['b'] # now there are zero 'b'
0
>>> d = Counter('simsalabim') # make another counter
>>> c.update(d) # add in the second counter
>>> c['a'] # now there are nine 'a'
9
>>> c.clear() # empty the counter
>>> c
Counter()
Note: If a count is set to zero or reduced to zero, it will remain
in the counter until the entry is deleted or the counter is cleared:
>>> c = Counter('aaabbc')
>>> c['b'] -= 2 # reduce the count of 'b' by two
>>> c.most_common() # 'b' is still in, but its count is zero
[('a', 3), ('c', 1), ('b', 0)]
'''
# References:
# http://en.wikipedia.org/wiki/Multiset
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
# http://code.activestate.com/recipes/259174/
# Knuth, TAOCP Vol. II section 4.6.3
def __init__(self, iterable=None, **kwds):
'''Create a new, empty Counter object. And if given, count elements
from an input iterable. Or, initialize the count from another mapping
of elements to their counts.
>>> c = Counter() # a new, empty counter
>>> c = Counter('gallahad') # a new counter from an iterable
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
>>> c = Counter(a=4, b=2) # a new counter from keyword args
'''
#super().__init__() #BE modified since super not supported
dict.__init__(self)
self.update(iterable, **kwds)
def __missing__(self, key):
'The count of elements not in the Counter is zero.'
# Needed so that self[missing_item] does not raise KeyError
return 0
def most_common(self, n=None):
'''List the n most common elements and their counts from the most
common to the least. If n is None, then list all element counts.
>>> Counter('abcdeabcdabcaba').most_common(3)
[('a', 5), ('b', 4), ('c', 3)]
'''
# Emulate Bag.sortedByCount from Smalltalk
if n is None:
return sorted(self.items(), key=_itemgetter(1), reverse=True)
return _heapq.nlargest(n, self.items(), key=_itemgetter(1))
def elements(self):
'''Iterator over elements repeating each as many times as its count.
>>> c = Counter('ABCABC')
>>> sorted(c.elements())
['A', 'A', 'B', 'B', 'C', 'C']
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
>>> product = 1
>>> for factor in prime_factors.elements(): # loop over factors
... product *= factor # and multiply them
>>> product
1836
Note, if an element's count has been set to zero or is a negative
number, elements() will ignore it.
'''
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
return _chain.from_iterable(_starmap(_repeat, self.items()))
# Override dict methods where necessary
@classmethod
def fromkeys(cls, iterable, v=None):
# There is no equivalent method for counters because setting v=1
# means that no element can have a count greater than one.
raise NotImplementedError(
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
def update(self, iterable=None, **kwds):
'''Like dict.update() but add counts instead of replacing them.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.update('witch') # add elements from another iterable
>>> d = Counter('watch')
>>> c.update(d) # add elements from another counter
>>> c['h'] # four 'h' in which, witch, and watch
4
'''
# The regular dict.update() operation makes no sense here because the
# replace behavior results in the some of original untouched counts
# being mixed-in with all of the other counts for a mismash that
# doesn't have a straight-forward interpretation in most counting
# contexts. Instead, we implement straight-addition. Both the inputs
# and outputs are allowed to contain zero and negative counts.
if iterable is not None:
if isinstance(iterable, Mapping):
if self:
self_get = self.get
for elem, count in iterable.items():
self[elem] = count + self_get(elem, 0)
else:
super().update(iterable) # fast path when counter is empty
else:
_count_elements(self, iterable)
if kwds:
self.update(kwds)
def subtract(self, iterable=None, **kwds):
'''Like dict.update() but subtracts counts instead of replacing them.
Counts can be reduced below zero. Both the inputs and outputs are
allowed to contain zero and negative counts.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.subtract('witch') # subtract elements from another iterable
>>> c.subtract(Counter('watch')) # subtract elements from another counter
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
0
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
-1
'''
if iterable is not None:
self_get = self.get
if isinstance(iterable, Mapping):
for elem, count in iterable.items():
self[elem] = self_get(elem, 0) - count
else:
for elem in iterable:
self[elem] = self_get(elem, 0) - 1
if kwds:
self.subtract(kwds)
def copy(self):
'Return a shallow copy.'
return self.__class__(self)
def __reduce__(self):
return self.__class__, (dict(self),)
def __delitem__(self, elem):
'Like dict.__delitem__() but does not raise KeyError for missing values.'
if elem in self:
super().__delitem__(elem)
def __repr__(self):
if not self:
return '%s()' % self.__class__.__name__
try:
items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
return '%s({%s})' % (self.__class__.__name__, items)
except TypeError:
# handle case where values are not orderable
return '{0}({1!r})'.format(self.__class__.__name__, dict(self))
# Multiset-style mathematical operations discussed in:
# Knuth TAOCP Volume II section 4.6.3 exercise 19
# and at http://en.wikipedia.org/wiki/Multiset
#
# Outputs guaranteed to only include positive counts.
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
def __add__(self, other):
'''Add counts from two counters.
>>> Counter('abbb') + Counter('bcc')
Counter({'b': 4, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count + other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __sub__(self, other):
''' Subtract count, but keep only results with positive counts.
>>> Counter('abbbc') - Counter('bccd')
Counter({'b': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count - other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count < 0:
result[elem] = 0 - count
return result
def __or__(self, other):
'''Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = other_count if count < other_count else count
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __and__(self, other):
''' Intersection is the minimum of corresponding counts.
>>> Counter('abbb') & Counter('bcc')
Counter({'b': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = count if count < other_count else other_count
if newcount > 0:
result[elem] = newcount
return result
########################################################################
### ChainMap (helper for configparser)
########################################################################
class ChainMap(MutableMapping):
''' A ChainMap groups multiple dicts (or other mappings) together
to create a single, updateable view.
The underlying mappings are stored in a list. That list is public and can
accessed or updated using the *maps* attribute. There is no other state.
Lookups search the underlying mappings successively until a key is found.
In contrast, writes, updates, and deletions only operate on the first
mapping.
'''
def __init__(self, *maps):
'''Initialize a ChainMap by setting *maps* to the given mappings.
If no mappings are provided, a single empty dictionary is used.
'''
self.maps = list(maps) or [{}] # always at least one map
def __missing__(self, key):
raise KeyError(key)
def __getitem__(self, key):
for mapping in self.maps:
try:
return mapping[key] # can't use 'key in mapping' with defaultdict
except KeyError:
pass
return self.__missing__(key) # support subclasses that define __missing__
def get(self, key, default=None):
return self[key] if key in self else default
def __len__(self):
return len(set().union(*self.maps)) # reuses stored hash values if possible
def __iter__(self):
return iter(set().union(*self.maps))
def __contains__(self, key):
return any(key in m for m in self.maps)
def __bool__(self):
return any(self.maps)
#fixme, brython
#@_recursive_repr()
def __repr__(self):
return '{0.__class__.__name__}({1})'.format(
self, ', '.join(map(repr, self.maps)))
def __repr__(self):
return ','.join(str(_map) for _map in self.maps)
@classmethod
def fromkeys(cls, iterable, *args):
'Create a ChainMap with a single dict created from the iterable.'
return cls(dict.fromkeys(iterable, *args))
def copy(self):
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
return self.__class__(self.maps[0].copy(), *self.maps[1:])
__copy__ = copy
def new_child(self): # like Django's Context.push()
'New ChainMap with a new dict followed by all previous maps.'
return self.__class__({}, *self.maps)
@property
def parents(self): # like Django's Context.pop()
'New ChainMap from maps[1:].'
return self.__class__(*self.maps[1:])
def __setitem__(self, key, value):
self.maps[0][key] = value
def __delitem__(self, key):
try:
del self.maps[0][key]
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def popitem(self):
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
try:
return self.maps[0].popitem()
except KeyError:
raise KeyError('No keys found in the first mapping.')
def pop(self, key, *args):
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
try:
return self.maps[0].pop(key, *args)
except KeyError:
#raise KeyError('Key not found in the first mapping: {!r}'.format(key))
raise KeyError('Key not found in the first mapping: %s' % key)
def clear(self):
'Clear maps[0], leaving maps[1:] intact.'
self.maps[0].clear()
################################################################################
### UserDict
################################################################################
class UserDict(MutableMapping):
# Start by filling-out the abstract methods
def __init__(self, dict=None, **kwargs):
self.data = {}
if dict is not None:
self.update(dict)
if len(kwargs):
self.update(kwargs)
def __len__(self): return len(self.data)
def __getitem__(self, key):
if key in self.data:
return self.data[key]
if hasattr(self.__class__, "__missing__"):
return self.__class__.__missing__(self, key)
raise KeyError(key)
def __setitem__(self, key, item): self.data[key] = item
def __delitem__(self, key): del self.data[key]
def __iter__(self):
return iter(self.data)
# Modify __contains__ to work correctly when __missing__ is present
def __contains__(self, key):
return key in self.data
# Now, add the methods in dicts but not in MutableMapping
def __repr__(self): return repr(self.data)
def copy(self):
if self.__class__ is UserDict:
return UserDict(self.data.copy())
import copy
data = self.data
try:
self.data = {}
c = copy.copy(self)
finally:
self.data = data
c.update(self)
return c
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
################################################################################
### UserList
################################################################################
################################################################################
### UserString
################################################################################
| gpl-3.0 |
woodpecker1/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/server_process_unittest.py | 121 | 5514 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
import unittest2 as unittest
from webkitpy.port.factory import PortFactory
from webkitpy.port import server_process
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.common.system.systemhost_mock import MockSystemHost
from webkitpy.common.system.outputcapture import OutputCapture
class TrivialMockPort(object):
def __init__(self):
self.host = MockSystemHost()
self.host.executive.kill_process = lambda x: None
self.host.executive.kill_process = lambda x: None
def results_directory(self):
return "/mock-results"
def check_for_leaks(self, process_name, process_pid):
pass
def process_kill_time(self):
return 1
class MockFile(object):
def __init__(self, server_process):
self._server_process = server_process
self.closed = False
def fileno(self):
return 1
def write(self, line):
self._server_process.broken_pipes.append(self)
raise IOError
def close(self):
self.closed = True
class MockProc(object):
def __init__(self, server_process):
self.stdin = MockFile(server_process)
self.stdout = MockFile(server_process)
self.stderr = MockFile(server_process)
self.pid = 1
def poll(self):
return 1
def wait(self):
return 0
class FakeServerProcess(server_process.ServerProcess):
def _start(self):
self._proc = MockProc(self)
self.stdin = self._proc.stdin
self.stdout = self._proc.stdout
self.stderr = self._proc.stderr
self._pid = self._proc.pid
self.broken_pipes = []
class TestServerProcess(unittest.TestCase):
def test_basic(self):
cmd = [sys.executable, '-c', 'import sys; import time; time.sleep(0.02); print "stdout"; sys.stdout.flush(); print >>sys.stderr, "stderr"']
host = SystemHost()
factory = PortFactory(host)
port = factory.get()
now = time.time()
proc = server_process.ServerProcess(port, 'python', cmd)
proc.write('')
self.assertEqual(proc.poll(), None)
self.assertFalse(proc.has_crashed())
# check that doing a read after an expired deadline returns
# nothing immediately.
line = proc.read_stdout_line(now - 1)
self.assertEqual(line, None)
# FIXME: This part appears to be flaky. line should always be non-None.
# FIXME: https://bugs.webkit.org/show_bug.cgi?id=88280
line = proc.read_stdout_line(now + 1.0)
if line:
self.assertEqual(line.strip(), "stdout")
line = proc.read_stderr_line(now + 1.0)
if line:
self.assertEqual(line.strip(), "stderr")
proc.stop(0)
def test_cleanup(self):
port_obj = TrivialMockPort()
server_process = FakeServerProcess(port_obj=port_obj, name="test", cmd=["test"])
server_process._start()
server_process.stop()
self.assertTrue(server_process.stdin.closed)
self.assertTrue(server_process.stdout.closed)
self.assertTrue(server_process.stderr.closed)
def test_broken_pipe(self):
port_obj = TrivialMockPort()
port_obj.host.platform.os_name = 'win'
server_process = FakeServerProcess(port_obj=port_obj, name="test", cmd=["test"])
server_process.write("should break")
self.assertTrue(server_process.has_crashed())
self.assertIsNotNone(server_process.pid())
self.assertIsNone(server_process._proc)
self.assertEqual(server_process.broken_pipes, [server_process.stdin])
port_obj.host.platform.os_name = 'mac'
server_process = FakeServerProcess(port_obj=port_obj, name="test", cmd=["test"])
server_process.write("should break")
self.assertTrue(server_process.has_crashed())
self.assertIsNone(server_process._proc)
self.assertEqual(server_process.broken_pipes, [server_process.stdin])
| bsd-3-clause |
itkvideo/ITK | Modules/Nonunit/IntegratedTest/test/itkCurvatureFlowTestPython2.py | 41 | 3475 | #==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
from InsightToolkit import *
import itktesting
import sys
import os
import shutil
basename = os.path.basename( sys.argv[0] )
name = os.path.splitext( basename )[0]
dir = "Algorithms"
testInput = itktesting.ITK_TEST_INPUT
testOutput = itktesting.ITK_TEST_OUTPUT
baseLine = itktesting.ITK_TEST_BASELINE
reader = itkImageFileReaderF2_New()
reader.SetFileName( testInput+"/cthead1.png")
cf = itkCurvatureFlowImageFilterF2F2_New()
cf.SetInput( reader.GetOutput() )
cf.SetTimeStep( 0.25 )
cf.SetNumberOfIterations( 10 )
cfss = itkShiftScaleImageFilterF2US2_New()
cfss.SetInput( cf.GetOutput() )
cfss.SetShift( 0.7 )
cfss.SetScale( 0.9 )
valid = itkImageFileReaderUS2_New()
valid.SetFileName( baseLine+"/"+dir+"/"+name+".png")
diff = itkDifferenceImageFilterUS2_New()
diff.SetValidInput( valid.GetOutput() )
diff.SetTestInput( cfss.GetOutput() )
diff.SetToleranceRadius( 1 )
diff.SetDifferenceThreshold( 0 )
diff.Update()
meanDiff = diff.GetMeanDifference()
totalDiff = diff.GetTotalDifference()
print "MeanDifference = ", meanDiff
print "TotalDifference = ", totalDiff
print "<DartMeasurement name=\"MeanDifference\" type=\"numeric/double\">",meanDiff,"</DartMeasurement>"
print "<DartMeasurement name=\"TotalDifference\" type=\"numeric/double\">",totalDiff,"</DartMeasurement>"
if ( meanDiff > 0.1 ) :
convert = itkCastImageFilterUS2UC2_New()
rescale = itkRescaleIntensityImageFilterUS2UC2_New()
rescale.SetInput( diff.GetOutput() )
rescale.SetOutputMinimum( 0 )
rescale.SetOutputMaximum( 255 )
io = itkPNGImageIO_New()
io.SetUseCompression( 1 )
io.SetCompressionLevel( 9 )
writer = itkImageFileWriterUC2_New()
writer.SetImageIO( io.GetPointer() )
writer.SetInput( convert.GetOutput() )
writer.SetFileName( testOutput+"/"+name+".test.png" )
convert.SetInput( cfss.GetOutput() )
writer.Write()
writer.SetFileName( testOutput+"/"+name+".diff.png" )
writer.SetInput( rescale.GetOutput() )
writer.Write()
shutil.copyfile( baseLine+"/"+dir+"/"+name+".png", testOutput+"/"+name+".valid.png" )
print "<DartMeasurementFile name=\"TestImage\" type=\"image/png\">"+testOutput+"/"+name+".test.png</DartMeasurementFile>"
print "<DartMeasurementFile name=\"DifferenceImage\" type=\"image/png\">"+testOutput+"/"+name+".diff.png</DartMeasurementFile>"
print "<DartMeasurementFile name=\"ValidImage\" type=\"image/png\">"+testOutput+"/"+name+".valid.png</DartMeasurementFile>"
print "<DartMeasurement name=\"DifferenceShift\" type=\"numeric/double\">",rescale.GetShift(),"</DartMeasurement>"
print "<DartMeasurement name=\"DifferenceScale\" type=\"numeric/double\">",rescale.GetScale(),"</DartMeasurement>"
# return 1
#return 0
| apache-2.0 |
csieg/ardupilot | libraries/AP_Math/tools/geodesic_grid/geodesic_grid.py | 89 | 8697 | #!/usr/bin/python
# Copyright (C) 2016 Intel Corporation. All rights reserved.
#
# This file is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import argparse
import numpy as np
import sys
import icosahedron as ico
import grid
def print_code_gen_notice():
print("/* This was generated with")
print(" * libraries/AP_Math/tools/geodesic_grid/geodesic_grid.py */")
def header_neighbor_umbrella(index):
t = ico.triangles[0]
a, b, c = t
triangle, edge = (
( t, ( a, b)),
( t, ( b, c)),
( t, ( c, a)),
(-t, (-a, -b)),
(-t, (-b, -c)),
(-t, (-c, -a)),
)[index]
return ico.neighbor_umbrella(triangle, edge), edge
parser = argparse.ArgumentParser(
description="""
Utility script for helping to understand concepts used by AP_GeodesicGrid as
well as for aiding its development.
When passing a vertex as argument to one of the options, the valid values for
the coordinates are 0, -1, 1, g and -g, where g is the golden ratio.
""",
)
parser.add_argument(
'-p', '--plot',
action='store_true',
help="""
Plot results when applicable.
""",
)
parser.add_argument(
'-b', '--plot-subtriangles',
action='store_true',
help="""
Plot subtriangles as well. This implies -p.
""",
)
parser.add_argument(
'--icosahedron',
action='store_true',
help='Get the icosahedron triangles.',
)
parser.add_argument(
'-t', '--triangle',
action='append',
type=int,
nargs='+',
metavar='INDEX',
help="""
Get the icosahedron triangle at INDEX.
""",
)
parser.add_argument(
'-s', '--section',
action='append',
type=int,
nargs='+',
help="""
Get the grid section SECTION. If --plot is passed, then --plot-subtriangles is
implied.
""",
)
parser.add_argument(
'-u', '--umbrella',
action='append',
nargs=3,
metavar=('X', 'Y', 'Z'),
help="""
Get the umbrella with pivot denoted by (X, Y, Z). The pivot must be one of the
icosahedron's vertices.
""",
)
parser.add_argument(
'-n', '--neighbor-umbrella',
action='append',
nargs='+',
metavar='INDEX',
help="""
Get the neighbor umbrella at INDEX as described by _neighbor_umbrellas in
AP_GeodesicGrid.h. The special value "all" for INDEX is also accepted, which
will make it ignore other indexes passed and get all neighbor umbrellas for
that member.
""",
)
parser.add_argument(
'--neighbor-umbrella-gen',
action='store_true',
help="""
Generate C++ code for the initialization of the member _neighbor_umbrellas
described in AP_GeodesicGrid.h.
""",
)
parser.add_argument(
'--inverses-gen',
action='store_true',
help="""
Generate C++ code for the initialization of members _inverses and _mid_inverses
declared in AP_GeodesicGrid.h.
""")
args = parser.parse_args()
if args.plot_subtriangles:
args.plot = True
if args.plot:
import plot
polygons_to_plot = []
if args.triangle:
indexes = []
for l in args.triangle:
indexes += l
for i in indexes:
if 0 > i or i >= len(ico.triangles):
print(
'Triangle index must be in the range [0,%d)' % len(ico.triangles),
file=sys.stderr,
)
sys.exit(1)
print(ico.triangles[i])
if args.plot:
plot.polygon(ico.triangles[i])
if args.section:
sections = []
for l in args.section:
sections += l
for s in sections:
if 0 > s or s >= 4 * len(ico.triangles):
print(
'Section must be in the range [0,%d)' % 4 * len(ico.triangles),
file=sys.stderr,
)
sys.exit(1)
print(grid.section_triangle(s))
if args.plot:
args.plot_subtriangles = True
plot.sections(sections)
if args.umbrella:
for pivot in args.umbrella:
for i, x in enumerate(pivot):
if x == 'g':
x = ico.g
elif x == '-g':
x = -ico.g
else:
try:
x = int(x)
if x not in (0, -1, 1):
raise ValueError()
except ValueError:
print(
'umbrella: invalid pivot coordinate: %s' % str(x),
file=sys.stderr,
)
sys.exit(1)
pivot[i] = x
pivot = ico.Vertex(*pivot)
if pivot not in ico.vertices:
print(
'umbrella: invalid pivot:', pivot,
file=sys.stderr,
)
sys.exit(1)
u = ico.umbrella(pivot)
print("Components of the umbrella of %s:" % str(pivot))
for c in u.components:
print(" %s" % str(c))
if args.plot:
plot.polygons(u.components)
if args.neighbor_umbrella:
indexes = []
for l in args.neighbor_umbrella:
indexes += l
if 'all' in indexes:
indexes = range(6)
else:
for i, arg in enumerate(indexes):
try:
arg = int(arg)
if arg not in range(6):
raise ValueError()
except ValueError:
print(
'neighbor_umbrella: invalid index %s' % str(arg),
file=sys.stderr,
)
sys.exit(1)
indexes[i] = arg
for i in indexes:
u, order_edge = header_neighbor_umbrella(i)
print("Header umbrella %d:" % i)
print(" Pivot:", u.pivot)
for i in range(5):
print(" Vertex %d:" % i, u.vertex(i, order_edge))
for i in range(5):
print(" Component %d:" % i, u.component(i, order_edge))
if args.plot:
plot.polygons(u.components)
if args.neighbor_umbrella_gen:
print("Header neighbor umbrellas code generation:")
print_code_gen_notice()
print("const struct AP_GeodesicGrid::neighbor_umbrella")
print("AP_GeodesicGrid::_neighbor_umbrellas[3]{")
for i in range(6):
u, order_edge = header_neighbor_umbrella(i)
components = tuple(
ico.triangles.index(u.component(i, order_edge)) for i in range(5)
)
def vi_cj(i, j):
v = u.vertex(i, order_edge)
t = u.component(j, order_edge)
return t.index(v)
vi_cj_values = tuple(
vi_cj(a, b) for a, b in ((0, 0), (1, 1), (2, 1), (4, 4), (0, 4))
)
print(" {{%s}, %s}," % (
", ".join("%2d" % i for i in components),
", ".join(str(i) for i in vi_cj_values),
))
print("};")
if args.inverses_gen:
print("Header inverses code generation:")
print_code_gen_notice()
print("const Matrix3f AP_GeodesicGrid::_inverses[10]{")
for i in range(10):
a, b, c = ico.triangles[i]
m = np.matrix((
(a.x, b.x, c.x),
(a.y, b.y, c.y),
(a.z, b.z, c.z),
)).getI()
print(" {{%9.6ff, %9.6ff, %9.6ff}," % (m[0,0], m[0,1], m[0,2]))
print(" {%9.6ff, %9.6ff, %9.6ff}," % (m[1,0], m[1,1], m[1,2]))
print(" {%9.6ff, %9.6ff, %9.6ff}}," % (m[2,0], m[2,1], m[2,2]))
print("};")
print()
print_code_gen_notice()
print("const Matrix3f AP_GeodesicGrid::_mid_inverses[10]{")
for i in range(10):
a, b, c = ico.triangles[i]
ma, mb, mc = .5 * (a + b), .5 * (b + c), .5 * (c + a)
m = np.matrix((
(ma.x, mb.x, mc.x),
(ma.y, mb.y, mc.y),
(ma.z, mb.z, mc.z),
)).getI()
print(" {{%9.6ff, %9.6ff, %9.6ff}," % (m[0,0], m[0,1], m[0,2]))
print(" {%9.6ff, %9.6ff, %9.6ff}," % (m[1,0], m[1,1], m[1,2]))
print(" {%9.6ff, %9.6ff, %9.6ff}}," % (m[2,0], m[2,1], m[2,2]))
print("};")
if args.icosahedron:
print('Icosahedron:')
for i, t in enumerate(ico.triangles):
print(' %s' % str(t))
if args.plot:
plot.polygons(ico.triangles)
if args.plot:
plot.show(subtriangles=args.plot_subtriangles)
| gpl-3.0 |
chfoo/wpull | wpull/application/plugin_test.py | 1 | 1250 | import unittest
from wpull.application.plugin import WpullPlugin, hook, event, InterfaceRegistry, \
event_interface, PluginClientFunctionInfo, PluginFunctionCategory
class MockPlugin(WpullPlugin):
@hook('hook_thing')
def my_hook_callback(self):
pass
@event('event_thing')
def my_event_callback(self, data):
pass
def unrelated_function(self):
pass
class TestPlugin(unittest.TestCase):
def test_plugin_function_discovery(self):
plugin = MockPlugin()
funcs = list(plugin.get_plugin_functions())
self.assertEqual(2, len(funcs))
self.assertIn(
PluginClientFunctionInfo(
plugin.my_event_callback, 'event_thing',
PluginFunctionCategory.event),
funcs)
self.assertIn(
PluginClientFunctionInfo(
plugin.my_hook_callback, 'hook_thing',
PluginFunctionCategory.hook),
funcs)
def test_plugin_interface_registry(self):
registry = InterfaceRegistry()
@event_interface('test_event', registry)
def event_callback(data):
pass
self.assertEqual(1, len(registry))
self.assertIn('test_event', registry)
| gpl-3.0 |
Rademade/taiga-back | taiga/base/api/templatetags/api.py | 3 | 9932 | # Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The code is partially taken (and modified) from django rest framework
# that is licensed under the following terms:
#
# Copyright (c) 2011-2014, Tom Christie
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.contrib.staticfiles.templatetags.staticfiles import StaticFilesNode
from django.http import QueryDict
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.safestring import SafeData, mark_safe
from django.utils import six
from django.utils.encoding import force_text
from django.utils.html import smart_urlquote
from urllib import parse as urlparse
import re
register = template.Library()
@register.tag("static")
def do_static(parser, token):
return StaticFilesNode.handle_token(parser, token)
def replace_query_param(url, key, val):
"""
Given a URL and a key/val pair, set or replace an item in the query
parameters of the URL, and return the new URL.
"""
(scheme, netloc, path, query, fragment) = urlparse.urlsplit(url)
query_dict = QueryDict(query).copy()
query_dict[key] = val
query = query_dict.urlencode()
return urlparse.urlunsplit((scheme, netloc, path, query, fragment))
# Regex for adding classes to html snippets
class_re = re.compile(r'(?<=class=["\'])(.*)(?=["\'])')
# And the template tags themselves...
@register.simple_tag
def optional_login(request):
"""
Include a login snippet if REST framework's login view is in the URLconf.
"""
try:
login_url = reverse("api:login")
except NoReverseMatch:
return ""
snippet = "<a href='%s?next=%s'>Log in</a>" % (login_url, request.path)
return snippet
@register.simple_tag
def optional_logout(request):
"""
Include a logout snippet if REST framework's logout view is in the URLconf.
"""
try:
logout_url = reverse("api:logout")
except NoReverseMatch:
return ""
snippet = "<a href='%s?next=%s'>Log out</a>" % (logout_url, request.path)
return snippet
@register.simple_tag
def add_query_param(request, key, val):
"""
Add a query parameter to the current request url, and return the new url.
"""
iri = request.get_full_path()
uri = iri_to_uri(iri)
return replace_query_param(uri, key, val)
@register.filter
def add_class(value, css_class):
"""
http://stackoverflow.com/questions/4124220/django-adding-css-classes-when-rendering-form-fields-in-a-template
Inserts classes into template variables that contain HTML tags,
useful for modifying forms without needing to change the Form objects.
Usage:
{{ field.label_tag|add_class:"control-label" }}
In the case of REST Framework, the filter is used to add Bootstrap-specific
classes to the forms.
"""
html = six.text_type(value)
match = class_re.search(html)
if match:
m = re.search(r"^%s$|^%s\s|\s%s\s|\s%s$" % (css_class, css_class,
css_class, css_class),
match.group(1))
if not m:
return mark_safe(class_re.sub(match.group(1) + " " + css_class,
html))
else:
return mark_safe(html.replace(">", ' class="%s">' % css_class, 1))
return value
# Bunch of stuff cloned from urlize
TRAILING_PUNCTUATION = [".", ",", ":", ";", ".)", "\"", "'"]
WRAPPING_PUNCTUATION = [("(", ")"), ("<", ">"), ("[", "]"), ("<", ">"),
("\"", "\""), ("'", "'")]
word_split_re = re.compile(r"(\s+)")
simple_url_re = re.compile(r"^https?://\[?\w", re.IGNORECASE)
simple_url_2_re = re.compile(r"^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)$", re.IGNORECASE)
simple_email_re = re.compile(r"^\S+@\S+\.\S+$")
def smart_urlquote_wrapper(matched_url):
"""
Simple wrapper for smart_urlquote. ValueError("Invalid IPv6 URL") can
be raised here, see issue #1386
"""
try:
return smart_urlquote(matched_url)
except ValueError:
return None
@register.filter
def urlize_quoted_links(text, trim_url_limit=None, nofollow=True, autoescape=True):
"""
Converts any URLs in text into clickable links.
Works on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it"ll still do the right thing.
If trim_url_limit is not None, the URLs in link text longer than this limit
will truncated to trim_url_limit-3 characters and appended with an elipsis.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
If autoescape is True, the link text and URLs will get autoescaped.
"""
trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ("%s..." % x[:max(0, limit - 3)])) or x
safe_input = isinstance(text, SafeData)
words = word_split_re.split(force_text(text))
for i, word in enumerate(words):
if "." in word or "@" in word or ":" in word:
# Deal with punctuation.
lead, middle, trail = "", word, ""
for punctuation in TRAILING_PUNCTUATION:
if middle.endswith(punctuation):
middle = middle[:-len(punctuation)]
trail = punctuation + trail
for opening, closing in WRAPPING_PUNCTUATION:
if middle.startswith(opening):
middle = middle[len(opening):]
lead = lead + opening
# Keep parentheses at the end only if they"re balanced.
if (middle.endswith(closing)
and middle.count(closing) == middle.count(opening) + 1):
middle = middle[:-len(closing)]
trail = closing + trail
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ""
if simple_url_re.match(middle):
url = smart_urlquote_wrapper(middle)
elif simple_url_2_re.match(middle):
url = smart_urlquote_wrapper("http://%s" % middle)
elif not ":" in middle and simple_email_re.match(middle):
local, domain = middle.rsplit("@", 1)
try:
domain = domain.encode("idna").decode("ascii")
except UnicodeError:
continue
url = "mailto:%s@%s" % (local, domain)
nofollow_attr = ""
# Make link.
if url:
trimmed = trim_url(middle)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
url, trimmed = escape(url), escape(trimmed)
middle = '<a href="%s"%s>%s</a>' % (url, nofollow_attr, trimmed)
words[i] = mark_safe("%s%s%s" % (lead, middle, trail))
else:
if safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
elif safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
return "".join(words)
@register.filter
def break_long_headers(header):
"""
Breaks headers longer than 160 characters (~page length)
when possible (are comma separated)
"""
if len(header) > 160 and "," in header:
header = mark_safe("<br> " + ", <br>".join(header.split(",")))
return header
| agpl-3.0 |
facebookresearch/fastText | python/doc/examples/bin_to_vec.py | 1 | 1120 | #!/usr/bin/env python
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division, absolute_import, print_function
from fasttext import load_model
import argparse
import errno
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=("Print fasttext .vec file to stdout from .bin file")
)
parser.add_argument(
"model",
help="Model to use",
)
args = parser.parse_args()
f = load_model(args.model)
words = f.get_words()
print(str(len(words)) + " " + str(f.get_dimension()))
for w in words:
v = f.get_word_vector(w)
vstr = ""
for vi in v:
vstr += " " + str(vi)
try:
print(w + vstr)
except IOError as e:
if e.errno == errno.EPIPE:
pass
| mit |
haeusser/tensorflow | tensorflow/python/kernel_tests/dynamic_stitch_op_test.py | 77 | 6501 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.data_flow_ops.dynamic_stitch."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gradients_impl
import tensorflow.python.ops.data_flow_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class DynamicStitchTest(test.TestCase):
def testScalar(self):
with self.test_session():
indices = [constant_op.constant(0), constant_op.constant(1)]
data = [constant_op.constant(40), constant_op.constant(60)]
for step in -1, 1:
stitched_t = data_flow_ops.dynamic_stitch(indices[::step], data)
stitched_val = stitched_t.eval()
self.assertAllEqual([40, 60][::step], stitched_val)
# Dimension 0 is determined by the max index in indices, so we
# can only infer that the output is a vector of some unknown
# length.
self.assertEqual([None], stitched_t.get_shape().as_list())
def testSimpleOneDimensional(self):
with self.test_session():
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([1, 6, 2, 3, 5])
]
data = [
constant_op.constant([0, 40, 70]),
constant_op.constant([10, 60, 20, 30, 50])
]
stitched_t = data_flow_ops.dynamic_stitch(indices, data)
stitched_val = stitched_t.eval()
self.assertAllEqual([0, 10, 20, 30, 40, 50, 60, 70], stitched_val)
# Dimension 0 is determined by the max index in indices, so we
# can only infer that the output is a vector of some unknown
# length.
self.assertEqual([None], stitched_t.get_shape().as_list())
def testOneListOneDimensional(self):
with self.test_session():
indices = [constant_op.constant([1, 6, 2, 3, 5, 0, 4, 7])]
data = [constant_op.constant([10, 60, 20, 30, 50, 0, 40, 70])]
stitched_t = data_flow_ops.dynamic_stitch(indices, data)
stitched_val = stitched_t.eval()
self.assertAllEqual([0, 10, 20, 30, 40, 50, 60, 70], stitched_val)
# Dimension 0 is determined by the max index in indices, so we
# can only infer that the output is a vector of some unknown
# length.
self.assertEqual([None], stitched_t.get_shape().as_list())
def testSimpleTwoDimensional(self):
with self.test_session():
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([1, 6]),
constant_op.constant([2, 3, 5])
]
data = [
constant_op.constant([[0, 1], [40, 41], [70, 71]]),
constant_op.constant([[10, 11], [60, 61]]),
constant_op.constant([[20, 21], [30, 31], [50, 51]])
]
stitched_t = data_flow_ops.dynamic_stitch(indices, data)
stitched_val = stitched_t.eval()
self.assertAllEqual([[0, 1], [10, 11], [20, 21], [30, 31], [40, 41],
[50, 51], [60, 61], [70, 71]], stitched_val)
# Dimension 0 is determined by the max index in indices, so we
# can only infer that the output is a matrix with 2 columns and
# some unknown number of rows.
self.assertEqual([None, 2], stitched_t.get_shape().as_list())
def testHigherRank(self):
with self.test_session() as sess:
indices = [
constant_op.constant(6), constant_op.constant([4, 1]),
constant_op.constant([[5, 2], [0, 3]])
]
data = [
constant_op.constant([61, 62]),
constant_op.constant([[41, 42], [11, 12]]),
constant_op.constant([[[51, 52], [21, 22]], [[1, 2], [31, 32]]])
]
stitched_t = data_flow_ops.dynamic_stitch(indices, data)
stitched_val = stitched_t.eval()
correct = 10 * np.arange(7)[:, None] + [1, 2]
self.assertAllEqual(correct, stitched_val)
self.assertEqual([None, 2], stitched_t.get_shape().as_list())
# Test gradients
stitched_grad = 7 * stitched_val
grads = gradients_impl.gradients(stitched_t, indices + data,
stitched_grad)
self.assertEqual(grads[:3], [None] * 3) # Indices have no gradients
for datum, grad in zip(data, sess.run(grads[3:])):
self.assertAllEqual(7 * datum.eval(), grad)
def testErrorIndicesMultiDimensional(self):
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([[1, 6, 2, 3, 5]])
]
data = [
constant_op.constant([[0, 40, 70]]),
constant_op.constant([10, 60, 20, 30, 50])
]
with self.assertRaises(ValueError):
data_flow_ops.dynamic_stitch(indices, data)
def testErrorDataNumDimsMismatch(self):
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([1, 6, 2, 3, 5])
]
data = [
constant_op.constant([0, 40, 70]),
constant_op.constant([[10, 60, 20, 30, 50]])
]
with self.assertRaises(ValueError):
data_flow_ops.dynamic_stitch(indices, data)
def testErrorDataDimSizeMismatch(self):
indices = [
constant_op.constant([0, 4, 5]), constant_op.constant([1, 6, 2, 3])
]
data = [
constant_op.constant([[0], [40], [70]]),
constant_op.constant([[10, 11], [60, 61], [20, 21], [30, 31]])
]
with self.assertRaises(ValueError):
data_flow_ops.dynamic_stitch(indices, data)
def testErrorDataAndIndicesSizeMismatch(self):
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([1, 6, 2, 3, 5])
]
data = [
constant_op.constant([0, 40, 70]),
constant_op.constant([10, 60, 20, 30])
]
with self.assertRaises(ValueError):
data_flow_ops.dynamic_stitch(indices, data)
if __name__ == "__main__":
test.main()
| apache-2.0 |
tersmitten/ansible | test/units/modules/storage/netapp/test_na_ontap_qos_policy_group.py | 45 | 12789 | # (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
''' unit test template for ONTAP Ansible module '''
from __future__ import print_function
import json
import pytest
from units.compat import unittest
from units.compat.mock import patch, Mock
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
import ansible.module_utils.netapp as netapp_utils
from ansible.modules.storage.netapp.na_ontap_qos_policy_group \
import NetAppOntapQosPolicyGroup as qos_policy_group_module # module under test
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class MockONTAPConnection(object):
''' mock server connection to ONTAP host '''
def __init__(self, kind=None, data=None):
''' save arguments '''
self.kind = kind
self.params = data
self.xml_in = None
self.xml_out = None
def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
''' mock invoke_successfully returning xml data '''
self.xml_in = xml
if self.kind == 'policy':
xml = self.build_policy_group_info(self.params)
if self.kind == 'error':
error = netapp_utils.zapi.NaApiError('test', 'error')
raise error
self.xml_out = xml
return xml
@staticmethod
def build_policy_group_info(vol_details):
''' build xml data for volume-attributes '''
xml = netapp_utils.zapi.NaElement('xml')
attributes = {
'num-records': 1,
'attributes-list': {
'qos-policy-group-info': {
'is-shared': 'true',
'max-throughput': '800KB/s,800IOPS',
'min-throughput': '100IOPS',
'num-workloads': 0,
'pgid': 8690,
'policy-group': vol_details['name'],
'vserver': vol_details['vserver']
}
}
}
xml.translate_struct(attributes)
return xml
class TestMyModule(unittest.TestCase):
''' a group of related Unit Tests '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
self.mock_policy_group = {
'name': 'policy_1',
'vserver': 'policy_vserver',
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS'
}
def mock_args(self):
return {
'name': self.mock_policy_group['name'],
'vserver': self.mock_policy_group['vserver'],
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'hostname': 'test',
'username': 'test_user',
'password': 'test_pass!',
'https': 'False'
}
def get_policy_group_mock_object(self, kind=None):
"""
Helper method to return an na_ontap_volume object
:param kind: passes this param to MockONTAPConnection()
:return: na_ontap_volume object
"""
policy_obj = qos_policy_group_module()
policy_obj.asup_log_for_cserver = Mock(return_value=None)
policy_obj.cluster = Mock()
policy_obj.cluster.invoke_successfully = Mock()
if kind is None:
policy_obj.server = MockONTAPConnection()
else:
policy_obj.server = MockONTAPConnection(kind=kind, data=self.mock_policy_group)
return policy_obj
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
qos_policy_group_module()
print('Info: %s' % exc.value.args[0]['msg'])
def test_get_nonexistent_policy(self):
''' Test if get_policy_group returns None for non-existent policy_group '''
set_module_args(self.mock_args())
result = self.get_policy_group_mock_object().get_policy_group()
assert result is None
def test_get_existing_policy_group(self):
''' Test if get_policy_group returns details for existing policy_group '''
set_module_args(self.mock_args())
result = self.get_policy_group_mock_object('policy').get_policy_group()
assert result['name'] == self.mock_policy_group['name']
assert result['vserver'] == self.mock_policy_group['vserver']
def test_create_error_missing_param(self):
''' Test if create throws an error if name is not specified'''
data = self.mock_args()
del data['name']
set_module_args(data)
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('policy').create_policy_group()
msg = 'missing required arguments: name'
assert exc.value.args[0]['msg'] == msg
def test_successful_create(self):
''' Test successful create '''
data = self.mock_args()
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object().apply()
assert exc.value.args[0]['changed']
def test_create_idempotency(self):
''' Test create idempotency '''
set_module_args(self.mock_args())
obj = self.get_policy_group_mock_object('policy')
with pytest.raises(AnsibleExitJson) as exc:
obj.apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_create_error(self, get_policy_group):
''' Test create error '''
set_module_args(self.mock_args())
get_policy_group.side_effect = [
None
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('error').apply()
assert exc.value.args[0]['msg'] == 'Error creating qos policy group policy_1: NetApp API failed. Reason - test:error'
def test_successful_delete(self):
''' Test delete existing volume '''
data = self.mock_args()
data['state'] = 'absent'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert exc.value.args[0]['changed']
def test_delete_idempotency(self):
''' Test delete idempotency '''
data = self.mock_args()
data['state'] = 'absent'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object().apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_delete_error(self, get_policy_group):
''' Test create idempotency '''
data = self.mock_args()
data['state'] = 'absent'
set_module_args(data)
current = {
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
current
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('error').apply()
assert exc.value.args[0]['msg'] == 'Error deleting qos policy group policy_1: NetApp API failed. Reason - test:error'
def test_successful_modify_max_throughput(self):
''' Test successful modify max throughput '''
data = self.mock_args()
data['max_throughput'] = '900KB/s,800iops'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert exc.value.args[0]['changed']
def test_modify_max_throughput_idempotency(self):
''' Test modify idempotency '''
data = self.mock_args()
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_modify_error(self, get_policy_group):
''' Test create idempotency '''
data = self.mock_args()
data['max_throughput'] = '900KB/s,900IOPS'
set_module_args(data)
current = {
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
current
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('error').apply()
assert exc.value.args[0]['msg'] == 'Error modifying qos policy group policy_1: NetApp API failed. Reason - test:error'
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_rename(self, get_policy_group):
''' Test rename idempotency '''
data = self.mock_args()
data['name'] = 'policy_2'
data['from_name'] = 'policy_1'
set_module_args(data)
current = {
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
None,
current
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_rename_idempotency(self, get_policy_group):
''' Test rename idempotency '''
data = self.mock_args()
data['name'] = 'policy_1'
data['from_name'] = 'policy_1'
current = {
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
current,
current
]
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_rename_error(self, get_policy_group):
''' Test create idempotency '''
data = self.mock_args()
data['from_name'] = 'policy_1'
data['name'] = 'policy_2'
set_module_args(data)
current = {
'is_shared': 'true',
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
None,
current
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('error').apply()
assert exc.value.args[0]['msg'] == 'Error renaming qos policy group policy_1: NetApp API failed. Reason - test:error'
| gpl-3.0 |
whausen/part | src/adhocracy/lib/text/mdx_showmore.py | 1 | 2762 | """Showmore extension for Markdown.
To hide something with [more...], surround the corresponding text with triple
parentheses, e.g. (((text_to_be_hidden))).
In order to show the text, you have to include the following Javascript in your
code, which depends on the availability of jquery.
$('.showmore').each(function () {
var self = $(this);
self.find('.showmore_morelink').bind('click', function (event) {
self.find('.showmore_collapsed').css('display', 'none');
self.find('.showmore_uncollapsed').css('display', 'inline');
});
self.find('.showmore_lesslink').bind('click', function (event) {
self.find('.showmore_collapsed').css('display', 'inline');
self.find('.showmore_uncollapsed').css('display', 'none');
});
});
Additionally, you have to add the following to your css code:
.showmore, .showmore_content {
display: inline;
}
.showmore_uncollapsed {
display: none;
}
"""
import re
import markdown
from pylons.i18n import _
SHOWMORE_RE = re.compile(r'\({3,}(?P<text>.*?)\){3,}',
re.MULTILINE | re.DOTALL)
MORE_STRING = u'show more'
LESS_STRING = u'show less'
PRE_HTML = u'''
<div class="showmore">
<span class="showmore_collapsed">
<span> </span>
<a class="showmore_morelink" href="#">[%s]</a>
<span> </span>
</span>
<div class="showmore_uncollapsed">
<div class="showmore_content">
'''
POST_HTML = u'''
</div>
<span> </span>
<a class="showmore_lesslink" href="#">[%s]</a>
<span> </span>
</div>
</div>
'''
class ShowmoreExtension(markdown.Extension):
""" Showmore Extension for Python-Markdown. """
def extendMarkdown(self, md, md_globals):
md.registerExtension(self)
md.preprocessors.add('showmore', ShowmorePreprocessor(md),
'>normalize_whitespace')
class ShowmorePreprocessor(markdown.preprocessors.Preprocessor):
def run(self, lines):
text = "\n".join(lines)
while 1:
m = SHOWMORE_RE.search(text)
if m:
text = '%s%s%s%s%s' % (
text[:m.start()],
self.markdown.htmlStash.store(PRE_HTML % _(MORE_STRING),
safe=True),
m.group('text'),
self.markdown.htmlStash.store(POST_HTML % _(LESS_STRING),
safe=True),
text[m.end():])
else:
break
return text.split("\n")
def makeExtension(configs=None):
return ShowmoreExtension(configs=configs)
| agpl-3.0 |
bobthekingofegypt/servo | tests/wpt/web-platform-tests/tools/html5lib/html5lib/treewalkers/etree.py | 658 | 4613 | from __future__ import absolute_import, division, unicode_literals
try:
from collections import OrderedDict
except ImportError:
try:
from ordereddict import OrderedDict
except ImportError:
OrderedDict = dict
import gettext
_ = gettext.gettext
import re
from six import text_type
from . import _base
from ..utils import moduleFactoryFactory
tag_regexp = re.compile("{([^}]*)}(.*)")
def getETreeBuilder(ElementTreeImplementation):
ElementTree = ElementTreeImplementation
ElementTreeCommentType = ElementTree.Comment("asd").tag
class TreeWalker(_base.NonRecursiveTreeWalker):
"""Given the particular ElementTree representation, this implementation,
to avoid using recursion, returns "nodes" as tuples with the following
content:
1. The current element
2. The index of the element relative to its parent
3. A stack of ancestor elements
4. A flag "text", "tail" or None to indicate if the current node is a
text node; either the text or tail of the current element (1)
"""
def getNodeDetails(self, node):
if isinstance(node, tuple): # It might be the root Element
elt, key, parents, flag = node
if flag in ("text", "tail"):
return _base.TEXT, getattr(elt, flag)
else:
node = elt
if not(hasattr(node, "tag")):
node = node.getroot()
if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"):
return (_base.DOCUMENT,)
elif node.tag == "<!DOCTYPE>":
return (_base.DOCTYPE, node.text,
node.get("publicId"), node.get("systemId"))
elif node.tag == ElementTreeCommentType:
return _base.COMMENT, node.text
else:
assert type(node.tag) == text_type, type(node.tag)
# This is assumed to be an ordinary element
match = tag_regexp.match(node.tag)
if match:
namespace, tag = match.groups()
else:
namespace = None
tag = node.tag
attrs = OrderedDict()
for name, value in list(node.attrib.items()):
match = tag_regexp.match(name)
if match:
attrs[(match.group(1), match.group(2))] = value
else:
attrs[(None, name)] = value
return (_base.ELEMENT, namespace, tag,
attrs, len(node) or node.text)
def getFirstChild(self, node):
if isinstance(node, tuple):
element, key, parents, flag = node
else:
element, key, parents, flag = node, None, [], None
if flag in ("text", "tail"):
return None
else:
if element.text:
return element, key, parents, "text"
elif len(element):
parents.append(element)
return element[0], 0, parents, None
else:
return None
def getNextSibling(self, node):
if isinstance(node, tuple):
element, key, parents, flag = node
else:
return None
if flag == "text":
if len(element):
parents.append(element)
return element[0], 0, parents, None
else:
return None
else:
if element.tail and flag != "tail":
return element, key, parents, "tail"
elif key < len(parents[-1]) - 1:
return parents[-1][key + 1], key + 1, parents, None
else:
return None
def getParentNode(self, node):
if isinstance(node, tuple):
element, key, parents, flag = node
else:
return None
if flag == "text":
if not parents:
return element
else:
return element, key, parents, None
else:
parent = parents.pop()
if not parents:
return parent
else:
return parent, list(parents[-1]).index(parent), parents, None
return locals()
getETreeModule = moduleFactoryFactory(getETreeBuilder)
| mpl-2.0 |
dcsquared13/Diamond | src/collectors/interrupt/test/testsoft.py | 31 | 3042 | #!/usr/bin/python
# coding=utf-8
##########################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from soft import SoftInterruptCollector
##########################################################################
class TestSoftInterruptCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SoftInterruptCollector', {
'interval': 1
})
self.collector = SoftInterruptCollector(config, None)
def test_import(self):
self.assertTrue(SoftInterruptCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_stat(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/stat', 'r')
@patch.object(Collector, 'publish')
def test_should_work_with_synthetic_data(self, publish_mock):
patch_open = patch('__builtin__.open', Mock(return_value=StringIO(
'softirq 0 0 0 0 0 0 0 0 0 0 0'
)))
patch_open.start()
self.collector.collect()
patch_open.stop()
self.assertPublishedMany(publish_mock, {})
patch_open = patch('__builtin__.open', Mock(return_value=StringIO(
'softirq 55 1 2 3 4 5 6 7 8 9 10'
)))
patch_open.start()
self.collector.collect()
patch_open.stop()
self.assertPublishedMany(publish_mock, {
'total': 55.0,
'0': 1,
'1': 2,
'2': 3,
'3': 4,
'4': 5,
'5': 6,
'6': 7,
'7': 8,
'8': 9,
'9': 10,
})
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
SoftInterruptCollector.PROC = self.getFixturePath('proc_stat_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
SoftInterruptCollector.PROC = self.getFixturePath('proc_stat_2')
self.collector.collect()
metrics = {
'total': 4971,
'0': 0,
'1': 1729,
'2': 2,
'3': 240,
'4': 31,
'5': 0,
'6': 0,
'7': 1480,
'8': 0,
'9': 1489,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
| mit |
chdecultot/erpnext | erpnext/patches/v11_0/change_healthcare_desktop_icons.py | 4 | 2450 | import frappe
from frappe import _
change_icons_map = [
{
"module_name": "Patient",
"color": "#6BE273",
"icon": "fa fa-user",
"doctype": "Patient",
"type": "link",
"link": "List/Patient",
"label": _("Patient")
},
{
"module_name": "Patient Encounter",
"color": "#2ecc71",
"icon": "fa fa-stethoscope",
"doctype": "Patient Encounter",
"type": "link",
"link": "List/Patient Encounter",
"label": _("Patient Encounter"),
},
{
"module_name": "Healthcare Practitioner",
"color": "#2ecc71",
"icon": "fa fa-user-md",
"doctype": "Healthcare Practitioner",
"type": "link",
"link": "List/Healthcare Practitioner",
"label": _("Healthcare Practitioner")
},
{
"module_name": "Patient Appointment",
"color": "#934F92",
"icon": "fa fa-calendar-plus-o",
"doctype": "Patient Appointment",
"type": "link",
"link": "List/Patient Appointment",
"label": _("Patient Appointment")
},
{
"module_name": "Lab Test",
"color": "#7578f6",
"icon": "octicon octicon-beaker",
"doctype": "Lab Test",
"type": "link",
"link": "List/Lab Test",
"label": _("Lab Test")
}
]
def execute():
change_healthcare_desktop_icons()
def change_healthcare_desktop_icons():
doctypes = ["patient", "patient_encounter", "healthcare_practitioner",
"patient_appointment", "lab_test"]
for doctype in doctypes:
frappe.reload_doc("healthcare", "doctype", doctype)
for spec in change_icons_map:
frappe.db.sql("""
delete from `tabDesktop Icon`
where _doctype = '{0}'
""".format(spec['doctype']))
desktop_icon = frappe.new_doc("Desktop Icon")
desktop_icon.hidden = 1
desktop_icon.standard = 1
desktop_icon.icon = spec['icon']
desktop_icon.color = spec['color']
desktop_icon.module_name = spec['module_name']
desktop_icon.label = spec['label']
desktop_icon.app = "erpnext"
desktop_icon.type = spec['type']
desktop_icon._doctype = spec['doctype']
desktop_icon.link = spec['link']
desktop_icon.save(ignore_permissions=True)
frappe.db.sql("""
delete from `tabDesktop Icon`
where module_name = 'Healthcare' and type = 'module'
""")
desktop_icon = frappe.new_doc("Desktop Icon")
desktop_icon.hidden = 1
desktop_icon.standard = 1
desktop_icon.icon = "fa fa-heartbeat"
desktop_icon.color = "#FF888B"
desktop_icon.module_name = "Healthcare"
desktop_icon.label = _("Healthcare")
desktop_icon.app = "erpnext"
desktop_icon.type = 'module'
desktop_icon.save(ignore_permissions=True)
| gpl-3.0 |
jszymon/pacal | pacal/__init__.py | 1 | 3420 | """PaCal, the probabilistic calculator."""
from __future__ import print_function
from . import params
import numpy as _np
from pylab import show
_np.seterr(all="ignore")
from .utils import Inf
from .distr import DiscreteDistr
from .distr import exp, log, atan, min, max, sqrt, sign, sin, cos, tan, tanh, sigmoid
from .standard_distr import FunDistr
from .standard_distr import NormalDistr
from .standard_distr import UniformDistr
from .standard_distr import TrapezoidalDistr
from .standard_distr import CauchyDistr
from .standard_distr import ChiSquareDistr
from .standard_distr import ExponentialDistr
from .standard_distr import GammaDistr
from .standard_distr import BetaDistr
from .standard_distr import ParetoDistr
from .standard_distr import LevyDistr
from .standard_distr import LaplaceDistr
from .standard_distr import StudentTDistr
from .standard_distr import SemicircleDistr
from .standard_distr import FDistr
from .standard_distr import WeibullDistr
from .standard_distr import GumbelDistr
from .standard_distr import FrechetDistr
from .standard_distr import LogLogisticDistr
from .standard_distr import MollifierDistr
from .standard_distr import OneDistr
from .standard_distr import ZeroDistr
from .standard_distr import BinomialDistr
from .standard_distr import BernoulliDistr
from .standard_distr import PoissonDistr
from .standard_distr import MixDistr
from .distr import CondGtDistr
from .distr import CondLtDistr
from .distr import ConstDistr
from .distr import Gt
from .distr import Lt
from .distr import Between
from .stats.noncentral_distr import NoncentralTDistr
from .stats.noncentral_distr import NoncentralChiSquareDistr
from .stats.noncentral_distr import NoncentralBetaDistr
from .stats.noncentral_distr import NoncentralFDistr
from .stats.iid_ops import iid_sum, iid_prod, iid_max, iid_min, iid_average, iid_average_geom
from .stats.iid_ops import iid_order_stat, iid_median
from .stats.distr_est import LoglikelihoodEstimator
# dependent variables
from .depvars.copulas import PiCopula, FrankCopula, ClaytonCopula, GumbelCopula
from .depvars.nddistr import NDNormalDistr, IJthOrderStatsNDDistr
from .depvars.models import TwoVarsModel, Model
def _pickle_method(method):
obj = method.__self__
cls = method.__self__.__class__
func_name = method.__func__.__name__
#print "pickle>>>", func_name, obj, cls
if func_name.startswith('__') and not func_name.endswith('__'):
#deal with mangled names
cls_name = cls.__name__.lstrip('_')
func_name = '_%s%s' % (cls_name, func_name)
return _unpickle_method, (func_name, obj, cls)
def _unpickle_method(func_name, obj, cls):
#print "upickle>>>", func_name, obj, cls
if obj and func_name in obj.__dict__:
cls, obj = obj, None # if func_name is classmethod
for cls in cls.__mro__:
try:
func = cls.__dict__[func_name]
except KeyError:
pass
else:
break
return func.__get__(obj, cls)
if params.general.parallel:
# make ufuncs picklable
import types
try:
import copyreg
#copy_reg.pickle(_np.ufunc, _pickle_ufunc, _unpickle_ufunc)
copyreg.pickle(types.MethodType, _pickle_method, _unpickle_method)
except:
import copy_reg
#copy_reg.pickle(_np.ufunc, _pickle_ufunc, _unpickle_ufunc)
copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
| gpl-3.0 |
arhik/nupic | src/nupic/frameworks/opf/opfhelpers.py | 39 | 3728 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
# This file contains utility functions that are may be imported
# by clients of the framework. Functions that are used only by
# the prediction framework should be in opfutils.py
#
# TODO: Rename as helpers.py once we're ready to replace the legacy
# helpers.py
import imp
import os
import expdescriptionapi
def loadExperiment(path):
"""Loads the experiment description file from the path.
Args:
path: The path to a directory containing a description.py file or the file
itself.
Returns:
(config, control)
"""
if not os.path.isdir(path):
path = os.path.dirname(path)
descriptionPyModule = loadExperimentDescriptionScriptFromDir(path)
expIface = getExperimentDescriptionInterfaceFromModule(descriptionPyModule)
return expIface.getModelDescription(), expIface.getModelControl()
def loadExperimentDescriptionScriptFromDir(experimentDir):
""" Loads the experiment description python script from the given experiment
directory.
experimentDir: experiment directory path
Returns: module of the loaded experiment description scripts
"""
descriptionScriptPath = os.path.join(experimentDir, "description.py")
module = _loadDescriptionFile(descriptionScriptPath)
return module
def getExperimentDescriptionInterfaceFromModule(module):
"""
module: imported description.py module
Returns: An expdescriptionapi.DescriptionIface-based instance that
represents the experiment description
"""
result = module.descriptionInterface
assert isinstance(result, expdescriptionapi.DescriptionIface), \
"expected DescriptionIface-based instance, but got %s" % type(result)
return result
g_descriptionImportCount = 0
def _loadDescriptionFile(descriptionPyPath):
"""Loads a description file and returns it as a module.
descriptionPyPath: path of description.py file to load
"""
global g_descriptionImportCount
if not os.path.isfile(descriptionPyPath):
raise RuntimeError(("Experiment description file %s does not exist or " + \
"is not a file") % (descriptionPyPath,))
mod = imp.load_source("pf_description%d" % g_descriptionImportCount,
descriptionPyPath)
g_descriptionImportCount += 1
if not hasattr(mod, "descriptionInterface"):
raise RuntimeError("Experiment description file %s does not define %s" % \
(descriptionPyPath, "descriptionInterface"))
if not isinstance(mod.descriptionInterface, expdescriptionapi.DescriptionIface):
raise RuntimeError(("Experiment description file %s defines %s but it " + \
"is not DescriptionIface-based") % \
(descriptionPyPath, name))
return mod
| agpl-3.0 |
yvaucher/vertical-ngo | logistic_requisition/wizard/assign_line.py | 3 | 1508 | # -*- coding: utf-8 -*-
#
#
# Copyright 2013-2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp import models, fields, api
class LogisticsRequisitionLineAssign(models.TransientModel):
_name = 'logistic.requisition.line.assign'
_description = 'Assign a logistic requisition line'
logistic_user_id = fields.Many2one(
'res.users',
'Logistics Specialist',
required=True,
help="Logistics Specialist in charge of the "
"Logistics Requisition Line")
@api.multi
def assign(self):
line_ids = self.env.context.get('active_ids')
if not line_ids:
return
lines = self.env['logistic.requisition.line'].browse(line_ids)
lines.write({'logistic_user_id': self.logistic_user_id.id})
return {'type': 'ir.actions.act_window_close'}
| agpl-3.0 |
omnirom/android_external_chromium-org | tools/code_coverage/croc_scan.py | 178 | 4383 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Crocodile source scanners."""
import re
class Scanner(object):
"""Generic source scanner."""
def __init__(self):
"""Constructor."""
self.re_token = re.compile('#')
self.comment_to_eol = ['#']
self.comment_start = None
self.comment_end = None
def ScanLines(self, lines):
"""Scans the lines for executable statements.
Args:
lines: Iterator returning source lines.
Returns:
An array of line numbers which are executable.
"""
exe_lines = []
lineno = 0
in_string = None
in_comment = None
comment_index = None
for line in lines:
lineno += 1
in_string_at_start = in_string
for t in self.re_token.finditer(line):
tokenstr = t.groups()[0]
if in_comment:
# Inside a multi-line comment, so look for end token
if tokenstr == in_comment:
in_comment = None
# Replace comment with spaces
line = (line[:comment_index]
+ ' ' * (t.end(0) - comment_index)
+ line[t.end(0):])
elif in_string:
# Inside a string, so look for end token
if tokenstr == in_string:
in_string = None
elif tokenstr in self.comment_to_eol:
# Single-line comment, so truncate line at start of token
line = line[:t.start(0)]
break
elif tokenstr == self.comment_start:
# Multi-line comment start - end token is comment_end
in_comment = self.comment_end
comment_index = t.start(0)
else:
# Starting a string - end token is same as start
in_string = tokenstr
# If still in comment at end of line, remove comment
if in_comment:
line = line[:comment_index]
# Next line, delete from the beginnine
comment_index = 0
# If line-sans-comments is not empty, claim it may be executable
if line.strip() or in_string_at_start:
exe_lines.append(lineno)
# Return executable lines
return exe_lines
def Scan(self, filename):
"""Reads the file and scans its lines.
Args:
filename: Path to file to scan.
Returns:
An array of line numbers which are executable.
"""
# TODO: All manner of error checking
f = None
try:
f = open(filename, 'rt')
return self.ScanLines(f)
finally:
if f:
f.close()
class PythonScanner(Scanner):
"""Python source scanner."""
def __init__(self):
"""Constructor."""
Scanner.__init__(self)
# TODO: This breaks for strings ending in more than 2 backslashes. Need
# a pattern which counts only an odd number of backslashes, so the last
# one thus escapes the quote.
self.re_token = re.compile(r'(#|\'\'\'|"""|(?<!(?<!\\)\\)["\'])')
self.comment_to_eol = ['#']
self.comment_start = None
self.comment_end = None
class CppScanner(Scanner):
"""C / C++ / ObjC / ObjC++ source scanner."""
def __init__(self):
"""Constructor."""
Scanner.__init__(self)
# TODO: This breaks for strings ending in more than 2 backslashes. Need
# a pattern which counts only an odd number of backslashes, so the last
# one thus escapes the quote.
self.re_token = re.compile(r'(^\s*#|//|/\*|\*/|(?<!(?<!\\)\\)["\'])')
# TODO: Treat '\' at EOL as a token, and handle it as continuing the
# previous line. That is, if in a comment-to-eol, this line is a comment
# too.
# Note that we treat # at beginning of line as a comment, so that we ignore
# preprocessor definitions
self.comment_to_eol = ['//', '#']
self.comment_start = '/*'
self.comment_end = '*/'
def ScanFile(filename, language):
"""Scans a file for executable lines.
Args:
filename: Path to file to scan.
language: Language for file ('C', 'C++', 'python', 'ObjC', 'ObjC++')
Returns:
A list of executable lines, or an empty list if the file was not a handled
language.
"""
if language == 'python':
return PythonScanner().Scan(filename)
elif language in ['C', 'C++', 'ObjC', 'ObjC++']:
return CppScanner().Scan(filename)
# Something we don't handle
return []
| bsd-3-clause |
arborh/tensorflow | tensorflow/python/training/session_manager.py | 4 | 22050 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training helper that checkpoints models and creates session."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management
from tensorflow.python.util.tf_export import tf_export
def _maybe_name(obj):
"""Returns object name if it has one, or a message otherwise.
This is useful for names that apper in error messages.
Args:
obj: Object to get the name of.
Returns:
name, "None", or a "no name" message.
"""
if obj is None:
return "None"
elif hasattr(obj, "name"):
return obj.name
else:
return "<no name for %s>" % type(obj)
@tf_export(v1=["train.SessionManager"])
class SessionManager(object):
"""Training helper that restores from checkpoint and creates session.
This class is a small wrapper that takes care of session creation and
checkpoint recovery. It also provides functions that to facilitate
coordination among multiple training threads or processes.
* Checkpointing trained variables as the training progresses.
* Initializing variables on startup, restoring them from the most recent
checkpoint after a crash, or wait for checkpoints to become available.
### Usage:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will checkpoint the model in '/tmp/mydir'.
sm = SessionManager()
sess = sm.prepare_session(master, init_op, saver, checkpoint_dir)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`prepare_session()` initializes or restores a model. It requires `init_op`
and `saver` as an argument.
A second process could wait for the model to be ready by doing the following:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will wait for the model to become ready.
sm = SessionManager()
sess = sm.wait_for_session(master)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`wait_for_session()` waits for a model to be initialized by other processes.
"""
def __init__(self,
local_init_op=None,
ready_op=None,
ready_for_local_init_op=None,
graph=None,
recovery_wait_secs=30,
local_init_run_options=None,
local_init_feed_dict=None):
"""Creates a SessionManager.
The `local_init_op` is an `Operation` that is run always after a new session
was created. If `None`, this step is skipped.
The `ready_op` is an `Operation` used to check if the model is ready. The
model is considered ready if that operation returns an empty 1D string
tensor. If the operation returns a non empty 1D string tensor, the elements
are concatenated and used to indicate to the user why the model is not
ready.
The `ready_for_local_init_op` is an `Operation` used to check if the model
is ready to run local_init_op. The model is considered ready if that
operation returns an empty 1D string tensor. If the operation returns a non
empty 1D string tensor, the elements are concatenated and used to indicate
to the user why the model is not ready.
If `ready_op` is `None`, the model is not checked for readiness.
`recovery_wait_secs` is the number of seconds between checks that
the model is ready. It is used by processes to wait for a model to
be initialized or restored. Defaults to 30 seconds.
Args:
local_init_op: An `Operation` run immediately after session creation.
Usually used to initialize tables and local variables.
ready_op: An `Operation` to check if the model is initialized.
ready_for_local_init_op: An `Operation` to check if the model is ready
to run local_init_op.
graph: The `Graph` that the model will use.
recovery_wait_secs: Seconds between checks for the model to be ready.
local_init_run_options: RunOptions to be passed to session.run when
executing the local_init_op.
local_init_feed_dict: Optional session feed dictionary to use when running
the local_init_op.
Raises:
ValueError: If ready_for_local_init_op is not None but local_init_op is
None
"""
# Sets default values of arguments.
if graph is None:
graph = ops.get_default_graph()
self._local_init_op = local_init_op
self._ready_op = ready_op
self._ready_for_local_init_op = ready_for_local_init_op
self._graph = graph
self._recovery_wait_secs = recovery_wait_secs
self._target = None
self._local_init_run_options = local_init_run_options
self._local_init_feed_dict = local_init_feed_dict
if ready_for_local_init_op is not None and local_init_op is None:
raise ValueError("If you pass a ready_for_local_init_op "
"you must also pass a local_init_op "
", ready_for_local_init_op [%s]" %
ready_for_local_init_op)
def _restore_checkpoint(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, and tries to restore a checkpoint.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, is_restored) where 'is_restored' is `True` if
the session could be restored, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
self._target = master
# This is required to so that we initialize the TPU device before
# restoring from checkpoint since we'll be placing variables on the device
# and TPUInitialize wipes out the memory of the device.
strategy = distribution_strategy_context.get_strategy()
if strategy and hasattr(strategy.extended,
"_experimental_initialize_system"):
strategy.extended._experimental_initialize_system() # pylint: disable=protected-access
sess = session.Session(self._target, graph=self._graph, config=config)
if checkpoint_dir and checkpoint_filename_with_path:
raise ValueError("Can not provide both checkpoint_dir and "
"checkpoint_filename_with_path.")
# If either saver or checkpoint_* is not specified, cannot restore. Just
# return.
if not saver or not (checkpoint_dir or checkpoint_filename_with_path):
return sess, False
if checkpoint_filename_with_path:
saver.restore(sess, checkpoint_filename_with_path)
return sess, True
# Waits up until max_wait_secs for checkpoint to become available.
wait_time = 0
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
while not ckpt or not ckpt.model_checkpoint_path:
if wait_for_checkpoint and wait_time < max_wait_secs:
logging.info("Waiting for checkpoint to be available.")
time.sleep(self._recovery_wait_secs)
wait_time += self._recovery_wait_secs
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
else:
return sess, False
# Loads the checkpoint.
saver.restore(sess, ckpt.model_checkpoint_path)
saver.recover_last_checkpoints(ckpt.all_model_checkpoint_paths)
return sess, True
def prepare_session(self,
master,
init_op=None,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None,
init_feed_dict=None,
init_fn=None):
"""Creates a `Session`. Makes sure the model is ready to be used.
Creates a `Session` on 'master'. If a `saver` object is passed in, and
`checkpoint_dir` points to a directory containing valid checkpoint
files, then it will try to recover the model from checkpoint. If
no checkpoint files are available, and `wait_for_checkpoint` is
`True`, then the process would check every `recovery_wait_secs`,
up to `max_wait_secs`, for recovery to succeed.
If the model cannot be recovered successfully then it is initialized by
running the `init_op` and calling `init_fn` if they are provided.
The `local_init_op` is also run after init_op and init_fn, regardless of
whether the model was recovered successfully, but only if
`ready_for_local_init_op` passes.
If the model is recovered from a checkpoint it is assumed that all
global variables have been initialized, in particular neither `init_op`
nor `init_fn` will be executed.
It is an error if the model cannot be recovered and no `init_op`
or `init_fn` or `local_init_op` are passed.
Args:
master: `String` representation of the TensorFlow master to use.
init_op: Optional `Operation` used to initialize the model.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
init_feed_dict: Optional dictionary that maps `Tensor` objects to feed
values. This feed dictionary is passed to the session `run()` call when
running the init op.
init_fn: Optional callable used to initialize the model. Called after the
optional `init_op` is called. The callable must accept one argument,
the session being initialized.
Returns:
A `Session` object that can be used to drive the model.
Raises:
RuntimeError: If the model cannot be initialized or recovered.
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
if not is_loaded_from_checkpoint:
if init_op is None and not init_fn and self._local_init_op is None:
raise RuntimeError("Model is not initialized and no init_op or "
"init_fn or local_init_op was given")
if init_op is not None:
sess.run(init_op, feed_dict=init_feed_dict)
if init_fn:
init_fn(sess)
local_init_success, msg = self._try_run_local_init_op(sess)
if not local_init_success:
raise RuntimeError(
"Init operations did not make model ready for local_init. "
"Init op: %s, init fn: %s, error: %s" % (_maybe_name(init_op),
init_fn,
msg))
is_ready, msg = self._model_ready(sess)
if not is_ready:
raise RuntimeError(
"Init operations did not make model ready. "
"Init op: %s, init fn: %s, local_init_op: %s, error: %s" %
(_maybe_name(init_op), init_fn, self._local_init_op, msg))
return sess
def recover_session(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, recovering if possible.
Creates a new session on 'master'. If the session is not initialized
and can be recovered from a checkpoint, recover it.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, initialized) where 'initialized' is `True` if
the session could be recovered and initialized, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
# Always try to run local_init_op
local_init_success, msg = self._try_run_local_init_op(sess)
if not is_loaded_from_checkpoint:
# Do not need to run checks for readiness
return sess, False
restoring_file = checkpoint_dir or checkpoint_filename_with_path
if not local_init_success:
logging.info(
"Restoring model from %s did not make model ready for local init:"
" %s", restoring_file, msg)
return sess, False
is_ready, msg = self._model_ready(sess)
if not is_ready:
logging.info("Restoring model from %s did not make model ready: %s",
restoring_file, msg)
return sess, False
logging.info("Restored model from %s", restoring_file)
return sess, is_loaded_from_checkpoint
def wait_for_session(self, master, config=None, max_wait_secs=float("Inf")):
"""Creates a new `Session` and waits for model to be ready.
Creates a new `Session` on 'master'. Waits for the model to be
initialized or recovered from a checkpoint. It's expected that
another thread or process will make the model ready, and that this
is intended to be used by threads/processes that participate in a
distributed training configuration where a different thread/process
is responsible for initializing or recovering the model being trained.
NB: The amount of time this method waits for the session is bounded
by max_wait_secs. By default, this function will wait indefinitely.
Args:
master: `String` representation of the TensorFlow master to use.
config: Optional ConfigProto proto used to configure the session.
max_wait_secs: Maximum time to wait for the session to become available.
Returns:
A `Session`. May be None if the operation exceeds the timeout
specified by config.operation_timeout_in_ms.
Raises:
tf.DeadlineExceededError: if the session is not available after
max_wait_secs.
"""
self._target = master
if max_wait_secs is None:
max_wait_secs = float("Inf")
timer = _CountDownTimer(max_wait_secs)
while True:
sess = session.Session(self._target, graph=self._graph, config=config)
not_ready_msg = None
not_ready_local_msg = None
local_init_success, not_ready_local_msg = self._try_run_local_init_op(
sess)
if local_init_success:
# Successful if local_init_op is None, or ready_for_local_init_op passes
is_ready, not_ready_msg = self._model_ready(sess)
if is_ready:
return sess
self._safe_close(sess)
# Do we have enough time left to try again?
remaining_ms_after_wait = (
timer.secs_remaining() - self._recovery_wait_secs)
if remaining_ms_after_wait < 0:
raise errors.DeadlineExceededError(
None, None,
"Session was not ready after waiting %d secs." % (max_wait_secs,))
logging.info("Waiting for model to be ready. "
"Ready_for_local_init_op: %s, ready: %s",
not_ready_local_msg, not_ready_msg)
time.sleep(self._recovery_wait_secs)
def _safe_close(self, sess):
"""Closes a session without raising an exception.
Just like sess.close() but ignores exceptions.
Args:
sess: A `Session`.
"""
# pylint: disable=broad-except
try:
sess.close()
except Exception:
# Intentionally not logging to avoid user complaints that
# they get cryptic errors. We really do not care that Close
# fails.
pass
# pylint: enable=broad-except
def _model_ready(self, sess):
"""Checks if the model is ready or not.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
return _ready(self._ready_op, sess, "Model not ready")
def _model_ready_for_local_init(self, sess):
"""Checks if the model is ready to run local_init_op.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready to run
local_init_op and False otherwise, and msg is `None` if the model is
ready to run local_init_op, a `String` with the reason why it is not ready
otherwise.
"""
return _ready(self._ready_for_local_init_op, sess,
"Model not ready for local init")
def _try_run_local_init_op(self, sess):
"""Tries to run _local_init_op, if not None, and is ready for local init.
Args:
sess: A `Session`.
Returns:
A tuple (is_successful, msg), where is_successful is True if
_local_init_op is None, or we ran _local_init_op, and False otherwise;
and msg is a `String` with the reason why the model was not ready to run
local init.
"""
if self._local_init_op is not None:
is_ready_for_local_init, msg = self._model_ready_for_local_init(sess)
if is_ready_for_local_init:
logging.info("Running local_init_op.")
sess.run(self._local_init_op, feed_dict=self._local_init_feed_dict,
options=self._local_init_run_options)
logging.info("Done running local_init_op.")
return True, None
else:
return False, msg
return True, None
def _ready(op, sess, msg):
"""Checks if the model is ready or not, as determined by op.
Args:
op: An op, either _ready_op or _ready_for_local_init_op, which defines the
readiness of the model.
sess: A `Session`.
msg: A message to log to warning if not ready
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
if op is None:
return True, None
else:
try:
ready_value = sess.run(op)
# The model is considered ready if ready_op returns an empty 1-D tensor.
# Also compare to `None` and dtype being int32 for backward
# compatibility.
if (ready_value is None or ready_value.dtype == np.int32 or
ready_value.size == 0):
return True, None
else:
# TODO(sherrym): If a custom ready_op returns other types of tensor,
# or strings other than variable names, this message could be
# confusing.
non_initialized_varnames = ", ".join(
[i.decode("utf-8") for i in ready_value])
return False, "Variables not initialized: " + non_initialized_varnames
except errors.FailedPreconditionError as e:
if "uninitialized" not in str(e):
logging.warning("%s : error [%s]", msg, str(e))
raise e
return False, str(e)
class _CountDownTimer(object):
def __init__(self, duration_secs):
self._start_time_secs = time.time()
self._duration_secs = duration_secs
def secs_remaining(self):
diff = self._duration_secs - (time.time() - self._start_time_secs)
return max(0, diff)
| apache-2.0 |
andreparames/odoo | addons/hr_timesheet_invoice/report/report_analytic.py | 299 | 5164 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp import tools
from openerp.addons.decimal_precision import decimal_precision as dp
class report_analytic_account_close(osv.osv):
_name = "report.analytic.account.close"
_description = "Analytic account to close"
_auto = False
_columns = {
'name': fields.many2one('account.analytic.account', 'Analytic account', readonly=True),
'state': fields.char('Status', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'quantity': fields.float('Quantity', readonly=True),
'quantity_max': fields.float('Max. Quantity', readonly=True),
'balance': fields.float('Balance', readonly=True),
'date_deadline': fields.date('Deadline', readonly=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_analytic_account_close')
cr.execute("""
create or replace view report_analytic_account_close as (
select
a.id as id,
a.id as name,
a.state as state,
sum(l.unit_amount) as quantity,
sum(l.amount) as balance,
a.partner_id as partner_id,
a.quantity_max as quantity_max,
a.date as date_deadline
from
account_analytic_line l
right join
account_analytic_account a on (l.account_id=a.id)
group by
a.id,a.state, a.quantity_max,a.date,a.partner_id
having
(a.quantity_max>0 and (sum(l.unit_amount)>=a.quantity_max)) or
a.date <= current_date
)""")
class report_account_analytic_line_to_invoice(osv.osv):
_name = "report.account.analytic.line.to.invoice"
_description = "Analytic lines to invoice report"
_auto = False
_columns = {
'name': fields.char('Year', required=False, readonly=True),
'product_id':fields.many2one('product.product', 'Product', readonly=True),
'account_id':fields.many2one('account.analytic.account', 'Analytic account', readonly=True),
'product_uom_id':fields.many2one('product.uom', 'Unit of Measure', readonly=True),
'unit_amount': fields.float('Units', readonly=True),
'sale_price': fields.float('Sale price', readonly=True, digits_compute=dp.get_precision('Product Price')),
'amount': fields.float('Amount', readonly=True, digits_compute=dp.get_precision('Account')),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month', readonly=True),
}
_order = 'name desc, product_id asc, account_id asc'
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_account_analytic_line_to_invoice')
cr.execute("""
CREATE OR REPLACE VIEW report_account_analytic_line_to_invoice AS (
SELECT
DISTINCT(to_char(l.date,'MM')) as month,
to_char(l.date, 'YYYY') as name,
MIN(l.id) AS id,
l.product_id,
l.account_id,
SUM(l.amount) AS amount,
SUM(l.unit_amount*t.list_price) AS sale_price,
SUM(l.unit_amount) AS unit_amount,
l.product_uom_id
FROM
account_analytic_line l
left join
product_product p on (l.product_id=p.id)
left join
product_template t on (p.product_tmpl_id=t.id)
WHERE
(invoice_id IS NULL) and (to_invoice IS NOT NULL)
GROUP BY
to_char(l.date, 'YYYY'), to_char(l.date,'MM'), product_id, product_uom_id, account_id
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
xtiankisutsa/MARA_Framework | tools/androwarn/androwarn/core/core.py | 1 | 16646 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of Androwarn.
#
# Copyright (C) 2012, Thomas Debize <tdebize at mail.com>
# All rights reserved.
#
# Androwarn is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androwarn is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androwarn. If not, see <http://www.gnu.org/licenses/>.
# Global imports
import re, logging
# Androguard imports
from androguard.core.analysis.analysis import *
# Androwarn modules import
from androwarn.constants.api_constants import *
# Constants
ERROR_VALUE_NOT_FOUND = 'N/A'
ERROR_CONSTANT_NAME_NOT_FOUND = 'N/A'
CONST_STRING = 'const-string'
CONST = 'const'
MOVE = 'move'
MOVE_RESULT = 'move-result'
APUT = 'aput'
INVOKE = 'invoke'
INVOKE_NO_REGISTER = 'invoke-no-register'
INVOKE_2_REGISTERS = 'invoke-2-registers'
NEW_INSTANCE = 'new-instance'
# Logguer
log = logging.getLogger('log')
# Instruction matcher
def match_current_instruction(current_instruction, registers_found) :
"""
@param current_instruction : the current instruction to be analyzed
@param registers_found : a dictionary of registers recovered so far
@rtype : the instruction name from the constants above, the local register number and its value, an updated version of the registers_found
"""
p_const = re.compile('^const(?:\/4|\/16|\/high16|-wide(?:\/16|\/32)|-wide\/high16|)? v([0-9]+), \#\+?(-?[0-9]+(?:\.[0-9]+)?)$')
p_const_string = re.compile("^const-string(?:||-jumbo) v([0-9]+), '(.*)'$")
p_move = re.compile('^move(?:|\/from16|-wide(?:\/from16|\/16)|-object(?:|\/from16|\/16))? v([0-9]+), (v[0-9]+)$')
p_move_result = re.compile('^move(?:-result(?:|-wide|-object)|-exception)? v([0-9]+)$')
p_aput = re.compile('^aput(?:-wide|-object|-boolean|-byte|-char|-short|) v([0-9]+), v([0-9]+), v([0-9]+)$')
p_invoke = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick) v([0-9]+), (L(?:.*);->.*)$')
p_invoke_2_registers = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick) v([0-9]+), v([0-9]+), (L(?:.*);->.*)$')
p_invoke_no_register = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick) (L(?:.*);->.*)$')
p_new_instance = re.compile('^new-instance v([0-9]+), (L(?:.*);)$')
# String concat
current_instruction = "%s %s" % (current_instruction.get_name(), current_instruction.get_output())
# Returned values init
instruction_name = ''
local_register_number = -1
local_register_value = -1
if p_const_string.match(current_instruction) :
#print p_const_string.match(current_instruction).groups()
instruction_name = CONST_STRING
register_number = p_const_string.match(current_instruction).groups()[0]
register_value = p_const_string.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
else :
old_string = registers_found[register_number]
new_string = "%s %s" % (str(register_value), str(old_string))
registers_found[register_number] = new_string
local_register_number = register_number
local_register_value = register_value
if p_const.match(current_instruction) :
#print p_const.match(current_instruction).groups()
instruction_name = CONST
register_number = p_const.match(current_instruction).groups()[0]
register_value = p_const.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
if p_move.match(current_instruction) :
#print p_move.match(current_instruction).groups()
instruction_name = MOVE
register_number = p_move.match(current_instruction).groups()[0]
register_value = p_move.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
if p_move_result.match(current_instruction) :
#print p_move_result.match(current_instruction).groups()
instruction_name = MOVE_RESULT
register_number = p_move_result.match(current_instruction).groups()[0]
register_value = ''
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
#print "number returned %s" % local_register_number
#print "value returned %s" % local_register_value
if p_invoke.match(current_instruction) :
#print p_invoke.match(current_instruction).groups()
instruction_name = INVOKE
register_number = p_invoke.match(current_instruction).groups()[0]
register_value = p_invoke.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
if p_invoke_no_register.match(current_instruction) :
#print p_invoke.match(current_instruction).groups()
instruction_name = INVOKE_NO_REGISTER
register_number = ''
register_value = p_invoke_no_register.match(current_instruction).groups()[0]
local_register_number = register_number
local_register_value = register_value
if p_invoke_2_registers.match(current_instruction) :
#print p_invoke.match(current_instruction).groups()
instruction_name = INVOKE_NO_REGISTER
register_number = p_invoke_2_registers.match(current_instruction).groups()[0]
register_value = p_invoke_2_registers.match(current_instruction).groups()[1]
local_register_number = register_number
local_register_value = register_value
if p_new_instance.match(current_instruction) :
#print p_new_instance.match(current_instruction).groups()
instruction_name = NEW_INSTANCE
register_number = p_new_instance.match(current_instruction).groups()[0]
register_value = p_new_instance.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
if p_aput.match(current_instruction) :
#print p_aput.match(current_instruction).groups()
instruction_name = APUT
register_object_reference = p_aput.match(current_instruction).groups()[0]
register_array_reference = p_aput.match(current_instruction).groups()[1]
register_element_index = p_aput.match(current_instruction).groups()[2]
local_register_number = register_object_reference
local_register_value = register_array_reference
return instruction_name, local_register_number, local_register_value, registers_found
# Backtrace registers #
def find_call_index_in_code_list(index_to_find, instruction_list):
"""
@param index_to_find : index of the method call
@param code_list : instruction list of the parent method called
@rtype : the index of the method call in the instruction listing
"""
idx = 0
for i in instruction_list :
if index_to_find <= idx :
#print "[+] code offset found at the index " + str(instruction_list.index(i))
return instruction_list.index(i)
else :
idx += i.get_length()
# in case of failure, return an inconsistent value
return ERROR_INDEX_NOT_FOUND
def backtrace_registers_before_call(x, method, index_to_find) :
"""
@param x : a VMAnalysis instance
@param method : a regexp for the method (the package)
@param index_to_find : index of the matching method
@rtype : an ordered list of dictionaries of each register content [{ 'register #': 'value' }, { 'register #': 'value' } ...]
"""
registers = {}
code = method.get_code()
#code.show()
bc = code.get_bc()
instruction_list = [ i for i in bc.get_instructions() ]
found_index = find_call_index_in_code_list(index_to_find, instruction_list)
if (found_index < 0) :
log.error("The call index in the code list can not be found")
return 0
else :
# Initialize the returned list of dictionaries
registers_final = []
# Initialize the harvesting dictionary
registers_found = {}
# List the register indexes related to the method call
relevant_registers = relevant_registers_for_the_method(instruction_list[found_index])
#print relevant_registers
i = int(found_index) - 1 # start index
while ((all_relevant_registers_filled(registers_found,relevant_registers) != True) and (i >= 0)) :
#current_instruction = instruction_list[i].show_buff(0)
#print current_instruction
current_instruction = instruction_list[i]
instruction_name, local_register_number, local_register_value, registers_found = match_current_instruction(current_instruction, registers_found)
if cmp(instruction_name, APUT) == 0:
try :
list_index_to_be_changed = relevant_registers.index(str(local_register_value))
#print "index_to_be_changed %s" % list_index_to_be_changed
del(relevant_registers[int(local_register_value)])
relevant_registers.insert(list_index_to_be_changed, local_register_number)
log.debug("New relevant_registers %s" % relevant_registers)
except :
log.debug("'%s' does not exist anymore in the relevant_registers list" % local_register_value)
if (cmp(instruction_name, MOVE_RESULT) == 0) and (local_register_number in relevant_registers):
try:
#past_instruction = instruction_list[i-1].show_buff(0)
#print past_instruction
past_instruction = instruction_list[i-1]
p_instruction_name, p_local_register_number, p_local_register_value, registers_found = match_current_instruction(past_instruction, registers_found)
if cmp(p_instruction_name, INVOKE_NO_REGISTER) == 0 :
registers_found[local_register_number] = p_local_register_value
else:
list_index_to_be_changed = relevant_registers.index(str(local_register_number))
del(relevant_registers[int(list_index_to_be_changed)])
relevant_registers.insert(list_index_to_be_changed, p_local_register_number)
log.debug("New relevant_registers %s" % relevant_registers)
except:
log.debug("'%s' does not exist anymore in the relevant_registers list" % local_register_value)
i = i - 1
#log.info('Registers found during the analysis %s' % registers_found)
final_answer = all_relevant_registers_filled(registers_found,relevant_registers)
log.debug("Are all relevant registers filled ? %s" % str(final_answer))
for i in relevant_registers :
try:
register_number = i
#print register_number
register_value = registers_found[i]
#print register_value
temp_dict = { register_number : register_value }
registers_final.append(temp_dict)
except KeyError:
registers_final = []
log.debug("KeyError exception : The value of the register # %s could not be found for the relevant registers %s" % (register_number, relevant_registers))
break
return registers_final
def extract_register_index_out_splitted_values(registers_raw_list_splitted) :
"""
@param : registers_raw_list_splitted : a list of registers still containing the 'v' prefix [' v1 ', ' v2 ' ...]
@rtype : an ordered list of register indexes ['1', '2' ...]
"""
relevant_registers = []
# Trim the values
registers_raw_list_splitted[:] = (value.strip() for value in registers_raw_list_splitted if len(value) > 0)
for value in registers_raw_list_splitted :
# Remove that 'v'
p_register_index_out_of_split = re.compile('^v([0-9]+)$')
if p_register_index_out_of_split.match(value) :
#print p_register_index_out_of_split.match(value).groups()
register_index = p_register_index_out_of_split.match(value).groups()[0]
relevant_registers.append(register_index)
else :
relevant_registers.append('N/A')
return relevant_registers
def relevant_registers_for_the_method(instruction) :
"""
@param method : a method instance
@param index_to_find : index of the matching method
@rtype : an ordered list of register indexes related to that method call
"""
relevant_registers = []
current_instruction_name = instruction.get_name()
current_instruction = instruction.show_buff(0)
p_invoke_name = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick)$')
p_invoke_range_name = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick)(?:\/range)$')
if p_invoke_name.match(current_instruction_name) :
p_invoke_registers = re.compile('(v[0-9]+),')
if p_invoke_registers.findall(current_instruction) :
registers_raw_list_splitted = p_invoke_registers.findall(current_instruction)
relevant_registers = extract_register_index_out_splitted_values(registers_raw_list_splitted)
if p_invoke_range_name.match(current_instruction_name) :
# We're facing implicit an implicit range declaration, for instance "invoke v19..v20"
p_invoke_registers_range = re.compile('^v([0-9]+) ... v([0-9]+), L.*$')
if p_invoke_registers_range.match(current_instruction) :
register_start_number = p_invoke_registers_range.match(current_instruction).groups()[0]
register_end_number = p_invoke_registers_range.match(current_instruction).groups()[1]
if int(register_start_number) > int(register_end_number) :
log.error("invoke-kind/range incoherent: # of the start register is lower than the end one")
else :
relevant_registers = [ str(i) for i in xrange(int(register_start_number), int(register_end_number))]
# +1 because range does not provide the higher boundary value
return relevant_registers
def all_relevant_registers_filled(registers, relevant_registers) :
"""
@param registers : a dictionary of each register content { 'register #': 'value' }
@param relevant_registers : an ordered list of register indexes related to that method call
@rtype : True if all the relevant_registers are filled, False if not
"""
answer = True
for i in relevant_registers :
# assert a False answer for null registers from the "move-result" instruction
if not(i in registers) or (i in registers and len(registers[i]) < 1) :
answer = False
return answer
def get_register_value(index, registers) :
"""
@param index : integer value of the index
@param registers : an ordered list of register indexes related to that method call
@rtype : a value casted in string
"""
# Index - 1, list starts at index 0
if index <= len(registers) :
dict = registers[index]
return dict.values()[0]
else :
return ERROR_VALUE_NOT_FOUND
def get_constants_name_from_value(constant_dict, value) :
"""
@param constant_dict : constant dictionary to consider
@param value : value's constant name to retrieve
@rtype : a string
"""
try:
return constant_dict[value]
except KeyError:
log.error("The constant name corresponding to the value '%s' can not be found in the dictionary '%s'" % (value, constant_dict))
return ERROR_CONSTANT_NAME_NOT_FOUND
def data_flow_analysis(tab, result, x) :
"""
@param tab : structural analysis results tab
@param result : current iteration
@param x : a VMAnalysis instance
@rtype : an ordered list of dictionaries of each register content [{ 'register #': 'value' }, { 'register #': 'value' } ...]
"""
method = tab[result].get_method()
method_call_index_to_find = tab[result].get_idx()
registers = backtrace_registers_before_call(x, method, method_call_index_to_find)
#log.info("Class '%s' - Method '%s' - register state before call %s" % (tab[result].get_class_name(),tab[result].get_name(), registers))
class_str = "Class '%s'" % tab[result].get_class_name()
method_str = "Method '%s'" % tab[result].get_name()
regs_str = "Register state before call %s" % registers
formatted_str = "{0:50}- {1:35}- {2:30}".format(class_str,method_str, regs_str)
log.info(formatted_str)
return registers
#########################
| lgpl-3.0 |
Mhynlo/SickRage | lib/github/Label.py | 72 | 4251 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2013 martinqt <m.ki2@laposte.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import urllib
import github.GithubObject
class Label(github.GithubObject.CompletableGithubObject):
"""
This class represents Labels. The reference can be found here http://developer.github.com/v3/issues/labels/
"""
@property
def color(self):
"""
:type: string
"""
self._completeIfNotSet(self._color)
return self._color.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def delete(self):
"""
:calls: `DELETE /repos/:owner/:repo/labels/:name <http://developer.github.com/v3/issues/labels>`_
:rtype: None
"""
headers, data = self._requester.requestJsonAndCheck(
"DELETE",
self.url
)
def edit(self, name, color):
"""
:calls: `PATCH /repos/:owner/:repo/labels/:name <http://developer.github.com/v3/issues/labels>`_
:param name: string
:param color: string
:rtype: None
"""
assert isinstance(name, (str, unicode)), name
assert isinstance(color, (str, unicode)), color
post_parameters = {
"name": name,
"color": color,
}
headers, data = self._requester.requestJsonAndCheck(
"PATCH",
self.url,
input=post_parameters
)
self._useAttributes(data)
@property
def _identity(self):
return urllib.quote(self.name)
def _initAttributes(self):
self._color = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "color" in attributes: # pragma no branch
self._color = self._makeStringAttribute(attributes["color"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| gpl-3.0 |
florentchandelier/zipline | tests/data/bundles/test_csvdir.py | 1 | 5092 | from __future__ import division
import numpy as np
import pandas as pd
from zipline.utils.calendars import get_calendar
from zipline.data.bundles import ingest, load, bundles
from zipline.testing import test_resource_path
from zipline.testing.fixtures import ZiplineTestCase
from zipline.testing.predicates import assert_equal
from zipline.utils.functional import apply
class CSVDIRBundleTestCase(ZiplineTestCase):
symbols = 'AAPL', 'IBM', 'KO', 'MSFT'
asset_start = pd.Timestamp('2012-01-03', tz='utc')
asset_end = pd.Timestamp('2014-12-31', tz='utc')
bundle = bundles['csvdir']
calendar = get_calendar(bundle.calendar_name)
start_date = calendar.first_session
end_date = calendar.last_session
api_key = 'ayylmao'
columns = 'open', 'high', 'low', 'close', 'volume'
def _expected_data(self, asset_finder):
sids = {
symbol: asset_finder.lookup_symbol(
symbol,
self.asset_start,
).sid
for symbol in self.symbols
}
def per_symbol(symbol):
df = pd.read_csv(
test_resource_path('csvdir_samples', 'csvdir',
'daily', symbol + '.csv.gz'),
parse_dates=['date'],
index_col='date',
usecols=[
'open',
'high',
'low',
'close',
'volume',
'date',
'dividend',
'split',
],
na_values=['NA'],
)
df['sid'] = sids[symbol]
return df
all_ = pd.concat(map(per_symbol, self.symbols)).set_index(
'sid',
append=True,
).unstack()
# fancy list comprehension with statements
@list
@apply
def pricing():
for column in self.columns:
vs = all_[column].values
if column == 'volume':
vs = np.nan_to_num(vs)
yield vs
adjustments = [[5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
6223, 6263, 6271, 6277],
[5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
6223, 6263, 6271, 6277],
[5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
6223, 6263, 6271, 6277],
[5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
6223, 6263, 6271, 6277],
[5701, 6157]]
return pricing, adjustments
def test_bundle(self):
environ = {
'CSVDIR': test_resource_path('csvdir_samples', 'csvdir')
}
ingest('csvdir', environ=environ)
bundle = load('csvdir', environ=environ)
sids = 0, 1, 2, 3
assert_equal(set(bundle.asset_finder.sids), set(sids))
for equity in bundle.asset_finder.retrieve_all(sids):
assert_equal(equity.start_date, self.asset_start, msg=equity)
assert_equal(equity.end_date, self.asset_end, msg=equity)
sessions = self.calendar.all_sessions
actual = bundle.equity_daily_bar_reader.load_raw_arrays(
self.columns,
sessions[sessions.get_loc(self.asset_start, 'bfill')],
sessions[sessions.get_loc(self.asset_end, 'ffill')],
sids,
)
expected_pricing, expected_adjustments = self._expected_data(
bundle.asset_finder,
)
assert_equal(actual, expected_pricing, array_decimal=2)
adjustments_for_cols = bundle.adjustment_reader.load_adjustments(
self.columns,
sessions,
pd.Index(sids),
)
assert_equal([sorted(adj.keys()) for adj in adjustments_for_cols],
expected_adjustments)
| apache-2.0 |
rafaelolg/django-guardian | guardian/migrations/0004_auto__del_field_groupobjectpermission_object_id__del_unique_groupobjec.py | 31 | 8186 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'GroupObjectPermission.object_pk'
db.alter_column('guardian_groupobjectpermission', 'object_pk', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'UserObjectPermission.object_pk'
db.alter_column('guardian_userobjectpermission', 'object_pk', self.gf('django.db.models.fields.CharField')(max_length=255))
# Removing unique constraint on 'UserObjectPermission', fields ['object_id', 'user', 'content_type', 'permission']
db.delete_unique('guardian_userobjectpermission', ['object_id', 'user_id', 'content_type_id', 'permission_id'])
# Removing unique constraint on 'GroupObjectPermission', fields ['group', 'object_id', 'content_type', 'permission']
db.delete_unique('guardian_groupobjectpermission', ['group_id', 'object_id', 'content_type_id', 'permission_id'])
# Deleting field 'GroupObjectPermission.object_id'
db.delete_column('guardian_groupobjectpermission', 'object_id')
# Adding unique constraint on 'GroupObjectPermission', fields ['object_pk', 'group', 'content_type', 'permission']
db.create_unique('guardian_groupobjectpermission', ['object_pk', 'group_id', 'content_type_id', 'permission_id'])
# Deleting field 'UserObjectPermission.object_id'
db.delete_column('guardian_userobjectpermission', 'object_id')
# Adding unique constraint on 'UserObjectPermission', fields ['object_pk', 'user', 'content_type', 'permission']
db.create_unique('guardian_userobjectpermission', ['object_pk', 'user_id', 'content_type_id', 'permission_id'])
def backwards(self, orm):
# Changing field 'GroupObjectPermission.object_pk'
db.alter_column('guardian_groupobjectpermission', 'object_pk', self.gf('django.db.models.fields.TextField')())
# Changing field 'UserObjectPermission.object_pk'
db.alter_column('guardian_userobjectpermission', 'object_pk', self.gf('django.db.models.fields.TextField')())
# Removing unique constraint on 'UserObjectPermission', fields ['object_pk', 'user', 'content_type', 'permission']
db.delete_unique('guardian_userobjectpermission', ['object_pk', 'user_id', 'content_type_id', 'permission_id'])
# Removing unique constraint on 'GroupObjectPermission', fields ['object_pk', 'group', 'content_type', 'permission']
db.delete_unique('guardian_groupobjectpermission', ['object_pk', 'group_id', 'content_type_id', 'permission_id'])
# We cannot add back in field 'GroupObjectPermission.object_id'
raise RuntimeError(
"Cannot reverse this migration. 'GroupObjectPermission.object_id' and its values cannot be restored.")
# Adding unique constraint on 'GroupObjectPermission', fields ['group', 'object_id', 'content_type', 'permission']
db.create_unique('guardian_groupobjectpermission', ['group_id', 'object_id', 'content_type_id', 'permission_id'])
# We cannot add back in field 'UserObjectPermission.object_id'
raise RuntimeError(
"Cannot reverse this migration. 'UserObjectPermission.object_id' and its values cannot be restored.")
# Adding unique constraint on 'UserObjectPermission', fields ['object_id', 'user', 'content_type', 'permission']
db.create_unique('guardian_userobjectpermission', ['object_id', 'user_id', 'content_type_id', 'permission_id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'guardian.groupobjectpermission': {
'Meta': {'unique_together': "(['group', 'permission', 'content_type', 'object_pk'],)", 'object_name': 'GroupObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"})
},
'guardian.userobjectpermission': {
'Meta': {'unique_together': "(['user', 'permission', 'content_type', 'object_pk'],)", 'object_name': 'UserObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['guardian']
| bsd-2-clause |
lunafeng/django | django/db/backends/postgresql/operations.py | 207 | 10109 | from __future__ import unicode_literals
from psycopg2.extras import Inet
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
class DatabaseOperations(BaseDatabaseOperations):
def unification_cast_sql(self, output_field):
internal_type = output_field.get_internal_type()
if internal_type in ("GenericIPAddressField", "IPAddressField", "TimeField", "UUIDField"):
# PostgreSQL will resolve a union as type 'text' if input types are
# 'unknown'.
# http://www.postgresql.org/docs/9.4/static/typeconv-union-case.html
# These fields cannot be implicitly cast back in the default
# PostgreSQL configuration so we need to explicitly cast them.
# We must also remove components of the type within brackets:
# varchar(255) -> varchar.
return 'CAST(%%s AS %s)' % output_field.db_type(self.connection).split('(')[0]
return '%s'
def date_extract_sql(self, lookup_type, field_name):
# http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT
if lookup_type == 'week_day':
# For consistency across backends, we return Sunday=1, Saturday=7.
return "EXTRACT('dow' FROM %s) + 1" % field_name
else:
return "EXTRACT('%s' FROM %s)" % (lookup_type, field_name)
def date_trunc_sql(self, lookup_type, field_name):
# http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def _convert_field_to_tz(self, field_name, tzname):
if settings.USE_TZ:
field_name = "%s AT TIME ZONE %%s" % field_name
params = [tzname]
else:
params = []
return field_name, params
def datetime_cast_date_sql(self, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
sql = '(%s)::date' % field_name
return sql, params
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
sql = self.date_extract_sql(lookup_type, field_name)
return sql, params
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
# http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
sql = "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
return sql, params
def deferrable_sql(self):
return " DEFERRABLE INITIALLY DEFERRED"
def lookup_cast(self, lookup_type, internal_type=None):
lookup = '%s'
# Cast text lookups to text to allow things like filter(x__contains=4)
if lookup_type in ('iexact', 'contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith', 'regex', 'iregex'):
if internal_type in ('IPAddressField', 'GenericIPAddressField'):
lookup = "HOST(%s)"
else:
lookup = "%s::text"
# Use UPPER(x) for case-insensitive lookups; it's faster.
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
lookup = 'UPPER(%s)' % lookup
return lookup
def last_insert_id(self, cursor, table_name, pk_name):
# Use pg_get_serial_sequence to get the underlying sequence name
# from the table name and column name (available since PostgreSQL 8)
cursor.execute("SELECT CURRVAL(pg_get_serial_sequence('%s','%s'))" % (
self.quote_name(table_name), pk_name))
return cursor.fetchone()[0]
def no_limit_value(self):
return None
def prepare_sql_script(self, sql):
return [sql]
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def set_time_zone_sql(self):
return "SET TIME ZONE %s"
def sql_flush(self, style, tables, sequences, allow_cascade=False):
if tables:
# Perform a single SQL 'TRUNCATE x, y, z...;' statement. It allows
# us to truncate tables referenced by a foreign key in any other
# table.
tables_sql = ', '.join(
style.SQL_FIELD(self.quote_name(table)) for table in tables)
if allow_cascade:
sql = ['%s %s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
tables_sql,
style.SQL_KEYWORD('CASCADE'),
)]
else:
sql = ['%s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
tables_sql,
)]
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
return sql
else:
return []
def sequence_reset_by_name_sql(self, style, sequences):
# 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
# to reset sequence indices
sql = []
for sequence_info in sequences:
table_name = sequence_info['table']
column_name = sequence_info['column']
if not (column_name and len(column_name) > 0):
# This will be the case if it's an m2m using an autogenerated
# intermediate table (see BaseDatabaseIntrospection.sequence_list)
column_name = 'id'
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" %
(style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(self.quote_name(table_name)),
style.SQL_FIELD(column_name))
)
return sql
def tablespace_sql(self, tablespace, inline=False):
if inline:
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
else:
return "TABLESPACE %s" % self.quote_name(tablespace)
def sequence_reset_sql(self, style, model_list):
from django.db import models
output = []
qn = self.quote_name
for model in model_list:
# Use `coalesce` to set the sequence for each model to the max pk value if there are records,
# or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true
# if there are records (as the max pk value is already in use), otherwise set it to false.
# Use pg_get_serial_sequence to get the underlying sequence name from the table name
# and column name (available since PostgreSQL 8)
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;" % (
style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(model._meta.db_table)),
style.SQL_FIELD(f.column),
style.SQL_FIELD(qn(f.column)),
style.SQL_FIELD(qn(f.column)),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(model._meta.db_table)),
)
)
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.many_to_many:
if not f.remote_field.through:
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;" % (
style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(f.m2m_db_table())),
style.SQL_FIELD('id'),
style.SQL_FIELD(qn('id')),
style.SQL_FIELD(qn('id')),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(f.m2m_db_table()))
)
)
return output
def prep_for_iexact_query(self, x):
return x
def max_name_length(self):
"""
Returns the maximum length of an identifier.
Note that the maximum length of an identifier is 63 by default, but can
be changed by recompiling PostgreSQL after editing the NAMEDATALEN
macro in src/include/pg_config_manual.h .
This implementation simply returns 63, but can easily be overridden by a
custom database backend that inherits most of its behavior from this one.
"""
return 63
def distinct_sql(self, fields):
if fields:
return 'DISTINCT ON (%s)' % ', '.join(fields)
else:
return 'DISTINCT'
def last_executed_query(self, cursor, sql, params):
# http://initd.org/psycopg/docs/cursor.html#cursor.query
# The query attribute is a Psycopg extension to the DB API 2.0.
if cursor.query is not None:
return cursor.query.decode('utf-8')
return None
def return_insert_id(self):
return "RETURNING %s", ()
def bulk_insert_sql(self, fields, num_values):
items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
return "VALUES " + ", ".join([items_sql] * num_values)
def adapt_datefield_value(self, value):
return value
def adapt_datetimefield_value(self, value):
return value
def adapt_timefield_value(self, value):
return value
def adapt_ipaddressfield_value(self, value):
if value:
return Inet(value)
return None
| bsd-3-clause |
fxia22/ASM_xf | PythonD/site_python/twisted/protocols/oscar.py | 2 | 43247 | # Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""An implementation of the OSCAR protocol, which AIM and ICQ use to communcate.
This module is unstable.
Maintainer: U{Paul Swartz<mailto:z3p@twistedmatrix.com>}
"""
from __future__ import nested_scopes
from twisted.internet import reactor, main, defer, protocol
from twisted.python import log
import struct
import md5
import string
import socket
import random
import time
import types
import re
def logPacketData(data):
lines = len(data)/16
if lines*16 != len(data): lines=lines+1
for i in range(lines):
d = tuple(data[16*i:16*i+16])
hex = map(lambda x: "%02X"%ord(x),d)
text = map(lambda x: (len(repr(x))>3 and '.') or x, d)
log.msg(' '.join(hex)+ ' '*3*(16-len(d)) +''.join(text))
log.msg('')
def SNAC(fam,sub,id,data,flags=[0,0]):
header="!HHBBL"
head=struct.pack(header,fam,sub,
flags[0],flags[1],
id)
return head+str(data)
def readSNAC(data):
header="!HHBBL"
head=list(struct.unpack(header,data[:10]))
return head+[data[10:]]
def TLV(type,value):
header="!HH"
head=struct.pack(header,type,len(value))
return head+str(value)
def readTLVs(data,count=None):
header="!HH"
dict={}
while data and len(dict)!=count:
head=struct.unpack(header,data[:4])
dict[head[0]]=data[4:4+head[1]]
data=data[4+head[1]:]
if not count:
return dict
return dict,data
def encryptPasswordMD5(password,key):
m=md5.new()
m.update(key)
m.update(md5.new(password).digest())
m.update("AOL Instant Messenger (SM)")
return m.digest()
def encryptPasswordICQ(password):
key=[0xF3,0x26,0x81,0xC4,0x39,0x86,0xDB,0x92,0x71,0xA3,0xB9,0xE6,0x53,0x7A,0x95,0x7C]
bytes=map(ord,password)
r=""
for i in range(len(bytes)):
r=r+chr(bytes[i]^key[i%len(key)])
return r
def dehtml(text):
text=string.replace(text,"<br>","\n")
text=string.replace(text,"<BR>","\n")
text=string.replace(text,"<Br>","\n") # XXX make this a regexp
text=string.replace(text,"<bR>","\n")
text=re.sub('<.*?>','',text)
text=string.replace(text,'>','>')
text=string.replace(text,'<','<')
text=string.replace(text,'&','&')
text=string.replace(text,' ',' ')
text=string.replace(text,'"','"')
return text
def html(text):
text=string.replace(text,'"','"')
text=string.replace(text,'&','&')
text=string.replace(text,'<','<')
text=string.replace(text,'>','>')
text=string.replace(text,"\n","<br>")
return '<html><body bgcolor="white"><font color="black">%s</font></body></html>'%text
class OSCARUser:
def __init__(self, name, warn, tlvs):
self.name = name
self.warning = warn
self.flags = []
self.caps = []
for k,v in tlvs.items():
if k == 1: # user flags
v=struct.unpack('!H',v)[0]
for o, f in [(1,'trial'),
(2,'unknown bit 2'),
(4,'aol'),
(8,'unknown bit 4'),
(16,'aim'),
(32,'away'),
(1024,'activebuddy')]:
if v&o: self.flags.append(f)
elif k == 2: # member since date
self.memberSince = struct.unpack('!L',v)[0]
elif k == 3: # on-since
self.onSince = struct.unpack('!L',v)[0]
elif k == 4: # idle time
self.idleTime = struct.unpack('!H',v)[0]
elif k == 5: # unknown
pass
elif k == 6: # icq online status
if v[2] == '\x00':
self.icqStatus = 'online'
elif v[2] == '\x01':
self.icqStatus = 'away'
else:
self.icqStatus = 'unknown'
elif k == 10: # icq ip address
self.icqIPaddy = socket.inet_ntoa(v)
elif k == 12: # icq random stuff
self.icqRandom = v
elif k == 13: # capabilities
caps=[]
while v:
c=v[:16]
if c==CAP_ICON: caps.append("icon")
elif c==CAP_IMAGE: caps.append("image")
elif c==CAP_VOICE: caps.append("voice")
elif c==CAP_CHAT: caps.append("chat")
elif c==CAP_GET_FILE: caps.append("getfile")
elif c==CAP_SEND_FILE: caps.append("sendfile")
elif c==CAP_SEND_LIST: caps.append("sendlist")
elif c==CAP_GAMES: caps.append("games")
else: caps.append(("unknown",c))
v=v[16:]
caps.sort()
self.caps=caps
elif k == 14: pass
elif k == 15: # session length (aim)
self.sessionLength = struct.unpack('!L',v)[0]
elif k == 16: # session length (aol)
self.sessionLength = struct.unpack('!L',v)[0]
elif k == 30: # no idea
pass
else:
log.msg("unknown tlv for user %s\nt: %s\nv: %s"%(self.name,k,repr(v)))
def __str__(self):
s = '<OSCARUser %s' % self.name
o = []
if self.warning!=0: o.append('warning level %s'%self.warning)
if hasattr(self, 'flags'): o.append('flags %s'%self.flags)
if hasattr(self, 'sessionLength'): o.append('online for %i minutes' % (self.sessionLength/60,))
if hasattr(self, 'idleTime'): o.append('idle for %i minutes' % self.idleTime)
if self.caps: o.append('caps %s'%self.caps)
if o:
s=s+', '+', '.join(o)
s=s+'>'
return s
class SSIGroup:
def __init__(self, name, tlvs = {}):
self.name = name
#self.tlvs = []
#self.userIDs = []
self.usersToID = {}
self.users = []
#if not tlvs.has_key(0xC8): return
#buddyIDs = tlvs[0xC8]
#while buddyIDs:
# bid = struct.unpack('!H',buddyIDs[:2])[0]
# buddyIDs = buddyIDs[2:]
# self.users.append(bid)
def findIDFor(self, user):
return self.usersToID[user]
def addUser(self, buddyID, user):
self.usersToID[user] = buddyID
self.users.append(user)
user.group = self
def oscarRep(self, groupID, buddyID):
tlvData = TLV(0xc8, reduce(lambda x,y:x+y, [struct.pack('!H',self.usersToID[x]) for x in self.users]))
return struct.pack('!H', len(self.name)) + self.name + \
struct.pack('!HH', groupID, buddyID) + '\000\001' + tlvData
class SSIBuddy:
def __init__(self, name, tlvs = {}):
self.name = name
self.tlvs = tlvs
for k,v in tlvs.items():
if k == 0x013c: # buddy comment
self.buddyComment = v
elif k == 0x013d: # buddy alerts
actionFlag = ord(v[0])
whenFlag = ord(v[1])
self.alertActions = []
self.alertWhen = []
if actionFlag&1:
self.alertActions.append('popup')
if actionFlag&2:
self.alertActions.append('sound')
if whenFlag&1:
self.alertWhen.append('online')
if whenFlag&2:
self.alertWhen.append('unidle')
if whenFlag&4:
self.alertWhen.append('unaway')
elif k == 0x013e:
self.alertSound = v
def oscarRep(self, groupID, buddyID):
tlvData = reduce(lambda x,y: x+y, map(lambda (k,v):TLV(k,v), self.tlvs.items())) or '\000\000'
return struct.pack('!H', len(self.name)) + self.name + \
struct.pack('!HH', groupID, buddyID) + '\000\000' + tlvData
class OscarConnection(protocol.Protocol):
def connectionMade(self):
self.state=""
self.seqnum=0
self.buf=''
self.stopKeepAliveID = None
self.setKeepAlive(4*60) # 4 minutes
def connectionLost(self, reason):
log.msg("Connection Lost! %s" % self)
self.stopKeepAlive()
# def connectionFailed(self):
# log.msg("Connection Failed! %s" % self)
# self.stopKeepAlive()
def sendFLAP(self,data,channel = 0x02):
header="!cBHH"
self.seqnum=(self.seqnum+1)%0xFFFF
seqnum=self.seqnum
head=struct.pack(header,'*', channel,
seqnum, len(data))
self.transport.write(head+str(data))
# if isinstance(self, ChatService):
# logPacketData(head+str(data))
def readFlap(self):
header="!cBHH"
if len(self.buf)<6: return
flap=struct.unpack(header,self.buf[:6])
if len(self.buf)<6+flap[3]: return
data,self.buf=self.buf[6:6+flap[3]],self.buf[6+flap[3]:]
return [flap[1],data]
def dataReceived(self,data):
# if isinstance(self, ChatService):
# logPacketData(data)
self.buf=self.buf+data
flap=self.readFlap()
while flap:
func=getattr(self,"oscar_%s"%self.state,None)
if not func:
log.msg("no func for state: %s" % self.state)
state=func(flap)
if state:
self.state=state
flap=self.readFlap()
def setKeepAlive(self,t):
self.keepAliveDelay=t
self.stopKeepAlive()
self.stopKeepAliveID = reactor.callLater(t, self.sendKeepAlive)
def sendKeepAlive(self):
self.sendFLAP("",0x05)
self.stopKeepAliveID = reactor.callLater(self.keepAliveDelay, self.sendKeepAlive)
def stopKeepAlive(self):
if self.stopKeepAliveID:
self.stopKeepAliveID.cancel()
self.stopKeepAliveID = None
def disconnect(self):
"""
send the disconnect flap, and sever the connection
"""
self.sendFLAP('', 0x04)
def f(reason): pass
self.connectionLost = f
self.transport.loseConnection()
class SNACBased(OscarConnection):
snacFamilies = {
# family : (version, toolID, toolVersion)
}
def __init__(self,cookie):
self.cookie=cookie
self.lastID=0
self.supportedFamilies = ()
self.requestCallbacks={} # request id:Deferred
def sendSNAC(self,fam,sub,data,flags=[0,0]):
"""
send a snac and wait for the response by returning a Deferred.
"""
reqid=self.lastID
self.lastID=reqid+1
d = defer.Deferred()
d.reqid = reqid
#d.addErrback(self._ebDeferredError,fam,sub,data) # XXX for testing
self.requestCallbacks[reqid] = d
self.sendFLAP(SNAC(fam,sub,reqid,data))
return d
def _ebDeferredError(self, error, fam, sub, data):
log.msg('ERROR IN DEFERRED %s' % error)
log.msg('on sending of message, family 0x%02x, subtype 0x%02x' % (fam, sub))
log.msg('data: %s' % repr(data))
def sendSNACnr(self,fam,sub,data,flags=[0,0]):
"""
send a snac, but don't bother adding a deferred, we don't care.
"""
self.sendFLAP(SNAC(fam,sub,0x10000*fam+sub,data))
def oscar_(self,data):
self.sendFLAP("\000\000\000\001"+TLV(6,self.cookie), 0x01)
return "Data"
def oscar_Data(self,data):
snac=readSNAC(data[1])
if self.requestCallbacks.has_key(snac[4]):
d = self.requestCallbacks[snac[4]]
del self.requestCallbacks[snac[4]]
if snac[1]!=1:
d.callback(snac)
else:
d.errback(snac)
return
func=getattr(self,'oscar_%02X_%02X'%(snac[0],snac[1]),None)
if not func:
self.oscar_unknown(snac)
else:
func(snac[2:])
return "Data"
def oscar_unknown(self,snac):
log.msg("unknown for %s" % self)
log.msg(snac)
def oscar_01_03(self, snac):
numFamilies = len(snac[3])/2
self.supportedFamilies = struct.unpack("!"+str(numFamilies)+'H', snac[3])
d = ''
for fam in self.supportedFamilies:
if self.snacFamilies.has_key(fam):
d=d+struct.pack('!2H',fam,self.snacFamilies[fam][0])
self.sendSNACnr(0x01,0x17, d)
def oscar_01_0A(self,snac):
"""
change of rate information.
"""
# this can be parsed, maybe we can even work it in
pass
def oscar_01_18(self,snac):
"""
host versions, in the same format as we sent
"""
self.sendSNACnr(0x01,0x06,"") #pass
def clientReady(self):
"""
called when the client is ready to be online
"""
d = ''
for fam in self.supportedFamilies:
if self.snacFamilies.has_key(fam):
version, toolID, toolVersion = self.snacFamilies[fam]
d = d + struct.pack('!4H',fam,version,toolID,toolVersion)
self.sendSNACnr(0x01,0x02,d)
class BOSConnection(SNACBased):
snacFamilies = {
0x01:(3, 0x0110, 0x059b),
0x13:(3, 0x0110, 0x059b),
0x02:(1, 0x0110, 0x059b),
0x03:(1, 0x0110, 0x059b),
0x04:(1, 0x0110, 0x059b),
0x06:(1, 0x0110, 0x059b),
0x08:(1, 0x0104, 0x0001),
0x09:(1, 0x0110, 0x059b),
0x0a:(1, 0x0110, 0x059b),
0x0b:(1, 0x0104, 0x0001),
0x0c:(1, 0x0104, 0x0001)
}
capabilities = None
def __init__(self,username,cookie):
SNACBased.__init__(self,cookie)
self.username=username
self.profile = None
self.awayMessage = None
self.services = {}
if not self.capabilities:
self.capabilities = [CAP_CHAT]
def parseUser(self,data,count=None):
l=ord(data[0])
name=data[1:1+l]
warn,foo=struct.unpack("!HH",data[1+l:5+l])
warn=int(warn/10)
tlvs=data[5+l:]
if count:
tlvs,rest = readTLVs(tlvs,foo)
else:
tlvs,rest = readTLVs(tlvs), None
u = OSCARUser(name, warn, tlvs)
if rest == None:
return u
else:
return u, rest
def oscar_01_05(self, snac, d = None):
"""
data for a new service connection
d might be a deferred to be called back when the service is ready
"""
tlvs = readTLVs(snac[3][2:])
service = struct.unpack('!H',tlvs[0x0d])[0]
ip = tlvs[5]
cookie = tlvs[6]
#c = serviceClasses[service](self, cookie, d)
c = protocol.ClientCreator(reactor, serviceClasses[service], self, cookie, d)
def addService(x):
self.services[service] = x
c.connectTCP(ip, 5190).addCallback(addService)
#self.services[service] = c
def oscar_01_07(self,snac):
"""
rate paramaters
"""
self.sendSNACnr(0x01,0x08,"\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05") # ack
self.initDone()
self.sendSNACnr(0x13,0x02,'') # SSI rights info
self.sendSNACnr(0x02,0x02,'') # location rights info
self.sendSNACnr(0x03,0x02,'') # buddy list rights
self.sendSNACnr(0x04,0x04,'') # ICBM parms
self.sendSNACnr(0x09,0x02,'') # BOS rights
def oscar_01_10(self,snac):
"""
we've been warned
"""
skip = struct.unpack('!H',snac[3][:2])[0]
newLevel = struct.unpack('!H',snac[3][2+skip:4+skip])[0]/10
if len(snac[3])>4+skip:
by = self.parseUser(snac[3][4+skip:])
else:
by = None
self.receiveWarning(newLevel, by)
def oscar_01_13(self,snac):
"""
MOTD
"""
pass # we don't care for now
def oscar_02_03(self, snac):
"""
location rights response
"""
tlvs = readTLVs(snac[3])
self.maxProfileLength = tlvs[1]
def oscar_03_03(self, snac):
"""
buddy list rights response
"""
tlvs = readTLVs(snac[3])
self.maxBuddies = tlvs[1]
self.maxWatchers = tlvs[2]
def oscar_03_0B(self, snac):
"""
buddy update
"""
self.updateBuddy(self.parseUser(snac[3]))
def oscar_03_0C(self, snac):
"""
buddy offline
"""
self.offlineBuddy(self.parseUser(snac[3]))
# def oscar_04_03(self, snac):
def oscar_04_05(self, snac):
"""
ICBM parms response
"""
self.sendSNACnr(0x04,0x02,'\x00\x00\x00\x00\x00\x0b\x1f@\x03\xe7\x03\xe7\x00\x00\x00\x00') # IM rights
def oscar_04_07(self, snac):
"""
ICBM message (instant message)
"""
data = snac[3]
cookie, data = data[:8], data[8:]
channel = struct.unpack('!H',data[:2])[0]
data = data[2:]
user, data = self.parseUser(data, 1)
tlvs = readTLVs(data)
if channel == 1: # message
flags = []
multiparts = []
for k, v in tlvs.items():
if k == 2:
while v:
v = v[2:] # skip bad data
messageLength, charSet, charSubSet = struct.unpack('!3H', v[:6])
messageLength -= 4
message = [v[6:6+messageLength]]
if charSet == 0:
pass # don't add anything special
elif charSet == 2:
message.append('unicode')
elif charSet == 3:
message.append('iso-8859-1')
elif charSet == 0xffff:
message.append('none')
if charSubSet == 0xb:
message.append('macintosh')
if messageLength > 0: multiparts.append(tuple(message))
v = v[6+messageLength:]
elif k == 3:
flags.append('acknowledge')
elif k == 4:
flags.append('auto')
elif k == 6:
flags.append('offline')
elif k == 8:
iconLength, foo, iconSum, iconStamp = struct.unpack('!LHHL',v)
if iconLength:
flags.append('icon')
flags.append((iconLength, iconSum, iconStamp))
elif k == 9:
flags.append('buddyrequest')
elif k == 0xb: # unknown
pass
elif k == 0x17:
flags.append('extradata')
flags.append(v)
else:
log.msg('unknown TLV for incoming IM, %04x, %s' % (k,repr(v)))
# unknown tlv for user SNewdorf
# t: 29
# v: '\x00\x00\x00\x05\x02\x01\xd2\x04r\x00\x01\x01\x10/\x8c\x8b\x8a\x1e\x94*\xbc\x80}\x8d\xc4;\x1dEM'
# XXX what is this?
self.receiveMessage(user, multiparts, flags)
elif channel == 2: # rondevouz
status = struct.unpack('!H',tlvs[5][:2])[0]
requestClass = tlvs[5][10:26]
moreTLVs = readTLVs(tlvs[5][26:])
if requestClass == CAP_CHAT: # a chat request
exchange = struct.unpack('!H',moreTLVs[10001][:2])[0]
name = moreTLVs[10001][3:-2]
instance = struct.unpack('!H',moreTLVs[10001][-2:])[0]
if not self.services.has_key(SERVICE_CHATNAV):
self.connectService(SERVICE_CHATNAV,1).addCallback(lambda x: self.services[SERVICE_CHATNAV].getChatInfo(exchange, name, instance).\
addCallback(self._cbGetChatInfoForInvite, user, moreTLVs[12]))
else:
self.services[SERVICE_CHATNAV].getChatInfo(exchange, name, instance).\
addCallback(self._cbGetChatInfoForInvite, user, moreTLVs[12])
elif requestClass == CAP_SEND_FILE:
if moreTLVs.has_key(11): # cancel
log.msg('cancelled file request')
log.msg(status)
return # handle this later
name = moreTLVs[10001][9:-7]
desc = moreTLVs[12]
log.msg('file request from %s, %s, %s' % (user, name, desc))
self.receiveSendFileRequest(user, name, desc, cookie)
else:
log.msg('unsupported rondevouz: %s' % requestClass)
log.msg(repr(moreTLVs))
else:
log.msg('unknown channel %02x' % channel)
log.msg(tlvs)
def _cbGetChatInfoForInvite(self, info, user, message):
apply(self.receiveChatInvite, (user,message)+info)
def oscar_09_03(self, snac):
"""
BOS rights response
"""
tlvs = readTLVs(snac[3])
self.maxPermitList = tlvs[1]
self.maxDenyList = tlvs[2]
def oscar_0B_02(self, snac):
"""
stats reporting interval
"""
self.reportingInterval = struct.unpack('!H',snac[3])[0]
def oscar_13_03(self, snac):
"""
SSI rights response
"""
#tlvs = readTLVs(snac[3])
pass # we don't know how to parse this
# methods to be called by the client, and their support methods
def requestSelfInfo(self):
"""
ask for the OSCARUser for ourselves
"""
d = defer.Deferred()
self.sendSNAC(0x01, 0x0E, '').addCallback(self._cbRequestSelfInfo, d)
return d
def _cbRequestSelfInfo(self, snac, d):
d.callback(self.parseUser(snac[5]))
def initSSI(self):
"""
this sends the rate request for family 0x13 (Server Side Information)
so we can then use it
"""
return self.sendSNAC(0x13, 0x02, '').addCallback(self._cbInitSSI)
def _cbInitSSI(self, snac, d):
return {} # don't even bother parsing this
def requestSSI(self, timestamp = 0, revision = 0):
"""
request the server side information
if the deferred gets None, it means the SSI is the same
"""
return self.sendSNAC(0x13, 0x05,
struct.pack('!LH',timestamp,revision)).addCallback(self._cbRequestSSI)
def _cbRequestSSI(self, snac, args = ()):
if snac[1] == 0x0f: # same SSI as we have
return
itemdata = snac[5][3:]
if args:
revision, groups, permit, deny, permitMode, visibility = args
else:
version, revision = struct.unpack('!BH', snac[5][:3])
groups = {}
permit = []
deny = []
permitMode = None
visibility = None
while len(itemdata)>4:
nameLength = struct.unpack('!H', itemdata[:2])[0]
name = itemdata[2:2+nameLength]
groupID, buddyID, itemType, restLength = \
struct.unpack('!4H', itemdata[2+nameLength:10+nameLength])
tlvs = readTLVs(itemdata[10+nameLength:10+nameLength+restLength])
itemdata = itemdata[10+nameLength+restLength:]
if itemType == 0: # buddies
groups[groupID].addUser(buddyID, SSIBuddy(name, tlvs))
elif itemType == 1: # group
g = SSIGroup(name, tlvs)
if groups.has_key(0): groups[0].addUser(groupID, g)
groups[groupID] = g
elif itemType == 2: # permit
permit.append(name)
elif itemType == 3: # deny
deny.append(name)
elif itemType == 4: # permit deny info
if not tlvs.has_key(0xcb):
continue # this happens with ICQ
permitMode = {1:'permitall',2:'denyall',3:'permitsome',4:'denysome',5:'permitbuddies'}[ord(tlvs[0xca])]
visibility = {'\xff\xff\xff\xff':'all','\x00\x00\x00\x04':'notaim'}[tlvs[0xcb]]
elif itemType == 5: # unknown (perhaps idle data)?
pass
else:
log.msg('%s %s %s %s %s' % (name, groupID, buddyID, itemType, tlvs))
timestamp = struct.unpack('!L',itemdata)[0]
if not timestamp: # we've got more packets coming
# which means add some deferred stuff
d = defer.Deferred()
self.requestCallbacks[snac[4]] = d
d.addCallback(self._cbRequestSSI, (revision, groups, permit, deny, permitMode, visibility))
return d
return (groups[0].users,permit,deny,permitMode,visibility,timestamp,revision)
def activateSSI(self):
"""
active the data stored on the server (use buddy list, permit deny settings, etc.)
"""
self.sendSNACnr(0x13,0x07,'')
def startModifySSI(self):
"""
tell the OSCAR server to be on the lookout for SSI modifications
"""
self.sendSNACnr(0x13,0x11,'')
def addItemSSI(self, item, groupID = None, buddyID = None):
"""
add an item to the SSI server. if buddyID == 0, then this should be a group.
this gets a callback when it's finished, but you can probably ignore it.
"""
if not groupID:
groupID = item.group.group.findIDFor(item.group)
if not buddyID:
buddyID = item.group.findIDFor(item)
return self.sendSNAC(0x13,0x08, item.oscarRep(groupID, buddyID))
def modifyItemSSI(self, item, groupID = None, buddyID = None):
if not groupID:
groupID = item.group.group.findIDFor(item.group)
if not buddyID:
buddyID = item.group.findIDFor(item)
return self.sendSNAC(0x13,0x09, item.oscarRep(groupID, buddyID))
def delItemSSI(self, item, groupID = None, buddyID = None):
if not groupID:
groupID = item.group.group.findIDFor(item.group)
if not buddyID:
buddyID = item.group.findIDFor(item)
return self.sendSNAC(0x13,0x0A, item.oscarRep(groupID, buddyID))
def endModifySSI(self):
self.sendSNACnr(0x13,0x12,'')
def setProfile(self, profile):
"""
set the profile.
send None to not set a profile (different from '' for a blank one)
"""
self.profile = profile
tlvs = ''
if self.profile:
tlvs = TLV(1,'text/aolrtf; charset="us-ascii"') + \
TLV(2,self.profile)
tlvs = tlvs + TLV(5, ''.join(self.capabilities))
self.sendSNACnr(0x02, 0x04, tlvs)
def setAway(self, away = None):
"""
set the away message, or return (if away == None)
"""
self.awayMessage = away
tlvs = TLV(3,'text/aolrtf; charset="us-ascii"') + \
TLV(4,away or '')
self.sendSNACnr(0x02, 0x04, tlvs)
def setIdleTime(self, idleTime):
"""
set our idle time. don't call more than once with a non-0 idle time.
"""
self.sendSNACnr(0x01, 0x11, struct.pack('!L',idleTime))
def sendMessage(self, user, message, wantAck = 0, autoResponse = 0, offline = 0 ): \
#haveIcon = 0, ):
"""
send a message to user (not an OSCARUseR).
message can be a string, or a multipart tuple.
if wantAck, we return a Deferred that gets a callback when the message is sent.
if autoResponse, this message is an autoResponse, as if from an away message.
if offline, this is an offline message (ICQ only, I think)
"""
data = ''.join([chr(random.randrange(0, 127)) for i in range(8)]) # cookie
data = data + '\x00\x01' + chr(len(user)) + user
if not type(message) in (types.TupleType, types.ListType):
message = [[message,]]
if type(message[0][0]) == types.UnicodeType:
message[0].append('unicode')
messageData = ''
for part in message:
charSet = 0
if 'unicode' in part[1:]:
charSet = 2
elif 'iso-8859-1' in part[1:]:
charSet = 3
elif 'none' in part[1:]:
charSet = 0xffff
if 'macintosh' in part[1:]:
charSubSet = 0xb
else:
charSubSet = 0
messageData = messageData + '\x01\x01' + \
struct.pack('!3H',len(part[0])+4,charSet,charSubSet)
messageData = messageData + part[0]
data = data + TLV(2, '\x05\x01\x00\x03\x01\x01\x02'+messageData)
if wantAck:
data = data + TLV(3,'')
if autoResponse:
data = data + TLV(4,'')
if offline:
data = data + TLV(6,'')
if wantAck:
return self.sendSNAC(0x04, 0x06, data).addCallback(self._cbSendMessageAck, user, message)
self.sendSNACnr(0x04, 0x06, data)
def _cbSendMessageAck(self, snac, user, message):
return user, message
def connectService(self, service, wantCallback = 0, extraData = ''):
"""
connect to another service
if wantCallback, we return a Deferred that gets called back when the service is online.
if extraData, append that to our request.
"""
if wantCallback:
d = defer.Deferred()
self.sendSNAC(0x01,0x04,struct.pack('!H',service) + extraData).addCallback(self._cbConnectService, d)
return d
else:
self.sendSNACnr(0x01,0x04,struct.pack('!H',service))
def _cbConnectService(self, snac, d):
d.arm()
self.oscar_01_05(snac[2:], d)
def createChat(self, shortName):
"""
create a chat room
"""
if self.services.has_key(SERVICE_CHATNAV):
return self.services[SERVICE_CHATNAV].createChat(shortName)
else:
d = defer.Deferred()
self.connectService(SERVICE_CHATNAV,1).addCallback(lambda s:d.arm() or s.createChat(shortName).chainDeferred(d))
return d
def joinChat(self, exchange, fullName, instance):
"""
join a chat room
"""
#d = defer.Deferred()
return self.connectService(0x0e, 1, TLV(0x01, struct.pack('!HB',exchange, len(fullName)) + fullName +
struct.pack('!H', instance))).addCallback(self._cbJoinChat) #, d)
#return d
def _cbJoinChat(self, chat):
del self.services[SERVICE_CHAT]
return chat
def warnUser(self, user, anon = 0):
return self.sendSNAC(0x04, 0x08, '\x00'+chr(anon)+chr(len(user))+user).addCallback(self._cbWarnUser)
def _cbWarnUser(self, snac):
oldLevel, newLevel = struct.unpack('!2H', snac[5])
return oldLevel, newLevel
def getInfo(self, user):
#if user.
return self.sendSNAC(0x02, 0x05, '\x00\x01'+chr(len(user))+user).addCallback(self._cbGetInfo)
def _cbGetInfo(self, snac):
user, rest = self.parseUser(snac[5],1)
tlvs = readTLVs(rest)
return tlvs.get(0x02,None)
def getAway(self, user):
return self.sendSNAC(0x02, 0x05, '\x00\x03'+chr(len(user))+user).addCallback(self._cbGetAway)
def _cbGetAway(self, snac):
user, rest = self.parseUser(snac[5],1)
tlvs = readTLVs(rest)
return tlvs.get(0x04,None) # return None if there is no away message
#def acceptSendFileRequest(self,
# methods to be overriden by the client
def initDone(self):
"""
called when we get the rate information, which means we should do other init. stuff.
"""
log.msg('%s initDone' % self)
pass
def updateBuddy(self, user):
"""
called when a buddy changes status, with the OSCARUser for that buddy.
"""
log.msg('%s updateBuddy %s' % (self, user))
pass
def offlineBuddy(self, user):
"""
called when a buddy goes offline
"""
log.msg('%s offlineBuddy %s' % (self, user))
pass
def receiveMessage(self, user, multiparts, flags):
"""
called when someone sends us a message
"""
pass
def receiveWarning(self, newLevel, user):
"""
called when someone warns us.
user is either None (if it was anonymous) or an OSCARUser
"""
pass
def receiveChatInvite(self, user, message, exchange, fullName, instance, shortName, inviteTime):
"""
called when someone invites us to a chat room
"""
pass
def chatReceiveMessage(self, chat, user, message):
"""
called when someone in a chatroom sends us a message in the chat
"""
pass
def chatMemberJoined(self, chat, member):
"""
called when a member joins the chat
"""
pass
def chatMemberLeft(self, chat, member):
"""
called when a member leaves the chat
"""
pass
def receiveSendFileRequest(self, user, file, description, cookie):
"""
called when someone tries to send a file to us
"""
pass
class OSCARService(SNACBased):
def __init__(self, bos, cookie, d = None):
SNACBased.__init__(self, cookie)
self.bos = bos
self.d = d
def connectionLost(self, reason):
for k,v in self.bos.services.items():
if v == self:
del self.bos.services[k]
return
def clientReady(self):
SNACBased.clientReady(self)
if self.d:
self.d.callback(self)
self.d = None
class ChatNavService(OSCARService):
snacFamilies = {
0x01:(3, 0x0010, 0x059b),
0x0d:(1, 0x0010, 0x059b)
}
def oscar_01_07(self, snac):
# rate info
self.sendSNACnr(0x01, 0x08, '\000\001\000\002\000\003\000\004\000\005')
self.sendSNACnr(0x0d, 0x02, '')
def oscar_0D_09(self, snac):
self.clientReady()
def getChatInfo(self, exchange, name, instance):
d = defer.Deferred()
self.sendSNAC(0x0d,0x04,struct.pack('!HB',exchange,len(name)) + \
name + struct.pack('!HB',instance,2)). \
addCallback(self._cbGetChatInfo, d)
return d
def _cbGetChatInfo(self, snac, d):
data = snac[5][4:]
exchange, length = struct.unpack('!HB',data[:3])
fullName = data[3:3+length]
instance = struct.unpack('!H',data[3+length:5+length])[0]
tlvs = readTLVs(data[8+length:])
shortName = tlvs[0x6a]
inviteTime = struct.unpack('!L',tlvs[0xca])[0]
info = (exchange,fullName,instance,shortName,inviteTime)
d.callback(info)
def createChat(self, shortName):
#d = defer.Deferred()
data = '\x00\x04\x06create\xff\xff\x01\x00\x03'
data = data + TLV(0xd7, 'en')
data = data + TLV(0xd6, 'us-ascii')
data = data + TLV(0xd3, shortName)
return self.sendSNAC(0x0d, 0x08, data).addCallback(self._cbCreateChat)
#return d
def _cbCreateChat(self, snac): #d):
exchange, length = struct.unpack('!HB',snac[5][4:7])
fullName = snac[5][7:7+length]
instance = struct.unpack('!H',snac[5][7+length:9+length])[0]
#d.callback((exchange, fullName, instance))
return exchange, fullName, instance
class ChatService(OSCARService):
snacFamilies = {
0x01:(3, 0x0010, 0x059b),
0x0E:(1, 0x0010, 0x059b)
}
def __init__(self,bos,cookie, d = None):
OSCARService.__init__(self,bos,cookie,d)
self.exchange = None
self.fullName = None
self.instance = None
self.name = None
self.members = None
clientReady = SNACBased.clientReady # we'll do our own callback
def oscar_01_07(self,snac):
self.sendSNAC(0x01,0x08,"\000\001\000\002\000\003\000\004\000\005")
self.clientReady()
def oscar_0E_02(self, snac):
# try: # this is EVIL
# data = snac[3][4:]
# self.exchange, length = struct.unpack('!HB',data[:3])
# self.fullName = data[3:3+length]
# self.instance = struct.unpack('!H',data[3+length:5+length])[0]
# tlvs = readTLVs(data[8+length:])
# self.name = tlvs[0xd3]
# self.d.callback(self)
# except KeyError:
data = snac[3]
self.exchange, length = struct.unpack('!HB',data[:3])
self.fullName = data[3:3+length]
self.instance = struct.unpack('!H',data[3+length:5+length])[0]
tlvs = readTLVs(data[8+length:])
self.name = tlvs[0xd3]
self.d.callback(self)
def oscar_0E_03(self,snac):
users=[]
rest=snac[3]
while rest:
user, rest = self.bos.parseUser(rest, 1)
users.append(user)
if not self.fullName:
self.members = users
else:
self.members.append(users[0])
self.bos.chatMemberJoined(self,users[0])
def oscar_0E_04(self,snac):
user=self.bos.parseUser(snac[3])
for u in self.members:
if u.name == user.name: # same person!
self.members.remove(u)
self.bos.chatMemberLeft(self,user)
def oscar_0E_06(self,snac):
data = snac[3]
user,rest=self.bos.parseUser(snac[3][14:],1)
tlvs = readTLVs(rest[8:])
message=tlvs[1]
self.bos.chatReceiveMessage(self,user,message)
def sendMessage(self,message):
tlvs=TLV(0x02,"us-ascii")+TLV(0x03,"en")+TLV(0x01,message)
self.sendSNAC(0x0e,0x05,
"\x46\x30\x38\x30\x44\x00\x63\x00\x00\x03\x00\x01\x00\x00\x00\x06\x00\x00\x00\x05"+
struct.pack("!H",len(tlvs))+
tlvs)
def leaveChat(self):
self.disconnect()
class OscarAuthenticator(OscarConnection):
BOSClass = BOSConnection
def __init__(self,username,password,deferred=None,icq=0):
self.username=username
self.password=password
self.deferred=deferred
self.icq=icq # icq mode is disabled
#if icq and self.BOSClass==BOSConnection:
# self.BOSClass=ICQConnection
def oscar_(self,flap):
if not self.icq:
self.sendFLAP("\000\000\000\001", 0x01)
self.sendFLAP(SNAC(0x17,0x06,0,
TLV(TLV_USERNAME,self.username)+
TLV(0x004B,'')))
self.state="Key"
else:
encpass=encryptPasswordICQ(self.password)
self.sendFLAP('\000\000\000\001'+
TLV(0x01,self.username)+
TLV(0x02,encpass)+
TLV(0x03,'ICQ Inc. - Product of ICQ (TM).2001b.5.18.1.3659.85')+
TLV(0x16,"\x01\x0a")+
TLV(0x17,"\x00\x05")+
TLV(0x18,"\x00\x12")+
TLV(0x19,"\000\001")+
TLV(0x1a,"\x0eK")+
TLV(0x14,"\x00\x00\x00U")+
TLV(0x0f,"en")+
TLV(0x0e,"us"),0x01)
self.state="Cookie"
def oscar_Key(self,data):
snac=readSNAC(data[1])
key=snac[5][2:]
encpass=encryptPasswordMD5(self.password,key)
self.sendFLAP(SNAC(0x17,0x02,0,
TLV(TLV_USERNAME,self.username)+
TLV(TLV_PASSWORD,encpass)+
TLV(0x004C, '')+ # unknown
TLV(TLV_CLIENTNAME,"AOL Instant Messenger (SM), version 4.8.2790/WIN32")+
TLV(0x0016,"\x01\x09")+
TLV(TLV_CLIENTMAJOR,"\000\004")+
TLV(TLV_CLIENTMINOR,"\000\010")+
TLV(0x0019,"\000\000")+
TLV(TLV_CLIENTSUB,"\x0A\xE6")+
TLV(0x0014,"\x00\x00\x00\xBB")+
TLV(TLV_LANG,"en")+
TLV(TLV_COUNTRY,"us")+
TLV(TLV_USESSI,"\001")))
return "Cookie"
def oscar_Cookie(self,data):
snac=readSNAC(data[1])
if self.icq:
i=snac[5].find("\000")
snac[5]=snac[5][i:]
tlvs=readTLVs(snac[5])
if tlvs.has_key(6):
self.cookie=tlvs[6]
server,port=string.split(tlvs[5],":")
c = protocol.ClientCreator(reactor, self.BOSClass, self.username, self.cookie)
d = c.connectTCP(server, int(port))
d.addErrback(lambda x: log.msg("Connection Failed! Reason: %s" % x))
if self.deferred:
d.chainDeferred(self.deferred)
self.disconnect()
elif tlvs.has_key(8):
errorcode=tlvs[8]
errorurl=tlvs[4]
if errorcode=='\000\030':
error="You are attempting to sign on again too soon. Please try again later."
elif errorcode=='\000\005':
error="Invalid Username or Password."
else: error=repr(errorcode)
self.error(error,errorurl)
else:
log.msg('hmm, weird tlvs for %s cookie packet' % str(self))
log.msg(tlvs)
log.msg('snac')
log.msg(str(snac))
return "None"
def oscar_None(self,data): pass
def error(self,error,url):
log.msg("ERROR! %s %s" % (error,url))
if self.deferred: self.deferred.errback((error,url))
self.transport.loseConnection()
FLAP_CHANNEL_NEW_CONNECTION = 0x01
FLAP_CHANNEL_DATA = 0x02
FLAP_CHANNEL_ERROR = 0x03
FLAP_CHANNEL_CLOSE_CONNECTION = 0x04
SERVICE_CHATNAV = 0x0d
SERVICE_CHAT = 0x0e
serviceClasses = {
SERVICE_CHATNAV:ChatNavService,
SERVICE_CHAT:ChatService
}
TLV_USERNAME = 0x0001
TLV_CLIENTNAME = 0x0003
TLV_COUNTRY = 0x000E
TLV_LANG = 0x000F
TLV_CLIENTMAJOR = 0x0017
TLV_CLIENTMINOR = 0x0018
TLV_CLIENTSUB = 0x001A
TLV_PASSWORD = 0x0025
TLV_USESSI = 0x004A
CAP_ICON = '\011F\023FL\177\021\321\202"DEST\000\000'
CAP_VOICE = '\011F\023AL\177\021\321\202"DEST\000\000'
CAP_IMAGE = '\011F\023EL\177\021\321\202"DEST\000\000'
CAP_CHAT = 't\217$ b\207\021\321\202"DEST\000\000'
CAP_GET_FILE = '\011F\023HL\177\021\321\202"DEST\000\000'
CAP_SEND_FILE = '\011F\023CL\177\021\321\202"DEST\000\000'
CAP_GAMES = '\011F\023GL\177\021\321\202"DEST\000\000'
CAP_SEND_LIST = '\011F\023KL\177\021\321\202"DEST\000\000'
| gpl-2.0 |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Python-2.7.10/Lib/lib2to3/fixes/fix_intern.py | 315 | 1405 | # Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from .. import pytree
from .. import fixer_base
from ..fixer_util import Name, Attr, touch_import
class FixIntern(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'intern'
trailer< lpar='('
( not(arglist | argument<any '=' any>) obj=any
| obj=arglist<(not argument<any '=' any>) any ','> )
rpar=')' >
after=any*
>
"""
def transform(self, node, results):
syms = self.syms
obj = results["obj"].clone()
if obj.type == syms.arglist:
newarglist = obj.clone()
else:
newarglist = pytree.Node(syms.arglist, [obj.clone()])
after = results["after"]
if after:
after = [n.clone() for n in after]
new = pytree.Node(syms.power,
Attr(Name(u"sys"), Name(u"intern")) +
[pytree.Node(syms.trailer,
[results["lpar"].clone(),
newarglist,
results["rpar"].clone()])] + after)
new.prefix = node.prefix
touch_import(None, u'sys', node)
return new
| mit |
EricMuller/mynotes-backend | requirements/twisted/Twisted-17.1.0/build/lib.linux-x86_64-3.5/twisted/test/iosim.py | 12 | 17684 | # -*- test-case-name: twisted.test.test_amp,twisted.test.test_iosim -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utilities and helpers for simulating a network
"""
from __future__ import absolute_import, division, print_function
import itertools
try:
from OpenSSL.SSL import Error as NativeOpenSSLError
except ImportError:
pass
from zope.interface import implementer, directlyProvides
from twisted.internet.endpoints import TCP4ClientEndpoint, TCP4ServerEndpoint
from twisted.internet.protocol import Factory, Protocol
from twisted.internet.error import ConnectionRefusedError
from twisted.python.failure import Failure
from twisted.internet import error
from twisted.internet import interfaces
from .proto_helpers import MemoryReactorClock
class TLSNegotiation:
def __init__(self, obj, connectState):
self.obj = obj
self.connectState = connectState
self.sent = False
self.readyToSend = connectState
def __repr__(self):
return 'TLSNegotiation(%r)' % (self.obj,)
def pretendToVerify(self, other, tpt):
# Set the transport problems list here? disconnections?
# hmmmmm... need some negative path tests.
if not self.obj.iosimVerify(other.obj):
tpt.disconnectReason = NativeOpenSSLError()
tpt.loseConnection()
@implementer(interfaces.IAddress)
class FakeAddress(object):
"""
The default address type for the host and peer of L{FakeTransport}
connections.
"""
@implementer(interfaces.ITransport,
interfaces.ITLSTransport)
class FakeTransport:
"""
A wrapper around a file-like object to make it behave as a Transport.
This doesn't actually stream the file to the attached protocol,
and is thus useful mainly as a utility for debugging protocols.
"""
_nextserial = staticmethod(lambda counter=itertools.count(): next(counter))
closed = 0
disconnecting = 0
disconnected = 0
disconnectReason = error.ConnectionDone("Connection done")
producer = None
streamingProducer = 0
tls = None
def __init__(self, protocol, isServer, hostAddress=None, peerAddress=None):
"""
@param protocol: This transport will deliver bytes to this protocol.
@type protocol: L{IProtocol} provider
@param isServer: C{True} if this is the accepting side of the
connection, C{False} if it is the connecting side.
@type isServer: L{bool}
@param hostAddress: The value to return from C{getHost}. L{None}
results in a new L{FakeAddress} being created to use as the value.
@type hostAddress: L{IAddress} provider or L{None}
@param peerAddress: The value to return from C{getPeer}. L{None}
results in a new L{FakeAddress} being created to use as the value.
@type peerAddress: L{IAddress} provider or L{None}
"""
self.protocol = protocol
self.isServer = isServer
self.stream = []
self.serial = self._nextserial()
if hostAddress is None:
hostAddress = FakeAddress()
self.hostAddress = hostAddress
if peerAddress is None:
peerAddress = FakeAddress()
self.peerAddress = peerAddress
def __repr__(self):
return 'FakeTransport<%s,%s,%s>' % (
self.isServer and 'S' or 'C', self.serial,
self.protocol.__class__.__name__)
def write(self, data):
if self.tls is not None:
self.tlsbuf.append(data)
else:
self.stream.append(data)
def _checkProducer(self):
# Cheating; this is called at "idle" times to allow producers to be
# found and dealt with
if self.producer:
self.producer.resumeProducing()
def registerProducer(self, producer, streaming):
"""
From abstract.FileDescriptor
"""
self.producer = producer
self.streamingProducer = streaming
if not streaming:
producer.resumeProducing()
def unregisterProducer(self):
self.producer = None
def stopConsuming(self):
self.unregisterProducer()
self.loseConnection()
def writeSequence(self, iovec):
self.write(b"".join(iovec))
def loseConnection(self):
self.disconnecting = True
def abortConnection(self):
"""
For the time being, this is the same as loseConnection; no buffered
data will be lost.
"""
self.disconnecting = True
def reportDisconnect(self):
if self.tls is not None:
# We were in the middle of negotiating! Must have been a TLS
# problem.
err = NativeOpenSSLError()
else:
err = self.disconnectReason
self.protocol.connectionLost(Failure(err))
def logPrefix(self):
"""
Identify this transport/event source to the logging system.
"""
return "iosim"
def getPeer(self):
return self.peerAddress
def getHost(self):
return self.hostAddress
def resumeProducing(self):
# Never sends data anyways
pass
def pauseProducing(self):
# Never sends data anyways
pass
def stopProducing(self):
self.loseConnection()
def startTLS(self, contextFactory, beNormal=True):
# Nothing's using this feature yet, but startTLS has an undocumented
# second argument which defaults to true; if set to False, servers will
# behave like clients and clients will behave like servers.
connectState = self.isServer ^ beNormal
self.tls = TLSNegotiation(contextFactory, connectState)
self.tlsbuf = []
def getOutBuffer(self):
"""
Get the pending writes from this transport, clearing them from the
pending buffer.
@return: the bytes written with C{transport.write}
@rtype: L{bytes}
"""
S = self.stream
if S:
self.stream = []
return b''.join(S)
elif self.tls is not None:
if self.tls.readyToSend:
# Only _send_ the TLS negotiation "packet" if I'm ready to.
self.tls.sent = True
return self.tls
else:
return None
else:
return None
def bufferReceived(self, buf):
if isinstance(buf, TLSNegotiation):
assert self.tls is not None # By the time you're receiving a
# negotiation, you have to have called
# startTLS already.
if self.tls.sent:
self.tls.pretendToVerify(buf, self)
self.tls = None # We're done with the handshake if we've gotten
# this far... although maybe it failed...?
# TLS started! Unbuffer...
b, self.tlsbuf = self.tlsbuf, None
self.writeSequence(b)
directlyProvides(self, interfaces.ISSLTransport)
else:
# We haven't sent our own TLS negotiation: time to do that!
self.tls.readyToSend = True
else:
self.protocol.dataReceived(buf)
def makeFakeClient(clientProtocol):
"""
Create and return a new in-memory transport hooked up to the given protocol.
@param clientProtocol: The client protocol to use.
@type clientProtocol: L{IProtocol} provider
@return: The transport.
@rtype: L{FakeTransport}
"""
return FakeTransport(clientProtocol, isServer=False)
def makeFakeServer(serverProtocol):
"""
Create and return a new in-memory transport hooked up to the given protocol.
@param serverProtocol: The server protocol to use.
@type serverProtocol: L{IProtocol} provider
@return: The transport.
@rtype: L{FakeTransport}
"""
return FakeTransport(serverProtocol, isServer=True)
class IOPump:
"""
Utility to pump data between clients and servers for protocol testing.
Perhaps this is a utility worthy of being in protocol.py?
"""
def __init__(self, client, server, clientIO, serverIO, debug):
self.client = client
self.server = server
self.clientIO = clientIO
self.serverIO = serverIO
self.debug = debug
def flush(self, debug=False):
"""
Pump until there is no more input or output.
Returns whether any data was moved.
"""
result = False
for x in range(1000):
if self.pump(debug):
result = True
else:
break
else:
assert 0, "Too long"
return result
def pump(self, debug=False):
"""
Move data back and forth.
Returns whether any data was moved.
"""
if self.debug or debug:
print('-- GLUG --')
sData = self.serverIO.getOutBuffer()
cData = self.clientIO.getOutBuffer()
self.clientIO._checkProducer()
self.serverIO._checkProducer()
if self.debug or debug:
print('.')
# XXX slightly buggy in the face of incremental output
if cData:
print('C: ' + repr(cData))
if sData:
print('S: ' + repr(sData))
if cData:
self.serverIO.bufferReceived(cData)
if sData:
self.clientIO.bufferReceived(sData)
if cData or sData:
return True
if (self.serverIO.disconnecting and
not self.serverIO.disconnected):
if self.debug or debug:
print('* C')
self.serverIO.disconnected = True
self.clientIO.disconnecting = True
self.clientIO.reportDisconnect()
return True
if self.clientIO.disconnecting and not self.clientIO.disconnected:
if self.debug or debug:
print('* S')
self.clientIO.disconnected = True
self.serverIO.disconnecting = True
self.serverIO.reportDisconnect()
return True
return False
def connect(serverProtocol, serverTransport, clientProtocol, clientTransport,
debug=False, greet=True):
"""
Create a new L{IOPump} connecting two protocols.
@param serverProtocol: The protocol to use on the accepting side of the
connection.
@type serverProtocol: L{IProtocol} provider
@param serverTransport: The transport to associate with C{serverProtocol}.
@type serverTransport: L{FakeTransport}
@param clientProtocol: The protocol to use on the initiating side of the
connection.
@type clientProtocol: L{IProtocol} provider
@param clientTransport: The transport to associate with C{clientProtocol}.
@type clientTransport: L{FakeTransport}
@param debug: A flag indicating whether to log information about what the
L{IOPump} is doing.
@type debug: L{bool}
@param greet: Should the L{IOPump} be L{flushed <IOPump.flush>} once before
returning to put the protocols into their post-handshake or
post-server-greeting state?
@type greet: L{bool}
@return: An L{IOPump} which connects C{serverProtocol} and
C{clientProtocol} and delivers bytes between them when it is pumped.
@rtype: L{IOPump}
"""
serverProtocol.makeConnection(serverTransport)
clientProtocol.makeConnection(clientTransport)
pump = IOPump(
clientProtocol, serverProtocol, clientTransport, serverTransport, debug
)
if greet:
# Kick off server greeting, etc
pump.flush()
return pump
def connectedServerAndClient(ServerClass, ClientClass,
clientTransportFactory=makeFakeClient,
serverTransportFactory=makeFakeServer,
debug=False, greet=True):
"""
Connect a given server and client class to each other.
@param ServerClass: a callable that produces the server-side protocol.
@type ServerClass: 0-argument callable returning L{IProtocol} provider.
@param ClientClass: like C{ServerClass} but for the other side of the
connection.
@type ClientClass: 0-argument callable returning L{IProtocol} provider.
@param clientTransportFactory: a callable that produces the transport which
will be attached to the protocol returned from C{ClientClass}.
@type clientTransportFactory: callable taking (L{IProtocol}) and returning
L{FakeTransport}
@param serverTransportFactory: a callable that produces the transport which
will be attached to the protocol returned from C{ServerClass}.
@type serverTransportFactory: callable taking (L{IProtocol}) and returning
L{FakeTransport}
@param debug: Should this dump an escaped version of all traffic on this
connection to stdout for inspection?
@type debug: L{bool}
@param greet: Should the L{IOPump} be L{flushed <IOPump.flush>} once before
returning to put the protocols into their post-handshake or
post-server-greeting state?
@type greet: L{bool}
@return: the client protocol, the server protocol, and an L{IOPump} which,
when its C{pump} and C{flush} methods are called, will move data
between the created client and server protocol instances.
@rtype: 3-L{tuple} of L{IProtocol}, L{IProtocol}, L{IOPump}
"""
c = ClientClass()
s = ServerClass()
cio = clientTransportFactory(c)
sio = serverTransportFactory(s)
return c, s, connect(s, sio, c, cio, debug, greet)
def _factoriesShouldConnect(clientInfo, serverInfo):
"""
Should the client and server described by the arguments be connected to
each other, i.e. do their port numbers match?
@param clientInfo: the args for connectTCP
@type clientInfo: L{tuple}
@param serverInfo: the args for listenTCP
@type serverInfo: L{tuple}
@return: If they do match, return factories for the client and server that
should connect; otherwise return L{None}, indicating they shouldn't be
connected.
@rtype: L{None} or 2-L{tuple} of (L{ClientFactory},
L{IProtocolFactory})
"""
(clientHost, clientPort, clientFactory, clientTimeout,
clientBindAddress) = clientInfo
(serverPort, serverFactory, serverBacklog,
serverInterface) = serverInfo
if serverPort == clientPort:
return clientFactory, serverFactory
else:
return None
class ConnectionCompleter(object):
"""
A L{ConnectionCompleter} can cause synthetic TCP connections established by
L{MemoryReactor.connectTCP} and L{MemoryReactor.listenTCP} to succeed or
fail.
"""
def __init__(self, memoryReactor):
"""
Create a L{ConnectionCompleter} from a L{MemoryReactor}.
@param memoryReactor: The reactor to attach to.
@type memoryReactor: L{MemoryReactor}
"""
self._reactor = memoryReactor
def succeedOnce(self, debug=False):
"""
Complete a single TCP connection established on this
L{ConnectionCompleter}'s L{MemoryReactor}.
@param debug: A flag; whether to dump output from the established
connection to stdout.
@type debug: L{bool}
@return: a pump for the connection, or L{None} if no connection could
be established.
@rtype: L{IOPump} or L{None}
"""
memoryReactor = self._reactor
for clientIdx, clientInfo in enumerate(memoryReactor.tcpClients):
for serverInfo in memoryReactor.tcpServers:
factories = _factoriesShouldConnect(clientInfo, serverInfo)
if factories:
memoryReactor.tcpClients.remove(clientInfo)
memoryReactor.connectors.pop(clientIdx)
clientFactory, serverFactory = factories
clientProtocol = clientFactory.buildProtocol(None)
serverProtocol = serverFactory.buildProtocol(None)
serverTransport = makeFakeServer(serverProtocol)
clientTransport = makeFakeClient(clientProtocol)
return connect(serverProtocol, serverTransport,
clientProtocol, clientTransport,
debug)
def failOnce(self, reason=Failure(ConnectionRefusedError())):
"""
Fail a single TCP connection established on this
L{ConnectionCompleter}'s L{MemoryReactor}.
@param reason: the reason to provide that the connection failed.
@type reason: L{Failure}
"""
self._reactor.tcpClients.pop(0)[2].clientConnectionFailed(
self._reactor.connectors.pop(0), reason
)
def connectableEndpoint(debug=False):
"""
Create an endpoint that can be fired on demand.
@param debug: A flag; whether to dump output from the established
connection to stdout.
@type debug: L{bool}
@return: A client endpoint, and an object that will cause one of the
L{Deferred}s returned by that client endpoint.
@rtype: 2-L{tuple} of (L{IStreamClientEndpoint}, L{ConnectionCompleter})
"""
reactor = MemoryReactorClock()
clientEndpoint = TCP4ClientEndpoint(reactor, "0.0.0.0", 4321)
serverEndpoint = TCP4ServerEndpoint(reactor, 4321)
serverEndpoint.listen(Factory.forProtocol(Protocol))
return clientEndpoint, ConnectionCompleter(reactor)
| mit |
longmen21/edx-platform | common/djangoapps/third_party_auth/admin.py | 14 | 5780 | # -*- coding: utf-8 -*-
"""
Admin site configuration for third party authentication
"""
from django import forms
from django.contrib import admin
from config_models.admin import ConfigurationModelAdmin, KeyedConfigurationModelAdmin
from .models import (
OAuth2ProviderConfig,
SAMLProviderConfig,
SAMLConfiguration,
SAMLProviderData,
LTIProviderConfig,
ProviderApiPermissions,
_PSA_OAUTH2_BACKENDS,
_PSA_SAML_BACKENDS
)
from .tasks import fetch_saml_metadata
from third_party_auth.provider import Registry
class OAuth2ProviderConfigForm(forms.ModelForm):
""" Django Admin form class for OAuth2ProviderConfig """
backend_name = forms.ChoiceField(choices=((name, name) for name in _PSA_OAUTH2_BACKENDS))
class OAuth2ProviderConfigAdmin(KeyedConfigurationModelAdmin):
""" Django Admin class for OAuth2ProviderConfig """
form = OAuth2ProviderConfigForm
def get_list_display(self, request):
""" Don't show every single field in the admin change list """
return (
'name', 'enabled', 'backend_name', 'secondary', 'skip_registration_form',
'skip_email_verification', 'change_date', 'changed_by', 'edit_link',
)
admin.site.register(OAuth2ProviderConfig, OAuth2ProviderConfigAdmin)
class SAMLProviderConfigForm(forms.ModelForm):
""" Django Admin form class for SAMLProviderConfig """
backend_name = forms.ChoiceField(choices=((name, name) for name in _PSA_SAML_BACKENDS))
class SAMLProviderConfigAdmin(KeyedConfigurationModelAdmin):
""" Django Admin class for SAMLProviderConfig """
form = SAMLProviderConfigForm
def get_list_display(self, request):
""" Don't show every single field in the admin change list """
return (
'name', 'enabled', 'backend_name', 'entity_id', 'metadata_source',
'has_data', 'icon_class', 'icon_image', 'change_date',
'changed_by', 'edit_link'
)
def has_data(self, inst):
""" Do we have cached metadata for this SAML provider? """
if not inst.is_active:
return None # N/A
data = SAMLProviderData.current(inst.entity_id)
return bool(data and data.is_valid())
has_data.short_description = u'Metadata Ready'
has_data.boolean = True
def save_model(self, request, obj, form, change):
"""
Post save: Queue an asynchronous metadata fetch to update SAMLProviderData.
We only want to do this for manual edits done using the admin interface.
Note: This only works if the celery worker and the app worker are using the
same 'configuration' cache.
"""
super(SAMLProviderConfigAdmin, self).save_model(request, obj, form, change)
fetch_saml_metadata.apply_async((), countdown=2)
admin.site.register(SAMLProviderConfig, SAMLProviderConfigAdmin)
class SAMLConfigurationAdmin(ConfigurationModelAdmin):
""" Django Admin class for SAMLConfiguration """
def get_list_display(self, request):
""" Shorten the public/private keys in the change view """
return (
'change_date', 'changed_by', 'enabled', 'entity_id',
'org_info_str', 'key_summary',
)
def key_summary(self, inst):
""" Short summary of the key pairs configured """
public_key = inst.get_setting('SP_PUBLIC_CERT')
private_key = inst.get_setting('SP_PRIVATE_KEY')
if not public_key or not private_key:
return u'<em>Key pair incomplete/missing</em>'
pub1, pub2 = public_key[0:10], public_key[-10:]
priv1, priv2 = private_key[0:10], private_key[-10:]
return u'Public: {}…{}<br>Private: {}…{}'.format(pub1, pub2, priv1, priv2)
key_summary.allow_tags = True
admin.site.register(SAMLConfiguration, SAMLConfigurationAdmin)
class SAMLProviderDataAdmin(admin.ModelAdmin):
""" Django Admin class for SAMLProviderData (Read Only) """
list_display = ('entity_id', 'is_valid', 'fetched_at', 'expires_at', 'sso_url')
readonly_fields = ('is_valid', )
def get_readonly_fields(self, request, obj=None):
if obj: # editing an existing object
return self.model._meta.get_all_field_names() # pylint: disable=protected-access
return self.readonly_fields
admin.site.register(SAMLProviderData, SAMLProviderDataAdmin)
class LTIProviderConfigAdmin(KeyedConfigurationModelAdmin):
""" Django Admin class for LTIProviderConfig """
exclude = (
'icon_class',
'icon_image',
'secondary',
)
def get_list_display(self, request):
""" Don't show every single field in the admin change list """
return (
'name',
'enabled',
'lti_consumer_key',
'lti_max_timestamp_age',
'change_date',
'changed_by',
'edit_link',
)
admin.site.register(LTIProviderConfig, LTIProviderConfigAdmin)
class ApiPermissionsAdminForm(forms.ModelForm):
""" Django admin form for ApiPermissions model """
class Meta(object):
model = ProviderApiPermissions
fields = ['client', 'provider_id']
provider_id = forms.ChoiceField(choices=[], required=True)
def __init__(self, *args, **kwargs):
super(ApiPermissionsAdminForm, self).__init__(*args, **kwargs)
self.fields['provider_id'].choices = (
(provider.provider_id, "{} ({})".format(provider.name, provider.provider_id))
for provider in Registry.enabled()
)
class ApiPermissionsAdmin(admin.ModelAdmin):
""" Django Admin class for ApiPermissions """
list_display = ('client', 'provider_id')
form = ApiPermissionsAdminForm
admin.site.register(ProviderApiPermissions, ApiPermissionsAdmin)
| agpl-3.0 |
milankl/swm | calc/misc/c_diss_plot.py | 1 | 3966 | from __future__ import print_function
path = '/home/mkloewer/python/swm/'
import os; os.chdir(path) # change working directory
import numpy as np
from scipy import sparse
import time as tictoc
from netCDF4 import Dataset
import glob
import matplotlib.pyplot as plt
# OPTIONS
runfolder = [2,3]
## read data
for r,i in zip(runfolder,range(len(runfolder))):
runpath = path+'data/run%04i' % r
if i == 0:
u = np.load(runpath+'/u_sub.npy')
v = np.load(runpath+'/v_sub.npy')
h = np.load(runpath+'/h_sub.npy')
time = np.load(runpath+'/t_sub.npy')
print('run %i read.' % r)
else:
u = np.concatenate((u,np.load(runpath+'/u_sub.npy')))
v = np.concatenate((v,np.load(runpath+'/v_sub.npy')))
h = np.concatenate((h,np.load(runpath+'/h_sub.npy')))
time = np.hstack((time,np.load(runpath+'/t_sub.npy')))
print('run %i read.' % r)
t = time / 3600. / 24. # in days
## read param
global param
param = np.load(runpath+'/param.npy').all()
param['dat_type'] = np.float32
# import functions
exec(open(path+'swm_param.py').read())
exec(open(path+'swm_operators.py').read())
exec(open(path+'swm_output.py').read())
param['output'] = 0
set_grad_mat()
set_interp_mat()
set_lapl_mat()
set_coriolis()
tlen = len(time)
## create ouputfolder
try:
os.mkdir(runpath+'/analysis')
except:
pass
## reshape u,v
u = u.reshape((tlen,param['Nu'])).T
v = v.reshape((tlen,param['Nv'])).T
h = h.reshape((tlen,param['NT'])).T
print('Reshape done.')
##
dudx = Gux.dot(u)
dudy = Guy.dot(u)
dvdx = Gvx.dot(v)
dvdy = Gvy.dot(v)
n = 2
D = np.sqrt((dudx - dvdy)**2 + IqT.dot((dudy + dvdx)**2))
Ro = (D.T/f_T)
Rom = Ro.mean(axis=0)
c = (1/(1+Ro)**n).mean(axis=0)
# REYNOLDS, ROSSBY, EKMAN NUMBER MEAN
u_T = IuT.dot(u)
v_T = IvT.dot(v)
print('u,v interpolation done.')
#advective term
adv_u = u_T*Gux.dot(u) + v_T*IqT.dot(Guy.dot(u))
adv_v = u_T*IqT.dot(Gvx.dot(v)) + v_T*Gvy.dot(v)
del u_T,v_T
adv_term = np.sqrt(adv_u**2 + adv_v**2)
del adv_u, adv_v
print('Advection term done.')
#coriolis term
cor_term = (f_T*np.sqrt(IuT.dot(u**2) + IvT.dot(v**2)).T).T
print('Coriolis term done.')
Ro2 = adv_term / cor_term
c2 = (1/(1+Ro2)**n).mean(axis=1)
Ro2m = Ro2.mean(axis=1)
##
levs1 = np.linspace(0,.2,21)
levs2 = np.linspace(0.5,1,21)
fig,axs = plt.subplots(2,3,sharex=True,sharey=True,figsize=(9,5.5))
plt.tight_layout(rect=[-.02,-.03,1.12,.97],w_pad=0.1)
axs[0,0].contourf(param['x_T'],param['y_T'],h2mat(Ro2m),levs1)
axs[0,1].contourf(param['x_T'],param['y_T'],h2mat(Rom),levs1,extend='max')
m1 = axs[0,2].contourf(param['x_T'],param['y_T'],h2mat(Ro[-1,:]),levs1,extend='max')
plt.colorbar(m1,ax=(axs[0,0],axs[0,1],axs[0,2]),ticks=np.arange(0,.22,.04))
axs[1,0].contourf(param['x_T'],param['y_T'],h2mat(c2),levs2)
m21 = axs[1,0].contour(param['x_T'],param['y_T'],h2mat(c2),[0.8],linewidths=0.7)
axs[1,1].contourf(param['x_T'],param['y_T'],h2mat(c),levs2)
m2 = axs[1,2].contourf(param['x_T'],param['y_T'],h2mat(1/(1+Ro[-1,:])**n),levs2,extend='min')
axs[1,2].contour(param['x_T'],param['y_T'],h2mat(1/(1+Ro[-1,:])**n),[0.8],linewidths=0.7)
m22 = axs[1,1].contour(param['x_T'],param['y_T'],h2mat(c),[0.8],linewidths=0.7)
plt.colorbar(m2,ax=(axs[1,0],axs[1,1],axs[1,2]),ticks=np.arange(0.5,1.05,.05))
plt.clabel(m22, inline=1, fontsize=5,fmt='%.1f')
plt.clabel(m21, inline=1, fontsize=5,fmt='%.1f')
axs[0,0].set_xticks([])
axs[0,0].set_yticks([])
axs[0,0].set_title(r'$\overline{R_o} = \overline{\frac{|(\mathbf{u} \cdot \nabla)\mathbf{u}|}{|f\mathbf{u}|}}$')
axs[0,1].set_title(r'$\overline{R_o^*} = \overline{\frac{|D|}{f}}$')
axs[0,2].set_title(r'snapshot: $R_o^*$')
axs[1,0].set_title(r'$(1+\overline{R_o})^{-2}$')
axs[1,1].set_title(r'$(1+\overline{R_o}^*)^{-2}$')
axs[1,2].set_title(r'$(1+R_o^*)^{-2}$')
axs[0,0].set_ylabel('y')
axs[1,0].set_ylabel('y')
axs[1,0].set_xlabel('x')
axs[1,1].set_xlabel('x')
plt.savefig(path+'compare/Ro_scaling.png',dpi=150)
plt.close(fig)
#plt.show()
| gpl-3.0 |
ArthurGarnier/SickRage | lib/sqlalchemy/dialects/mysql/types.py | 8 | 25137 | # mysql/types.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import datetime
from ... import exc, util
from ... import types as sqltypes
class _NumericType(object):
"""Base for MySQL numeric types.
This is the base both for NUMERIC as well as INTEGER, hence
it's a mixin.
"""
def __init__(self, unsigned=False, zerofill=False, **kw):
self.unsigned = unsigned
self.zerofill = zerofill
super(_NumericType, self).__init__(**kw)
def __repr__(self):
return util.generic_repr(self,
to_inspect=[_NumericType, sqltypes.Numeric])
class _FloatType(_NumericType, sqltypes.Float):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
if isinstance(self, (REAL, DOUBLE)) and \
(
(precision is None and scale is not None) or
(precision is not None and scale is None)
):
raise exc.ArgumentError(
"You must specify both precision and scale or omit "
"both altogether.")
super(_FloatType, self).__init__(
precision=precision, asdecimal=asdecimal, **kw)
self.scale = scale
def __repr__(self):
return util.generic_repr(self, to_inspect=[_FloatType,
_NumericType,
sqltypes.Float])
class _IntegerType(_NumericType, sqltypes.Integer):
def __init__(self, display_width=None, **kw):
self.display_width = display_width
super(_IntegerType, self).__init__(**kw)
def __repr__(self):
return util.generic_repr(self, to_inspect=[_IntegerType,
_NumericType,
sqltypes.Integer])
class _StringType(sqltypes.String):
"""Base for MySQL string types."""
def __init__(self, charset=None, collation=None,
ascii=False, binary=False, unicode=False,
national=False, **kw):
self.charset = charset
# allow collate= or collation=
kw.setdefault('collation', kw.pop('collate', collation))
self.ascii = ascii
self.unicode = unicode
self.binary = binary
self.national = national
super(_StringType, self).__init__(**kw)
def __repr__(self):
return util.generic_repr(self,
to_inspect=[_StringType, sqltypes.String])
class _MatchType(sqltypes.Float, sqltypes.MatchType):
def __init__(self, **kw):
# TODO: float arguments?
sqltypes.Float.__init__(self)
sqltypes.MatchType.__init__(self)
class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""MySQL NUMERIC type."""
__visit_name__ = 'NUMERIC'
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a NUMERIC.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(NUMERIC, self).__init__(precision=precision,
scale=scale, asdecimal=asdecimal, **kw)
class DECIMAL(_NumericType, sqltypes.DECIMAL):
"""MySQL DECIMAL type."""
__visit_name__ = 'DECIMAL'
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DECIMAL.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(DECIMAL, self).__init__(precision=precision, scale=scale,
asdecimal=asdecimal, **kw)
class DOUBLE(_FloatType):
"""MySQL DOUBLE type."""
__visit_name__ = 'DOUBLE'
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DOUBLE.
.. note::
The :class:`.DOUBLE` type by default converts from float
to Decimal, using a truncation that defaults to 10 digits.
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
to change this scale, or ``asdecimal=False`` to return values
directly as Python floating points.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(DOUBLE, self).__init__(precision=precision, scale=scale,
asdecimal=asdecimal, **kw)
class REAL(_FloatType, sqltypes.REAL):
"""MySQL REAL type."""
__visit_name__ = 'REAL'
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a REAL.
.. note::
The :class:`.REAL` type by default converts from float
to Decimal, using a truncation that defaults to 10 digits.
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
to change this scale, or ``asdecimal=False`` to return values
directly as Python floating points.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(REAL, self).__init__(precision=precision, scale=scale,
asdecimal=asdecimal, **kw)
class FLOAT(_FloatType, sqltypes.FLOAT):
"""MySQL FLOAT type."""
__visit_name__ = 'FLOAT'
def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
"""Construct a FLOAT.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(FLOAT, self).__init__(precision=precision, scale=scale,
asdecimal=asdecimal, **kw)
def bind_processor(self, dialect):
return None
class INTEGER(_IntegerType, sqltypes.INTEGER):
"""MySQL INTEGER type."""
__visit_name__ = 'INTEGER'
def __init__(self, display_width=None, **kw):
"""Construct an INTEGER.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(INTEGER, self).__init__(display_width=display_width, **kw)
class BIGINT(_IntegerType, sqltypes.BIGINT):
"""MySQL BIGINTEGER type."""
__visit_name__ = 'BIGINT'
def __init__(self, display_width=None, **kw):
"""Construct a BIGINTEGER.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(BIGINT, self).__init__(display_width=display_width, **kw)
class MEDIUMINT(_IntegerType):
"""MySQL MEDIUMINTEGER type."""
__visit_name__ = 'MEDIUMINT'
def __init__(self, display_width=None, **kw):
"""Construct a MEDIUMINTEGER
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(MEDIUMINT, self).__init__(display_width=display_width, **kw)
class TINYINT(_IntegerType):
"""MySQL TINYINT type."""
__visit_name__ = 'TINYINT'
def __init__(self, display_width=None, **kw):
"""Construct a TINYINT.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(TINYINT, self).__init__(display_width=display_width, **kw)
class SMALLINT(_IntegerType, sqltypes.SMALLINT):
"""MySQL SMALLINTEGER type."""
__visit_name__ = 'SMALLINT'
def __init__(self, display_width=None, **kw):
"""Construct a SMALLINTEGER.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(SMALLINT, self).__init__(display_width=display_width, **kw)
class BIT(sqltypes.TypeEngine):
"""MySQL BIT type.
This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a
MSTinyInteger() type.
"""
__visit_name__ = 'BIT'
def __init__(self, length=None):
"""Construct a BIT.
:param length: Optional, number of bits.
"""
self.length = length
def result_processor(self, dialect, coltype):
"""Convert a MySQL's 64 bit, variable length binary string to a long.
TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector
already do this, so this logic should be moved to those dialects.
"""
def process(value):
if value is not None:
v = 0
for i in value:
if not isinstance(i, int):
i = ord(i) # convert byte to int on Python 2
v = v << 8 | i
return v
return value
return process
class TIME(sqltypes.TIME):
"""MySQL TIME type. """
__visit_name__ = 'TIME'
def __init__(self, timezone=False, fsp=None):
"""Construct a MySQL TIME type.
:param timezone: not used by the MySQL dialect.
:param fsp: fractional seconds precision value.
MySQL 5.6 supports storage of fractional seconds;
this parameter will be used when emitting DDL
for the TIME type.
.. note::
DBAPI driver support for fractional seconds may
be limited; current support includes
MySQL Connector/Python.
.. versionadded:: 0.8 The MySQL-specific TIME
type as well as fractional seconds support.
"""
super(TIME, self).__init__(timezone=timezone)
self.fsp = fsp
def result_processor(self, dialect, coltype):
time = datetime.time
def process(value):
# convert from a timedelta value
if value is not None:
microseconds = value.microseconds
seconds = value.seconds
minutes = seconds // 60
return time(minutes // 60,
minutes % 60,
seconds - minutes * 60,
microsecond=microseconds)
else:
return None
return process
class TIMESTAMP(sqltypes.TIMESTAMP):
"""MySQL TIMESTAMP type.
"""
__visit_name__ = 'TIMESTAMP'
def __init__(self, timezone=False, fsp=None):
"""Construct a MySQL TIMESTAMP type.
:param timezone: not used by the MySQL dialect.
:param fsp: fractional seconds precision value.
MySQL 5.6.4 supports storage of fractional seconds;
this parameter will be used when emitting DDL
for the TIMESTAMP type.
.. note::
DBAPI driver support for fractional seconds may
be limited; current support includes
MySQL Connector/Python.
.. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.TIMESTAMP`
with fractional seconds support.
"""
super(TIMESTAMP, self).__init__(timezone=timezone)
self.fsp = fsp
class DATETIME(sqltypes.DATETIME):
"""MySQL DATETIME type.
"""
__visit_name__ = 'DATETIME'
def __init__(self, timezone=False, fsp=None):
"""Construct a MySQL DATETIME type.
:param timezone: not used by the MySQL dialect.
:param fsp: fractional seconds precision value.
MySQL 5.6.4 supports storage of fractional seconds;
this parameter will be used when emitting DDL
for the DATETIME type.
.. note::
DBAPI driver support for fractional seconds may
be limited; current support includes
MySQL Connector/Python.
.. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.DATETIME`
with fractional seconds support.
"""
super(DATETIME, self).__init__(timezone=timezone)
self.fsp = fsp
class YEAR(sqltypes.TypeEngine):
"""MySQL YEAR type, for single byte storage of years 1901-2155."""
__visit_name__ = 'YEAR'
def __init__(self, display_width=None):
self.display_width = display_width
class TEXT(_StringType, sqltypes.TEXT):
"""MySQL TEXT type, for text up to 2^16 characters."""
__visit_name__ = 'TEXT'
def __init__(self, length=None, **kw):
"""Construct a TEXT.
:param length: Optional, if provided the server may optimize storage
by substituting the smallest TEXT type sufficient to store
``length`` characters.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(TEXT, self).__init__(length=length, **kw)
class TINYTEXT(_StringType):
"""MySQL TINYTEXT type, for text up to 2^8 characters."""
__visit_name__ = 'TINYTEXT'
def __init__(self, **kwargs):
"""Construct a TINYTEXT.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(TINYTEXT, self).__init__(**kwargs)
class MEDIUMTEXT(_StringType):
"""MySQL MEDIUMTEXT type, for text up to 2^24 characters."""
__visit_name__ = 'MEDIUMTEXT'
def __init__(self, **kwargs):
"""Construct a MEDIUMTEXT.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(MEDIUMTEXT, self).__init__(**kwargs)
class LONGTEXT(_StringType):
"""MySQL LONGTEXT type, for text up to 2^32 characters."""
__visit_name__ = 'LONGTEXT'
def __init__(self, **kwargs):
"""Construct a LONGTEXT.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(LONGTEXT, self).__init__(**kwargs)
class VARCHAR(_StringType, sqltypes.VARCHAR):
"""MySQL VARCHAR type, for variable-length character data."""
__visit_name__ = 'VARCHAR'
def __init__(self, length=None, **kwargs):
"""Construct a VARCHAR.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(VARCHAR, self).__init__(length=length, **kwargs)
class CHAR(_StringType, sqltypes.CHAR):
"""MySQL CHAR type, for fixed-length character data."""
__visit_name__ = 'CHAR'
def __init__(self, length=None, **kwargs):
"""Construct a CHAR.
:param length: Maximum data length, in characters.
:param binary: Optional, use the default binary collation for the
national character set. This does not affect the type of data
stored, use a BINARY type for binary data.
:param collation: Optional, request a particular collation. Must be
compatible with the national character set.
"""
super(CHAR, self).__init__(length=length, **kwargs)
@classmethod
def _adapt_string_for_cast(self, type_):
# copy the given string type into a CHAR
# for the purposes of rendering a CAST expression
type_ = sqltypes.to_instance(type_)
if isinstance(type_, sqltypes.CHAR):
return type_
elif isinstance(type_, _StringType):
return CHAR(
length=type_.length,
charset=type_.charset,
collation=type_.collation,
ascii=type_.ascii,
binary=type_.binary,
unicode=type_.unicode,
national=False # not supported in CAST
)
else:
return CHAR(length=type_.length)
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
"""MySQL NVARCHAR type.
For variable-length character data in the server's configured national
character set.
"""
__visit_name__ = 'NVARCHAR'
def __init__(self, length=None, **kwargs):
"""Construct an NVARCHAR.
:param length: Maximum data length, in characters.
:param binary: Optional, use the default binary collation for the
national character set. This does not affect the type of data
stored, use a BINARY type for binary data.
:param collation: Optional, request a particular collation. Must be
compatible with the national character set.
"""
kwargs['national'] = True
super(NVARCHAR, self).__init__(length=length, **kwargs)
class NCHAR(_StringType, sqltypes.NCHAR):
"""MySQL NCHAR type.
For fixed-length character data in the server's configured national
character set.
"""
__visit_name__ = 'NCHAR'
def __init__(self, length=None, **kwargs):
"""Construct an NCHAR.
:param length: Maximum data length, in characters.
:param binary: Optional, use the default binary collation for the
national character set. This does not affect the type of data
stored, use a BINARY type for binary data.
:param collation: Optional, request a particular collation. Must be
compatible with the national character set.
"""
kwargs['national'] = True
super(NCHAR, self).__init__(length=length, **kwargs)
class TINYBLOB(sqltypes._Binary):
"""MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
__visit_name__ = 'TINYBLOB'
class MEDIUMBLOB(sqltypes._Binary):
"""MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
__visit_name__ = 'MEDIUMBLOB'
class LONGBLOB(sqltypes._Binary):
"""MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
__visit_name__ = 'LONGBLOB'
| gpl-3.0 |
geoffreyporto/radartec | node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py | 2767 | 2174 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Applies a fix to CR LF TAB handling in xml.dom.
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
Working around this: http://bugs.python.org/issue5752
TODO(bradnelson): Consider dropping this when we drop XP support.
"""
import xml.dom.minidom
def _Replacement_write_data(writer, data, is_attrib=False):
"""Writes datachars to writer."""
data = data.replace("&", "&").replace("<", "<")
data = data.replace("\"", """).replace(">", ">")
if is_attrib:
data = data.replace(
"\r", "
").replace(
"\n", "
").replace(
"\t", "	")
writer.write(data)
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
writer.write(indent+"<" + self.tagName)
attrs = self._get_attributes()
a_names = attrs.keys()
a_names.sort()
for a_name in a_names:
writer.write(" %s=\"" % a_name)
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
writer.write("\"")
if self.childNodes:
writer.write(">%s" % newl)
for node in self.childNodes:
node.writexml(writer, indent + addindent, addindent, newl)
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
else:
writer.write("/>%s" % newl)
class XmlFix(object):
"""Object to manage temporary patching of xml.dom.minidom."""
def __init__(self):
# Preserve current xml.dom.minidom functions.
self.write_data = xml.dom.minidom._write_data
self.writexml = xml.dom.minidom.Element.writexml
# Inject replacement versions of a function and a method.
xml.dom.minidom._write_data = _Replacement_write_data
xml.dom.minidom.Element.writexml = _Replacement_writexml
def Cleanup(self):
if self.write_data:
xml.dom.minidom._write_data = self.write_data
xml.dom.minidom.Element.writexml = self.writexml
self.write_data = None
def __del__(self):
self.Cleanup()
| agpl-3.0 |
apache/thrift | lib/py/src/transport/TSocket.py | 7 | 9123 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import errno
import logging
import os
import socket
import sys
from .TTransport import TTransportBase, TTransportException, TServerTransportBase
logger = logging.getLogger(__name__)
class TSocketBase(TTransportBase):
def _resolveAddr(self):
if self._unix_socket is not None:
return [(socket.AF_UNIX, socket.SOCK_STREAM, None, None,
self._unix_socket)]
else:
return socket.getaddrinfo(self.host,
self.port,
self._socket_family,
socket.SOCK_STREAM,
0,
socket.AI_PASSIVE)
def close(self):
if self.handle:
self.handle.close()
self.handle = None
class TSocket(TSocketBase):
"""Socket implementation of TTransport base."""
def __init__(self, host='localhost', port=9090, unix_socket=None,
socket_family=socket.AF_UNSPEC,
socket_keepalive=False):
"""Initialize a TSocket
@param host(str) The host to connect to.
@param port(int) The (TCP) port to connect to.
@param unix_socket(str) The filename of a unix socket to connect to.
(host and port will be ignored.)
@param socket_family(int) The socket family to use with this socket.
@param socket_keepalive(bool) enable TCP keepalive, default off.
"""
self.host = host
self.port = port
self.handle = None
self._unix_socket = unix_socket
self._timeout = None
self._socket_family = socket_family
self._socket_keepalive = socket_keepalive
def setHandle(self, h):
self.handle = h
def isOpen(self):
if self.handle is None:
return False
# this lets us cheaply see if the other end of the socket is still
# connected. if disconnected, we'll get EOF back (expressed as zero
# bytes of data) otherwise we'll get one byte or an error indicating
# we'd have to block for data.
#
# note that we're not doing this with socket.MSG_DONTWAIT because 1)
# it's linux-specific and 2) gevent-patched sockets hide EAGAIN from us
# when timeout is non-zero.
original_timeout = self.handle.gettimeout()
try:
self.handle.settimeout(0)
try:
peeked_bytes = self.handle.recv(1, socket.MSG_PEEK)
except (socket.error, OSError) as exc: # on modern python this is just BlockingIOError
if exc.errno in (errno.EWOULDBLOCK, errno.EAGAIN):
return True
return False
finally:
self.handle.settimeout(original_timeout)
# the length will be zero if we got EOF (indicating connection closed)
return len(peeked_bytes) == 1
def setTimeout(self, ms):
if ms is None:
self._timeout = None
else:
self._timeout = ms / 1000.0
if self.handle is not None:
self.handle.settimeout(self._timeout)
def _do_open(self, family, socktype):
return socket.socket(family, socktype)
@property
def _address(self):
return self._unix_socket if self._unix_socket else '%s:%d' % (self.host, self.port)
def open(self):
if self.handle:
raise TTransportException(type=TTransportException.ALREADY_OPEN, message="already open")
try:
addrs = self._resolveAddr()
except socket.gaierror as gai:
msg = 'failed to resolve sockaddr for ' + str(self._address)
logger.exception(msg)
raise TTransportException(type=TTransportException.NOT_OPEN, message=msg, inner=gai)
for family, socktype, _, _, sockaddr in addrs:
handle = self._do_open(family, socktype)
# TCP_KEEPALIVE
if self._socket_keepalive:
handle.setsockopt(socket.IPPROTO_TCP, socket.SO_KEEPALIVE, 1)
handle.settimeout(self._timeout)
try:
handle.connect(sockaddr)
self.handle = handle
return
except socket.error:
handle.close()
logger.info('Could not connect to %s', sockaddr, exc_info=True)
msg = 'Could not connect to any of %s' % list(map(lambda a: a[4],
addrs))
logger.error(msg)
raise TTransportException(type=TTransportException.NOT_OPEN, message=msg)
def read(self, sz):
try:
buff = self.handle.recv(sz)
except socket.error as e:
if (e.args[0] == errno.ECONNRESET and
(sys.platform == 'darwin' or sys.platform.startswith('freebsd'))):
# freebsd and Mach don't follow POSIX semantic of recv
# and fail with ECONNRESET if peer performed shutdown.
# See corresponding comment and code in TSocket::read()
# in lib/cpp/src/transport/TSocket.cpp.
self.close()
# Trigger the check to raise the END_OF_FILE exception below.
buff = ''
elif e.args[0] == errno.ETIMEDOUT:
raise TTransportException(type=TTransportException.TIMED_OUT, message="read timeout", inner=e)
else:
raise TTransportException(message="unexpected exception", inner=e)
if len(buff) == 0:
raise TTransportException(type=TTransportException.END_OF_FILE,
message='TSocket read 0 bytes')
return buff
def write(self, buff):
if not self.handle:
raise TTransportException(type=TTransportException.NOT_OPEN,
message='Transport not open')
sent = 0
have = len(buff)
while sent < have:
try:
plus = self.handle.send(buff)
if plus == 0:
raise TTransportException(type=TTransportException.END_OF_FILE,
message='TSocket sent 0 bytes')
sent += plus
buff = buff[plus:]
except socket.error as e:
raise TTransportException(message="unexpected exception", inner=e)
def flush(self):
pass
class TServerSocket(TSocketBase, TServerTransportBase):
"""Socket implementation of TServerTransport base."""
def __init__(self, host=None, port=9090, unix_socket=None, socket_family=socket.AF_UNSPEC):
self.host = host
self.port = port
self._unix_socket = unix_socket
self._socket_family = socket_family
self.handle = None
self._backlog = 128
def setBacklog(self, backlog=None):
if not self.handle:
self._backlog = backlog
else:
# We cann't update backlog when it is already listening, since the
# handle has been created.
logger.warn('You have to set backlog before listen.')
def listen(self):
res0 = self._resolveAddr()
socket_family = self._socket_family == socket.AF_UNSPEC and socket.AF_INET6 or self._socket_family
for res in res0:
if res[0] is socket_family or res is res0[-1]:
break
# We need remove the old unix socket if the file exists and
# nobody is listening on it.
if self._unix_socket:
tmp = socket.socket(res[0], res[1])
try:
tmp.connect(res[4])
except socket.error as err:
eno, message = err.args
if eno == errno.ECONNREFUSED:
os.unlink(res[4])
self.handle = socket.socket(res[0], res[1])
self.handle.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(self.handle, 'settimeout'):
self.handle.settimeout(None)
self.handle.bind(res[4])
self.handle.listen(self._backlog)
def accept(self):
client, addr = self.handle.accept()
result = TSocket()
result.setHandle(client)
return result
| apache-2.0 |
h-matsuo/memTracker | lib/track.py | 1 | 8958 | #!/usr/bin/python
# coding: UTF-8
"""
Implementation of command: track
"""
__author__ = "Hiroyuki Matsuo <h-matsuo@ist.osaka-u.ac.jp>"
# ===== Configuration ==========================================================
# ----- Disk I/O tracking ------------------------------------------------------
#DEVICE_NAME = "mmcblk0"
# Read from: "/sys/block/<DEVICE_NAME>/queue/physical_block_size"
SECTOR_SIZE = 512 # [Bytes]
# Source: https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats
DISKSTATS_ROW = 24
DISKSTATS_COL_READ = 5
DISKSTATS_COL_WRITE = 9
# ----- Memory usage tracking --------------------------------------------------
# NOTE: 実験の特性上,ここでは
# 「使用メモリ」=「メモリ合計容量」-「メモリ未割り当て容量」
# と定義する.バッファ領域やキャッシュ領域等は考慮しない.
# 参考:http://nopipi.hatenablog.com/entry/2015/09/13/181026
MEMINFO_ROW_TOTAL = 0
MEMINFO_ROW_FREE = 1
# ----- Network communications tracking ----------------------------------------
NET_DEV_ROW_WLAN0 = 2
NET_DEV_ROW_LO = 3
NET_DEV_ROW_ETH0 = 4
NET_DEV_COL_RECV = 1
NET_DEV_COL_SEND = 9
# ===== END Configuration ======================================================
from datetime import datetime
import json
import os.path
import re
import signal
import sys
import time
from lib.utils import Utils
class TrackController:
"""
Control tracking memory usage
"""
def __init__(self):
"""
Constructor
"""
# Initialize output data
self.__tracked_data = []
# Stop flag for tracking
self.__stop_flag = False
# Default values
self.__interval = 1.0
self.__out_file = None
# self.__pid = None
# Default tracking mode
self.__mode_io = True
self.__mode_mem = True
self.__mode_net = True
# Compile regex pattern
self.__regex_pattern = re.compile(r"\s*") # Raw string to avoid the backslash plague
def setTrackingInterval(self, interval):
"""
Set tracking interval
@param interval Tracking interval
"""
self.__interval = interval
def setOutputFilename(self, filename):
"""
Set filename to write output
@param filename Filename to write output
"""
self.__out_file = filename
# def setPid(self, pid):
# """
# Set process ID to track
# @param process ID
# """
# if not os.path.exists("/proc/%d" % pid):
# sys.stderr.write("ERROR: PID %d: No such process.\n" % pid)
# sys.exit(1)
# self.__pid = pid
def setTrackingMode(self, io = False, mem = False, net = False):
"""
Set tracking mode
@param io True if track disk I/O
@param mem True if track memory usage
@param net True if track network communications
"""
self.__mode_io = io
self.__mode_mem = mem
self.__mode_net = net
def start(self):
"""
Start tracking
"""
# Initialize valiables for analyzing "/proc/diskstats"
if self.__mode_io:
total_data = self.__getIOTotalData()
self.__io_read_bytes_begin = total_data["total_read_bytes"]
self.__io_write_bytes_begin = total_data["total_write_bytes"]
# Initialize valiables for analyzing "/proc/meminfo"
if self.__mode_mem:
total_data = self.__getMemTotalData()
self.__mem_used_kilobytes_begin = total_data["used_kilobytes"]
# Initialize valiables for analyzing "/proc/net/dev"
if self.__mode_net:
total_data = self.__getNetTotalData()
self.__net_recv_total_bytes_begin = total_data["total_recv_bytes"]
self.__net_send_total_bytes_begin = total_data["total_send_bytes"]
# Start tracking
self.__track()
def stop(self):
"""
Stop tracking
"""
self.__stop_flag = True
if self.__out_file != None:
fout = open(self.__out_file, "w")
json.dump(self.__tracked_data, fout, indent = 2, separators = (",", ": "))
fout.close()
def __track(self):
"""
Track procfs repeatedly
"""
while not self.__stop_flag:
begin = datetime.today()
tracked_data = self.__getTrackedData()
if self.__out_file != None:
self.__tracked_data.append(tracked_data)
else:
print json.dumps(tracked_data, indent = 2, separators = (",", ": "))
end = datetime.today()
diff = self.__interval - (end - begin).total_seconds()
if diff < 0: diff = 0
time.sleep(diff)
def __getTrackedData(self):
"""
Get data from "/proc"
@return Tracked data
"""
data = {}
now = datetime.today()
if self.__mode_io: data_io = self.__getIOData()
if self.__mode_mem: data_mem = self.__getMemData()
if self.__mode_net: data_net = self.__getNetData()
data["date"] = Utils.formatDatetime(now)
if self.__mode_io: data["io"] = data_io
if self.__mode_mem: data["mem"] = data_mem
if self.__mode_net: data["net"] = data_net
return data
def __getIOData(self):
"""
Get disk I/O data
@return Disk I/O data
"""
total_data = self.__getIOTotalData()
return {
"read_bytes" : total_data["total_read_bytes"] - self.__io_read_bytes_begin,
"write_bytes": total_data["total_write_bytes"] - self.__io_write_bytes_begin
}
def __getMemData(self):
"""
Get memory usage data
@return Memory usage data
"""
total_data = self.__getMemTotalData()
return {
"used_kilobytes": total_data["used_kilobytes"] - self.__mem_used_kilobytes_begin
}
def __getNetData(self):
"""
Get network communications data
@return Network communications data
"""
total_data = self.__getNetTotalData()
return {
"recv_bytes": total_data["total_recv_bytes"] - self.__net_recv_total_bytes_begin,
"send_bytes": total_data["total_send_bytes"] - self.__net_send_total_bytes_begin
}
def __getIOTotalData(self):
"""
Get data from "/proc/diskstats"
@return Analyzed data
"""
fin = open("/proc/diskstats", "r")
diskstats = fin.readlines()
fin.close()
diskstats = self.__regex_pattern.split(diskstats[DISKSTATS_ROW].strip())
return {
"total_read_bytes" : int(diskstats[DISKSTATS_COL_READ]) * SECTOR_SIZE,
"total_write_bytes": int(diskstats[DISKSTATS_COL_WRITE]) * SECTOR_SIZE
}
def __getMemTotalData(self):
"""
Get data from "/proc/meminfo"
@return Analyzed data
"""
fin = open("/proc/meminfo", "r")
meminfo = fin.readlines()
fin.close()
return {
"used_kilobytes": int(meminfo[MEMINFO_ROW_TOTAL][9:-3].strip()) - int(meminfo[MEMINFO_ROW_FREE][8:-3].strip())
}
def __getNetTotalData(self):
"""
Get data from "/proc/net/dev"
@return Analyzed data
"""
fin = open("/proc/net/dev", "r")
net_dev = fin.readlines()
fin.close()
recv_bytes = 0
send_bytes = 0
for row in [NET_DEV_ROW_WLAN0, NET_DEV_ROW_LO, NET_DEV_ROW_ETH0]:
line = self.__regex_pattern.split(net_dev[row].strip())
recv_bytes += int(line[NET_DEV_COL_RECV])
send_bytes += int(line[NET_DEV_COL_SEND])
return {
"total_recv_bytes": recv_bytes,
"total_send_bytes": send_bytes
}
def SIGINTHandler(signum, frame):
"""
Signal SIGINT handler
"""
global controller
controller.stop()
def exec_track(flags):
"""
Execute command: track
@param flags Result of parsing argv
"""
# Instantiate controller
global controller
controller = TrackController()
# Set tracking interval
controller.setTrackingInterval(flags.interval)
# Set output filename
if flags.out_file != None:
controller.setOutputFilename(flags.out_file)
# Set process id to track
# if flags.pid != None:
# controller.setPid(flags.pid)
# Set tracking mode
controller.setTrackingMode(io = flags.mode_io,
mem = flags.mode_mem,
net = flags.mode_net)
# Print message
print "Start tracking..."
print 'Press "Ctrl + c" to quit.'
# Handle SIGINT
signal.signal(signal.SIGINT, SIGINTHandler)
# Start tracking
controller.start()
| mit |
broferek/ansible | test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py | 17 | 3851 | # (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from distutils.version import LooseVersion
import astroid
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
from ansible.release import __version__ as ansible_version_raw
MSGS = {
'E9501': ("Deprecated version (%r) found in call to Display.deprecated "
"or AnsibleModule.deprecate",
"ansible-deprecated-version",
"Used when a call to Display.deprecated specifies a version "
"less than or equal to the current version of Ansible",
{'minversion': (2, 6)}),
'E9502': ("Display.deprecated call without a version",
"ansible-deprecated-no-version",
"Used when a call to Display.deprecated does not specify a "
"version",
{'minversion': (2, 6)}),
'E9503': ("Invalid deprecated version (%r) found in call to "
"Display.deprecated or AnsibleModule.deprecate",
"ansible-invalid-deprecated-version",
"Used when a call to Display.deprecated specifies an invalid "
"version number",
{'minversion': (2, 6)}),
}
ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version_raw.split('.')[:3]))
def _get_expr_name(node):
"""Funciton to get either ``attrname`` or ``name`` from ``node.func.expr``
Created specifically for the case of ``display.deprecated`` or ``self._display.deprecated``
"""
try:
return node.func.expr.attrname
except AttributeError:
# If this fails too, we'll let it raise, the caller should catch it
return node.func.expr.name
class AnsibleDeprecatedChecker(BaseChecker):
"""Checks for Display.deprecated calls to ensure that the ``version``
has not passed or met the time for removal
"""
__implements__ = (IAstroidChecker,)
name = 'deprecated'
msgs = MSGS
@check_messages(*(MSGS.keys()))
def visit_call(self, node):
version = None
try:
if (node.func.attrname == 'deprecated' and 'display' in _get_expr_name(node) or
node.func.attrname == 'deprecate' and 'module' in _get_expr_name(node)):
if node.keywords:
for keyword in node.keywords:
if len(node.keywords) == 1 and keyword.arg is None:
# This is likely a **kwargs splat
return
if keyword.arg == 'version':
if isinstance(keyword.value.value, astroid.Name):
# This is likely a variable
return
version = keyword.value.value
if not version:
try:
version = node.args[1].value
except IndexError:
self.add_message('ansible-deprecated-no-version', node=node)
return
try:
if ANSIBLE_VERSION >= LooseVersion(str(version)):
self.add_message('ansible-deprecated-version', node=node, args=(version,))
except ValueError:
self.add_message('ansible-invalid-deprecated-version', node=node, args=(version,))
except AttributeError:
# Not the type of node we are interested in
pass
def register(linter):
"""required method to auto register this checker """
linter.register_checker(AnsibleDeprecatedChecker(linter))
| gpl-3.0 |
salamer/django | tests/extra_regress/models.py | 281 | 1401 | from __future__ import unicode_literals
import copy
import datetime
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class RevisionableModel(models.Model):
base = models.ForeignKey('self', models.SET_NULL, null=True)
title = models.CharField(blank=True, max_length=255)
when = models.DateTimeField(default=datetime.datetime.now)
def __str__(self):
return "%s (%s, %s)" % (self.title, self.id, self.base.id)
def save(self, *args, **kwargs):
super(RevisionableModel, self).save(*args, **kwargs)
if not self.base:
self.base = self
kwargs.pop('force_insert', None)
kwargs.pop('force_update', None)
super(RevisionableModel, self).save(*args, **kwargs)
def new_revision(self):
new_revision = copy.copy(self)
new_revision.pk = None
return new_revision
class Order(models.Model):
created_by = models.ForeignKey(User, models.CASCADE)
text = models.TextField()
@python_2_unicode_compatible
class TestObject(models.Model):
first = models.CharField(max_length=20)
second = models.CharField(max_length=20)
third = models.CharField(max_length=20)
def __str__(self):
return 'TestObject: %s,%s,%s' % (self.first, self.second, self.third)
| bsd-3-clause |
amar266/puppet-rjil | files/tests/ceph_health.py | 31 | 4048 | #!/usr/bin/env python
#
# Copyright (c) 2013 SWITCH http://www.switch.ch
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import subprocess
import sys
__version__ = '1.0.1'
# default ceph values
CEPH_COMMAND = '/usr/bin/ceph'
# nagios exit code
STATUS_OK = 0
STATUS_WARNING = 1
STATUS_ERROR = 2
STATUS_UNKNOWN = 3
def main():
# parse args
parser = argparse.ArgumentParser(description="'ceph health' nagios plugin.")
parser.add_argument('-e','--exe', help='ceph executable [%s]' % CEPH_COMMAND)
parser.add_argument('-c','--conf', help='alternative ceph conf file')
parser.add_argument('-m','--monaddress', help='ceph monitor address[:port]')
parser.add_argument('-i','--id', help='ceph client id')
parser.add_argument('-k','--keyring', help='ceph client keyring file')
parser.add_argument('-d','--detail', help="exec 'ceph health detail'", action='store_true')
parser.add_argument('-V','--version', help='show version and exit', action='store_true')
args = parser.parse_args()
# validate args
ceph_exec = args.exe if args.exe else CEPH_COMMAND
if not os.path.exists(ceph_exec):
print "ERROR: ceph executable '%s' doesn't exist" % ceph_exec
return STATUS_UNKNOWN
if args.version:
print 'version %s' % __version__
return STATUS_OK
if args.conf and not os.path.exists(args.conf):
print "ERROR: ceph conf file '%s' doesn't exist" % args.conf
return STATUS_UNKNOWN
if args.keyring and not os.path.exists(args.keyring):
print "ERROR: keyring file '%s' doesn't exist" % args.keyring
return STATUS_UNKNOWN
# build command
ceph_health = [ceph_exec]
if args.monaddress:
ceph_health.append('-m')
ceph_health.append(args.monaddress)
if args.conf:
ceph_health.append('-c')
ceph_health.append(args.conf)
if args.id:
ceph_health.append('--id')
ceph_health.append(args.id)
if args.keyring:
ceph_health.append('--keyring')
ceph_health.append(args.keyring)
ceph_health.append('health')
if args.detail:
ceph_health.append('detail')
#print ceph_health
# exec command
p = subprocess.Popen(ceph_health,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
output, err = p.communicate()
# parse output
#print "output:", output
#print "err:", err
if output:
# merge multi-lines of output in one line
one_line = output.replace('\n','; ')
if one_line.startswith('HEALTH_OK'):
#print 'HEALTH OK:', one_line[len('HEALTH_OK')+1:]
one_line= one_line[len('HEALTH_OK')+1:].strip()
if one_line:
print 'HEALTH OK:', one_line
else:
print 'HEALTH OK'
return STATUS_OK
elif one_line.startswith('HEALTH_WARN'):
print 'HEALTH WARNING:', one_line[len('HEALTH_WARN')+1:]
return STATUS_WARNING
elif one_line.startswith('HEALTH_ERR'):
print 'HEALTH ERROR:', one_line[len('HEALTH_ERR')+1:]
return STATUS_ERROR
else:
print one_line
elif err:
# read only first line of error
one_line = err.split('\n')[0]
if '-1 ' in one_line:
idx = one_line.rfind('-1 ')
print 'ERROR: %s: %s' % (ceph_exec, one_line[idx+len('-1 '):])
else:
print one_line
return STATUS_UNKNOWN
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 |
bruecksen/isimip | isi_mip/climatemodels/migrations/0088_attachment.py | 1 | 1304 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-26 14:51
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import isi_mip.climatemodels.models
class Migration(migrations.Migration):
dependencies = [
('climatemodels', '0087_datapublicationconfirmation_confirmed_license'),
]
operations = [
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('attachment1', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('attachment2', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('attachment3', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('attachment4', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('attachment5', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('impact_model', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='climatemodels.ImpactModel')),
],
),
]
| mit |
stelfrich/openmicroscopy | components/tools/OmeroCpp/ext/gtest-1.7.0/test/gtest_xml_outfiles_test.py | 2526 | 5340 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module."""
__author__ = "keith.ray@gmail.com (Keith Ray)"
import os
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_SUBDIR = "xml_outfiles"
GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne" SetUpProp="1" TestSomeProperty="1" TearDownProp="1" />
</testsuite>
</testsuites>
"""
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo" SetUpProp="2" TestSomeProperty="2" TearDownProp="2" />
</testsuite>
</testsuites>
"""
class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
"""Unit test for Google Test's XML output functionality."""
def setUp(self):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_OUTPUT_SUBDIR, "")
self.DeleteFilesAndDir()
def tearDown(self):
self.DeleteFilesAndDir()
def DeleteFilesAndDir(self):
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml"))
except os.error:
pass
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml"))
except os.error:
pass
try:
os.rmdir(self.output_dir_)
except os.error:
pass
def testOutfile1(self):
self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)
def testOutfile2(self):
self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)
def _TestOutFile(self, test_name, expected_xml):
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_]
p = gtest_test_utils.Subprocess(command,
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
# TODO(wan@google.com): libtool causes the built test binary to be
# named lt-gtest_xml_outfiles_test_ instead of
# gtest_xml_outfiles_test_. To account for this possibillity, we
# allow both names in the following code. We should remove this
# hack when Chandler Carruth's libtool replacement tool is ready.
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
output_file1)
expected = minidom.parseString(expected_xml)
if os.path.isfile(output_file1):
actual = minidom.parse(output_file1)
else:
actual = minidom.parse(output_file2)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == "__main__":
os.environ["GTEST_STACK_TRACE_DEPTH"] = "0"
gtest_test_utils.Main()
| gpl-2.0 |
pratikmallya/hue | desktop/core/ext-py/elementtree/elementtree/XMLTreeBuilder.py | 107 | 3744 | #
# ElementTree
# $Id: XMLTreeBuilder.py 2305 2005-03-01 17:43:09Z fredrik $
#
# an XML tree builder
#
# history:
# 2001-10-20 fl created
# 2002-05-01 fl added namespace support for xmllib
# 2002-07-27 fl require expat (1.5.2 code can use SimpleXMLTreeBuilder)
# 2002-08-17 fl use tag/attribute name memo cache
# 2002-12-04 fl moved XMLTreeBuilder to the ElementTree module
#
# Copyright (c) 1999-2004 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2004 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Tools to build element trees from XML files.
##
import ElementTree
##
# (obsolete) ElementTree builder for XML source data, based on the
# <b>expat</b> parser.
# <p>
# This class is an alias for ElementTree.XMLTreeBuilder. New code
# should use that version instead.
#
# @see elementtree.ElementTree
class TreeBuilder(ElementTree.XMLTreeBuilder):
pass
##
# (experimental) An alternate builder that supports manipulation of
# new elements.
class FancyTreeBuilder(TreeBuilder):
def __init__(self, html=0):
TreeBuilder.__init__(self, html)
self._parser.StartNamespaceDeclHandler = self._start_ns
self._parser.EndNamespaceDeclHandler = self._end_ns
self.namespaces = []
def _start(self, tag, attrib_in):
elem = TreeBuilder._start(self, tag, attrib_in)
self.start(elem)
def _start_list(self, tag, attrib_in):
elem = TreeBuilder._start_list(self, tag, attrib_in)
self.start(elem)
def _end(self, tag):
elem = TreeBuilder._end(self, tag)
self.end(elem)
def _start_ns(self, prefix, value):
self.namespaces.insert(0, (prefix, value))
def _end_ns(self, prefix):
assert self.namespaces.pop(0)[0] == prefix, "implementation confused"
##
# Hook method that's called when a new element has been opened.
# May access the <b>namespaces</b> attribute.
#
# @param element The new element. The tag name and attributes are,
# set, but it has no children, and the text and tail attributes
# are still empty.
def start(self, element):
pass
##
# Hook method that's called when a new element has been closed.
# May access the <b>namespaces</b> attribute.
#
# @param element The new element.
def end(self, element):
pass
| apache-2.0 |
anthonydillon/horizon | openstack_dashboard/dashboards/admin/metering/tables.py | 13 | 3288 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.contrib.humanize.templatetags import humanize
from django.utils import text
from django.utils.translation import ugettext_lazy as _
import six
from horizon import tables
def show_date(datum):
return datum.split('T')[0]
class ModifyUsageReportParameters(tables.LinkAction):
name = "create"
verbose_name = _("Modify Usage Report Parameters")
url = "horizon:admin:metering:create"
classes = ("ajax-modal",)
icon = "edit"
class CreateCSVUsageReport(tables.LinkAction):
name = "csv"
verbose_name = _("Download CSV Summary")
url = "horizon:admin:metering:csvreport"
classes = ("btn-create",)
icon = "download"
class ReportTable(tables.DataTable):
project = tables.Column('project', verbose_name=_('Project'))
service = tables.Column('service', verbose_name=_('Service'))
meter = tables.Column('meter', verbose_name=_('Meter'))
description = tables.Column('description', verbose_name=_('Description'))
time = tables.Column('time', verbose_name=_('Day'),
filters=[show_date])
value = tables.Column('value', verbose_name=_('Value (Avg)'),
filters=[humanize.intcomma])
unit = tables.Column('unit', verbose_name=_('Unit'))
def get_object_id(self, obj):
return "%s-%s-%s" % (obj['project'], obj['service'], obj['meter'])
class Meta(object):
name = 'report_table'
verbose_name = _("Daily Usage Report")
table_actions = (ModifyUsageReportParameters, CreateCSVUsageReport)
multi_select = False
@six.python_2_unicode_compatible
class UsageTable(tables.DataTable):
service = tables.Column('service', verbose_name=_('Service'))
meter = tables.Column('meter', verbose_name=_('Meter'))
description = tables.Column('description', verbose_name=_('Description'))
time = tables.Column('time', verbose_name=_('Day'),
filters=[show_date])
value = tables.Column('value', verbose_name=_('Value (Avg)'),
filters=[humanize.intcomma])
def __init__(self, request, *args, **kwargs):
super(UsageTable, self).__init__(request, *args, **kwargs)
self.title = getattr(self, 'title', None)
def get_object_id(self, datum):
return datum['time'] + datum['meter']
# since these tables are dynamically created and named, we use title
@property
def name(self):
# slugify was introduced in Django 1.5
if hasattr(text, 'slugify'):
return text.slugify(six.text_type(self.title))
else:
return self.title
def __str__(self):
return self.title
class Meta(object):
name = 'daily'
| apache-2.0 |
HackerBaloo/SublimeOpenInTotalCommander | Open in Total Commander.py | 1 | 2176 | import os
import os.path
import subprocess
import sublime
import sublime_plugin
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def which(program):
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
class SelectInTotalCommanderCommand(sublime_plugin.TextCommand):
def set_exe(self, exe):
exe = which(exe)
if exe:
#print('exe: ', exe)
self.exe = exe
return True
return False
def __init__(self, view):
self.view = view
settings = sublime.load_settings("Open in Total Commander.sublime-settings")
self.args = settings.get("aruments")
env_name = settings.get("path_environment_variable")
#print('env_name: ', env_name)
variable = ''
if env_name in os.environ:
variable = os.environ[env_name]
if not self.set_exe(variable):
if not self.set_exe(settings.get("executable")):
if not self.set_exe(settings.get("executable2")):
sublime.error_message('No executable found, check Open in Total Commander.sublime-settings!')
def run(self, edit):
path = self.view.file_name()
if path is None:
sublime.error_message('No file in view')
return
#print('path: ', path)
#print('self.args: ', self.args)
args = self.args.format(**locals())
#print('args: ', args)
cmd = '{self.exe} {args}'.format(**locals())
print('cmd: ', cmd)
if os.name == 'posix':
subprocess.call([self.exe, args])
else:
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
proc = subprocess.Popen(cmd, startupinfo=startupinfo)
| mit |
dparnell/rethinkdb | external/gtest_1.7.0/test/gtest_color_test.py | 3259 | 4911 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly determines whether to use colors."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name = 'nt'
COLOR_ENV_VAR = 'GTEST_COLOR'
COLOR_FLAG = 'gtest_color'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_color_test_')
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def UsesColor(term, color_env_var, color_flag):
"""Runs gtest_color_test_ and returns its exit code."""
SetEnvVar('TERM', term)
SetEnvVar(COLOR_ENV_VAR, color_env_var)
if color_flag is None:
args = []
else:
args = ['--%s=%s' % (COLOR_FLAG, color_flag)]
p = gtest_test_utils.Subprocess([COMMAND] + args)
return not p.exited or p.exit_code
class GTestColorTest(gtest_test_utils.TestCase):
def testNoEnvVarNoFlag(self):
"""Tests the case when there's neither GTEST_COLOR nor --gtest_color."""
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', None, None))
self.assert_(not UsesColor('emacs', None, None))
self.assert_(not UsesColor('xterm-mono', None, None))
self.assert_(not UsesColor('unknown', None, None))
self.assert_(not UsesColor(None, None, None))
self.assert_(UsesColor('linux', None, None))
self.assert_(UsesColor('cygwin', None, None))
self.assert_(UsesColor('xterm', None, None))
self.assert_(UsesColor('xterm-color', None, None))
self.assert_(UsesColor('xterm-256color', None, None))
def testFlagOnly(self):
"""Tests the case when there's --gtest_color but not GTEST_COLOR."""
self.assert_(not UsesColor('dumb', None, 'no'))
self.assert_(not UsesColor('xterm-color', None, 'no'))
if not IS_WINDOWS:
self.assert_(not UsesColor('emacs', None, 'auto'))
self.assert_(UsesColor('xterm', None, 'auto'))
self.assert_(UsesColor('dumb', None, 'yes'))
self.assert_(UsesColor('xterm', None, 'yes'))
def testEnvVarOnly(self):
"""Tests the case when there's GTEST_COLOR but not --gtest_color."""
self.assert_(not UsesColor('dumb', 'no', None))
self.assert_(not UsesColor('xterm-color', 'no', None))
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', 'auto', None))
self.assert_(UsesColor('xterm-color', 'auto', None))
self.assert_(UsesColor('dumb', 'yes', None))
self.assert_(UsesColor('xterm-color', 'yes', None))
def testEnvVarAndFlag(self):
"""Tests the case when there are both GTEST_COLOR and --gtest_color."""
self.assert_(not UsesColor('xterm-color', 'no', 'no'))
self.assert_(UsesColor('dumb', 'no', 'yes'))
self.assert_(UsesColor('xterm-color', 'no', 'auto'))
def testAliasesOfYesAndNo(self):
"""Tests using aliases in specifying --gtest_color."""
self.assert_(UsesColor('dumb', None, 'true'))
self.assert_(UsesColor('dumb', None, 'YES'))
self.assert_(UsesColor('dumb', None, 'T'))
self.assert_(UsesColor('dumb', None, '1'))
self.assert_(not UsesColor('xterm', None, 'f'))
self.assert_(not UsesColor('xterm', None, 'false'))
self.assert_(not UsesColor('xterm', None, '0'))
self.assert_(not UsesColor('xterm', None, 'unknown'))
if __name__ == '__main__':
gtest_test_utils.Main()
| agpl-3.0 |